Vector3A D3dMatrixMultiply(D3DXMATRIX bonematrix, D3DXMATRIX actormatrix) { Vector3A result; result.X = bonematrix._41 * actormatrix._11 + bonematrix._42 * actormatrix._21 + bonematrix._43 * actormatrix._31 + bonematrix._44 * actormatrix._41; result.Y = bonematrix._41 * actormatrix._12 + bonematrix._42 * actormatrix._22 + bonematrix._43 * actormatrix._32 + bonematrix._44 * actormatrix._42; result.Z = bonematrix._41 * actormatrix._13 + bonematrix._42 * actormatrix._23 + bonematrix._43 * actormatrix._33 + bonematrix._44 * actormatrix._43; return result; }
时间: 2023-06-17 11:04:05 浏览: 50
这是一个用于计算两个 D3D 矩阵相乘后得到的 Vector3A 的函数。其中 bonematrix 和 actormatrix 是两个 D3DXMATRIX 类型的矩阵,包含了矩阵的各个元素(_11, _12, _13, ..., _44)。这个函数的作用是将这两个矩阵相乘后得到的结果再赋值给一个 Vector3A 类型的变量 result,并返回这个变量。具体的计算过程是,先将 bonematrix 的第一行元素和 actormatrix 的第一列元素对应相乘,再将 bonematrix 的第二行元素和 actormatrix 的第二列元素对应相乘,最后将 bonematrix 的第三行元素和 actormatrix 的第三列元素对应相乘,将这三个结果分别赋值给 result 的 X、Y、Z 成员变量。
相关问题
在windows cpp 动态库读取test.mp4视频流到Windows窗口的离屏表面,在Qt调用该动态库。完整源码
以下是一个简单的示例代码,演示如何使用Windows动态库来读取视频流并在Qt中显示:
Windows动态库代码(video_player.cpp):
```cpp
#include <windows.h>
#include <d3d9.h>
#include <d3dx9.h>
#include "video_player.h"
#pragma comment(lib, "d3d9.lib")
#pragma comment(lib, "d3dx9.lib")
// 定义窗口宽高
#define WINDOW_WIDTH 640
#define WINDOW_HEIGHT 480
// 定义视频帧缓冲区大小
#define FRAME_SIZE (WINDOW_WIDTH * WINDOW_HEIGHT * 4)
// 全局变量
LPDIRECT3D9 g_pD3D;
LPDIRECT3DDEVICE9 g_pDevice;
LPDIRECT3DSURFACE9 g_pSurface;
BYTE* g_pFrameBuffer = NULL;
// 初始化Direct3D
bool InitD3D(HWND hWnd)
{
g_pD3D = Direct3DCreate9(D3D_SDK_VERSION);
if (!g_pD3D)
return false;
D3DPRESENT_PARAMETERS d3dpp;
ZeroMemory(&d3dpp, sizeof(d3dpp));
d3dpp.Windowed = TRUE;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.BackBufferFormat = D3DFMT_UNKNOWN;
if (FAILED(g_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &g_pDevice)))
return false;
return true;
}
// 释放Direct3D资源
void ReleaseD3D()
{
if (g_pSurface)
{
g_pSurface->Release();
g_pSurface = NULL;
}
if (g_pDevice)
{
g_pDevice->Release();
g_pDevice = NULL;
}
if (g_pD3D)
{
g_pD3D->Release();
g_pD3D = NULL;
}
}
// 创建离屏表面
bool CreateOffscreenSurface()
{
HRESULT hr = g_pDevice->CreateOffscreenPlainSurface(WINDOW_WIDTH, WINDOW_HEIGHT, D3DFMT_X8R8G8B8,
D3DPOOL_SYSTEMMEM, &g_pSurface, NULL);
return SUCCEEDED(hr);
}
// 读取视频帧到离屏表面
bool LoadFrameToSurface(BYTE* pBuffer)
{
D3DLOCKED_RECT LockedRect;
HRESULT hr = g_pSurface->LockRect(&LockedRect, NULL, D3DLOCK_DISCARD);
if (FAILED(hr))
return false;
BYTE* pDest = (BYTE*)LockedRect.pBits;
int nDestPitch = LockedRect.Pitch;
for (int y = 0; y < WINDOW_HEIGHT; ++y)
{
memcpy(pDest, pBuffer, WINDOW_WIDTH * 4);
pBuffer += WINDOW_WIDTH * 4;
pDest += nDestPitch;
}
hr = g_pSurface->UnlockRect();
return SUCCEEDED(hr);
}
// 在窗口中显示离屏表面
void PresentOffscreenSurface(HWND hWnd)
{
RECT rect;
GetClientRect(hWnd, &rect);
D3DXMATRIX matProj;
D3DXMatrixOrthoOffCenterLH(&matProj, 0, (FLOAT)WINDOW_WIDTH, (FLOAT)WINDOW_HEIGHT, 0, 0, 1);
g_pDevice->SetTransform(D3DTS_PROJECTION, &matProj);
g_pDevice->StretchRect(g_pSurface, NULL, g_pDevice->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO), &rect, D3DTEXF_NONE);
}
// 打开视频文件,读取视频流并显示
bool PlayVideo(HWND hWnd, const char* szFileName)
{
// 打开视频文件
FILE* file = fopen(szFileName, "rb");
if (!file)
return false;
fseek(file, 0, SEEK_END);
int nFileSize = ftell(file);
fseek(file, 0, SEEK_SET);
// 初始化视频帧缓冲区
g_pFrameBuffer = new BYTE[FRAME_SIZE];
// 初始化Direct3D
if (!InitD3D(hWnd))
return false;
// 创建离屏表面
if (!CreateOffscreenSurface())
return false;
// 读取视频流
while (true)
{
int nFrameSize;
if (fread(&nFrameSize, sizeof(nFrameSize), 1, file) != 1)
break;
if (fread(g_pFrameBuffer, nFrameSize, 1, file) != 1)
break;
// 将视频帧加载到离屏表面
if (!LoadFrameToSurface(g_pFrameBuffer))
break;
// 在窗口中显示离屏表面
PresentOffscreenSurface(hWnd);
// 等待一段时间,以便视频流显示更流畅
Sleep(33);
}
// 释放资源
delete[] g_pFrameBuffer;
ReleaseD3D();
fclose(file);
return true;
}
```
Windows动态库头文件(video_player.h):
```cpp
#ifndef VIDEO_PLAYER_H
#define VIDEO_PLAYER_H
#include <windows.h>
// 播放视频
bool PlayVideo(HWND hWnd, const char* szFileName);
#endif // VIDEO_PLAYER_H
```
Qt代码(mainwindow.cpp):
```cpp
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include "video_player.h"
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
// 创建按钮
QPushButton* btnPlay = new QPushButton("Play", this);
btnPlay->setGeometry(10, 10, 80, 30);
connect(btnPlay, SIGNAL(clicked()), this, SLOT(OnBtnPlayClicked()));
}
MainWindow::~MainWindow()
{
delete ui;
}
// 点击播放按钮
void MainWindow::OnBtnPlayClicked()
{
PlayVideo((HWND)winId(), "test.mp4");
}
```
请注意,此示例代码仅演示了如何在Windows和Qt中使用动态库读取视频流并在窗口中显示。实际应用中,您需要根据您的需求进行修改和调整。
ffmepg6.0录屏推流c++示例
以下是在 Windows 平台上,使用 FFmpeg 6.0 实现录屏推流的 C++ 示例代码:
```c++
#include <iostream>
#include <Windows.h>
#include <d3d9.h>
#include <d3dx9.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#pragma comment(lib, "d3d9.lib")
#pragma comment(lib, "d3dx9.lib")
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#define SCREEN_WIDTH 1920
#define SCREEN_HEIGHT 1080
#define FPS 30
#define BIT_RATE 4000000
#define STREAM_URL "rtmp://localhost/live/stream"
int main()
{
// 初始化 D3D9
IDirect3D9Ex* pD3D = nullptr;
if (FAILED(Direct3DCreate9Ex(D3D_SDK_VERSION, &pD3D)))
{
std::cerr << "Failed to create IDirect3D9Ex object" << std::endl;
return -1;
}
// 枚举显示器适配器
UINT adapterCount = pD3D->GetAdapterCount();
if (adapterCount == 0)
{
std::cerr << "No display adapter found" << std::endl;
return -1;
}
// 获取第一个适配器的显示模式
D3DDISPLAYMODE displayMode;
if (FAILED(pD3D->EnumAdapterModes(0, D3DFMT_X8R8G8B8, 0, &displayMode)))
{
std::cerr << "Failed to enumerate display adapter modes" << std::endl;
return -1;
}
// 创建 D3D 设备
D3DPRESENT_PARAMETERS d3dpp = {};
d3dpp.Windowed = TRUE;
d3dpp.BackBufferFormat = D3DFMT_X8R8G8B8;
d3dpp.BackBufferWidth = SCREEN_WIDTH;
d3dpp.BackBufferHeight = SCREEN_HEIGHT;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
IDirect3DDevice9Ex* pD3DDevice = nullptr;
if (FAILED(pD3D->CreateDeviceEx(
D3DADAPTER_DEFAULT,
D3DDEVTYPE_HAL,
GetDesktopWindow(),
D3DCREATE_HARDWARE_VERTEXPROCESSING,
&d3dpp,
nullptr,
&pD3DDevice)))
{
std::cerr << "Failed to create IDirect3DDevice9Ex object" << std::endl;
return -1;
}
// 初始化 FFmpeg
av_register_all();
avcodec_register_all();
avformat_network_init();
// 创建输出流
AVFormatContext* pFormatCtx = nullptr;
if (avformat_alloc_output_context2(&pFormatCtx, nullptr, "flv", STREAM_URL) < 0)
{
std::cerr << "Failed to allocate output context" << std::endl;
return -1;
}
// 创建视频流
AVCodec* pCodec = nullptr;
AVStream* pStream = avformat_new_stream(pFormatCtx, pCodec);
if (!pStream)
{
std::cerr << "Failed to allocate video stream" << std::endl;
return -1;
}
// 设置编码器参数
AVCodecContext* pCodecCtx = pStream->codec;
pCodecCtx->codec_id = AV_CODEC_ID_H264;
pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
pCodecCtx->width = SCREEN_WIDTH;
pCodecCtx->height = SCREEN_HEIGHT;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
pCodecCtx->bit_rate = BIT_RATE;
pCodecCtx->gop_size = FPS;
pCodecCtx->time_base = { 1, FPS };
pCodecCtx->level = 31;
pCodecCtx->profile = FF_PROFILE_H264_MAIN;
av_opt_set(pCodecCtx->priv_data, "preset", "ultrafast", 0);
av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);
// 打开编码器
pCodec = avcodec_find_encoder(pCodecCtx->codec_id);
if (!pCodec)
{
std::cerr << "Failed to find video encoder" << std::endl;
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, nullptr) < 0)
{
std::cerr << "Failed to open video encoder" << std::endl;
return -1;
}
// 打开输出流
if (avio_open(&pFormatCtx->pb, STREAM_URL, AVIO_FLAG_WRITE) < 0)
{
std::cerr << "Failed to open output stream" << std::endl;
return -1;
}
// 写入文件头
if (avformat_write_header(pFormatCtx, nullptr) < 0)
{
std::cerr << "Failed to write file header" << std::endl;
return -1;
}
// 创建缓冲区
const auto buffer_size = av_image_get_buffer_size(pCodecCtx->pix_fmt, SCREEN_WIDTH, SCREEN_HEIGHT, 1);
auto buffer = static_cast<uint8_t*>(av_malloc(buffer_size));
AVFrame* pFrame = av_frame_alloc();
if (!pFrame)
{
std::cerr << "Failed to allocate video frame" << std::endl;
return -1;
}
av_image_fill_arrays(pFrame->data, pFrame->linesize, buffer, pCodecCtx->pix_fmt, SCREEN_WIDTH, SCREEN_HEIGHT, 1);
// 设置 D3D 设备参数
IDirect3DSurface9* pSurface = nullptr;
D3DXMATRIX matrix;
D3DLOCKED_RECT lockedRect;
pD3DDevice->CreateOffscreenPlainSurface(SCREEN_WIDTH, SCREEN_HEIGHT, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, &pSurface, nullptr);
D3DXMatrixIdentity(&matrix);
// 循环推流
bool running = true;
while (running)
{
// 从 D3D 设备中读取屏幕数据
if (SUCCEEDED(pD3DDevice->GetFrontBufferData(0, pSurface)))
{
if (SUCCEEDED(pSurface->LockRect(&lockedRect, nullptr, D3DLOCK_READONLY)))
{
// 将屏幕数据转换为 YUV420P 格式
for (int y = 0; y < SCREEN_HEIGHT; y++)
{
auto dest = buffer + y * pFrame->linesize[0];
auto src = static_cast<uint8_t*>(lockedRect.pBits) + y * lockedRect.Pitch;
memcpy(dest, src, SCREEN_WIDTH * 4);
}
av_frame_set_pts(pFrame, av_rescale_q(av_gettime(), { 1, AV_TIME_BASE }, pCodecCtx->time_base));
// 编码并写入数据
AVPacket packet;
av_init_packet(&packet);
packet.data = nullptr;
packet.size = 0;
int result = avcodec_send_frame(pCodecCtx, pFrame);
if (result == 0)
{
while (result >= 0)
{
result = avcodec_receive_packet(pCodecCtx, &packet);
if (result == 0)
{
packet.stream_index = pStream->index;
av_interleaved_write_frame(pFormatCtx, &packet);
av_packet_unref(&packet);
}
else if (result == AVERROR(EAGAIN) || result == AVERROR_EOF)
{
break;
}
else
{
std::cerr << "Failed to encode video frame" << std::endl;
running = false;
}
}
}
else
{
std::cerr << "Failed to send video frame for encoding" << std::endl;
running = false;
}
pSurface->UnlockRect();
}
}
// 限制帧率
Sleep(1000 / FPS);
}
// 写入文件尾
av_write_trailer(pFormatCtx);
// 释放资源
if (pSurface)
{
pSurface->Release();
}
if (pD3DDevice)
{
pD3DDevice->Release();
}
if (pD3D)
{
pD3D->Release();
}
if (pCodecCtx)
{
avcodec_close(pCodecCtx);
}
if (pFrame)
{
av_frame_free(&pFrame);
}
if (pFormatCtx)
{
avio_close(pFormatCtx->pb);
avformat_free_context(pFormatCtx);
}
if (buffer)
{
av_free(buffer);
}
return 0;
}
```
在这个示例中,我们使用了 Windows 平台上的 DirectX 9 API 来捕获屏幕数据,然后使用 FFmpeg 6.0 的编码器将数据编码为 H.264 格式,并通过 RTMP 协议推流到本地的流服务器。您需要根据具体的情况修改 STREAM_URL、SCREEN_WIDTH、SCREEN_HEIGHT、FPS 和 BIT_RATE 等参数。
相关推荐
![docx](https://img-home.csdnimg.cn/images/20210720083331.png)
![docx](https://img-home.csdnimg.cn/images/20210720083331.png)
![application/x-zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![application/x-rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![application/x-rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![application/x-rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![application/msword](https://img-home.csdnimg.cn/images/20210720083327.png)
![rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![whl](https://img-home.csdnimg.cn/images/20210720083646.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![7z](https://img-home.csdnimg.cn/images/20210720083312.png)
![7z](https://img-home.csdnimg.cn/images/20210720083312.png)