在windows cpp 动态库 读取 test.mp4 视频流到 Windows窗口的离屏表面,且要求再调用该函数时不卡UI, 在Qt调用该动态库。完整源码
时间: 2023-12-06 13:42:00 浏览: 180
以下是一个简单的示例代码,展示如何在Windows平台上使用C++动态库读取视频流并将其渲染到窗口的离屏表面中。该示例使用了FFmpeg和DirectX,同时在Qt中调用了该动态库。
1. 动态库代码
首先,我们需要编写动态库的代码。下面是一个简单的示例,它使用FFmpeg读取视频流,并将视频帧渲染到DirectX纹理中。这个示例假设输入视频流的编码格式为H.264,输出窗口使用的是DirectX 11。
```cpp
#include <Windows.h>
#include <d3d11.h>
#include <dxgi.h>
#include <ffmpeg/avcodec.h>
#include <ffmpeg/avformat.h>
#include <ffmpeg/swscale.h>
#define WIDTH 640
#define HEIGHT 480
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVPacket packet;
int videoStreamIndex;
struct SwsContext *pSwsCtx;
ID3D11Device *pDevice;
ID3D11DeviceContext *pContext;
IDXGISurface *pSurface;
ID3D11Texture2D *pTexture;
ID3D11ShaderResourceView *pSRV;
extern "C" __declspec(dllexport) bool initialize(HWND hwnd)
{
// Initialize FFmpeg
av_register_all();
avformat_network_init();
// Open video file
if (avformat_open_input(&pFormatCtx, "test.mp4", NULL, NULL) != 0)
{
return false;
}
// Retrieve stream information
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
{
return false;
}
// Find the first video stream
videoStreamIndex = -1;
for (int i = 0; i < pFormatCtx->nb_streams; i++)
{
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex == -1)
{
return false;
}
// Get codec parameters for video stream
AVCodecParameters *pCodecPar = pFormatCtx->streams[videoStreamIndex]->codecpar;
// Find the decoder for the video stream
pCodec = avcodec_find_decoder(pCodecPar->codec_id);
if (pCodec == NULL)
{
return false;
}
// Initialize codec context
pCodecCtx = avcodec_alloc_context3(pCodec);
if (avcodec_parameters_to_context(pCodecCtx, pCodecPar) != 0)
{
return false;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
return false;
}
// Allocate video frame
pFrame = av_frame_alloc();
if (pFrame == NULL)
{
return false;
}
// Initialize SwsContext for color conversion
pSwsCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
WIDTH, HEIGHT, AV_PIX_FMT_BGRA, SWS_FAST_BILINEAR, NULL, NULL, NULL);
// Create Direct3D device and context
D3D_FEATURE_LEVEL featureLevels[] = {D3D_FEATURE_LEVEL_11_0};
D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, featureLevels, 1, D3D11_SDK_VERSION, &pDevice, NULL, &pContext);
// Create DXGI surface
DXGI_SWAP_CHAIN_DESC scd = {};
scd.BufferCount = 1;
scd.BufferDesc.Width = WIDTH;
scd.BufferDesc.Height = HEIGHT;
scd.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
scd.BufferDesc.RefreshRate.Numerator = 60;
scd.BufferDesc.RefreshRate.Denominator = 1;
scd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
scd.OutputWindow = hwnd;
scd.SampleDesc.Count = 1;
scd.SampleDesc.Quality = 0;
scd.Windowed = TRUE;
IDXGISwapChain *pSwapChain;
D3D11CreateDeviceAndSwapChain(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, featureLevels, 1, D3D11_SDK_VERSION, &scd, &pSwapChain, &pDevice, NULL, &pContext);
pSwapChain->GetBuffer(0, __uuidof(IDXGISurface), (void **)&pSurface);
pSwapChain->Release();
pContext->OMSetRenderTargets(1, &pSurface, NULL);
// Create Direct3D texture and shader resource view
D3D11_TEXTURE2D_DESC td = {};
td.Width = WIDTH;
td.Height = HEIGHT;
td.MipLevels = 1;
td.ArraySize = 1;
td.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
td.SampleDesc.Count = 1;
td.Usage = D3D11_USAGE_DEFAULT;
td.BindFlags = D3D11_BIND_SHADER_RESOURCE;
td.CPUAccessFlags = 0;
pDevice->CreateTexture2D(&td, NULL, &pTexture);
pDevice->CreateShaderResourceView(pTexture, NULL, &pSRV);
return true;
}
extern "C" __declspec(dllexport) void renderNextFrame()
{
// Read the next packet from the video stream
while (av_read_frame(pFormatCtx, &packet) >= 0)
{
if (packet.stream_index == videoStreamIndex)
{
// Decode the video frame
if (avcodec_send_packet(pCodecCtx, &packet) == 0)
{
while (avcodec_receive_frame(pCodecCtx, pFrame) == 0)
{
// Convert the video frame to the desired format
uint8_t *data[4];
int linesize[4];
data[0] = (uint8_t *)pTexture->Map(0, D3D11_MAP_WRITE_DISCARD, 0);
linesize[0] = WIDTH * 4;
sws_scale(pSwsCtx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, data, linesize);
pTexture->Unmap(0, NULL);
// Render the video frame to the surface
pContext->ClearRenderTargetView(pSurface, DirectX::Colors::Black);
pContext->PSSetShaderResources(0, 1, &pSRV);
pContext->Draw(3, 0);
pContext->Flush();
}
}
}
av_packet_unref(&packet);
}
}
extern "C" __declspec(dllexport) void cleanup()
{
avformat_close_input(&pFormatCtx);
avformat_network_deinit();
avcodec_close(pCodecCtx);
av_frame_free(&pFrame);
sws_freeContext(pSwsCtx);
pSurface->Release();
pTexture->Release();
pSRV->Release();
pContext->Release();
pDevice->Release();
}
```
2. Qt代码
接下来,我们需要编写一个Qt应用程序来调用该动态库并渲染视频。下面是一个简单的示例,它创建一个窗口,并在该窗口上绘制动态库中读取的视频流。
```cpp
#include <QtWidgets/QApplication>
#include <QtWidgets/QMainWindow>
#include <QtGui/QPainter>
#include <QtCore/QTimer>
#include <Windows.h>
#include <d3d11.h>
typedef bool (*InitializeFn)(HWND hwnd);
typedef void (*RenderNextFrameFn)();
typedef void (*CleanupFn)();
class VideoPlayer : public QMainWindow
{
public:
VideoPlayer() : m_hwnd(NULL), m_initialized(false), m_renderTimer(this) {}
virtual void paintEvent(QPaintEvent *event)
{
if (!m_initialized)
{
return;
}
// Render the video frame to the window
m_renderNextFrameFn();
QPainter painter(this);
painter.drawImage(0, 0, m_image);
}
void startRendering()
{
// Load the dynamic library
HMODULE hModule = LoadLibrary(TEXT("VideoRenderer.dll"));
if (hModule == NULL)
{
return;
}
// Get function pointers
InitializeFn initializeFn = (InitializeFn)GetProcAddress(hModule, "initialize");
if (initializeFn == NULL)
{
return;
}
m_renderNextFrameFn = (RenderNextFrameFn)GetProcAddress(hModule, "renderNextFrame");
if (m_renderNextFrameFn == NULL)
{
return;
}
CleanupFn cleanupFn = (CleanupFn)GetProcAddress(hModule, "cleanup");
if (cleanupFn == NULL)
{
return;
}
// Initialize the dynamic library
m_hwnd = (HWND)winId();
if (!initializeFn(m_hwnd))
{
return;
}
m_initialized = true;
// Start rendering timer
connect(&m_renderTimer, &QTimer::timeout, this, [this]() {
update();
});
m_renderTimer.start(16);
// Register event handler for window close
qApp->installEventFilter(this);
}
bool eventFilter(QObject *obj, QEvent *event)
{
if (event->type() == QEvent::Close)
{
// Stop rendering timer
m_renderTimer.stop();
// Clean up dynamic library
m_initialized = false;
m_cleanupFn();
// Unregister event handler for window close
qApp->removeEventFilter(this);
}
return QMainWindow::eventFilter(obj, event);
}
private:
HWND m_hwnd;
bool m_initialized;
RenderNextFrameFn m_renderNextFrameFn;
CleanupFn m_cleanupFn;
QImage m_image;
QTimer m_renderTimer;
};
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
VideoPlayer player;
player.show();
player.startRendering();
return app.exec();
}
```
3. 编译和运行
为了编译该程序,您需要安装FFmpeg和DirectX SDK,并将其包含在项目中。您还需要将VideoRenderer.dll复制到应用程序的运行目录中。
由于该程序在Windows平台上运行,因此您需要使用Visual Studio来编译该程序。您可以将Qt项目导入Visual Studio,并在Visual Studio中设置项目属性以包含所需的库和头文件。
完成编译后,您可以运行该程序并播放test.mp4文件。该程序将在窗口中显示视频流。
阅读全文