ffmepg6.0录屏推流c++示例
时间: 2023-07-05 22:35:53 浏览: 161
以下是在 Windows 平台上,使用 FFmpeg 6.0 实现录屏推流的 C++ 示例代码:
```c++
#include <iostream>
#include <Windows.h>
#include <d3d9.h>
#include <d3dx9.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#pragma comment(lib, "d3d9.lib")
#pragma comment(lib, "d3dx9.lib")
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#define SCREEN_WIDTH 1920
#define SCREEN_HEIGHT 1080
#define FPS 30
#define BIT_RATE 4000000
#define STREAM_URL "rtmp://localhost/live/stream"
int main()
{
// 初始化 D3D9
IDirect3D9Ex* pD3D = nullptr;
if (FAILED(Direct3DCreate9Ex(D3D_SDK_VERSION, &pD3D)))
{
std::cerr << "Failed to create IDirect3D9Ex object" << std::endl;
return -1;
}
// 枚举显示器适配器
UINT adapterCount = pD3D->GetAdapterCount();
if (adapterCount == 0)
{
std::cerr << "No display adapter found" << std::endl;
return -1;
}
// 获取第一个适配器的显示模式
D3DDISPLAYMODE displayMode;
if (FAILED(pD3D->EnumAdapterModes(0, D3DFMT_X8R8G8B8, 0, &displayMode)))
{
std::cerr << "Failed to enumerate display adapter modes" << std::endl;
return -1;
}
// 创建 D3D 设备
D3DPRESENT_PARAMETERS d3dpp = {};
d3dpp.Windowed = TRUE;
d3dpp.BackBufferFormat = D3DFMT_X8R8G8B8;
d3dpp.BackBufferWidth = SCREEN_WIDTH;
d3dpp.BackBufferHeight = SCREEN_HEIGHT;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
IDirect3DDevice9Ex* pD3DDevice = nullptr;
if (FAILED(pD3D->CreateDeviceEx(
D3DADAPTER_DEFAULT,
D3DDEVTYPE_HAL,
GetDesktopWindow(),
D3DCREATE_HARDWARE_VERTEXPROCESSING,
&d3dpp,
nullptr,
&pD3DDevice)))
{
std::cerr << "Failed to create IDirect3DDevice9Ex object" << std::endl;
return -1;
}
// 初始化 FFmpeg
av_register_all();
avcodec_register_all();
avformat_network_init();
// 创建输出流
AVFormatContext* pFormatCtx = nullptr;
if (avformat_alloc_output_context2(&pFormatCtx, nullptr, "flv", STREAM_URL) < 0)
{
std::cerr << "Failed to allocate output context" << std::endl;
return -1;
}
// 创建视频流
AVCodec* pCodec = nullptr;
AVStream* pStream = avformat_new_stream(pFormatCtx, pCodec);
if (!pStream)
{
std::cerr << "Failed to allocate video stream" << std::endl;
return -1;
}
// 设置编码器参数
AVCodecContext* pCodecCtx = pStream->codec;
pCodecCtx->codec_id = AV_CODEC_ID_H264;
pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
pCodecCtx->width = SCREEN_WIDTH;
pCodecCtx->height = SCREEN_HEIGHT;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
pCodecCtx->bit_rate = BIT_RATE;
pCodecCtx->gop_size = FPS;
pCodecCtx->time_base = { 1, FPS };
pCodecCtx->level = 31;
pCodecCtx->profile = FF_PROFILE_H264_MAIN;
av_opt_set(pCodecCtx->priv_data, "preset", "ultrafast", 0);
av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);
// 打开编码器
pCodec = avcodec_find_encoder(pCodecCtx->codec_id);
if (!pCodec)
{
std::cerr << "Failed to find video encoder" << std::endl;
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, nullptr) < 0)
{
std::cerr << "Failed to open video encoder" << std::endl;
return -1;
}
// 打开输出流
if (avio_open(&pFormatCtx->pb, STREAM_URL, AVIO_FLAG_WRITE) < 0)
{
std::cerr << "Failed to open output stream" << std::endl;
return -1;
}
// 写入文件头
if (avformat_write_header(pFormatCtx, nullptr) < 0)
{
std::cerr << "Failed to write file header" << std::endl;
return -1;
}
// 创建缓冲区
const auto buffer_size = av_image_get_buffer_size(pCodecCtx->pix_fmt, SCREEN_WIDTH, SCREEN_HEIGHT, 1);
auto buffer = static_cast<uint8_t*>(av_malloc(buffer_size));
AVFrame* pFrame = av_frame_alloc();
if (!pFrame)
{
std::cerr << "Failed to allocate video frame" << std::endl;
return -1;
}
av_image_fill_arrays(pFrame->data, pFrame->linesize, buffer, pCodecCtx->pix_fmt, SCREEN_WIDTH, SCREEN_HEIGHT, 1);
// 设置 D3D 设备参数
IDirect3DSurface9* pSurface = nullptr;
D3DXMATRIX matrix;
D3DLOCKED_RECT lockedRect;
pD3DDevice->CreateOffscreenPlainSurface(SCREEN_WIDTH, SCREEN_HEIGHT, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, &pSurface, nullptr);
D3DXMatrixIdentity(&matrix);
// 循环推流
bool running = true;
while (running)
{
// 从 D3D 设备中读取屏幕数据
if (SUCCEEDED(pD3DDevice->GetFrontBufferData(0, pSurface)))
{
if (SUCCEEDED(pSurface->LockRect(&lockedRect, nullptr, D3DLOCK_READONLY)))
{
// 将屏幕数据转换为 YUV420P 格式
for (int y = 0; y < SCREEN_HEIGHT; y++)
{
auto dest = buffer + y * pFrame->linesize[0];
auto src = static_cast<uint8_t*>(lockedRect.pBits) + y * lockedRect.Pitch;
memcpy(dest, src, SCREEN_WIDTH * 4);
}
av_frame_set_pts(pFrame, av_rescale_q(av_gettime(), { 1, AV_TIME_BASE }, pCodecCtx->time_base));
// 编码并写入数据
AVPacket packet;
av_init_packet(&packet);
packet.data = nullptr;
packet.size = 0;
int result = avcodec_send_frame(pCodecCtx, pFrame);
if (result == 0)
{
while (result >= 0)
{
result = avcodec_receive_packet(pCodecCtx, &packet);
if (result == 0)
{
packet.stream_index = pStream->index;
av_interleaved_write_frame(pFormatCtx, &packet);
av_packet_unref(&packet);
}
else if (result == AVERROR(EAGAIN) || result == AVERROR_EOF)
{
break;
}
else
{
std::cerr << "Failed to encode video frame" << std::endl;
running = false;
}
}
}
else
{
std::cerr << "Failed to send video frame for encoding" << std::endl;
running = false;
}
pSurface->UnlockRect();
}
}
// 限制帧率
Sleep(1000 / FPS);
}
// 写入文件尾
av_write_trailer(pFormatCtx);
// 释放资源
if (pSurface)
{
pSurface->Release();
}
if (pD3DDevice)
{
pD3DDevice->Release();
}
if (pD3D)
{
pD3D->Release();
}
if (pCodecCtx)
{
avcodec_close(pCodecCtx);
}
if (pFrame)
{
av_frame_free(&pFrame);
}
if (pFormatCtx)
{
avio_close(pFormatCtx->pb);
avformat_free_context(pFormatCtx);
}
if (buffer)
{
av_free(buffer);
}
return 0;
}
```
在这个示例中,我们使用了 Windows 平台上的 DirectX 9 API 来捕获屏幕数据,然后使用 FFmpeg 6.0 的编码器将数据编码为 H.264 格式,并通过 RTMP 协议推流到本地的流服务器。您需要根据具体的情况修改 STREAM_URL、SCREEN_WIDTH、SCREEN_HEIGHT、FPS 和 BIT_RATE 等参数。
阅读全文