ffmpeg6.0 录屏并推流到rtmp c++示例
时间: 2023-08-16 21:04:14 浏览: 361
以下是一个简单的示例代码,展示如何使用FFmpeg 6.0在C++中录屏并将其推流到RTMP服务器:
#include <iostream>
#include <cstdlib>
#include <chrono>
#include <thread>
extern "C" {
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include <libavutil/imgutils.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/extensions/XShm.h>
}
#define STREAM_URL "rtmp://example.com/live/stream"
int main()
{
// Initialize X11 display
Display *disp = XOpenDisplay(NULL);
if (!disp) {
std::cerr << "Error: Could not open X11 display." << std::endl;
return EXIT_FAILURE;
}
int screen = DefaultScreen(disp);
Window root = RootWindow(disp, screen);
// Get screen dimensions
int width = XDisplayWidth(disp, screen);
int height = XDisplayHeight(disp, screen);
// Create XImage and XShmImage structures
XImage *ximg = XGetImage(disp, root, 0, 0, width, height, AllPlanes, ZPixmap);
XShmSegmentInfo shminfo;
XShmCreateImage(disp, root, ZPixmap, 0, ximg->width, ximg->height,
ximg->depth, &shminfo, 0);
shminfo.shmaddr = (char *)shmat(shminfo.shmid, 0, 0);
shminfo.readOnly = False;
XShmAttach(disp, &shminfo);
XSync(disp, False);
// Allocate AVFrame for video data
AVFrame *frame = av_frame_alloc();
if (!frame) {
std::cerr << "Error: Could not allocate AVFrame." << std::endl;
return EXIT_FAILURE;
}
frame->width = width;
frame->height = height;
frame->format = AV_PIX_FMT_RGB24;
if (av_frame_get_buffer(frame, 32) < 0) {
std::cerr << "Error: Could not allocate video frame data." << std::endl;
return EXIT_FAILURE;
}
// Initialize FFmpeg
av_register_all();
avcodec_register_all();
avformat_network_init();
// Open output context
AVFormatContext *outctx = nullptr;
if (avformat_alloc_output_context2(&outctx, nullptr, "flv", STREAM_URL) < 0) {
std::cerr << "Error: Could not allocate output context." << std::endl;
return EXIT_FAILURE;
}
if (avio_open2(&outctx->pb, STREAM_URL, AVIO_FLAG_WRITE, nullptr, nullptr) < 0) {
std::cerr << "Error: Could not open output URL." << std::endl;
return EXIT_FAILURE;
}
// Add video stream
AVStream *vstream = avformat_new_stream(outctx, nullptr);
if (!vstream) {
std::cerr << "Error: Could not allocate video stream." << std::endl;
return EXIT_FAILURE;
}
vstream->id = 0;
vstream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
vstream->codecpar->codec_id = AV_CODEC_ID_H264;
vstream->codecpar->width = width;
vstream->codecpar->height = height;
vstream->codecpar->format = AV_PIX_FMT_YUV420P;
vstream->codecpar->bit_rate = 400000;
vstream->codecpar->profile = FF_PROFILE_H264_BASELINE;
// Find encoder
AVCodec *vcodec = avcodec_find_encoder(vstream->codecpar->codec_id);
if (!vcodec) {
std::cerr << "Error: Could not find video encoder." << std::endl;
return EXIT_FAILURE;
}
// Open video encoder
AVCodecContext *vctx = avcodec_alloc_context3(vcodec);
if (!vctx) {
std::cerr << "Error: Could not allocate video encoder context." << std::endl;
return EXIT_FAILURE;
}
if (avcodec_parameters_to_context(vctx, vstream->codecpar) < 0) {
std::cerr << "Error: Could not initialize video encoder context." << std::endl;
return EXIT_FAILURE;
}
vctx->bit_rate = 400000;
vctx->time_base = {1, 25};
vctx->gop_size = 10;
if (vstream->codecpar->codec_id == AV_CODEC_ID_H264) {
av_opt_set(vctx->priv_data, "preset", "ultrafast", 0);
av_opt_set(vctx->priv_data, "tune", "zerolatency", 0);
}
if (avcodec_open2(vctx, vcodec, nullptr) < 0) {
std::cerr << "Error: Could not open video encoder." << std::endl;
return EXIT_FAILURE;
}
// Allocate AVPacket for video data
AVPacket *vpacket = av_packet_alloc();
if (!vpacket) {
std::cerr << "Error: Could not allocate video packet." << std::endl;
return EXIT_FAILURE;
}
// Allocate AVFrame for video data after conversion to YUV420P
AVFrame *vframe = av_frame_alloc();
if (!vframe) {
std::cerr << "Error: Could not allocate video frame." << std::endl;
return EXIT_FAILURE;
}
vframe->width = width;
vframe->height = height;
vframe->format = vctx->pix_fmt;
if (av_frame_get_buffer(vframe, 32) < 0) {
std::cerr << "Error: Could not allocate video frame data." << std::endl;
return EXIT_FAILURE;
}
// Initialize swscale context for converting RGB to YUV420P
SwsContext *swsctx = sws_getContext(width, height, AV_PIX_FMT_RGB24,
width, height, vctx->pix_fmt,
SWS_BICUBIC, nullptr, nullptr, nullptr);
if (!swsctx) {
std::cerr << "Error: Could not initialize swscale context." << std::endl;
return EXIT_FAILURE;
}
// Write header to output context
avformat_write_header(outctx, nullptr);
// Read and encode video frames
std::cout << "Start recording." << std::endl;
while (true) {
// Get screenshot from X11
XShmGetImage(disp, root, ximg, 0, 0, AllPlanes);
// Convert RGB to YUV420P
sws_scale(swsctx, (const uint8_t * const *)frame->data, frame->linesize,
0, height, vframe->data, vframe->linesize);
// Encode video frame
vframe->pts = av_rescale_q(av_gettime(), {1, AV_TIME_BASE}, vctx->time_base);
int ret = avcodec_send_frame(vctx, vframe);
if (ret < 0) {
std::cerr << "Error: Could not send video frame." << std::endl;
return EXIT_FAILURE;
}
while (ret >= 0) {
ret = avcodec_receive_packet(vctx, vpacket);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
break;
else if (ret < 0) {
std::cerr << "Error: Could not receive video packet." << std::endl;
return EXIT_FAILURE;
}
av_packet_rescale_ts(vpacket, vctx->time_base, vstream->time_base);
vpacket->stream_index = vstream->index;
// Write video packet to output context
av_interleaved_write_frame(outctx, vpacket);
av_packet_unref(vpacket);
}
// Sleep for 40ms to limit framerate to 25fps
std::this_thread::sleep_for(std::chrono::milliseconds(40));
}
// Cleanup
av_write_trailer(outctx);
avcodec_free_context(&vctx);
av_frame_free(&vframe);
av_packet_free(&vpacket);
av_frame_free(&frame);
avformat_close_input(&outctx);
XShmDetach(disp, &shminfo);
XDestroyImage(ximg);
XCloseDisplay(disp);
return EXIT_SUCCESS;
}
这个示例代码假设你已经安装了FFmpeg 6.0和X11库,可以通过以下命令来编译它:
g++ -o screen_capture screen_capture.cpp -lX11 `pkg-config --cflags --libs libavutil libavcodec libavformat libswscale libswresample`
请注意,这个示例代码只是一个简单的演示,并没有处理错误或异常情况。在实际应用中,你需要根据你的需要添加错误处理和异常处理代码。
相关推荐













