ffmpeg 获取 rtsp 视频流 c++ 代码实现
时间: 2023-10-02 10:08:45 浏览: 207
以下是使用 ffmpeg 获取 rtsp 视频流的 C++ 代码示例:
```
#include <iostream>
#include <string>
#include <thread>
#include <mutex>
extern "C" {
#include <libavutil/imgutils.h>
#include <libavutil/parseutils.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
}
// mutex for accessing the global frame buffer
std::mutex frame_mutex;
AVFrame* frame_buffer = nullptr;
// thread function for decoding and storing frames
void decode_thread(AVFormatContext* format_ctx, int video_stream_idx) {
AVCodec* codec = nullptr;
AVCodecContext* codec_ctx = nullptr;
AVPacket packet;
AVFrame* frame = nullptr;
// find decoder for the video stream
codec = avcodec_find_decoder(format_ctx->streams[video_stream_idx]->codecpar->codec_id);
if (!codec) {
std::cerr << "Error: could not find decoder for video stream." << std::endl;
return;
}
// allocate codec context and open the codec
codec_ctx = avcodec_alloc_context3(codec);
if (!codec_ctx) {
std::cerr << "Error: could not allocate codec context." << std::endl;
return;
}
if (avcodec_open2(codec_ctx, codec, nullptr) < 0) {
std::cerr << "Error: could not open codec." << std::endl;
return;
}
// allocate frame for decoding
frame = av_frame_alloc();
if (!frame) {
std::cerr << "Error: could not allocate frame." << std::endl;
return;
}
// read packets and decode frames
while (av_read_frame(format_ctx, &packet) >= 0) {
if (packet.stream_index == video_stream_idx) {
// send packet to decoder
if (avcodec_send_packet(codec_ctx, &packet) < 0) {
std::cerr << "Error: could not send packet to decoder." << std::endl;
break;
}
// receive frames from decoder
while (avcodec_receive_frame(codec_ctx, frame) >= 0) {
// lock the frame buffer before storing the new frame
std::unique_lock<std::mutex> lock(frame_mutex);
// free the previous frame if it exists
if (frame_buffer) {
av_frame_free(&frame_buffer);
}
// store the new frame
frame_buffer = av_frame_clone(frame);
}
}
av_packet_unref(&packet);
}
// free resources
avcodec_free_context(&codec_ctx);
av_frame_free(&frame);
}
int main(int argc, char* argv[]) {
if (argc != 2) {
std::cerr << "Usage: " << argv[0] << " rtsp_url" << std::endl;
return 1;
}
// register all codecs and formats
av_register_all();
avformat_network_init();
AVFormatContext* format_ctx = nullptr;
AVDictionary* options = nullptr;
// open input stream
if (avformat_open_input(&format_ctx, argv[1], nullptr, &options) < 0) {
std::cerr << "Error: could not open input stream." << std::endl;
return 1;
}
// find stream information
if (avformat_find_stream_info(format_ctx, nullptr) < 0) {
std::cerr << "Error: could not find stream information." << std::endl;
return 1;
}
// find video stream
int video_stream_idx = av_find_best_stream(format_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &nullptr, 0);
if (video_stream_idx < 0) {
std::cerr << "Error: could not find video stream." << std::endl;
return 1;
}
// start decoding thread
std::thread decode_thread(decode_thread, format_ctx, video_stream_idx);
// initialize scaler for converting the decoded frames to RGB
SwsContext* scaler = sws_getContext(
frame_buffer->width, frame_buffer->height, (AVPixelFormat)frame_buffer->format,
frame_buffer->width, frame_buffer->height, AV_PIX_FMT_RGB24,
SWS_BILINEAR, nullptr, nullptr, nullptr);
// main loop for displaying frames
while (true) {
// lock the frame buffer before accessing it
std::unique_lock<std::mutex> lock(frame_mutex);
// display the current frame if it exists
if (frame_buffer) {
// allocate buffer for the RGB frame
uint8_t* rgb_buffer = (uint8_t*)av_malloc(frame_buffer->width * frame_buffer->height * 3);
// convert the frame to RGB
sws_scale(scaler, frame_buffer->data, frame_buffer->linesize, 0, frame_buffer->height,
&rgb_buffer, (int*)&(frame_buffer->width));
// display the RGB frame (here you would use a GUI library or write the image to a file)
// ...
// free the RGB buffer
av_free(rgb_buffer);
}
// unlock the frame buffer
lock.unlock();
// sleep for a short time to reduce CPU usage
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
// free resources
avformat_close_input(&format_ctx);
avformat_network_deinit();
return 0;
}
```
该代码使用 FFmpeg 库从 RTSP 视频流中获取帧,并将它们转换为 RGB 格式以进行显示。它使用一个单独的线程来解码和存储帧,并使用互斥量进行线程同步。在主线程中,它不断地从全局帧缓冲区中获取并显示最新的帧,直到用户中断程序。
阅读全文