将mat yuv编码为h264流, 使用ffmpeg, cpp代码
时间: 2024-05-08 13:16:53 浏览: 160
以下是将Mat YUV编码为H264流的C++代码示例,使用FFmpeg库:
```c++
#include <iostream>
#include <fstream>
#include <opencv2/opencv.hpp>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
int main(int argc, char* argv[])
{
// Check input arguments
if (argc < 4) {
std::cerr << "Usage: " << argv[0] << " input_file width height output_file" << std::endl;
return 1;
}
// Initialize FFmpeg
av_register_all();
// Open input file
const char* input_file = argv[1];
int width = std::stoi(argv[2]);
int height = std::stoi(argv[3]);
cv::Mat yuv_image(height * 3 / 2, width, CV_8UC1);
std::ifstream input_stream(input_file, std::ios::binary);
if (!input_stream.is_open()) {
std::cerr << "Failed to open input file" << std::endl;
return 1;
}
// Open output file
const char* output_file = argv[4];
AVFormatContext* format_context = nullptr;
if (avformat_alloc_output_context2(&format_context, nullptr, nullptr, output_file) < 0) {
std::cerr << "Failed to allocate output context" << std::endl;
return 1;
}
// Open output stream
AVOutputFormat* output_format = format_context->oformat;
AVStream* stream = avformat_new_stream(format_context, nullptr);
if (!stream) {
std::cerr << "Failed to create output stream" << std::endl;
return 1;
}
stream->id = format_context->nb_streams - 1;
AVCodecContext* codec_context = stream->codec;
codec_context->codec_id = output_format->video_codec;
codec_context->codec_type = AVMEDIA_TYPE_VIDEO;
codec_context->width = width;
codec_context->height = height;
codec_context->pix_fmt = AV_PIX_FMT_YUV420P;
codec_context->time_base = { 1, 25 };
if (format_context->oformat->flags & AVFMT_GLOBALHEADER) {
codec_context->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
AVCodec* codec = avcodec_find_encoder(codec_context->codec_id);
if (!codec) {
std::cerr << "Failed to find encoder" << std::endl;
return 1;
}
if (avcodec_open2(codec_context, codec, nullptr) < 0) {
std::cerr << "Failed to open codec" << std::endl;
return 1;
}
av_dump_format(format_context, 0, output_file, 1);
if (!(output_format->flags & AVFMT_NOFILE)) {
if (avio_open(&format_context->pb, output_file, AVIO_FLAG_WRITE) < 0) {
std::cerr << "Failed to open output file" << std::endl;
return 1;
}
}
if (avformat_write_header(format_context, nullptr) < 0) {
std::cerr << "Failed to write header" << std::endl;
return 1;
}
// Initialize video converter
struct SwsContext* converter = sws_getContext(width, height, AV_PIX_FMT_GRAY8,
width, height, AV_PIX_FMT_YUV420P,
0, nullptr, nullptr, nullptr);
if (!converter) {
std::cerr << "Failed to create video converter" << std::endl;
return 1;
}
// Encode frames
AVFrame* frame = av_frame_alloc();
frame->width = width;
frame->height = height;
frame->format = codec_context->pix_fmt;
if (av_frame_get_buffer(frame, 0) < 0) {
std::cerr << "Failed to allocate frame buffer" << std::endl;
return 1;
}
AVPacket packet = { 0 };
int frame_count = 0;
while (input_stream.read(reinterpret_cast<char*>(yuv_image.data), yuv_image.total())) {
// Convert YUV image to AVFrame
sws_scale(converter, &yuv_image.data, &yuv_image.step, 0, height,
frame->data, frame->linesize);
// Encode frame
frame->pts = frame_count++;
int result = avcodec_send_frame(codec_context, frame);
if (result < 0) {
std::cerr << "Failed to send frame" << std::endl;
return 1;
}
while (result >= 0) {
result = avcodec_receive_packet(codec_context, &packet);
if (result == AVERROR(EAGAIN) || result == AVERROR_EOF) {
break;
}
if (result < 0) {
std::cerr << "Failed to receive packet" << std::endl;
return 1;
}
av_packet_rescale_ts(&packet, codec_context->time_base, stream->time_base);
packet.stream_index = stream->index;
if (av_interleaved_write_frame(format_context, &packet) < 0) {
std::cerr << "Failed to write packet" << std::endl;
return 1;
}
av_packet_unref(&packet);
}
}
// Flush encoder
int result = avcodec_send_frame(codec_context, nullptr);
if (result < 0) {
std::cerr << "Failed to send frame" << std::endl;
return 1;
}
while (result >= 0) {
result = avcodec_receive_packet(codec_context, &packet);
if (result == AVERROR(EAGAIN) || result == AVERROR_EOF) {
break;
}
if (result < 0) {
std::cerr << "Failed to receive packet" << std::endl;
return 1;
}
av_packet_rescale_ts(&packet, codec_context->time_base, stream->time_base);
packet.stream_index = stream->index;
if (av_interleaved_write_frame(format_context, &packet) < 0) {
std::cerr << "Failed to write packet" << std::endl;
return 1;
}
av_packet_unref(&packet);
}
// Close output file
av_write_trailer(format_context);
if (format_context && !(output_format->flags & AVFMT_NOFILE)) {
avio_close(format_context->pb);
}
avcodec_close(codec_context);
avformat_free_context(format_context);
av_frame_free(&frame);
sws_freeContext(converter);
input_stream.close();
return 0;
}
```
请注意,此示例假设输入文件为YUV 4:2:0格式的二进制文件,其中Y,U和V平面以交替方式存储。如果您的输入格式不同,请根据需要进行修改。此外,此示例假设您已安装OpenCV和FFmpeg库。
阅读全文