请提供一个FFmpeg最新api将H264解码成YUV的demo
时间: 2023-08-08 08:05:02 浏览: 111
好的,以下是一个最新的FFmpeg API将H264解码成YUV的demo:
```c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#define INBUF_SIZE 4096
int main(int argc, char **argv)
{
AVCodec *codec;
AVCodecContext *codec_ctx = NULL;
AVFrame *frame = NULL;
AVPacket pkt;
int ret, got_frame;
uint8_t inbuf[INBUF_SIZE + AV_INPUT_BUFFER_PADDING_SIZE];
uint8_t *data = NULL;
int data_size;
FILE *yuv_file = NULL;
struct SwsContext *sws_ctx = NULL;
if (argc < 3) {
printf("Usage: %s <input file> <output file>\n", argv[0]);
return 1;
}
av_register_all();
avcodec_register_all();
codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!codec) {
fprintf(stderr, "Codec not found\n");
return 1;
}
codec_ctx = avcodec_alloc_context3(codec);
if (!codec_ctx) {
fprintf(stderr, "Could not allocate video codec context\n");
return 1;
}
if (avcodec_open2(codec_ctx, codec, NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
return 1;
}
frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Could not allocate video frame\n");
return 1;
}
yuv_file = fopen(argv[2], "wb");
if (!yuv_file) {
fprintf(stderr, "Could not open %s\n", argv[2]);
return 1;
}
av_init_packet(&pkt);
pkt.data = inbuf;
pkt.size = sizeof(inbuf);
FILE *file = fopen(argv[1], "rb");
if (!file) {
fprintf(stderr, "Could not open %s\n", argv[1]);
return 1;
}
while (1) {
data_size = fread(inbuf, 1, INBUF_SIZE, file);
if (data_size <= 0) {
break;
}
data = inbuf;
while (data_size > 0) {
ret = avcodec_decode_video2(codec_ctx, frame, &got_frame, &pkt);
if (ret < 0) {
fprintf(stderr, "Error decoding video frame (%d)\n", ret);
return 1;
}
if (got_frame) {
if (!sws_ctx) {
sws_ctx = sws_getContext(codec_ctx->width, codec_ctx->height,
codec_ctx->pix_fmt, codec_ctx->width, codec_ctx->height,
AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);
if (!sws_ctx) {
fprintf(stderr, "Could not initialize the conversion context\n");
return 1;
}
}
uint8_t *dst_data[4] = { NULL };
int dst_linesize[4] = { 0 };
av_image_alloc(dst_data, dst_linesize, codec_ctx->width, codec_ctx->height,
AV_PIX_FMT_YUV420P, 16);
sws_scale(sws_ctx, frame->data, frame->linesize, 0, codec_ctx->height,
dst_data, dst_linesize);
fwrite(dst_data[0], 1, codec_ctx->width * codec_ctx->height, yuv_file);
fwrite(dst_data[1], 1, codec_ctx->width * codec_ctx->height / 4, yuv_file);
fwrite(dst_data[2], 1, codec_ctx->width * codec_ctx->height / 4, yuv_file);
av_freep(&dst_data[0]);
av_freep(&dst_data[1]);
av_freep(&dst_data[2]);
}
data += ret;
data_size -= ret;
if (pkt.size > 0 && data_size <= 0) {
memcpy(inbuf, pkt.data, pkt.size);
pkt.data = inbuf;
ret = avcodec_decode_video2(codec_ctx, frame, &got_frame, &pkt);
if (ret < 0) {
fprintf(stderr, "Error decoding video frame (%d)\n", ret);
return 1;
}
if (got_frame) {
if (!sws_ctx) {
sws_ctx = sws_getContext(codec_ctx->width, codec_ctx->height,
codec_ctx->pix_fmt, codec_ctx->width, codec_ctx->height,
AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);
if (!sws_ctx) {
fprintf(stderr, "Could not initialize the conversion context\n");
return 1;
}
}
uint8_t *dst_data[4] = { NULL };
int dst_linesize[4] = { 0 };
av_image_alloc(dst_data, dst_linesize, codec_ctx->width, codec_ctx->height,
AV_PIX_FMT_YUV420P, 16);
sws_scale(sws_ctx, frame->data, frame->linesize, 0, codec_ctx->height,
dst_data, dst_linesize);
fwrite(dst_data[0], 1, codec_ctx->width * codec_ctx->height, yuv_file);
fwrite(dst_data[1], 1, codec_ctx->width * codec_ctx->height / 4, yuv_file);
fwrite(dst_data[2], 1, codec_ctx->width * codec_ctx->height / 4, yuv_file);
av_freep(&dst_data[0]);
av_freep(&dst_data[1]);
av_freep(&dst_data[2]);
}
pkt.size = 0;
}
}
}
fclose(file);
fclose(yuv_file);
avcodec_free_context(&codec_ctx);
av_frame_free(&frame);
sws_freeContext(sws_ctx);
return 0;
}
```
这个demo使用了FFmpeg的API,可以将H264解码成YUV,并将输出写入到文件中。注意,这个demo只是一个示例,具体的使用方法可能需要根据你的具体需求进行修改。
阅读全文