android下使用FFMpeg的VAAPI播放视频的代码
时间: 2024-04-05 11:34:45 浏览: 121
使用FFmpeg和VAAPI播放视频需要以下步骤:
1. 初始化FFmpeg和VAAPI环境:
```c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
#include <unistd.h>
#include <fcntl.h>
#include <pthread.h>
#include <va/va.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
// VAAPI
static VADisplay va_dpy;
static VAConfigID va_config;
static VAContextID va_context;
static VASurfaceID va_surfaces[10];
static int va_buffers[10];
static int va_buffers_num;
```
```c
// 初始化VAAPI
static bool va_init(AVCodecContext *p_codec_ctx, int num_surfaces)
{
VAStatus va_status;
VASurfaceAttrib va_surface_attrib[2];
VAConfigAttrib va_config_attrib;
int major_version, minor_version;
int num_entries;
VAEntrypoint *entrypoints;
VAProfile *profiles;
va_dpy = vaGetDisplayDRM(0);
va_status = vaInitialize(va_dpy, &major_version, &minor_version);
if (va_status != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaInitialize failed: %d\n", va_status);
return false;
}
num_entries = vaMaxNumEntrypoints(va_dpy);
entrypoints = malloc(num_entries * sizeof(*entrypoints));
vaQueryConfigEntrypoints(va_dpy, VAProfileH264High, entrypoints, &num_entries);
num_entries = vaMaxNumProfiles(va_dpy);
profiles = malloc(num_entries * sizeof(*profiles));
vaQueryConfigProfiles(va_dpy, profiles, &num_entries);
va_status = vaCreateConfig(va_dpy, VAProfileH264High, VAEntrypointVLD, NULL, 0, &va_config);
if (va_status != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaCreateConfig failed: %d\n", va_status);
return false;
}
va_surface_attrib[0].type = VASurfaceAttribPixelFormat;
va_surface_attrib[0].value.type = VAGenericValueTypeInteger;
va_surface_attrib[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
va_surface_attrib[0].value.value.i = VA_FOURCC('N', 'V', '1', '2');
va_surface_attrib[1].type = VASurfaceAttribMemoryType;
va_surface_attrib[1].value.type = VAGenericValueTypeInteger;
va_surface_attrib[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
va_surface_attrib[1].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME;
va_status = vaCreateSurfaces(va_dpy, VA_RT_FORMAT_YUV420, p_codec_ctx->width, p_codec_ctx->height,
va_surfaces, num_surfaces, va_surface_attrib, 2);
if (va_status != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaCreateSurfaces failed: %d\n", va_status);
return false;
}
va_config_attrib.type = VAConfigAttribRTFormat;
vaQueryConfigAttributes(va_dpy, va_config, &va_config_attrib, 1);
if ((va_config_attrib.value & VA_RT_FORMAT_YUV420) == 0) {
fprintf(stderr, "RT format not supported\n");
return false;
}
va_buffers_num = vaMaxNumBufferSlots(va_dpy);
va_status = vaCreateContext(va_dpy, va_config, p_codec_ctx->width, p_codec_ctx->height,
VA_PROGRESSIVE, va_surfaces, num_surfaces, &va_context);
if (va_status != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaCreateContext failed: %d\n", va_status);
return false;
}
free(entrypoints);
free(profiles);
return true;
}
```
2. 解码视频并将解码后的帧渲染到VAAPI surface上:
```c
// 解码视频并将解码后的帧渲染到VAAPI surface上
int decode_and_render(AVCodecContext *p_codec_ctx, AVPacket *p_pkt, AVFrame **pp_frame, int *pi_frame_available)
{
int i_ret;
AVFrame *p_frame;
VAStatus va_status;
VABufferID va_buffers[3];
VAEncPictureParameterBufferH264 va_pic_param;
VAEncSliceParameterBufferH264 va_slice_param;
int i_surface_index;
int i;
*pi_frame_available = 0;
i_ret = avcodec_send_packet(p_codec_ctx, p_pkt);
if (i_ret < 0) {
fprintf(stderr, "avcodec_send_packet failed\n");
return i_ret;
}
while (1) {
p_frame = av_frame_alloc();
if (!p_frame) {
fprintf(stderr, "av_frame_alloc failed\n");
return -1;
}
i_ret = avcodec_receive_frame(p_codec_ctx, p_frame);
if (i_ret == AVERROR(EAGAIN)) {
av_frame_free(&p_frame);
break;
}
if (i_ret < 0) {
fprintf(stderr, "avcodec_receive_frame failed\n");
av_frame_free(&p_frame);
return i_ret;
}
for (i = 0; i < va_buffers_num; i++) {
if (va_buffers[i] == VA_INVALID_ID) {
va_buffers[i] = vaCreateBuffer(va_dpy, va_context, VAEncPictureParameterBufferType,
sizeof(va_pic_param), 1, &va_pic_param, NULL);
break;
}
}
if (i == va_buffers_num) {
fprintf(stderr, "no free picture parameter buffers available\n");
av_frame_free(&p_frame);
return -1;
}
memset(&va_pic_param, 0, sizeof(va_pic_param));
va_pic_param.CurrPic.picture_id = va_surfaces[i_surface_index];
va_pic_param.CurrPic.TopFieldOrderCnt = p_frame->coded_picture_number;
va_pic_param.CurrPic.BottomFieldOrderCnt = p_frame->coded_picture_number;
va_pic_param.ReferenceFrames[0].picture_id = VA_INVALID_SURFACE;
va_pic_param.ReferenceFrames[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
va_pic_param.picture_width_in_mbs_minus1 = (p_codec_ctx->width + 15) / 16 - 1;
va_pic_param.picture_height_in_mbs_minus1 = (p_codec_ctx->height + 15) / 16 - 1;
va_pic_param.bits_per_pixel = 0x20;
va_pic_param.num_slice_groups_minus1 = 0;
va_pic_param.slice_group_map_type = VA_SLICE_GROUP_MAP_TYPE_INTERLEAVED;
va_pic_param.num_ref_idx_l0_active_minus1 = 0;
va_pic_param.num_ref_idx_l1_active_minus1 = 0;
va_pic_param.chroma_qp_index_offset = 0;
va_pic_param.second_chroma_qp_index_offset = 0;
va_pic_param.pic_init_qp_minus26 = p_codec_ctx->qmin;
va_pic_param.num_ref_frames = 1;
va_pic_param.frame_num = p_frame->coded_picture_number;
va_pic_param.frametype = VA_FRAME_PICTURE;
va_status = vaMapBuffer(va_dpy, va_buffers[i], (void **)&va_pic_param);
if (va_status != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaMapBuffer failed: %d\n", va_status);
av_frame_free(&p_frame);
return -1;
}
i_surface_index = (i_surface_index + 1) % num_surfaces;
for (i = 0; i < va_buffers_num; i++) {
if (va_buffers[i] == VA_INVALID_ID) {
va_buffers[i] = vaCreateBuffer(va_dpy, va_context, VAEncSliceParameterBufferType,
sizeof(va_slice_param), 1, &va_slice_param, NULL);
break;
}
}
if (i == va_buffers_num) {
fprintf(stderr, "no free slice parameter buffers available\n");
av_frame_free(&p_frame);
return -1;
}
memset(&va_slice_param, 0, sizeof(va_slice_param));
va_slice_param.slice_data_size = p_frame->pkt_size;
va_slice_param.slice_data_offset = 0;
va_slice_param.slice_type = VA_SLICE_TYPE_I;
va_slice_param.pic_parameter_set_id = 0;
va_slice_param.slice_group_change_cycle = 0;
va_slice_param.num_macroblocks = (p_codec_ctx->height / 16) * (p_codec_ctx->width / 16);
va_slice_param.disable_deblocking_filter_idc = 0;
va_slice_param.slice_alpha_c0_offset_div2 = 0;
va_slice_param.slice_beta_offset_div2 = 0;
va_status = vaMapBuffer(va_dpy, va_buffers[i], (void **)&va_slice_param);
if (va_status != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaMapBuffer failed: %d\n", va_status);
av_frame_free(&p_frame);
return -1;
}
i_surface_index = (i_surface_index + 1) % num_surfaces;
for (i = 0; i < va_buffers_num; i++) {
if (va_buffers[i] == VA_INVALID_ID) {
va_buffers[i] = vaCreateBuffer(va_dpy, va_context, VAEncCodedBufferType,
p_frame->pkt_size, 1, NULL, NULL);
break;
}
}
if (i == va_buffers_num) {
fprintf(stderr, "no free coded buffers available\n");
av_frame_free(&p_frame);
return -1;
}
va_status = vaMapBuffer(va_dpy, va_buffers[i], (void **)&va_buffers[i]);
if (va_status != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaMapBuffer failed: %d\n", va_status);
av_frame_free(&p_frame);
return -1;
}
if (vaBeginPicture(va_dpy, va_context, va_surfaces[i_surface_index]) != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaBeginPicture failed\n");
av_frame_free(&p_frame);
return -1;
}
if (vaRenderPicture(va_dpy, va_context, va_buffers, 3) != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaRenderPicture failed\n");
av_frame_free(&p_frame);
return -1;
}
if (vaEndPicture(va_dpy, va_context) != VA_STATUS_SUCCESS) {
fprintf(stderr, "vaEndPicture failed\n");
av_frame_free(&p_frame);
return -1;
}
av_frame_free(&p_frame);
*pi_frame_available = 1;
*pp_frame = p_frame;
}
return 0;
}
```
3. 使用SDL或其他图形库将VAAPI surface上的帧渲染到屏幕上。
完整的代码示例可以参考以下链接:https://github.com/FFmpeg/FFmpeg/blob/master/doc/examples/hw_decode.c
阅读全文