openglES 在android中通过C++ 将rgb图片转换成mp4视频完整代码例子
时间: 2024-02-09 14:08:47 浏览: 150
以下是一个简单的示例代码,用于将RGB图像转换为MP4视频。这个代码假设你已经熟悉了OpenGL ES和Android NDK的基础知识。
```c++
#include <jni.h>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#define LOG_TAG "VideoEncoder"
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
#define VIDEO_WIDTH 640
#define VIDEO_HEIGHT 480
#define VIDEO_FPS 30
#define VIDEO_BITRATE 4000000
ANativeWindow* window;
EGLDisplay display;
EGLSurface surface;
EGLContext context;
GLuint texture;
AVFormatContext* formatContext;
AVOutputFormat* outputFormat;
AVCodecContext* codecContext;
AVStream* stream;
AVPacket packet;
AVFrame* frame;
SwsContext* swsContext;
uint8_t* buffer;
int bufferWidth, bufferHeight;
int frameCount = 0;
extern "C" JNIEXPORT void JNICALL Java_com_example_videotest_NativeLibrary_init(JNIEnv* env, jobject obj, jobject surfaceObject) {
// 初始化EGL
EGLint major, minor;
EGLConfig config;
EGLint numConfigs;
EGLint format;
EGLint width, height;
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, &major, &minor);
eglChooseConfig(display, NULL, &config, 1, &numConfigs);
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
ANativeWindow_setBuffersGeometry(window, 0, 0, format);
surface = eglCreateWindowSurface(display, config, window, NULL);
context = eglCreateContext(display, config, NULL, NULL);
eglMakeCurrent(display, surface, surface, context);
// 初始化OpenGL ES
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, VIDEO_WIDTH, VIDEO_HEIGHT, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// 初始化FFmpeg
av_register_all();
avformat_alloc_output_context2(&formatContext, NULL, NULL, "output.mp4");
outputFormat = formatContext->oformat;
codecContext = avcodec_alloc_context3(NULL);
codecContext->codec_id = outputFormat->video_codec;
codecContext->codec_type = AVMEDIA_TYPE_VIDEO;
codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
codecContext->width = VIDEO_WIDTH;
codecContext->height = VIDEO_HEIGHT;
codecContext->time_base = (AVRational) {1, VIDEO_FPS};
codecContext->bit_rate = VIDEO_BITRATE;
codecContext->gop_size = 10;
codecContext->max_b_frames = 1;
av_opt_set(codecContext->priv_data, "preset", "ultrafast", 0);
avcodec_open2(codecContext, avcodec_find_encoder(codecContext->codec_id), NULL);
stream = avformat_new_stream(formatContext, NULL);
avcodec_parameters_from_context(stream->codecpar, codecContext);
av_dump_format(formatContext, 0, "output.mp4", 1);
avio_open(&formatContext->pb, "output.mp4", AVIO_FLAG_WRITE);
avformat_write_header(formatContext, NULL);
frame = av_frame_alloc();
frame->format = codecContext->pix_fmt;
frame->width = VIDEO_WIDTH;
frame->height = VIDEO_HEIGHT;
av_frame_get_buffer(frame, 0);
swsContext = sws_getContext(VIDEO_WIDTH, VIDEO_HEIGHT, AV_PIX_FMT_RGB24,
codecContext->width, codecContext->height, codecContext->pix_fmt,
SWS_BILINEAR, NULL, NULL, NULL);
bufferWidth = av_image_get_linesize(codecContext->pix_fmt, VIDEO_WIDTH, 0);
bufferHeight = VIDEO_HEIGHT;
buffer = (uint8_t*) av_malloc(bufferHeight * bufferWidth);
}
extern "C" JNIEXPORT void JNICALL Java_com_example_videotest_NativeLibrary_render(JNIEnv* env, jobject obj) {
// 绘制图像
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0, 0, VIDEO_WIDTH, VIDEO_HEIGHT);
glUseProgram(shaderProgram);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glUniform1i(textureUniform, 0);
glVertexAttribPointer(positionAttribute, 2, GL_FLOAT, GL_FALSE, 0, vertices);
glEnableVertexAttribArray(positionAttribute);
glVertexAttribPointer(texCoordAttribute, 2, GL_FLOAT, GL_FALSE, 0, texCoords);
glEnableVertexAttribArray(texCoordAttribute);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
// 将图像转换为YUV420P格式
glReadPixels(0, 0, VIDEO_WIDTH, VIDEO_HEIGHT, GL_RGB, GL_UNSIGNED_BYTE, buffer);
sws_scale(swsContext, &buffer, &bufferWidth, 0, VIDEO_HEIGHT, frame->data, frame->linesize);
// 编码图像
av_init_packet(&packet);
packet.data = NULL;
packet.size = 0;
frame->pts = frameCount++;
avcodec_send_frame(codecContext, frame);
while (avcodec_receive_packet(codecContext, &packet) == 0) {
av_interleaved_write_frame(formatContext, &packet);
av_packet_unref(&packet);
}
}
extern "C" JNIEXPORT void JNICALL Java_com_example_videotest_NativeLibrary_release(JNIEnv* env, jobject obj) {
// 释放资源
glDeleteTextures(1, &texture);
eglMakeCurrent(display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
eglDestroySurface(display, surface);
eglDestroyContext(display, context);
eglTerminate(display);
av_write_trailer(formatContext);
avcodec_close(codecContext);
avio_close(formatContext->pb);
avformat_free_context(formatContext);
av_frame_free(&frame);
av_free(buffer);
sws_freeContext(swsContext);
}
```
注意,这只是一个简单的示例代码,可能需要根据你的具体需求进行修改。尤其是编码器的参数应该根据你的要求进行调整。
阅读全文