写个jni OpenGL渲染mp4
时间: 2023-08-31 10:09:35 浏览: 116
首先,需要在 C++ 中使用 FFmpeg 库来解码 MP4 文件,然后将解码后的视频帧传递给 OpenGL ES 进行渲染。这个过程需要用到 JNI 技术,将 C++ 的代码嵌入到 Java 中。
下面是一个简单的 JNI OpenGL ES 渲染 MP4 的代码示例:
```java
public class VideoRenderer {
static {
System.loadLibrary("native-lib");
}
public native void init(String videoPath, Surface surface);
public native void render();
public native void release();
}
public class MainActivity extends Activity implements SurfaceHolder.Callback {
private SurfaceView mSurfaceView;
private SurfaceHolder mSurfaceHolder;
private VideoRenderer mRenderer;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
mRenderer = new VideoRenderer();
mRenderer.init("/sdcard/video.mp4", holder.getSurface());
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mRenderer.release();
}
}
```
native-lib.cpp:
```c++
#include "native-lib.h"
#include <jni.h>
#include <android/native_window_jni.h>
#include <android/log.h>
#include <EGL/egl.h>
#include <GLES/gl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core/mat.hpp>
#include <opencv2/imgproc/types_c.h>
#include <libavutil/imgutils.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#define TAG "OpenGLRenderer"
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
using namespace cv;
extern "C" {
static const GLfloat VERTEX_BUF[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 0.0f
};
static const GLfloat TEX_COORD_BUF[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f
};
static const char *videoPath = NULL;
static AVFormatContext *pFormatCtx = NULL;
static AVCodecContext *pCodecCtx = NULL;
static AVCodec *pCodec = NULL;
static int videoStream = -1;
static AVFrame *pFrame = NULL;
static AVFrame *pFrameRGB = NULL;
static uint8_t *buffer = NULL;
static struct SwsContext *sws_ctx = NULL;
static ANativeWindow *nativeWindow = NULL;
static ANativeWindow_Buffer windowBuffer;
static GLuint program = 0;
static GLuint texture = 0;
static GLuint vertexShader = 0;
static GLuint fragmentShader = 0;
static GLuint positionHandle = 0;
static GLuint texCoordHandle = 0;
static GLuint textureHandle = 0;
static void checkGlError(const char *op) {
GLint error;
for (error = glGetError(); error; error = glGetError()) {
LOGE("after %s() glError (0x%x)\n", op, error);
}
}
static GLuint loadShader(GLenum type, const char *shaderCode) {
GLuint shader = glCreateShader(type);
glShaderSource(shader, 1, &shaderCode, NULL);
glCompileShader(shader);
return shader;
}
static GLuint createProgram(const char *vertexShaderCode, const char *fragmentShaderCode) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexShaderCode);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
GLuint program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
return program;
}
static void initShader() {
const char *vertexShaderCode =
"attribute vec4 position;\n"
"attribute vec2 texCoord;\n"
"varying vec2 vTexCoord;\n"
"void main() {\n"
" gl_Position = position;\n"
" vTexCoord = texCoord;\n"
"}";
const char *fragmentShaderCode =
"precision mediump float;\n"
"varying vec2 vTexCoord;\n"
"uniform sampler2D texture;\n"
"void main() {\n"
" gl_FragColor = texture2D(texture, vTexCoord);\n"
"}";
program = createProgram(vertexShaderCode, fragmentShaderCode);
positionHandle = glGetAttribLocation(program, "position");
glEnableVertexAttribArray(positionHandle);
glVertexAttribPointer(positionHandle, 3, GL_FLOAT, GL_FALSE, 0, VERTEX_BUF);
texCoordHandle = glGetAttribLocation(program, "texCoord");
glEnableVertexAttribArray(texCoordHandle);
glVertexAttribPointer(texCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, TEX_COORD_BUF);
textureHandle = glGetUniformLocation(program, "texture");
}
static void initTexture() {
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
static void initDecoder() {
av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, videoPath, NULL, NULL) != 0) {
LOGE("Couldn't open file: %s\n", videoPath);
return;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("Couldn't find stream information.\n");
return;
}
for (int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
break;
}
}
if (videoStream == -1) {
LOGE("Couldn't find video stream.\n");
return;
}
pCodecCtx = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar);
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
LOGE("Codec not found.\n");
return;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Could not open codec.\n");
return;
}
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
int frameSize = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
buffer = (uint8_t *) av_malloc(frameSize * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
}
static void initEGL(Surface *surface) {
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (display == EGL_NO_DISPLAY) {
LOGE("eglGetDisplay() failed.\n");
return;
}
EGLint majorVersion, minorVersion;
if (eglInitialize(display, &majorVersion, &minorVersion) == EGL_FALSE) {
LOGE("eglInitialize() failed.\n");
return;
}
EGLint numConfigs;
EGLConfig config;
EGLint configAttribs[] = {
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 24,
EGL_NONE
};
if (eglChooseConfig(display, configAttribs, &config, 1, &numConfigs) == EGL_FALSE) {
LOGE("eglChooseConfig() failed.\n");
return;
}
EGLint contextAttribs[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
EGLContext context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs);
if (context == EGL_NO_CONTEXT) {
LOGE("eglCreateContext() failed.\n");
return;
}
EGLSurface eglSurface = eglCreateWindowSurface(display, config, surface, NULL);
if (eglSurface == EGL_NO_SURFACE) {
LOGE("eglCreateWindowSurface() failed.\n");
return;
}
if (eglMakeCurrent(display, eglSurface, eglSurface, context) == EGL_FALSE) {
LOGE("eglMakeCurrent() failed.\n");
return;
}
initShader();
initTexture();
}
JNIEXPORT void JNICALL
Java_com_example_VideoRenderer_init(JNIEnv *env, jobject instance, jstring videoPath_, jobject surface) {
videoPath = env->GetStringUTFChars(videoPath_, 0);
nativeWindow = ANativeWindow_fromSurface(env, surface);
initDecoder();
initEGL(nativeWindow);
}
JNIEXPORT void JNICALL
Java_com_example_VideoRenderer_render(JNIEnv *env, jobject instance) {
ANativeWindow_acquire(nativeWindow);
if (ANativeWindow_lock(nativeWindow, &windowBuffer, NULL) == 0) {
AVPacket packet;
int frameFinished;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
if (packet.stream_index == videoStream) {
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) {
sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
glTexImage2D(GL_TEXTURE_EXTERNAL_OES, 0, GL_RGBA, pCodecCtx->width, pCodecCtx->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pFrameRGB->data[0]);
glUseProgram(program);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
glUniform1i(textureHandle, 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
eglSwapBuffers(eglGetDisplay(EGL_DEFAULT_DISPLAY), eglGetCurrentSurface(EGL_DRAW));
}
}
av_packet_unref(&packet);
}
}
ANativeWindow_unlockAndPost(nativeWindow);
ANativeWindow_release(nativeWindow);
}
JNIEXPORT void JNICALL
Java_com_example_VideoRenderer_release(JNIEnv *env, jobject instance) {
av_frame_free(&pFrame);
av_frame_free(&pFrameRGB);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
avformat_network_deinit();
ANativeWindow_release(nativeWindow);
eglTerminate(eglGetDisplay(EGL_DEFAULT_DISPLAY));
}
}
```
需要注意的是,这里使用的是纹理类型为 GL_TEXTURE_EXTERNAL_OES,需要在 shader 中使用 samplerExternalOES 类型。
这只是一个简单的示例,实际应用中需要根据需求进行修改和优化。
阅读全文