opengl es 中C++通过MediaCodec解码MP4视频转换成RGB格式图像的完整简单代码例子
时间: 2024-03-09 20:48:09 浏览: 149
以下是一个简单的C++代码例子,通过MediaCodec解码MP4视频,并将其转换为RGB格式的图像:
```cpp
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <ctime>
#include <jni.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>
#include <android/log.h>
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaFormat.h>
#include <media/NdkMediaExtractor.h>
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#define LOG_TAG "VideoDecoder"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
class VideoDecoder {
public:
VideoDecoder();
~VideoDecoder();
void setSurface(JNIEnv *env, jobject surface);
void decode(const char *path);
private:
void init(const char *path);
void release();
void decodeLoop();
void drawFrame();
void updateTexImage();
ANativeWindow *mWindow;
AMediaExtractor *mExtractor;
AMediaCodec *mCodec;
ANativeWindow_Buffer mBuffer;
int32_t mWidth, mHeight;
GLuint mTextureId;
EGLDisplay mEglDisplay;
EGLSurface mEglSurface;
EGLContext mEglContext;
};
VideoDecoder::VideoDecoder()
: mWindow(nullptr)
, mExtractor(nullptr)
, mCodec(nullptr)
, mWidth(0)
, mHeight(0)
, mTextureId(0)
, mEglDisplay(EGL_NO_DISPLAY)
, mEglSurface(EGL_NO_SURFACE)
, mEglContext(EGL_NO_CONTEXT)
{
}
VideoDecoder::~VideoDecoder()
{
release();
}
void VideoDecoder::init(const char *path)
{
mExtractor = AMediaExtractor_new();
AMediaExtractor_setDataSource(mExtractor, path);
int32_t numTracks = AMediaExtractor_getTrackCount(mExtractor);
int32_t videoTrackIndex = -1;
for (int32_t i = 0; i < numTracks; ++i) {
AMediaFormat *format = AMediaExtractor_getTrackFormat(mExtractor, i);
const char *mime;
AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
if (strncmp(mime, "video/", 6) == 0) {
videoTrackIndex = i;
AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &mWidth);
AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &mHeight);
break;
}
}
if (videoTrackIndex < 0) {
LOGD("No video track found!");
return;
}
AMediaExtractor_selectTrack(mExtractor, videoTrackIndex);
const char *mime;
AMediaFormat *format = AMediaExtractor_getTrackFormat(mExtractor, videoTrackIndex);
AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
mCodec = AMediaCodec_createDecoderByType(mime);
AMediaCodec_configure(mCodec, format, mWindow, nullptr, 0);
AMediaCodec_start(mCodec);
mTextureId = 0;
glGenTextures(1, &mTextureId);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureId);
glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
ANativeWindow_setBuffersGeometry(mWindow, mWidth, mHeight, WINDOW_FORMAT_RGBA_8888);
EGLint attribs[] = { EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_BLUE_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_RED_SIZE, 8, EGL_NONE };
EGLint numConfigs;
EGLConfig config;
mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(mEglDisplay, nullptr, nullptr);
eglChooseConfig(mEglDisplay, attribs, &config, 1, &numConfigs);
mEglSurface = eglCreateWindowSurface(mEglDisplay, config, mWindow, nullptr);
EGLint contextAttribs[] = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE };
mEglContext = eglCreateContext(mEglDisplay, config, EGL_NO_CONTEXT, contextAttribs);
eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext);
}
void VideoDecoder::release()
{
if (mCodec) {
AMediaCodec_stop(mCodec);
AMediaCodec_delete(mCodec);
mCodec = nullptr;
}
if (mExtractor) {
AMediaExtractor_delete(mExtractor);
mExtractor = nullptr;
}
if (mTextureId) {
glDeleteTextures(1, &mTextureId);
mTextureId = 0;
}
if (mEglDisplay != EGL_NO_DISPLAY) {
eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
if (mEglContext != EGL_NO_CONTEXT) {
eglDestroyContext(mEglDisplay, mEglContext);
mEglContext = EGL_NO_CONTEXT;
}
if (mEglSurface != EGL_NO_SURFACE) {
eglDestroySurface(mEglDisplay, mEglSurface);
mEglSurface = EGL_NO_SURFACE;
}
eglTerminate(mEglDisplay);
mEglDisplay = EGL_NO_DISPLAY;
}
}
void VideoDecoder::setSurface(JNIEnv *env, jobject surface)
{
if (mWindow) {
ANativeWindow_release(mWindow);
mWindow = nullptr;
}
mWindow = ANativeWindow_fromSurface(env, surface);
}
void VideoDecoder::decode(const char *path)
{
if (!mWindow) {
LOGD("Surface not set!");
return;
}
init(path);
if (!mCodec) {
LOGD("Failed to create codec!");
return;
}
decodeLoop();
release();
}
void VideoDecoder::decodeLoop()
{
bool inputDone = false;
bool outputDone = false;
ssize_t bufIdx;
uint8_t *buf;
AMediaCodecBufferInfo info;
int64_t startMs = 0;
int64_t timeoutUs = 10000; // 10ms timeout
while (!outputDone) {
if (!inputDone) {
bufIdx = AMediaCodec_dequeueInputBuffer(mCodec, timeoutUs);
if (bufIdx >= 0) {
buf = AMediaCodec_getInputBuffer(mCodec, bufIdx, nullptr);
size_t bufferSize = AMediaExtractor_readSampleData(mExtractor, buf, AMediaCodec_getInputBufferSize(mCodec, bufIdx));
if (bufferSize < 0) {
bufferSize = 0;
inputDone = true;
AMediaCodec_queueInputBuffer(mCodec, bufIdx, 0, 0, 0, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
} else {
int64_t presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
AMediaCodec_queueInputBuffer(mCodec, bufIdx, 0, bufferSize, presentationTimeUs, 0);
AMediaExtractor_advance(mExtractor);
}
}
}
if (!outputDone) {
ssize_t status = AMediaCodec_dequeueOutputBuffer(mCodec, &info, timeoutUs);
if (status == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
// no output available yet
} else if (status == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
} else if (status == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
AMediaFormat *format = nullptr;
format = AMediaCodec_getOutputFormat(mCodec);
AMediaFormat_delete(format);
} else if (status < 0) {
LOGD("unexpected result from dequeueOutputBuffer: %zd", status);
} else {
if (startMs == 0) {
startMs = info.presentationTimeUs / 1000;
}
int64_t waitMs = info.presentationTimeUs / 1000 - startMs - clock() * 1000 / CLOCKS_PER_SEC;
if (waitMs > 0) {
usleep(waitMs * 1000);
}
AMediaCodec_releaseOutputBuffer(mCodec, status, true);
outputDone = (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) != 0;
drawFrame();
}
}
}
}
void VideoDecoder::drawFrame()
{
updateTexImage();
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0, 0, mWidth, mHeight);
static const GLfloat vertices[] = { -1.0f, -1.0f, 1.0f,
-1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
1.0f, -1.0f, 1.0f };
static const GLfloat texCoords[] = { 0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f };
static const GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
static const GLuint stride = 0;
static const GLuint positionIndex = 0;
static const GLuint texCoordIndex = 1;
glEnableVertexAttribArray(positionIndex);
glVertexAttribPointer(positionIndex, 3, GL_FLOAT, false, stride, vertices);
glEnableVertexAttribArray(texCoordIndex);
glVertexAttribPointer(texCoordIndex, 2, GL_FLOAT, false, stride, texCoords);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
glDisableVertexAttribArray(positionIndex);
glDisableVertexAttribArray(texCoordIndex);
eglSwapBuffers(mEglDisplay, mEglSurface);
}
void VideoDecoder::updateTexImage()
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureId);
AMediaCodecBufferInfo info;
ssize_t status = AMediaCodec_getOutputBufferInfo(mCodec, &info, 0);
if (status == AMEDIA_OK) {
AMediaCodec_releaseOutputBuffer(mCodec, 0, true);
}
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_example_videodecoder_VideoDecoder_createNativeObject(JNIEnv *env, jobject thiz)
{
return reinterpret_cast<jlong>(new VideoDecoder());
}
extern "C" JNIEXPORT void JNICALL
Java_com_example_videodecoder_VideoDecoder_setSurface(JNIEnv *env, jobject thiz, jlong nativeObject, jobject surface)
{
auto *decoder = reinterpret_cast<VideoDecoder *>(nativeObject);
decoder->setSurface(env, surface);
}
extern "C" JNIEXPORT void JNICALL
Java_com_example_videodecoder_VideoDecoder_decode(JNIEnv *env, jobject thiz, jlong nativeObject, jstring path)
{
auto *decoder = reinterpret_cast<VideoDecoder *>(nativeObject);
const char *filePath = env->GetStringUTFChars(path, nullptr);
decoder->decode(filePath);
env->ReleaseStringUTFChars(path, filePath);
}
extern "C" JNIEXPORT void JNICALL
Java_com_example_videodecoder_VideoDecoder_releaseNativeObject(JNIEnv *env, jobject thiz, jlong nativeObject)
{
auto *decoder = reinterpret_cast<VideoDecoder *>(nativeObject);
delete decoder;
}
```
在这个例子中,我们使用了AMediaExtractor和AMediaCodec来解码视频。解码出的图像是YUV格式的,我们使用OpenGL ES来将其转换为RGB格式的图像,并将其渲染到Android Surface上。
这个例子可能依赖于一些库,如OpenSSL,Android NDK等。如果你想尝试这个例子,请确保你已经在你的开发环境中正确地配置了这些库。
阅读全文