OPENGL es MediaCodec例子
时间: 2024-03-05 21:52:02 浏览: 111
这里提供一个使用OpenGL ES和MediaCodec的简单例子,用于解码视频并将其渲染到屏幕上:
```java
public class VideoRenderer implements SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = "VideoRenderer";
private SurfaceTexture mSurfaceTexture;
private Surface mSurface;
private int mTextureId;
private MediaCodec mMediaCodec;
private MediaFormat mMediaFormat;
private ByteBuffer[] mInputBuffers;
private ByteBuffer[] mOutputBuffers;
private int mWidth;
private int mHeight;
private boolean mIsRunning = false;
private EGLDisplay mEGLDisplay;
private EGLSurface mEGLSurface;
private EGLContext mEGLContext;
public VideoRenderer(SurfaceTexture surfaceTexture, int width, int height) {
mSurfaceTexture = surfaceTexture;
mWidth = width;
mHeight = height;
}
public void start(String filePath) throws IOException {
mMediaFormat = MediaFormat.createVideoFormat("video/avc", mWidth, mHeight);
mMediaCodec = MediaCodec.createDecoderByType("video/avc");
mMediaCodec.configure(mMediaFormat, mSurface, null, 0);
mInputBuffers = mMediaCodec.getInputBuffers();
mOutputBuffers = mMediaCodec.getOutputBuffers();
File file = new File(filePath);
FileInputStream inputStream = new FileInputStream(file);
FileChannel fileChannel = inputStream.getChannel();
mMediaCodec.start();
while (mIsRunning) {
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(1000);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = mInputBuffers[inputBufferIndex];
int sampleSize = fileChannel.read(inputBuffer);
if (sampleSize < 0) {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, sampleSize, 0, 0);
}
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 1000);
while (outputBufferIndex >= 0) {
mMediaCodec.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
}
public void stop() {
mIsRunning = false;
// Release MediaCodec
if (mMediaCodec != null) {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}
// Destroy EGL context and surface
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
if (mEGLSurface != null) {
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
}
if (mEGLContext != null) {
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
}
EGL14.eglTerminate(mEGLDisplay);
}
// Release SurfaceTexture and Surface
if (mSurfaceTexture != null) {
mSurfaceTexture.release();
mSurfaceTexture = null;
}
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
// Update SurfaceTexture
surfaceTexture.updateTexImage();
// Render frame with OpenGL ES
float[] transformMatrix = new float[16];
surfaceTexture.getTransformMatrix(transformMatrix);
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
GLES20.glUniformMatrix4fv(mTransformMatrixHandle, 1, false, transformMatrix, 0);
GLES20.glVertexAttribPointer(mPositionHandle, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
GLES20.glVertexAttribPointer(mTextureHandle, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glEnableVertexAttribArray(mTextureHandle);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureHandle);
// Swap buffers
EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}
}
```
在这个示例中,我们首先创建一个`SurfaceTexture`和一个`Surface`来将视频渲染到屏幕上。然后,我们创建一个`MediaCodec`来解码视频,并将其配置为使用指定的`Surface`进行输出。我们使用`FileInputStream`和`FileChannel`来从文件中读取视频数据,并将其输入到`MediaCodec`中进行解码。最后,我们使用OpenGL ES将解码后的视频帧渲染到屏幕上。
需要注意的是,在`onFrameAvailable`回调中,我们需要使用OpenGL ES将视频帧渲染到屏幕上,并且需要在渲染完成后调用`EGL14.eglSwapBuffers`来交换前后缓冲区。此外,我们还需要在程序退出时释放所有资源,包括`MediaCodec`,EGL上下文和SurfaceTexture。
阅读全文