opengl es 中MediaCodec通过C++先将MP4视频解码为YUV在将YUV编码成MP4视频例子
时间: 2024-04-21 15:29:07 浏览: 177
mediacodec c++.
4星 · 用户满意度95%
首先,需要在Android项目中引入MediaCodec和OpenGLES的相关库:
```java
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Environment;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
```
接着,创建一个名为VideoEncoder的类,用于实现视频编码:
```java
public class VideoEncoder {
private static final String MIME_TYPE = "video/avc";
private static final int FRAME_RATE = 25;
private static final int I_FRAME_INTERVAL = 10;
private static final int TIMEOUT_US = 10000;
private MediaCodec mEncoder;
private MediaFormat mFormat;
private MediaMuxer mMuxer;
private int mTrackIndex;
private boolean mMuxerStarted;
private int mWidth;
private int mHeight;
private EGLContext mEglContext;
private int mTextureId;
public VideoEncoder(int width, int height, EGLContext eglContext, int textureId) {
mWidth = width;
mHeight = height;
mEglContext = eglContext;
mTextureId = textureId;
}
public void start(String outputPath) throws IOException {
mFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
mFormat.setInteger(MediaFormat.KEY_BIT_RATE, mWidth * mHeight * 4);
mFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEncoder.start();
mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
public void stop() {
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mMuxer != null) {
if (mMuxerStarted) {
mMuxer.stop();
}
mMuxer.release();
mMuxer = null;
mTrackIndex = -1;
mMuxerStarted = false;
}
}
public void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (endOfStream) {
mEncoder.signalEndOfInputStream();
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!endOfStream) {
break;
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
throw new RuntimeException("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else {
ByteBuffer encodedData = mEncoder.getOutputBuffer(encoderStatus);
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, bufferInfo);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
throw new RuntimeException("reached end of stream unexpectedly");
}
break;
}
}
}
}
public void encodeFrame() {
// Set up the EGL context and surface for offscreen rendering
OffscreenSurface surface = new OffscreenSurface(mEglContext, mWidth, mHeight);
surface.makeCurrent();
// Set up the texture renderer
TextureRenderer renderer = new TextureRenderer();
renderer.surfaceCreated();
renderer.setExternalTexture(mTextureId);
renderer.setRenderSize(mWidth, mHeight);
// Draw the frame
GLES20.glViewport(0, 0, mWidth, mHeight);
renderer.surfaceChanged();
renderer.drawFrame();
// Read the pixel data into a buffer
ByteBuffer pixelBuffer = ByteBuffer.allocateDirect(mWidth * mHeight * 4);
GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer);
// Convert the pixel data from RGBA to YUV420
byte[] yuv420 = new byte[mWidth * mHeight * 3 / 2];
NV21Converter.RGBtoYUV420SemiPlanar(pixelBuffer.array(), mWidth, mHeight, yuv420);
// Encode the frame and write it to the muxer
int inputBufferIndex = mEncoder.dequeueInputBuffer(TIMEOUT_US);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufferIndex);
inputBuffer.clear();
inputBuffer.put(yuv420);
mEncoder.queueInputBuffer(inputBufferIndex, 0, yuv420.length, System.nanoTime() / 1000, 0);
}
// Release the resources
surface.release();
}
}
```
其中,OffscreenSurface类用于创建离屏渲染的EGLContext和Surface,NV21Converter类用于将RGBA格式的像素数据转换为YUV420格式的数据。
最后,我们可以在GLSurfaceView中使用VideoEncoder类进行视频编码:
```java
public class MainActivity extends AppCompatActivity {
private GLSurfaceView mGLSurfaceView;
private VideoEncoder mVideoEncoder;
private int mTextureId;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mGLSurfaceView = findViewById(R.id.glsurfaceview);
mGLSurfaceView.setEGLContextClientVersion(2);
mGLSurfaceView.setRenderer(new Renderer());
mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
mTextureId = TextureHelper.createTexture();
mVideoEncoder = new VideoEncoder(640, 480, EGL14.eglGetCurrentContext(), mTextureId);
try {
String outputPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/output.mp4";
mVideoEncoder.start(outputPath);
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
mVideoEncoder.stop();
GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
}
private class Renderer implements GLSurfaceView.Renderer {
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
@Override
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Render the texture
GLES20.glUseProgram(TextureRenderer.DEFAULT_VERTEX_SHADER_PROGRAM);
TextureRenderer.renderTexture(mTextureId);
// Encode the frame
mVideoEncoder.encodeFrame();
}
}
}
```
在GLSurfaceView的回调函数onDrawFrame中,我们首先使用TextureRenderer类将纹理渲染到屏幕上,然后调用VideoEncoder类的encodeFrame方法,将当前帧的像素数据编码为H.264格式的视频帧,并写入MP4文件中。
阅读全文