Android如何在前台服务中使用glsurfaceView编码录制视频?并且不会因为界面退到后台而停止录制,请给出实例代码
时间: 2024-02-13 13:01:06 浏览: 135
Android使用GlSurfaceView和OpenGL绘制三角形 Demo
5星 · 资源好评率100%
好的,下面是一个使用 GLSurfaceView 进行视频编码和录制的示例代码,可以在前台服务中运行,并且不会因为界面退到后台而停止录制。
首先,需要在前台服务中创建一个 GLSurfaceView,并设置 Renderer。在 Renderer 的 onDrawFrame() 方法中,将 Surface 和纹理传递给编码器进行编码。同时,在 SurfaceTexture 上设置 OnFrameAvailableListener,当有新的帧可用时,会回调这个监听器,将最新的帧传递给编码器进行编码。编码器的回调函数中,将编码后的数据写入 Muxer。
以下是示例代码:
```
public class VideoEncoderService extends Service implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener, MediaEncoder.MediaEncoderListener {
private static final String TAG = "VideoEncoderService";
private static final int FRAME_RATE = 30;
private static final int I_FRAME_INTERVAL = 1;
private static final int VIDEO_BITRATE = 2000000;
private static final int AUDIO_BITRATE = 128000;
private GLSurfaceView mGLSurfaceView;
private SurfaceTexture mSurfaceTexture;
private MediaCodec mVideoEncoder;
private MediaMuxer mMuxer;
private int mVideoTrackIndex;
private boolean mMuxerStarted;
private MediaEncoder mAudioEncoder;
private MediaEncoder mVideoEncoderWrapper;
private final Object mSyncObject = new Object();
private boolean mFrameAvailable;
private boolean mIsRecording;
@Override
public void onCreate() {
super.onCreate();
mGLSurfaceView = new GLSurfaceView(this);
mGLSurfaceView.setEGLContextClientVersion(2);
mGLSurfaceView.setRenderer(this);
mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
WindowManager.LayoutParams params = new WindowManager.LayoutParams(
WindowManager.LayoutParams.MATCH_PARENT,
WindowManager.LayoutParams.MATCH_PARENT,
WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY,
WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE |
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE,
PixelFormat.TRANSLUCENT);
WindowManager wm = (WindowManager) getSystemService(WINDOW_SERVICE);
wm.addView(mGLSurfaceView, params);
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
startRecording();
return START_STICKY;
}
@Override
public void onDestroy() {
stopRecording();
super.onDestroy();
}
private void startRecording() {
synchronized (mSyncObject) {
if (mIsRecording) {
return;
}
try {
String outputFile = getExternalFilesDir(null).getAbsolutePath() + "/output.mp4";
mMuxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mVideoEncoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
MediaFormat videoFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 720, 1280);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mVideoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface surface = mVideoEncoder.createInputSurface();
mVideoEncoder.start();
mVideoEncoderWrapper = new MediaEncoder(mVideoEncoder, this);
mAudioEncoder = new AudioEncoder(AUDIO_BITRATE);
mAudioEncoder.setMediaEncoderListener(this);
mSurfaceTexture = new SurfaceTexture(1);
mSurfaceTexture.setOnFrameAvailableListener(this);
mGLSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
mSurfaceTexture.attachToGLContext(TEXTURE_ID);
}
});
mIsRecording = true;
} catch (IOException e) {
Log.e(TAG, "Failed to start recording", e);
stopRecording();
}
}
}
private void stopRecording() {
synchronized (mSyncObject) {
if (!mIsRecording) {
return;
}
mIsRecording = false;
mAudioEncoder.stop();
mAudioEncoder = null;
mGLSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
mSurfaceTexture.detachFromGLContext();
mSurfaceTexture.release();
}
});
mVideoEncoderWrapper.stop();
mVideoEncoderWrapper = null;
mVideoEncoder.stop();
mVideoEncoder.release();
mVideoEncoder = null;
if (mMuxerStarted) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
WindowManager wm = (WindowManager) getSystemService(WINDOW_SERVICE);
wm.removeView(mGLSurfaceView);
}
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
@Override
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
synchronized (mSyncObject) {
if (mFrameAvailable) {
mFrameAvailable = false;
mSurfaceTexture.updateTexImage();
float[] mSTMatrix = new float[16];
mSurfaceTexture.getTransformMatrix(mSTMatrix);
mVideoEncoderWrapper.frameAvailableSoon(mSTMatrix);
}
}
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (mSyncObject) {
mFrameAvailable = true;
mGLSurfaceView.requestRender();
}
}
@Override
public void onMediaEncoderInitialized(MediaEncoder encoder) {
synchronized (mSyncObject) {
if (encoder instanceof AudioEncoder) {
mAudioEncoder.start();
} else if (encoder instanceof VideoEncoder) {
mVideoTrackIndex = mMuxer.addTrack(encoder.getMediaFormat());
mMuxer.start();
mMuxerStarted = true;
}
}
}
@Override
public void onMediaEncoderFrameEncoded(byte[] data, boolean isKeyFrame, long presentationTimeUs) {
synchronized (mSyncObject) {
if (mMuxerStarted) {
ByteBuffer buffer = ByteBuffer.wrap(data);
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
bufferInfo.size = data.length;
bufferInfo.presentationTimeUs = presentationTimeUs;
bufferInfo.flags = isKeyFrame ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;
mMuxer.writeSampleData(mVideoTrackIndex, buffer, bufferInfo);
}
}
}
@Override
public void onMediaEncoderStopped(MediaEncoder encoder) {
synchronized (mSyncObject) {
if (encoder instanceof AudioEncoder) {
mAudioEncoder = null;
} else if (encoder instanceof VideoEncoder) {
mVideoTrackIndex = -1;
mMuxerStarted = false;
}
}
}
}
```
上面的代码中,我们在 onCreate() 方法中创建 GLSurfaceView 并添加到 WindowManager 中,然后在 onStartCommand() 方法中调用 startRecording() 方法开始录制视频。在 startRecording() 方法中,我们创建了一个 MediaCodec 编码器和一个 MediaMuxer Muxer,并将其关联起来。同时,我们还创建了一个 AudioEncoder 对象用于编码音频数据。我们还创建了一个 SurfaceTexture,并将其与 GLSurfaceView 关联起来。然后,我们在 onDrawFrame() 方法中获取最新的帧,并将其传递给编码器进行编码。在编码器的回调函数中,我们将编码后的数据写入 Muxer。最后,在 stopRecording() 方法中释放所有资源。
需要注意的是,由于涉及到多个线程之间的同步和通信,因此需要仔细处理线程安全问题。同时,为了避免被系统杀死,需要将服务设置为前台服务,并在通知栏中显示一个通知。
阅读全文