FFmpeg.AutoGen调用ffmpeg库开发unity录屏工具
时间: 2023-11-13 14:02:59 浏览: 281
要在 Unity 中开发录屏工具,并调用 FFmpeg 库进行视频编码,可以使用 FFmpeg.AutoGen 库来实现。以下是一个简单的示例代码:
```csharp
using System;
using System.Runtime.InteropServices;
using FFmpeg.AutoGen;
using UnityEngine;
public class ScreenRecorder : MonoBehaviour
{
private const int FPS = 30;
private const int BIT_RATE = 4000000;
private const string OUTPUT_FILE = "output.mp4";
private int frameCount = 0;
private AVCodecContext* codecContext;
private AVFormatContext* formatContext;
private AVStream* stream;
private void Start()
{
AVDictionary* options = null;
ffmpeg.av_dict_set(&options, "framerate", FPS.ToString(), 0);
ffmpeg.av_dict_set(&options, "video_size", $"{Screen.width}x{Screen.height}", 0);
ffmpeg.av_dict_set(&options, "preset", "ultrafast", 0);
ffmpeg.av_dict_set(&options, "tune", "zerolatency", 0);
ffmpeg.av_dict_set(&options, "crf", "25", 0);
ffmpeg.av_dict_set(&options, "bitrate", BIT_RATE.ToString(), 0);
AVCodec* codec = null;
codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
if (codec == null)
{
Debug.LogError("Failed to find H.264 codec!");
return;
}
codecContext = ffmpeg.avcodec_alloc_context3(codec);
codecContext->width = Screen.width;
codecContext->height = Screen.height;
codecContext->time_base = new AVRational { num = 1, den = FPS };
codecContext->framerate = new AVRational { num = FPS, den = 1 };
codecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
codecContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
if ((codec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) != 0)
{
codecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
}
int ret = ffmpeg.avcodec_open2(codecContext, codec, &options);
if (ret < 0)
{
Debug.LogError($"Failed to open codec! Error code: {ret}");
return;
}
formatContext = ffmpeg.avformat_alloc_context();
formatContext->oformat = ffmpeg.av_guess_format(null, OUTPUT_FILE, null);
if (formatContext->oformat == null)
{
Debug.LogError("Failed to guess output format!");
return;
}
ret = ffmpeg.avio_open(&formatContext->pb, OUTPUT_FILE, ffmpeg.AVIO_FLAG_WRITE);
if (ret < 0)
{
Debug.LogError($"Failed to open file '{OUTPUT_FILE}'! Error code: {ret}");
return;
}
stream = ffmpeg.avformat_new_stream(formatContext, codec);
ret = ffmpeg.avcodec_parameters_from_context(stream->codecpar, codecContext);
if (ret < 0)
{
Debug.LogError($"Failed to copy codec parameters! Error code: {ret}");
return;
}
ret = ffmpeg.avformat_write_header(formatContext, &options);
if (ret < 0)
{
Debug.LogError($"Failed to write format header! Error code: {ret}");
return;
}
}
private void OnDestroy()
{
ffmpeg.av_write_trailer(formatContext);
if (codecContext != null)
{
ffmpeg.avcodec_close(codecContext);
ffmpeg.avcodec_free_context(&codecContext);
}
if (formatContext != null)
{
if ((formatContext->oformat->flags & ffmpeg.AVFMT_NOFILE) == 0 && formatContext->pb != null)
{
ffmpeg.avio_close(formatContext->pb);
}
ffmpeg.avformat_free_context(formatContext);
}
}
private void LateUpdate()
{
AVFrame* frame = ffmpeg.av_frame_alloc();
if (frame == null)
{
Debug.LogError("Failed to allocate frame!");
return;
}
ffmpeg.av_image_alloc(frame->data, frame->linesize, codecContext->width, codecContext->height, codecContext->pix_fmt, 16);
int size = Screen.width * Screen.height * 3;
byte[] buffer = new byte[size];
GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned);
IntPtr ptr = handle.AddrOfPinnedObject();
GL.ReadPixels(0, 0, Screen.width, Screen.height, UnityEngine.GL.RGB, UnityEngine.GL.UNSIGNED_BYTE, ptr);
handle.Free();
for (int i = 0; i < codecContext->height; i++)
{
byte* row = (byte*)frame->data[0] + i * frame->linesize[0];
for (int j = 0; j < codecContext->width; j++)
{
row[3 * j] = buffer[3 * (i * codecContext->width + j) + 2];
row[3 * j + 1] = buffer[3 * (i * codecContext->width + j) + 1];
row[3 * j + 2] = buffer[3 * (i * codecContext->width + j)];
}
}
frame->pts = frameCount++;
ffmpeg.avcodec_send_frame(codecContext, frame);
AVPacket* packet = ffmpeg.av_packet_alloc();
ffmpeg.av_init_packet(packet);
while (ffmpeg.avcodec_receive_packet(codecContext, packet) >= 0)
{
packet->stream_index = stream->index;
packet->pts = packet->dts = frameCount++;
packet->duration = ffmpeg.av_rescale_q(ffmpeg.av_make_q(1, FPS), stream->time_base, formatContext->streams[0]->time_base);
packet->pos = -1;
ffmpeg.av_interleaved_write_frame(formatContext, packet);
ffmpeg.av_packet_unref(packet);
}
ffmpeg.av_frame_free(&frame);
}
}
```
在上述代码中,我们首先定义了一些常量,如帧率、输出文件名等。在 Start() 方法中,我们使用 AVDictionary 来设置 FFmpeg 的编码参数,并打开 H.264 编码器。然后,我们创建了一个 AVFormatContext 对象,并设置输出文件格式和音视频流等参数。接着,我们写入文件头,并进入 LateUpdate() 方法,开始每帧截屏并编码。最后,在 OnDestroy() 方法中,我们关闭编码器并释放相关资源。
请注意,上述代码仅适用于 Windows 平台,并且需要将 FFmpeg 库文件复制到 Unity 项目中,并在项目属性中设置库文件的引用路径。同时,为了避免因为异常退出导致编码器资源无法释放,建议在代码中添加相关的异常处理机制。
阅读全文