sleep c++示例
时间: 2024-06-04 12:04:19 浏览: 104
在C++中,`sleep()`是一种暂停程序执行的函数,它接受一个整数参数,表示要暂停的秒数。在调用`sleep()`函数后,程序将会阻塞等待指定的时间,直到时间到达后再继续执行下一条语句。下面是一个简单的示例:
```
#include <iostream>
#include <chrono>
#include <thread>
int main() {
std::cout << "Starting..." << std::endl;
std::this_thread::sleep_for(std::chrono::seconds(5));
std::cout << "Done!" << std::endl;
return 0;
}
```
在上面的示例中,我们使用了C++11标准库中的`std::this_thread::sleep_for()`函数来实现暂停功能。这个函数接受一个`std::chrono::duration`类型的参数,表示要暂停的时间长度。在本例中,我们暂停了5秒钟后再输出"Done!"。
相关问题
ffmepg 录屏推流 c++示例
这里提供一个使用 ffmpeg 库进行屏幕录制并推流的 C++ 示例代码:
```c++
#include <iostream>
#include <thread>
#include <chrono>
#include <cstring>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
}
#ifdef _WIN32
#include <Windows.h>
#include <d3d9.h>
#pragma comment(lib, "d3d9.lib")
#endif
#define WIDTH 1280
#define HEIGHT 720
#define FPS 30
#define BITRATE 2000000
#define STREAM_URL "rtmp://localhost/live/stream"
int main() {
av_register_all();
avcodec_register_all();
AVFormatContext *format_ctx = nullptr;
AVOutputFormat *output_fmt = nullptr;
AVCodecContext *codec_ctx = nullptr;
AVCodec *codec = nullptr;
AVStream *stream = nullptr;
AVFrame *frame = nullptr;
AVPacket packet;
SwsContext *sws_ctx = nullptr;
// 创建输出格式上下文
avformat_alloc_output_context2(&format_ctx, nullptr, "flv", STREAM_URL);
if (!format_ctx) {
std::cerr << "Could not create output context" << std::endl;
return -1;
}
// 查找视频编码器
codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec) {
std::cerr << "Could not find encoder" << std::endl;
return -1;
}
// 创建视频流
stream = avformat_new_stream(format_ctx, codec);
if (!stream) {
std::cerr << "Could not create stream" << std::endl;
return -1;
}
codec_ctx = avcodec_alloc_context3(codec);
if (!codec_ctx) {
std::cerr << "Could not allocate codec context" << std::endl;
return -1;
}
// 配置编码器参数
codec_ctx->codec_id = codec->id;
codec_ctx->width = WIDTH;
codec_ctx->height = HEIGHT;
codec_ctx->bit_rate = BITRATE;
codec_ctx->time_base = (AVRational){1, FPS};
codec_ctx->gop_size = 10;
codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
// 设置编码器的质量
av_opt_set(codec_ctx->priv_data, "preset", "ultrafast", 0);
av_opt_set(codec_ctx->priv_data, "tune", "zerolatency", 0);
// 打开编码器
if (avcodec_open2(codec_ctx, codec, nullptr) < 0) {
std::cerr << "Could not open codec" << std::endl;
return -1;
}
// 设置视频流参数
avcodec_parameters_from_context(stream->codecpar, codec_ctx);
// 打开输出流
if (avio_open(&format_ctx->pb, STREAM_URL, AVIO_FLAG_WRITE) < 0) {
std::cerr << "Could not open output stream" << std::endl;
return -1;
}
// 写入头信息
avformat_write_header(format_ctx, nullptr);
#ifdef _WIN32
// 初始化 Direct3D
IDirect3D9* d3d = Direct3DCreate9(D3D_SDK_VERSION);
if (!d3d) {
std::cerr << "Could not create Direct3D object" << std::endl;
return -1;
}
// 创建 Direct3D 设备对象
D3DPRESENT_PARAMETERS d3dpp = {0};
d3dpp.Windowed = TRUE;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.BackBufferFormat = D3DFMT_UNKNOWN;
d3dpp.hDeviceWindow = GetDesktopWindow();
IDirect3DDevice9* device = nullptr;
if (FAILED(d3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, d3dpp.hDeviceWindow,
D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &device))) {
std::cerr << "Could not create Direct3D device" << std::endl;
return -1;
}
// 创建 D3D 表面对象
IDirect3DSurface9* surface = nullptr;
if (FAILED(device->CreateOffscreenPlainSurface(WIDTH, HEIGHT, D3DFMT_X8R8G8B8,
D3DPOOL_SYSTEMMEM, &surface, nullptr))) {
std::cerr << "Could not create surface object" << std::endl;
return -1;
}
#endif
// 分配内存
frame = av_frame_alloc();
av_image_alloc(frame->data, frame->linesize, codec_ctx->width, codec_ctx->height,
codec_ctx->pix_fmt, 32);
// 初始化转换上下文
sws_ctx = sws_getContext(codec_ctx->width, codec_ctx->height, AV_PIX_FMT_BGRA,
codec_ctx->width, codec_ctx->height, codec_ctx->pix_fmt,
SWS_BICUBIC, nullptr, nullptr, nullptr);
while (true) {
#ifdef _WIN32
// 从 D3D 表面对象中获取像素数据
if (FAILED(device->GetFrontBufferData(0, surface))) {
std::cerr << "Could not get front buffer data" << std::endl;
continue;
}
D3DLOCKED_RECT rect;
if (FAILED(surface->LockRect(&rect, nullptr, D3DLOCK_READONLY))) {
std::cerr << "Could not lock surface rect" << std::endl;
continue;
}
// 将像素数据复制到 AVFrame 中
uint8_t *dst_data[4] = {frame->data[0], nullptr, nullptr, nullptr};
int dst_linesize[4] = {frame->linesize[0], 0, 0, 0};
uint8_t *src_data[1] = {(uint8_t*)rect.pBits};
int src_linesize[1] = {rect.Pitch};
sws_scale(sws_ctx, src_data, src_linesize, 0, codec_ctx->height, dst_data, dst_linesize);
surface->UnlockRect();
#else
// TODO: 获取像素数据的代码 (Linux/MacOS)
#endif
// 编码帧
frame->pts = av_gettime();
int ret = avcodec_send_frame(codec_ctx, frame);
if (ret < 0) {
std::cerr << "Error sending frame" << std::endl;
continue;
}
while (ret >= 0) {
ret = avcodec_receive_packet(codec_ctx, &packet);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
} else if (ret < 0) {
std::cerr << "Error receiving packet" << std::endl;
continue;
}
// 发送数据包
av_interleaved_write_frame(format_ctx, &packet);
av_packet_unref(&packet);
}
std::this_thread::sleep_for(std::chrono::milliseconds(1000 / FPS));
}
// 清理资源
av_write_trailer(format_ctx);
avcodec_free_context(&codec_ctx);
avformat_free_context(format_ctx);
av_frame_free(&frame);
sws_freeContext(sws_ctx);
return 0;
}
```
这个示例代码使用了 Direct3D 技术获取屏幕像素数据,因此需要在 Windows 操作系统上运行。如果需要在其他平台上运行,需要修改获取像素数据的代码。
ffmepg6.0录屏推流c++示例
以下是在 Windows 平台上,使用 FFmpeg 6.0 实现录屏推流的 C++ 示例代码:
```c++
#include <iostream>
#include <Windows.h>
#include <d3d9.h>
#include <d3dx9.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#pragma comment(lib, "d3d9.lib")
#pragma comment(lib, "d3dx9.lib")
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#define SCREEN_WIDTH 1920
#define SCREEN_HEIGHT 1080
#define FPS 30
#define BIT_RATE 4000000
#define STREAM_URL "rtmp://localhost/live/stream"
int main()
{
// 初始化 D3D9
IDirect3D9Ex* pD3D = nullptr;
if (FAILED(Direct3DCreate9Ex(D3D_SDK_VERSION, &pD3D)))
{
std::cerr << "Failed to create IDirect3D9Ex object" << std::endl;
return -1;
}
// 枚举显示器适配器
UINT adapterCount = pD3D->GetAdapterCount();
if (adapterCount == 0)
{
std::cerr << "No display adapter found" << std::endl;
return -1;
}
// 获取第一个适配器的显示模式
D3DDISPLAYMODE displayMode;
if (FAILED(pD3D->EnumAdapterModes(0, D3DFMT_X8R8G8B8, 0, &displayMode)))
{
std::cerr << "Failed to enumerate display adapter modes" << std::endl;
return -1;
}
// 创建 D3D 设备
D3DPRESENT_PARAMETERS d3dpp = {};
d3dpp.Windowed = TRUE;
d3dpp.BackBufferFormat = D3DFMT_X8R8G8B8;
d3dpp.BackBufferWidth = SCREEN_WIDTH;
d3dpp.BackBufferHeight = SCREEN_HEIGHT;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
IDirect3DDevice9Ex* pD3DDevice = nullptr;
if (FAILED(pD3D->CreateDeviceEx(
D3DADAPTER_DEFAULT,
D3DDEVTYPE_HAL,
GetDesktopWindow(),
D3DCREATE_HARDWARE_VERTEXPROCESSING,
&d3dpp,
nullptr,
&pD3DDevice)))
{
std::cerr << "Failed to create IDirect3DDevice9Ex object" << std::endl;
return -1;
}
// 初始化 FFmpeg
av_register_all();
avcodec_register_all();
avformat_network_init();
// 创建输出流
AVFormatContext* pFormatCtx = nullptr;
if (avformat_alloc_output_context2(&pFormatCtx, nullptr, "flv", STREAM_URL) < 0)
{
std::cerr << "Failed to allocate output context" << std::endl;
return -1;
}
// 创建视频流
AVCodec* pCodec = nullptr;
AVStream* pStream = avformat_new_stream(pFormatCtx, pCodec);
if (!pStream)
{
std::cerr << "Failed to allocate video stream" << std::endl;
return -1;
}
// 设置编码器参数
AVCodecContext* pCodecCtx = pStream->codec;
pCodecCtx->codec_id = AV_CODEC_ID_H264;
pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
pCodecCtx->width = SCREEN_WIDTH;
pCodecCtx->height = SCREEN_HEIGHT;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
pCodecCtx->bit_rate = BIT_RATE;
pCodecCtx->gop_size = FPS;
pCodecCtx->time_base = { 1, FPS };
pCodecCtx->level = 31;
pCodecCtx->profile = FF_PROFILE_H264_MAIN;
av_opt_set(pCodecCtx->priv_data, "preset", "ultrafast", 0);
av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);
// 打开编码器
pCodec = avcodec_find_encoder(pCodecCtx->codec_id);
if (!pCodec)
{
std::cerr << "Failed to find video encoder" << std::endl;
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, nullptr) < 0)
{
std::cerr << "Failed to open video encoder" << std::endl;
return -1;
}
// 打开输出流
if (avio_open(&pFormatCtx->pb, STREAM_URL, AVIO_FLAG_WRITE) < 0)
{
std::cerr << "Failed to open output stream" << std::endl;
return -1;
}
// 写入文件头
if (avformat_write_header(pFormatCtx, nullptr) < 0)
{
std::cerr << "Failed to write file header" << std::endl;
return -1;
}
// 创建缓冲区
const auto buffer_size = av_image_get_buffer_size(pCodecCtx->pix_fmt, SCREEN_WIDTH, SCREEN_HEIGHT, 1);
auto buffer = static_cast<uint8_t*>(av_malloc(buffer_size));
AVFrame* pFrame = av_frame_alloc();
if (!pFrame)
{
std::cerr << "Failed to allocate video frame" << std::endl;
return -1;
}
av_image_fill_arrays(pFrame->data, pFrame->linesize, buffer, pCodecCtx->pix_fmt, SCREEN_WIDTH, SCREEN_HEIGHT, 1);
// 设置 D3D 设备参数
IDirect3DSurface9* pSurface = nullptr;
D3DXMATRIX matrix;
D3DLOCKED_RECT lockedRect;
pD3DDevice->CreateOffscreenPlainSurface(SCREEN_WIDTH, SCREEN_HEIGHT, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, &pSurface, nullptr);
D3DXMatrixIdentity(&matrix);
// 循环推流
bool running = true;
while (running)
{
// 从 D3D 设备中读取屏幕数据
if (SUCCEEDED(pD3DDevice->GetFrontBufferData(0, pSurface)))
{
if (SUCCEEDED(pSurface->LockRect(&lockedRect, nullptr, D3DLOCK_READONLY)))
{
// 将屏幕数据转换为 YUV420P 格式
for (int y = 0; y < SCREEN_HEIGHT; y++)
{
auto dest = buffer + y * pFrame->linesize[0];
auto src = static_cast<uint8_t*>(lockedRect.pBits) + y * lockedRect.Pitch;
memcpy(dest, src, SCREEN_WIDTH * 4);
}
av_frame_set_pts(pFrame, av_rescale_q(av_gettime(), { 1, AV_TIME_BASE }, pCodecCtx->time_base));
// 编码并写入数据
AVPacket packet;
av_init_packet(&packet);
packet.data = nullptr;
packet.size = 0;
int result = avcodec_send_frame(pCodecCtx, pFrame);
if (result == 0)
{
while (result >= 0)
{
result = avcodec_receive_packet(pCodecCtx, &packet);
if (result == 0)
{
packet.stream_index = pStream->index;
av_interleaved_write_frame(pFormatCtx, &packet);
av_packet_unref(&packet);
}
else if (result == AVERROR(EAGAIN) || result == AVERROR_EOF)
{
break;
}
else
{
std::cerr << "Failed to encode video frame" << std::endl;
running = false;
}
}
}
else
{
std::cerr << "Failed to send video frame for encoding" << std::endl;
running = false;
}
pSurface->UnlockRect();
}
}
// 限制帧率
Sleep(1000 / FPS);
}
// 写入文件尾
av_write_trailer(pFormatCtx);
// 释放资源
if (pSurface)
{
pSurface->Release();
}
if (pD3DDevice)
{
pD3DDevice->Release();
}
if (pD3D)
{
pD3D->Release();
}
if (pCodecCtx)
{
avcodec_close(pCodecCtx);
}
if (pFrame)
{
av_frame_free(&pFrame);
}
if (pFormatCtx)
{
avio_close(pFormatCtx->pb);
avformat_free_context(pFormatCtx);
}
if (buffer)
{
av_free(buffer);
}
return 0;
}
```
在这个示例中,我们使用了 Windows 平台上的 DirectX 9 API 来捕获屏幕数据,然后使用 FFmpeg 6.0 的编码器将数据编码为 H.264 格式,并通过 RTMP 协议推流到本地的流服务器。您需要根据具体的情况修改 STREAM_URL、SCREEN_WIDTH、SCREEN_HEIGHT、FPS 和 BIT_RATE 等参数。
阅读全文