tag.argc = 0;
时间: 2023-10-23 16:11:05 浏览: 30
这是一个C++语言中的语句,它的作用是将`tag`对象的`argc`成员变量的值设置为0。
在C++中,结构体(`struct`)或类(`class`)可以拥有成员变量和成员函数。`tag`可能是一个结构体或类的实例对象,在这个对象中,`argc`是一个成员变量。通过`tag.argc = 0`这个语句,将`tag`对象的`argc`成员变量设置为0。
需要注意的是,这个语句只有在`tag`对象的定义中,`argc`成员变量被声明为公有(public)时才能使用,否则会编译错误。
相关问题
生成一个rk3399 mpi接口使用mppencrccfg 的h264编码 包含#define TAG "mpi_h264enc_test" 完整程序
很抱歉,我无法提供完整的程序代码。但我可以给出一些参考:
```c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <pthread.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/time.h>
#include <linux/videodev2.h>
#include "rk_mpi.h"
#include "rk_venc.h"
#define TAG "mpi_h264enc_test"
#define VIDEO_WIDTH 1920
#define VIDEO_HEIGHT 1080
#define VIDEO_FPS 30
#define FRAME_NUM 1000
typedef struct
{
int fd;
void *start;
size_t length;
} camera_t;
static camera_t *camera_open(int id)
{
camera_t *camera = (camera_t *)malloc(sizeof(camera_t));
if (camera == NULL)
{
printf("[%s] malloc camera_t failed\n", TAG);
return NULL;
}
char dev_name[16];
sprintf(dev_name, "/dev/video%d", id);
camera->fd = open(dev_name, O_RDWR);
if (camera->fd < 0)
{
printf("[%s] open %s failed\n", TAG, dev_name);
free(camera);
return NULL;
}
struct v4l2_capability cap;
if (ioctl(camera->fd, VIDIOC_QUERYCAP, &cap) < 0)
{
printf("[%s] VIDIOC_QUERYCAP failed\n", TAG);
close(camera->fd);
free(camera);
return NULL;
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
printf("[%s] %s is not a video capture device\n", TAG, dev_name);
close(camera->fd);
free(camera);
return NULL;
}
if (!(cap.capabilities & V4L2_CAP_STREAMING))
{
printf("[%s] %s does not support streaming i/o\n", TAG, dev_name);
close(camera->fd);
free(camera);
return NULL;
}
struct v4l2_format fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = VIDEO_WIDTH;
fmt.fmt.pix.height = VIDEO_HEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
if (ioctl(camera->fd, VIDIOC_S_FMT, &fmt) < 0)
{
printf("[%s] VIDIOC_S_FMT failed\n", TAG);
close(camera->fd);
free(camera);
return NULL;
}
struct v4l2_requestbuffers req;
memset(&req, 0, sizeof(req));
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(camera->fd, VIDIOC_REQBUFS, &req) < 0)
{
printf("[%s] VIDIOC_REQBUFS failed\n", TAG);
close(camera->fd);
free(camera);
return NULL;
}
struct v4l2_buffer buf;
for (int i = 0; i < req.count; i++)
{
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (ioctl(camera->fd, VIDIOC_QUERYBUF, &buf) < 0)
{
printf("[%s] VIDIOC_QUERYBUF failed\n", TAG);
close(camera->fd);
free(camera);
return NULL;
}
camera->start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, camera->fd, buf.m.offset);
if (camera->start == MAP_FAILED)
{
printf("[%s] mmap failed\n", TAG);
close(camera->fd);
free(camera);
return NULL;
}
camera->length = buf.length;
if (ioctl(camera->fd, VIDIOC_QBUF, &buf) < 0)
{
printf("[%s] VIDIOC_QBUF failed\n", TAG);
munmap(camera->start, camera->length);
close(camera->fd);
free(camera);
return NULL;
}
}
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(camera->fd, VIDIOC_STREAMON, &type) < 0)
{
printf("[%s] VIDIOC_STREAMON failed\n", TAG);
close(camera->fd);
free(camera);
return NULL;
}
return camera;
}
static void camera_close(camera_t *camera)
{
if (camera != NULL)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ioctl(camera->fd, VIDIOC_STREAMOFF, &type);
for (int i = 0; i < 4; i++)
{
munmap(camera->start, camera->length);
}
close(camera->fd);
free(camera);
}
}
static int camera_capture(camera_t *camera, unsigned char *buffer, int *length)
{
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (ioctl(camera->fd, VIDIOC_DQBUF, &buf) < 0)
{
return -1;
}
memcpy(buffer, camera->start, buf.bytesused);
*length = buf.bytesused;
if (ioctl(camera->fd, VIDIOC_QBUF, &buf) < 0)
{
return -1;
}
return 0;
}
static void *camera_thread(void *arg)
{
camera_t *camera = (camera_t *)arg;
unsigned char *buffer = (unsigned char *)malloc(camera->length);
if (buffer == NULL)
{
printf("[%s] malloc buffer failed\n", TAG);
return NULL;
}
int length = 0;
while (1)
{
if (camera_capture(camera, buffer, &length) == 0)
{
// do something with captured frame
}
usleep(1000);
}
free(buffer);
return NULL;
}
int main(int argc, char **argv)
{
RK_MPI_SYS_Init();
RK_U32 u32Width = VIDEO_WIDTH;
RK_U32 u32Height = VIDEO_HEIGHT;
RK_U32 u32Fps = VIDEO_FPS;
RK_U32 u32BitRate = u32Width * u32Height * 3 / 2 * u32Fps;
RK_U32 u32KeyFrameInterval = u32Fps;
RK_U32 u32Profile = 66; // H264 PROFILE_HIGH
MPP_CHN_S stChnAttr;
stChnAttr.mChnId = 0;
stChnAttr.mModId = RK_ID_VENC;
stChnAttr.mDevId = 0;
stChnAttr.mWidth = u32Width;
stChnAttr.mHeight = u32Height;
stChnAttr.mFps = u32Fps;
stChnAttr.mBitRate = u32BitRate;
stChnAttr.mProfile = u32Profile;
stChnAttr.mLevel = 41; // H264 LEVEL4_1
stChnAttr.mPixelFormat = RK_FMT_YUV420SP;
stChnAttr.mRotation = 0;
stChnAttr.mMirror = 0;
stChnAttr.mFlip = 0;
stChnAttr.mDrmMode = 0;
stChnAttr.mDrmFd = -1;
if (RK_MPI_VENC_CreateChn(0, &stChnAttr) != RK_SUCCESS)
{
printf("[%s] create venc chn failed\n", TAG);
return -1;
}
if (RK_MPI_VENC_RegisterChn(0, 0, 0) != RK_SUCCESS)
{
printf("[%s] register venc chn failed\n", TAG);
return -1;
}
MPP_CHN_S stSrcChn;
stSrcChn.mModId = RK_ID_VI;
stSrcChn.mDevId = 0;
stSrcChn.mChnId = 0;
MPP_CHN_S stDestChn;
stDestChn.mModId = RK_ID_VENC;
stDestChn.mDevId = 0;
stDestChn.mChnId = 0;
RK_MPI_SYS_Bind(&stSrcChn, &stDestChn);
camera_t *camera = camera_open(0);
if (camera != NULL)
{
pthread_t tid;
pthread_create(&tid, NULL, camera_thread, camera);
}
RK_S32 s32Ret = RK_SUCCESS;
MPP_FRAME_S stFrame;
memset(&stFrame, 0, sizeof(stFrame));
stFrame.mModId = RK_ID_VENC;
stFrame.mChannelId = 0;
stFrame.mWidth = u32Width;
stFrame.mHeight = u32Height;
stFrame.mField = RK_FIELD_NONE;
stFrame.mFrameType = RK_CODEC_FRAME_SPS_PPS_I;
stFrame.mCompressMode = COMPRESS_MODE_NONE;
stFrame.mBitWidth = 10;
stFrame.mColor = MPP_FMT_YUV420SP;
for (int i = 0; i < FRAME_NUM; i++)
{
s32Ret = RK_MPI_VENC_GetFrm(0, &stFrame, RK_TRUE);
if (s32Ret != RK_SUCCESS)
{
printf("[%s] venc get frame failed\n", TAG);
goto done;
}
unsigned char *y = (unsigned char *)stFrame.mVirAddr[0];
unsigned char *uv = (unsigned char *)stFrame.mVirAddr[1];
int y_len = stFrame.mWidth * stFrame.mHeight;
int uv_len = stFrame.mWidth * stFrame.mHeight / 2;
RK_MPI_VENC_RcCfg rc_cfg;
memset(&rc_cfg, 0, sizeof(rc_cfg));
rc_cfg.mRcMode = VENC_RC_MODE_H264CBR;
rc_cfg.mBitRate = u32BitRate;
rc_cfg.mFrmRate = u32Fps;
rc_cfg.mGop = u32KeyFrameInterval;
rc_cfg.mQpMin = 30;
rc_cfg.mQpMax = 51;
rc_cfg.mQpInit = 35;
rc_cfg.mMaxReEncodeTimes = 5;
rc_cfg.mMaxQPDelta = 10;
rc_cfg.mMaxBitRateTolerance = 1000;
RK_MPI_VENC_SetRcCfg(0, &rc_cfg);
RK_MPI_VENC_H264Cfg h264_cfg;
memset(&h264_cfg, 0, sizeof(h264_cfg));
h264_cfg.mProfile = u32Profile;
h264_cfg.mLevel = 41;
h264_cfg.mEntropyMode = VENC_ENTROPY_MODE_CABAC;
h264_cfg.mCabacInitIdc = 0;
h264_cfg.mSliceNum = 2;
h264_cfg.mSliceMode = VENC_H264_SLICEMODE_SINGLE;
RK_MPI_VENC_SetH264Cfg(0, &h264_cfg);
RK_MPI_VENC_H264Vui h264_vui;
memset(&h264_vui, 0, sizeof(h264_vui));
h264_vui.mAspectRatioIdc = 0;
h264_vui.mOverScanInfo = 0;
h264_vui.mBitstreamRestriction = 0;
RK_MPI_VENC_SetH264Vui(0, &h264_vui);
RK_MPI_VENC_H264Sei h264_sei;
memset(&h264_sei, 0, sizeof(h264_sei));
h264_sei.mRecoveryPoint = 0;
h264_sei.mRecoveryPointInfoPresent = 0;
h264_sei.mBufferingPeriod = 0;
h264_sei.mPictureTiming = 0;
RK_MPI_VENC_SetH264Sei(0, &h264_sei);
MPP_ENC_CFG_S stMppEncCfg;
memset(&stMppEncCfg, 0, sizeof(stMppEncCfg));
stMppEncCfg.mRcCfg = &rc_cfg;
stMppEncCfg.mGopCfg = NULL;
stMppEncCfg.mH264Cfg = &h264_cfg;
stMppEncCfg.mH264VuiCfg = &h264_vui;
stMppEncCfg.mH264SeiCfg = &h264_sei;
RK_MPI_VENC_SetMppCfg(0, &stMppEncCfg);
MPP_ENC_SEI_S stEncSei;
memset(&stEncSei, 0, sizeof(stEncSei));
stEncSei.mEncSeiMode = MPP_ENC_SEI_MODE_ALL;
RK_MPI_VENC_SetExtCfg(0, ENC_CFG_SEI, &stEncSei);
RK_MPI_VENC_SendFrame(0, y, uv, y_len, uv_len);
RK_MPI_VENC_ReleaseFrm(0, &stFrame);
usleep(1000 * 1000 / u32Fps);
}
done:
if (RK_MPI_VENC_UnRegisterChn(0, 0, 0) != RK_SUCCESS)
{
printf("[%s] unregister venc chn failed\n", TAG);
}
if (RK_MPI_VENC_DestroyChn(0) != RK_SUCCESS)
{
printf("[%s] destroy venc chn failed\n", TAG);
}
RK_MPI_SYS_UnBind(&stSrcChn, &stDestChn);
if (camera != NULL)
{
camera_close(camera);
}
RK_MPI_SYS_Exit();
return 0;
}
```
这是一个基于 Rockchip RK3399 平台的 H.264 编码程序,程序中包含了使用 MPP 接口对 H.264 编码器进行配置的相关代码,可以根据需要进行修改。注意,本程序只是一个示例程序,不保证可以直接编译通过并运行。
讲解 napi_value NapiAsyKeyGenerator::CreateJsAsyKeyGenerator(napi_env env, napi_callback_info info) { LOGI("enter ..."); size_t expectedArgc = PARAMS_NUM_ONE; size_t argc = expectedArgc; napi_value argv[PARAMS_NUM_ONE] = { nullptr }; napi_get_cb_info(env, info, &argc, argv, nullptr, nullptr); if (argc != expectedArgc) { LOGE("The input args num is invalid."); return NapiGetNull(env); } napi_value instance; napi_value constructor = nullptr; napi_get_reference_value(env, classRef_, &constructor); napi_new_instance(env, constructor, argc, argv, &instance); std::string algName; if (!GetStringFromJSParams(env, argv[0], algName)) { LOGE("failed to get algoName."); return NapiGetNull(env); } HcfAsyKeyGenerator *generator = nullptr; int32_t res = HcfAsyKeyGeneratorCreate(algName.c_str(), &generator); if (res != HCF_SUCCESS) { LOGE("create c generator fail."); return NapiGetNull(env); } NapiAsyKeyGenerator *napiAsyKeyGenerator = new NapiAsyKeyGenerator(generator); napi_wrap( env, instance, napiAsyKeyGenerator, [](napi_env env, void *data, void *hint) { NapiAsyKeyGenerator *napiAsyKeyGenerator = static_cast<NapiAsyKeyGenerator *>(data); delete napiAsyKeyGenerator; return; }, nullptr, nullptr); napi_value napiAlgName = nullptr; napi_create_string_utf8(env, algName.c_str(), NAPI_AUTO_LENGTH, &napiAlgName); napi_set_named_property(env, instance, CRYPTO_TAG_ALG_NAME.c_str(), napiAlgName); LOGI("out ..."); return instance; }
这段代码是一个 N-API 模块中的函数,用于创建一个 JavaScript 对象,该对象包含一个 C++ 对象的实例。函数名为 `CreateJsAsyKeyGenerator`,它接受两个参数:`napi_env` 类型的 `env` 和 `napi_callback_info` 类型的 `info`。
在函数体内,首先获取函数调用时传入的参数数量和参数值。如果参数数量不符合预期,则返回 null。
接下来,使用 `napi_new_instance` 函数创建一个 JavaScript 对象,并将其赋值给变量 `instance`。这个 JavaScript 对象是通过调用一个 C++ 类的构造函数创建的。具体来说,先使用 `napi_get_reference_value` 函数获取一个类的引用值,然后再将这个引用值传递给 `napi_new_instance` 函数,由它来创建 JavaScript 对象。
然后,从 JavaScript 参数中获取一个算法名称,这个名称是一个字符串。如果获取失败,则返回 null。
接下来,调用 C++ 函数 `HcfAsyKeyGeneratorCreate` 来创建一个 C++ 对象的实例。如果创建失败,则返回 null。
接着,将这个 C++ 对象的实例和 JavaScript 对象进行绑定,使得它们可以相互访问。具体来说,使用 `napi_wrap` 函数将它们绑定在一起。在这个过程中,还传递了一个析构函数,以确保 C++ 对象的实例在 JavaScript 对象被垃圾回收时也能被正确地释放掉。
最后,将算法名称作为一个属性设置在 JavaScript 对象中,并返回这个 JavaScript 对象。
相关推荐
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![ini-development](https://img-home.csdnimg.cn/images/20210720083646.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)