本文整理汇总了C++中NV_ENCODE_API_FUNCTION_LIST::nvEncLockBitstream方法的典型用法代码示例。如果您正苦于以下问题:C++ NV_ENCODE_API_FUNCTION_LIST::nvEncLockBitstream方法的具体用法?C++ NV_ENCODE_API_FUNCTION_LIST::nvEncLockBitstream怎么用?C++ NV_ENCODE_API_FUNCTION_LIST::nvEncLockBitstream使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NV_ENCODE_API_FUNCTION_LIST
的用法示例。
在下文中一共展示了NV_ENCODE_API_FUNCTION_LIST::nvEncLockBitstream方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: nvenc_get_frame
static int nvenc_get_frame(AVCodecContext *avctx, AVPacket *pkt)
{
NVENCContext *ctx = avctx->priv_data;
NV_ENCODE_API_FUNCTION_LIST *nv = &ctx->nvel.nvenc_funcs;
NV_ENC_LOCK_BITSTREAM params = { 0 };
NVENCOutputSurface *out = NULL;
int ret;
ret = nvenc_dequeue_surface(ctx->pending, &out);
if (ret)
return ret;
params.version = NV_ENC_LOCK_BITSTREAM_VER;
params.outputBitstream = out->out;
ret = nv->nvEncLockBitstream(ctx->nvenc_ctx, ¶ms);
if (ret < 0)
return AVERROR_UNKNOWN;
ret = ff_alloc_packet(pkt, params.bitstreamSizeInBytes);
if (ret < 0)
return ret;
memcpy(pkt->data, params.bitstreamBufferPtr, pkt->size);
ret = nv->nvEncUnlockBitstream(ctx->nvenc_ctx, out->out);
if (ret < 0)
return AVERROR_UNKNOWN;
out->busy = out->in->locked = 0;
ret = nvenc_set_timestamp(ctx, ¶ms, pkt);
if (ret < 0)
return ret;
switch (params.pictureType) {
case NV_ENC_PIC_TYPE_IDR:
pkt->flags |= AV_PKT_FLAG_KEY;
#if FF_API_CODED_FRAME
FF_DISABLE_DEPRECATION_WARNINGS
case NV_ENC_PIC_TYPE_INTRA_REFRESH:
case NV_ENC_PIC_TYPE_I:
avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
break;
case NV_ENC_PIC_TYPE_P:
avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
break;
case NV_ENC_PIC_TYPE_B:
avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;
break;
case NV_ENC_PIC_TYPE_BI:
avctx->coded_frame->pict_type = AV_PICTURE_TYPE_BI;
break;
FF_ENABLE_DEPRECATION_WARNINGS
#endif
}
return 0;
}
示例2: memcpy
bool fcH264EncoderNVIDIA::encode(fcH264Frame& dst, const void *image, fcPixelFormat fmt, fcTime timestamp, bool force_keyframe)
{
if (!isValid()) { return false; }
dst.timestamp = timestamp;
// convert image to NV12
AnyToNV12(m_nv12_image, m_rgba_image, image, fmt, m_conf.width, m_conf.height);
NV12Data data = m_nv12_image.data();
NVENCSTATUS stat;
// upload image to input buffer
{
NV_ENC_LOCK_INPUT_BUFFER lock_params = { 0 };
lock_params.version = NV_ENC_LOCK_INPUT_BUFFER_VER;
lock_params.inputBuffer = m_input.inputBuffer;
stat = nvenc.nvEncLockInputBuffer(m_encoder, &lock_params);
memcpy(lock_params.bufferDataPtr, data.y, m_nv12_image.size());
stat = nvenc.nvEncUnlockInputBuffer(m_encoder, m_input.inputBuffer);
}
NV_ENC_PIC_PARAMS params = { 0 };
params.version = NV_ENC_PIC_PARAMS_VER;
params.inputBuffer = m_input.inputBuffer;
params.outputBitstream = m_output.bitstreamBuffer;
params.bufferFmt = NV_ENC_BUFFER_FORMAT_NV12;
params.inputWidth = m_conf.width;
params.inputHeight = m_conf.height;
params.completionEvent = 0;
params.pictureStruct = NV_ENC_PIC_STRUCT_FRAME;
params.encodePicFlags = 0;
if (force_keyframe) {
params.encodePicFlags |= NV_ENC_PIC_FLAG_FORCEINTRA;
}
params.inputTimeStamp = to_usec(timestamp);
params.inputDuration = to_usec(1.0 / m_conf.target_framerate);
// encode!
stat = nvenc.nvEncEncodePicture(m_encoder, ¶ms);
// retrieve encoded data
{
NV_ENC_LOCK_BITSTREAM lock_params = { 0 };
lock_params.version = NV_ENC_LOCK_BITSTREAM_VER;
lock_params.outputBitstream = m_output.bitstreamBuffer;
stat = nvenc.nvEncLockBitstream(m_encoder, &lock_params);
dst.data.append((char*)lock_params.bitstreamBufferPtr, lock_params.bitstreamSizeInBytes);
dst.gatherNALInformation();
stat = nvenc.nvEncUnlockBitstream(m_encoder, m_output.bitstreamBuffer);
}
return true;
}