本文整理汇总了C++中NV_ENCODE_API_FUNCTION_LIST::nvEncEncodePicture方法的典型用法代码示例。如果您正苦于以下问题:C++ NV_ENCODE_API_FUNCTION_LIST::nvEncEncodePicture方法的具体用法?C++ NV_ENCODE_API_FUNCTION_LIST::nvEncEncodePicture怎么用?C++ NV_ENCODE_API_FUNCTION_LIST::nvEncEncodePicture使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NV_ENCODE_API_FUNCTION_LIST
的用法示例。
在下文中一共展示了NV_ENCODE_API_FUNCTION_LIST::nvEncEncodePicture方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: memcpy
bool fcH264EncoderNVIDIA::encode(fcH264Frame& dst, const void *image, fcPixelFormat fmt, fcTime timestamp, bool force_keyframe)
{
if (!isValid()) { return false; }
dst.timestamp = timestamp;
// convert image to NV12
AnyToNV12(m_nv12_image, m_rgba_image, image, fmt, m_conf.width, m_conf.height);
NV12Data data = m_nv12_image.data();
NVENCSTATUS stat;
// upload image to input buffer
{
NV_ENC_LOCK_INPUT_BUFFER lock_params = { 0 };
lock_params.version = NV_ENC_LOCK_INPUT_BUFFER_VER;
lock_params.inputBuffer = m_input.inputBuffer;
stat = nvenc.nvEncLockInputBuffer(m_encoder, &lock_params);
memcpy(lock_params.bufferDataPtr, data.y, m_nv12_image.size());
stat = nvenc.nvEncUnlockInputBuffer(m_encoder, m_input.inputBuffer);
}
NV_ENC_PIC_PARAMS params = { 0 };
params.version = NV_ENC_PIC_PARAMS_VER;
params.inputBuffer = m_input.inputBuffer;
params.outputBitstream = m_output.bitstreamBuffer;
params.bufferFmt = NV_ENC_BUFFER_FORMAT_NV12;
params.inputWidth = m_conf.width;
params.inputHeight = m_conf.height;
params.completionEvent = 0;
params.pictureStruct = NV_ENC_PIC_STRUCT_FRAME;
params.encodePicFlags = 0;
if (force_keyframe) {
params.encodePicFlags |= NV_ENC_PIC_FLAG_FORCEINTRA;
}
params.inputTimeStamp = to_usec(timestamp);
params.inputDuration = to_usec(1.0 / m_conf.target_framerate);
// encode!
stat = nvenc.nvEncEncodePicture(m_encoder, ¶ms);
// retrieve encoded data
{
NV_ENC_LOCK_BITSTREAM lock_params = { 0 };
lock_params.version = NV_ENC_LOCK_BITSTREAM_VER;
lock_params.outputBitstream = m_output.bitstreamBuffer;
stat = nvenc.nvEncLockBitstream(m_encoder, &lock_params);
dst.data.append((char*)lock_params.bitstreamBufferPtr, lock_params.bitstreamSizeInBytes);
dst.gatherNALInformation();
stat = nvenc.nvEncUnlockBitstream(m_encoder, m_output.bitstreamBuffer);
}
return true;
}
示例2: ff_nvenc_encode_frame
int ff_nvenc_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
const AVFrame *frame, int *got_packet)
{
NVENCContext *ctx = avctx->priv_data;
NV_ENCODE_API_FUNCTION_LIST *nv = &ctx->nvel.nvenc_funcs;
NV_ENC_PIC_PARAMS params = { 0 };
NVENCInputSurface *in = NULL;
NVENCOutputSurface *out = NULL;
int ret;
params.version = NV_ENC_PIC_PARAMS_VER;
if (frame) {
ret = nvenc_enqueue_frame(avctx, frame, &in);
if (ret < 0)
return ret;
out = get_output_surface(ctx);
if (!out)
return AVERROR_BUG;
out->in = in;
params.inputBuffer = in->in;
params.bufferFmt = in->format;
params.inputWidth = frame->width;
params.inputHeight = frame->height;
params.outputBitstream = out->out;
params.inputTimeStamp = frame->pts;
if (avctx->flags & CODEC_FLAG_INTERLACED_DCT) {
if (frame->top_field_first)
params.pictureStruct = NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
else
params.pictureStruct = NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
} else {
params.pictureStruct = NV_ENC_PIC_STRUCT_FRAME;
}
nvenc_codec_specific_pic_params(avctx, ¶ms);
ret = nvenc_enqueue_timestamp(ctx->timestamps, frame->pts);
if (ret < 0)
return ret;
} else {
params.encodePicFlags = NV_ENC_PIC_FLAG_EOS;
}
ret = nv->nvEncEncodePicture(ctx->nvenc_ctx, ¶ms);
if (ret != NV_ENC_SUCCESS &&
ret != NV_ENC_ERR_NEED_MORE_INPUT) {
return AVERROR_UNKNOWN;
}
if (out) {
ret = nvenc_enqueue_surface(ctx->pending, out);
if (ret < 0)
return ret;
}
if (ret != NV_ENC_ERR_NEED_MORE_INPUT &&
av_fifo_size(ctx->pending)) {
ret = nvenc_get_frame(avctx, pkt);
if (ret < 0)
return ret;
*got_packet = 1;
} else {
*got_packet = 0;
}
return 0;
}