本文整理汇总了C++中PicturePtr类的典型用法代码示例。如果您正苦于以下问题:C++ PicturePtr类的具体用法?C++ PicturePtr怎么用?C++ PicturePtr使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了PicturePtr类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: locker
Encode_Status VaapiEncoderH264::encodePicture(const PicturePtr& picture)
{
Encode_Status ret = ENCODE_FAIL;
SurfacePtr reconstruct = createSurface();
if (!reconstruct)
return ret;
{
AutoLock locker(m_paramLock);
#ifdef __BUILD_GET_MV__
uint32_t size;
void *buffer = NULL;
getMVBufferSize(&size);
if (!picture->editMVBuffer(buffer, &size))
return ret;
#endif
if (!ensureSequence (picture))
return ret;
if (!ensureMiscParams (picture.get()))
return ret;
if (!ensurePicture(picture, reconstruct))
return ret;
if (!ensureSlices (picture))
return ret;
}
if (!picture->encode())
return ret;
if (!referenceListUpdate (picture, reconstruct))
return ret;
return ENCODE_SUCCESS;
}
示例2: while
void FFMPEGMovie::_consume()
{
while( !_stopConsuming )
{
{
std::unique_lock<std::mutex> lock( _targetMutex );
while( !_targetChangedSent )
_targetChanged.wait( lock );
_targetChangedSent = false;
if( _stopConsuming )
return;
if( _seekTo( _targetTimestamp ))
_ptsPosition = UNDEFINED_PTS; // Reset position after seeking
}
PicturePtr frame;
while( !_stopConsuming && _getPtsDelta() >= getFrameDuration() && !isAtEOF( ))
{
frame = _queue.dequeue();
_ptsPosition = _videoStream->getPositionInSec( frame->getTimestamp( ));
}
if( !frame )
{
auto exception = std::runtime_error( "Frame unavailable error" );
_promise.set_exception( std::make_exception_ptr( exception ));
return;
}
_promise.set_value( frame );
}
}
示例3: FromResource
PicturePtr Picture::FromResource(HWND hwnd,
HINSTANCE instance,
const char *type,
const char *name)
{
PicturePtr result;
if (IStream *stream = StreamFromResource(instance, type, name)) {
IPicture *pic = PictureFromStream(stream);
stream->Release();
if (pic) {
if (HDC hdc = GetDC(hwnd)) {
int width = MakeWidth(hdc, pic);
int height = MakeHeight(hdc, pic);
ReleaseDC(hwnd, hdc);
if (width && height)
result.reset(new Picture(pic, width, height));
}
if (!result)
pic->Release();
}
}
return result;
}
示例4: createPicture
PicturePtr VaapiDecoderBase::createPicture(int64_t timeStamp /* , VaapiPictureStructure structure = VAAPI_PICTURE_STRUCTURE_FRAME */)
{
PicturePtr picture;
/*accquire one surface from m_surfacePool in base decoder */
SurfacePtr surface = createSurface();
if (!surface) {
ERROR("create surface failed");
return picture;
}
picture.reset(new VaapiDecPicture(m_context, surface, timeStamp));
return picture;
}
示例5: setH264PictureReference
static void
setH264PictureReference(VaapiDecPictureH264* picture,
uint32_t referenceFlags, bool otherField)
{
VAAPI_PICTURE_FLAG_UNSET(picture, VAAPI_PICTURE_FLAGS_REFERENCE);
VAAPI_PICTURE_FLAG_SET(picture, referenceFlags);
PicturePtr strong;
if (!otherField || !(strong = picture->m_otherField.lock()))
return ;
VaapiDecPictureH264* other = strong.get();
VAAPI_PICTURE_FLAG_UNSET(other, VAAPI_PICTURE_FLAGS_REFERENCE);
VAAPI_PICTURE_FLAG_SET(other, referenceFlags);
}
示例6: ERROR
bool VaapiEncoderH264::ensureSequence(const PicturePtr& picture)
{
VAEncSequenceParameterBufferH264* seqParam;
if (!picture->editSequence(seqParam) || !fill(seqParam)) {
ERROR("failed to create sequence parameter buffer (SPS)");
return false;
}
if (picture->isIdr() && !ensureSequenceHeader(picture, seqParam)) {
ERROR ("failed to create packed sequence header buffer");
return false;
}
return true;
}
示例7: if
bool VaapiDecoderH265::fillIqMatrix(const PicturePtr& picture, const H265SliceHdr* const slice)
{
H265PPS* pps = slice->pps;
H265SPS* sps = pps->sps;
H265ScalingList* scalingList;
if (pps->scaling_list_data_present_flag) {
scalingList = &pps->scaling_list;
} else if(sps->scaling_list_enabled_flag) {
if(sps->scaling_list_data_present_flag) {
scalingList = &sps->scaling_list;
} else {
scalingList = &pps->scaling_list;
}
} else {
//default scaling list
return true;
}
VAIQMatrixBufferHEVC* iqMatrix;
if (!picture->editIqMatrix(iqMatrix))
return false;
fillScalingList4x4(iqMatrix, scalingList);
fillScalingList8x8(iqMatrix, scalingList);
fillScalingList16x16(iqMatrix, scalingList);
fillScalingList32x32(iqMatrix, scalingList);
fillScalingListDc16x16(iqMatrix, scalingList);
fillScalingListDc32x32(iqMatrix, scalingList);
return true;
}
示例8: execRefPicMarking
bool VaapiDPBManager::execRefPicMarking(const PicturePtr& pic,
bool * hasMMCO5)
{
*hasMMCO5 = false;
if (!VAAPI_PICTURE_IS_REFERENCE(pic)) {
return true;
}
if (!VAAPI_H264_PICTURE_IS_IDR(pic)) {
H264SliceHdr* header = pic->getLastSliceHeader();
H264DecRefPicMarking *const decRefPicMarking =
&header->dec_ref_pic_marking;
if (decRefPicMarking->adaptive_ref_pic_marking_mode_flag) {
if (!execRefPicMarkingAdaptive(pic, decRefPicMarking, hasMMCO5))
return false;
} else {
if (!execRefPicMarkingSlidingWindow(pic))
return false;
}
}
return true;
}
示例9: outputPicture
YamiStatus VaapiDecoderBase::outputPicture(const PicturePtr& picture)
{
//TODO: reorder poc
return m_surfacePool->output(picture->getSurface(),
picture->m_timeStamp)
? YAMI_SUCCESS
: YAMI_FAIL;
}
示例10:
/* fill quant parameter buffers functions*/
bool VaapiDecoderVP8::ensureQuantMatrix(const PicturePtr& pic)
{
Vp8Segmentation *seg = &m_parser.segmentation;
VAIQMatrixBufferVP8 *iqMatrix;
int32_t baseQI, i;
if (!pic->editIqMatrix(iqMatrix))
return false;
for (i = 0; i < 4; i++) {
int32_t tempIndex;
const int32_t MAX_QI_INDEX = 127;
if (seg->segmentation_enabled) {
baseQI = seg->quantizer_update_value[i];
if (!seg->segment_feature_mode) // 0 means delta update
baseQI += m_frameHdr.quant_indices.y_ac_qi;;
} else
baseQI = m_frameHdr.quant_indices.y_ac_qi;
// the first component is y_ac_qi
tempIndex =
baseQI < 0 ? 0 : (baseQI >
MAX_QI_INDEX ? MAX_QI_INDEX : baseQI);
iqMatrix->quantization_index[i][0] = tempIndex;
tempIndex = baseQI + m_frameHdr.quant_indices.y_dc_delta;
tempIndex =
tempIndex < 0 ? 0 : (tempIndex >
MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex);
iqMatrix->quantization_index[i][1] = tempIndex;
tempIndex = baseQI + m_frameHdr.quant_indices.y2_dc_delta;
tempIndex =
tempIndex < 0 ? 0 : (tempIndex >
MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex);
iqMatrix->quantization_index[i][2] = tempIndex;
tempIndex = baseQI + m_frameHdr.quant_indices.y2_ac_delta;
tempIndex =
tempIndex < 0 ? 0 : (tempIndex >
MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex);
iqMatrix->quantization_index[i][3] = tempIndex;
tempIndex = baseQI + m_frameHdr.quant_indices.uv_dc_delta;
tempIndex =
tempIndex < 0 ? 0 : (tempIndex >
MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex);
iqMatrix->quantization_index[i][4] = tempIndex;
tempIndex = baseQI + m_frameHdr.quant_indices.uv_ac_delta;
tempIndex =
tempIndex < 0 ? 0 : (tempIndex >
MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex);
iqMatrix->quantization_index[i][5] = tempIndex;
}
return true;
}
示例11: assert
/* Adds slice headers to picture */
bool VaapiEncoderH264::addSliceHeaders (const PicturePtr& picture) const
{
VAEncSliceParameterBufferH264 *sliceParam;
uint32_t sliceOfMbs, sliceModMbs, curSliceMbs;
uint32_t mbSize;
uint32_t lastMbIndex;
assert (picture);
if (picture->m_type != VAAPI_PICTURE_TYPE_I) {
/* have one reference frame at least */
assert(m_refList0.size() > 0);
}
mbSize = m_mbWidth * m_mbHeight;
assert (m_numSlices && m_numSlices < mbSize);
sliceOfMbs = mbSize / m_numSlices;
sliceModMbs = mbSize % m_numSlices;
lastMbIndex = 0;
for (uint32_t i = 0; i < m_numSlices; ++i) {
curSliceMbs = sliceOfMbs;
if (sliceModMbs) {
++curSliceMbs;
--sliceModMbs;
}
if (!picture->newSlice(sliceParam))
return false;
sliceParam->macroblock_address = lastMbIndex;
sliceParam->num_macroblocks = curSliceMbs;
sliceParam->macroblock_info = VA_INVALID_ID;
sliceParam->slice_type = h264_get_slice_type (picture->m_type);
assert (sliceParam->slice_type != -1);
sliceParam->idr_pic_id = m_idrNum;
sliceParam->pic_order_cnt_lsb = picture->m_poc;
sliceParam->num_ref_idx_active_override_flag = 1;
if (picture->m_type != VAAPI_PICTURE_TYPE_I && m_refList0.size() > 0)
sliceParam->num_ref_idx_l0_active_minus1 = m_refList0.size() - 1;
if (picture->m_type == VAAPI_PICTURE_TYPE_B && m_refList1.size() > 0)
sliceParam->num_ref_idx_l1_active_minus1 = m_refList1.size() - 1;
fillReferenceList(sliceParam);
sliceParam->slice_qp_delta = initQP() - minQP();
if (sliceParam->slice_qp_delta > 4)
sliceParam->slice_qp_delta = 4;
sliceParam->slice_alpha_c0_offset_div2 = 2;
sliceParam->slice_beta_offset_div2 = 2;
/* set calculation for next slice */
lastMbIndex += curSliceMbs;
}
assert (lastMbIndex == mbSize);
return true;
}
示例12: getSliceDataByteOffset
bool VaapiDecoderH265::fillSlice(const PicturePtr& picture,
const H265SliceHdr* const theSlice, const H265NalUnit* const nalu)
{
const H265SliceHdr* slice = theSlice;
VASliceParameterBufferHEVC* sliceParam;
if (!picture->newSlice(sliceParam, nalu->data + nalu->offset, nalu->size))
return false;
sliceParam->slice_data_byte_offset =
getSliceDataByteOffset(slice, nalu->header_bytes);
sliceParam->slice_segment_address = slice->segment_address;
#define FILL_LONG(f) sliceParam->LongSliceFlags.fields.f = slice->f
#define FILL_LONG_SLICE(f) sliceParam->LongSliceFlags.fields.slice_##f = slice->f
//how to fill this
//LastSliceOfPic
FILL_LONG(dependent_slice_segment_flag);
//follow spec
if (slice->dependent_slice_segment_flag) {
slice = m_prevSlice.get();
}
if (!fillReferenceIndex(sliceParam, slice))
return false;
FILL_LONG_SLICE(type);
sliceParam->LongSliceFlags.fields.color_plane_id = slice->colour_plane_id;
FILL_LONG_SLICE(sao_luma_flag);
FILL_LONG_SLICE(sao_chroma_flag);
FILL_LONG(mvd_l1_zero_flag);
FILL_LONG(cabac_init_flag);
FILL_LONG_SLICE(temporal_mvp_enabled_flag);
if (slice->deblocking_filter_override_flag)
FILL_LONG_SLICE(deblocking_filter_disabled_flag);
else
sliceParam->LongSliceFlags.fields.slice_deblocking_filter_disabled_flag=
slice->pps->deblocking_filter_disabled_flag;
FILL_LONG(collocated_from_l0_flag);
FILL_LONG_SLICE(loop_filter_across_slices_enabled_flag);
#define FILL(f) sliceParam->f = slice->f
#define FILL_SLICE(f) sliceParam->slice_##f = slice->f
FILL(collocated_ref_idx);
/* following fields fill in fillReference
num_ref_idx_l0_active_minus1
num_ref_idx_l1_active_minus1*/
FILL_SLICE(qp_delta);
FILL_SLICE(cb_qp_offset);
FILL_SLICE(cr_qp_offset);
FILL_SLICE(beta_offset_div2);
FILL_SLICE(tc_offset_div2);
if (!fillPredWeightTable(sliceParam, slice))
return false;
FILL(five_minus_max_num_merge_cand);
return true;
}
示例13: getOutput
Encode_Status VaapiEncoderBase::getOutput(VideoEncOutputBuffer * outBuffer, bool withWait)
{
bool isEmpty;
PicturePtr picture;
Encode_Status ret;
FUNC_ENTER();
ret = checkEmpty(outBuffer, &isEmpty);
if (isEmpty)
return ret;
getPicture(picture);
ret = picture->getOutput(outBuffer);
if (ret != ENCODE_SUCCESS)
return ret;
checkCodecData(outBuffer);
return ENCODE_SUCCESS;
}
示例14: removeShortReference
void VaapiDPBManager::removeShortReference(const PicturePtr& picture)
{
VaapiDecPictureH264 *refPicture;
uint32_t i;
uint32_t frameNum = picture->m_frameNum;
PicturePtr strong = picture->m_otherField.lock();
VaapiDecPictureH264* other = strong.get();
for (i = 0; i < DPBLayer->shortRefCount; ++i) {
if (DPBLayer->shortRef[i]->m_frameNum == frameNum) {
refPicture = DPBLayer->shortRef[i];
if (refPicture != other) {
setH264PictureReference(refPicture, 0, false);
ARRAY_REMOVE_INDEX(DPBLayer->shortRef, i);
}
return;
}
}
}
示例15: memcpy
/* fill quant parameter buffers functions*/
bool VaapiDecoderVP8::ensureProbabilityTable(const PicturePtr& pic)
{
VAProbabilityDataBufferVP8 *probTable = NULL;
// XXX, create/render VAProbabilityDataBufferVP8 in base class
if (!pic->editProbTable(probTable))
return false;
memcpy (probTable->dct_coeff_probs,
m_frameHdr.token_probs.prob,
sizeof (m_frameHdr.token_probs.prob));
return true;
}