本文整理汇总了C++中SEIMessages类的典型用法代码示例。如果您正苦于以下问题:C++ SEIMessages类的具体用法?C++ SEIMessages怎么用?C++ SEIMessages使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了SEIMessages类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: deleteSEIs
Void deleteSEIs (SEIMessages &seiList)
{
for (SEIMessages::iterator it=seiList.begin(); it!=seiList.end(); it++)
{
delete (*it);
}
seiList.clear();
}
示例2: xTraceSEIHeader
/**
* marshal all SEI messages in provided list into one bitstream bs
*/
Void SEIWriter::writeSEImessages(TComBitIf& bs, const SEIMessages &seiList, const TComSPS *sps, Bool isNested)
{
#if ENC_DEC_TRACE
if (g_HLSTraceEnable)
xTraceSEIHeader();
#endif
TComBitCounter bs_count;
for (SEIMessages::const_iterator sei=seiList.begin(); sei!=seiList.end(); sei++)
{
// calculate how large the payload data is
// TODO: this would be far nicer if it used vectored buffers
bs_count.resetBits();
setBitstream(&bs_count);
#if ENC_DEC_TRACE
Bool traceEnable = g_HLSTraceEnable;
g_HLSTraceEnable = false;
#endif
xWriteSEIpayloadData(bs_count, **sei, sps);
#if ENC_DEC_TRACE
g_HLSTraceEnable = traceEnable;
#endif
UInt payload_data_num_bits = bs_count.getNumberOfWrittenBits();
assert(0 == payload_data_num_bits % 8);
setBitstream(&bs);
UInt payloadType = (*sei)->payloadType();
for (; payloadType >= 0xff; payloadType -= 0xff)
{
WRITE_CODE(0xff, 8, "payload_type");
}
WRITE_CODE(payloadType, 8, "payload_type");
UInt payloadSize = payload_data_num_bits/8;
for (; payloadSize >= 0xff; payloadSize -= 0xff)
{
WRITE_CODE(0xff, 8, "payload_size");
}
WRITE_CODE(payloadSize, 8, "payload_size");
/* payloadData */
#if ENC_DEC_TRACE
if (g_HLSTraceEnable)
xTraceSEIMessageType((*sei)->payloadType());
#endif
xWriteSEIpayloadData(bs, **sei, sps);
}
if (!isNested)
{
xWriteRbspTrailingBits();
}
}
示例3: getSeisByType
SEIMessages getSeisByType(SEIMessages &seiList, SEI::PayloadType seiType)
{
SEIMessages result;
for (SEIMessages::iterator it=seiList.begin(); it!=seiList.end(); it++)
{
if ((*it)->payloadType() == seiType)
{
result.push_back(*it);
}
}
return result;
}
示例4: extractSeisByType
SEIMessages extractSeisByType(SEIMessages &seiList, SEI::PayloadType seiType)
{
SEIMessages result;
SEIMessages::iterator it=seiList.begin();
while ( it!=seiList.end() )
{
if ((*it)->payloadType() == seiType)
{
result.push_back(*it);
it = seiList.erase(it);
}
else
{
it++;
}
}
return result;
}
示例5: assert
//! initialize scalable nesting SEI message.
//! Note: The SEI message structures input into this function will become part of the scalable nesting SEI and will be
//! automatically freed, when the nesting SEI is disposed.
Void SEIEncoder::initSEIScalableNesting(SEIScalableNesting *scalableNestingSEI, SEIMessages &nestedSEIs)
{
assert (m_isInitialized);
assert (scalableNestingSEI != NULL);
scalableNestingSEI->m_bitStreamSubsetFlag = 1; // If the nested SEI messages are picture buffering SEI messages, picture timing SEI messages or sub-picture timing SEI messages, bitstream_subset_flag shall be equal to 1
scalableNestingSEI->m_nestingOpFlag = 0;
scalableNestingSEI->m_nestingNumOpsMinus1 = 0; //nesting_num_ops_minus1
scalableNestingSEI->m_allLayersFlag = 0;
scalableNestingSEI->m_nestingNoOpMaxTemporalIdPlus1 = 6 + 1; //nesting_no_op_max_temporal_id_plus1
scalableNestingSEI->m_nestingNumLayersMinus1 = 1 - 1; //nesting_num_layers_minus1
scalableNestingSEI->m_nestingLayerId[0] = 0;
scalableNestingSEI->m_nestedSEIs.clear();
for (SEIMessages::iterator it=nestedSEIs.begin(); it!=nestedSEIs.end(); it++)
{
scalableNestingSEI->m_nestedSEIs.push_back((*it));
}
}
示例6: getSeisByType
Void TDecTop::xDecodeSEI( TComInputBitstream* bs, const NalUnitType nalUnitType )
{
if(nalUnitType == NAL_UNIT_SUFFIX_SEI)
{
m_seiReader.parseSEImessage( bs, m_pcPic->getSEIs(), nalUnitType, m_parameterSetManager.getActiveSPS(), m_pDecodedSEIOutputStream );
}
else
{
m_seiReader.parseSEImessage( bs, m_SEIs, nalUnitType, m_parameterSetManager.getActiveSPS(), m_pDecodedSEIOutputStream );
SEIMessages activeParamSets = getSeisByType(m_SEIs, SEI::ACTIVE_PARAMETER_SETS);
if (activeParamSets.size()>0)
{
SEIActiveParameterSets *seiAps = (SEIActiveParameterSets*)(*activeParamSets.begin());
assert(seiAps->activeSeqParameterSetId.size()>0);
if (! m_parameterSetManager.activateSPSWithSEI(seiAps->activeSeqParameterSetId[0] ))
{
printf ("Warning SPS activation with Active parameter set SEI failed");
}
}
}
}
示例7: xReadSEImessage
Void SEIReader::xReadSEImessage(SEIMessages& seis, const NalUnitType nalUnitType, TComSPS *sps)
{
#if ENC_DEC_TRACE
xTraceSEIHeader();
#endif
Int payloadType = 0;
UInt val = 0;
do
{
READ_CODE (8, val, "payload_type");
payloadType += val;
} while (val==0xFF);
UInt payloadSize = 0;
do
{
READ_CODE (8, val, "payload_size");
payloadSize += val;
} while (val==0xFF);
#if ENC_DEC_TRACE
xTraceSEIMessageType((SEI::PayloadType)payloadType);
#endif
/* extract the payload for this single SEI message.
* This allows greater safety in erroneous parsing of an SEI message
* from affecting subsequent messages.
* After parsing the payload, bs needs to be restored as the primary
* bitstream.
*/
TComInputBitstream *bs = getBitstream();
setBitstream(bs->extractSubstream(payloadSize * 8));
SEI *sei = NULL;
if(nalUnitType == NAL_UNIT_SEI)
{
switch (payloadType)
{
case SEI::USER_DATA_UNREGISTERED:
sei = new SEIuserDataUnregistered;
xParseSEIuserDataUnregistered((SEIuserDataUnregistered&) *sei, payloadSize);
break;
case SEI::ACTIVE_PARAMETER_SETS:
sei = new SEIActiveParameterSets;
xParseSEIActiveParameterSets((SEIActiveParameterSets&) *sei, payloadSize);
break;
case SEI::DECODING_UNIT_INFO:
if (!sps)
{
printf ("Warning: Found Decoding unit SEI message, but no active SPS is available. Ignoring.");
}
else
{
sei = new SEIDecodingUnitInfo;
xParseSEIDecodingUnitInfo((SEIDecodingUnitInfo&) *sei, payloadSize, sps);
}
break;
case SEI::BUFFERING_PERIOD:
if (!sps)
{
printf ("Warning: Found Buffering period SEI message, but no active SPS is available. Ignoring.");
}
else
{
sei = new SEIBufferingPeriod;
xParseSEIBufferingPeriod((SEIBufferingPeriod&) *sei, payloadSize, sps);
}
break;
case SEI::PICTURE_TIMING:
if (!sps)
{
printf ("Warning: Found Picture timing SEI message, but no active SPS is available. Ignoring.");
}
else
{
sei = new SEIPictureTiming;
xParseSEIPictureTiming((SEIPictureTiming&)*sei, payloadSize, sps);
}
break;
case SEI::RECOVERY_POINT:
sei = new SEIRecoveryPoint;
xParseSEIRecoveryPoint((SEIRecoveryPoint&) *sei, payloadSize);
break;
case SEI::FRAME_PACKING:
sei = new SEIFramePacking;
xParseSEIFramePacking((SEIFramePacking&) *sei, payloadSize);
break;
case SEI::DISPLAY_ORIENTATION:
sei = new SEIDisplayOrientation;
xParseSEIDisplayOrientation((SEIDisplayOrientation&) *sei, payloadSize);
break;
case SEI::TEMPORAL_LEVEL0_INDEX:
sei = new SEITemporalLevel0Index;
xParseSEITemporalLevel0Index((SEITemporalLevel0Index&) *sei, payloadSize);
break;
case SEI::REGION_REFRESH_INFO:
sei = new SEIGradualDecodingRefreshInfo;
xParseSEIGradualDecodingRefreshInfo((SEIGradualDecodingRefreshInfo&) *sei, payloadSize);
//.........这里部分代码省略.........
示例8: clock
Void TDecGop::filterPicture(TComPic*& rpcPic)
{
TComSlice* pcSlice = rpcPic->getSlice(rpcPic->getCurrSliceIdx());
//-- For time output for each slice
long iBeforeTime = clock();
// deblocking filter
Bool bLFCrossTileBoundary = pcSlice->getPPS()->getLoopFilterAcrossTilesEnabledFlag();
m_pcLoopFilter->setCfg(bLFCrossTileBoundary);
m_pcLoopFilter->loopFilterPic( rpcPic );
if(pcSlice->getSPS()->getUseSAO())
{
m_sliceStartCUAddress.push_back(rpcPic->getNumCUsInFrame()* rpcPic->getNumPartInCU());
rpcPic->createNonDBFilterInfo(m_sliceStartCUAddress, 0, &m_LFCrossSliceBoundaryFlag, rpcPic->getPicSym()->getNumTiles(), bLFCrossTileBoundary);
}
if( pcSlice->getSPS()->getUseSAO() )
{
{
SAOParam *saoParam = rpcPic->getPicSym()->getSaoParam();
saoParam->bSaoFlag[0] = pcSlice->getSaoEnabledFlag();
saoParam->bSaoFlag[1] = pcSlice->getSaoEnabledFlagChroma();
m_pcSAO->setSaoLcuBasedOptimization(1);
m_pcSAO->createPicSaoInfo(rpcPic);
m_pcSAO->SAOProcess(saoParam);
m_pcSAO->PCMLFDisableProcess(rpcPic);
m_pcSAO->destroyPicSaoInfo();
}
}
if(pcSlice->getSPS()->getUseSAO())
{
rpcPic->destroyNonDBFilterInfo();
}
#if H_3D
rpcPic->compressMotion(2);
#endif
#if !H_3D
rpcPic->compressMotion();
#endif
Char c = (pcSlice->isIntra() ? 'I' : pcSlice->isInterP() ? 'P' : 'B');
if (!pcSlice->isReferenced()) c += 32;
//-- For time output for each slice
#if H_MV
printf("\nLayer %2d POC %4d TId: %1d ( %c-SLICE, QP%3d ) ", pcSlice->getLayerId(),
pcSlice->getPOC(),
pcSlice->getTLayer(),
c,
pcSlice->getSliceQp() );
#else
printf("\nPOC %4d TId: %1d ( %c-SLICE, QP%3d ) ", pcSlice->getPOC(),
pcSlice->getTLayer(),
c,
pcSlice->getSliceQp() );
#endif
m_dDecTime += (Double)(clock()-iBeforeTime) / CLOCKS_PER_SEC;
printf ("[DT %6.3f] ", m_dDecTime );
m_dDecTime = 0;
for (Int iRefList = 0; iRefList < 2; iRefList++)
{
printf ("[L%d ", iRefList);
for (Int iRefIndex = 0; iRefIndex < pcSlice->getNumRefIdx(RefPicList(iRefList)); iRefIndex++)
{
#if H_MV
if( pcSlice->getLayerId() != pcSlice->getRefLayerId( RefPicList(iRefList), iRefIndex ) )
{
printf( "V%d ", pcSlice->getRefLayerId( RefPicList(iRefList), iRefIndex ) );
}
else
{
#endif
printf ("%d ", pcSlice->getRefPOC(RefPicList(iRefList), iRefIndex));
#if H_MV
}
#endif
}
printf ("] ");
}
if (m_decodedPictureHashSEIEnabled)
{
SEIMessages pictureHashes = getSeisByType(rpcPic->getSEIs(), SEI::DECODED_PICTURE_HASH );
const SEIDecodedPictureHash *hash = ( pictureHashes.size() > 0 ) ? (SEIDecodedPictureHash*) *(pictureHashes.begin()) : NULL;
if (pictureHashes.size() > 1)
{
printf ("Warning: Got multiple decoded picture hash SEI messages. Using first.");
}
calcAndPrintHashStatus(*rpcPic->getPicYuvRec(), hash);
}
rpcPic->setOutputMark(true);
rpcPic->setReconMark(true);
m_sliceStartCUAddress.clear();
m_LFCrossSliceBoundaryFlag.clear();
}
示例9: while
/** \param pcListPic list of pictures to be written to file
\todo DYN_REF_FREE should be revised
*/
Void TAppDecTop::xWriteOutput( TComList<TComPic*>* pcListPic, UInt tId )
{
if (pcListPic->empty())
{
return;
}
TComList<TComPic*>::iterator iterPic = pcListPic->begin();
Int numPicsNotYetDisplayed = 0;
Int dpbFullness = 0;
TComSPS* activeSPS = m_cTDecTop.getActiveSPS();
UInt numReorderPicsHighestTid;
UInt maxDecPicBufferingHighestTid;
UInt maxNrSublayers = activeSPS->getMaxTLayers();
if(m_iMaxTemporalLayer == -1 || m_iMaxTemporalLayer >= maxNrSublayers)
{
numReorderPicsHighestTid = activeSPS->getNumReorderPics(maxNrSublayers-1);
maxDecPicBufferingHighestTid = activeSPS->getMaxDecPicBuffering(maxNrSublayers-1);
}
else
{
numReorderPicsHighestTid = activeSPS->getNumReorderPics(m_iMaxTemporalLayer);
maxDecPicBufferingHighestTid = activeSPS->getMaxDecPicBuffering(m_iMaxTemporalLayer);
}
while (iterPic != pcListPic->end())
{
TComPic* pcPic = *(iterPic);
if(pcPic->getOutputMark() && pcPic->getPOC() > m_iPOCLastDisplay)
{
numPicsNotYetDisplayed++;
dpbFullness++;
}
else if(pcPic->getSlice( 0 )->isReferenced())
{
dpbFullness++;
}
iterPic++;
}
iterPic = pcListPic->begin();
if (numPicsNotYetDisplayed>2)
{
iterPic++;
}
TComPic* pcPic = *(iterPic);
if (numPicsNotYetDisplayed>2 && pcPic->isField()) //Field Decoding
{
TComList<TComPic*>::iterator endPic = pcListPic->end();
endPic--;
iterPic = pcListPic->begin();
while (iterPic != endPic)
{
TComPic* pcPicTop = *(iterPic);
iterPic++;
TComPic* pcPicBottom = *(iterPic);
if ( pcPicTop->getOutputMark() && pcPicBottom->getOutputMark() &&
(numPicsNotYetDisplayed > numReorderPicsHighestTid || dpbFullness > maxDecPicBufferingHighestTid) &&
(!(pcPicTop->getPOC()%2) && pcPicBottom->getPOC() == pcPicTop->getPOC()+1) &&
(pcPicTop->getPOC() == m_iPOCLastDisplay+1 || m_iPOCLastDisplay < 0))
{
// write to file
numPicsNotYetDisplayed = numPicsNotYetDisplayed-2;
if ( m_pchReconFile )
{
const Window &conf = pcPicTop->getConformanceWindow();
const Window &defDisp = m_respectDefDispWindow ? pcPicTop->getDefDisplayWindow() : Window();
const Bool isTff = pcPicTop->isTopField();
Bool display = true;
if( m_decodedNoDisplaySEIEnabled )
{
SEIMessages noDisplay = getSeisByType(pcPic->getSEIs(), SEI::NO_DISPLAY );
const SEINoDisplay *nd = ( noDisplay.size() > 0 ) ? (SEINoDisplay*) *(noDisplay.begin()) : NULL;
if( (nd != NULL) && nd->m_noDisplay )
{
display = false;
}
}
if (display)
{
m_cTVideoIOYuvReconFile.write( pcPicTop->getPicYuvRec(), pcPicBottom->getPicYuvRec(),
m_outputColourSpaceConvert,
conf.getWindowLeftOffset() + defDisp.getWindowLeftOffset(),
conf.getWindowRightOffset() + defDisp.getWindowRightOffset(),
conf.getWindowTopOffset() + defDisp.getWindowTopOffset(),
conf.getWindowBottomOffset() + defDisp.getWindowBottomOffset(), NUM_CHROMA_FORMAT, isTff );
}
}
// update POC of display order
m_iPOCLastDisplay = pcPicBottom->getPOC();
//.........这里部分代码省略.........
示例10: assert
Void TDecTop::xActivateParameterSets()
{
if (m_bFirstSliceInPicture)
{
const TComPPS *pps = m_parameterSetManager.getPPS(m_apcSlicePilot->getPPSId()); // this is a temporary PPS object. Do not store this value
assert (pps != 0);
const TComSPS *sps = m_parameterSetManager.getSPS(pps->getSPSId()); // this is a temporary SPS object. Do not store this value
assert (sps != 0);
m_parameterSetManager.clearSPSChangedFlag(sps->getSPSId());
m_parameterSetManager.clearPPSChangedFlag(pps->getPPSId());
if (false == m_parameterSetManager.activatePPS(m_apcSlicePilot->getPPSId(),m_apcSlicePilot->isIRAP()))
{
printf ("Parameter set activation failed!");
assert (0);
}
// TODO: remove the use of the following globals:
for (UInt channel = 0; channel < MAX_NUM_CHANNEL_TYPE; channel++)
{
g_bitDepth[channel] = sps->getBitDepth(ChannelType(channel));
g_maxTrDynamicRange[channel] = (sps->getUseExtendedPrecision()) ? std::max<Int>(15, (g_bitDepth[channel] + 6)) : 15;
}
g_uiMaxCUWidth = sps->getMaxCUWidth();
g_uiMaxCUHeight = sps->getMaxCUHeight();
g_uiMaxCUDepth = sps->getMaxCUDepth();
g_uiAddCUDepth = max (0, sps->getLog2MinCodingBlockSize() - (Int)sps->getQuadtreeTULog2MinSize() + (Int)getMaxCUDepthOffset(sps->getChromaFormatIdc(), sps->getQuadtreeTULog2MinSize()));
// Get a new picture buffer. This will also set up m_pcPic, and therefore give us a SPS and PPS pointer that we can use.
xGetNewPicBuffer (*(sps), *(pps), m_pcPic, m_apcSlicePilot->getTLayer());
m_apcSlicePilot->applyReferencePictureSet(m_cListPic, m_apcSlicePilot->getRPS());
// make the slice-pilot a real slice, and set up the slice-pilot for the next slice
assert(m_pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
m_apcSlicePilot = m_pcPic->getPicSym()->swapSliceObject(m_apcSlicePilot, m_uiSliceIdx);
// we now have a real slice:
TComSlice *pSlice = m_pcPic->getSlice(m_uiSliceIdx);
// Update the PPS and SPS pointers with the ones of the picture.
pps=pSlice->getPPS();
sps=pSlice->getSPS();
// Initialise the various objects for the new set of settings
m_cSAO.create( sps->getPicWidthInLumaSamples(), sps->getPicHeightInLumaSamples(), sps->getChromaFormatIdc(), sps->getMaxCUWidth(), sps->getMaxCUHeight(), sps->getMaxCUDepth(), pps->getSaoOffsetBitShift(CHANNEL_TYPE_LUMA), pps->getSaoOffsetBitShift(CHANNEL_TYPE_CHROMA) );
m_cLoopFilter.create( sps->getMaxCUDepth() );
m_cPrediction.initTempBuff(sps->getChromaFormatIdc());
Bool isField = false;
Bool isTopField = false;
if(!m_SEIs.empty())
{
// Check if any new Picture Timing SEI has arrived
SEIMessages pictureTimingSEIs = extractSeisByType (m_SEIs, SEI::PICTURE_TIMING);
if (pictureTimingSEIs.size()>0)
{
SEIPictureTiming* pictureTiming = (SEIPictureTiming*) *(pictureTimingSEIs.begin());
isField = (pictureTiming->m_picStruct == 1) || (pictureTiming->m_picStruct == 2) || (pictureTiming->m_picStruct == 9) || (pictureTiming->m_picStruct == 10) || (pictureTiming->m_picStruct == 11) || (pictureTiming->m_picStruct == 12);
isTopField = (pictureTiming->m_picStruct == 1) || (pictureTiming->m_picStruct == 9) || (pictureTiming->m_picStruct == 11);
}
}
//Set Field/Frame coding mode
m_pcPic->setField(isField);
m_pcPic->setTopField(isTopField);
// transfer any SEI messages that have been received to the picture
m_pcPic->setSEIs(m_SEIs);
m_SEIs.clear();
// Recursive structure
m_cCuDecoder.create ( sps->getMaxCUDepth(), sps->getMaxCUWidth(), sps->getMaxCUHeight(), sps->getChromaFormatIdc() );
m_cCuDecoder.init ( &m_cEntropyDecoder, &m_cTrQuant, &m_cPrediction );
m_cTrQuant.init ( sps->getMaxTrSize() );
m_cSliceDecoder.create();
}
else
{
// make the slice-pilot a real slice, and set up the slice-pilot for the next slice
m_pcPic->allocateNewSlice();
assert(m_pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
m_apcSlicePilot = m_pcPic->getPicSym()->swapSliceObject(m_apcSlicePilot, m_uiSliceIdx);
TComSlice *pSlice = m_pcPic->getSlice(m_uiSliceIdx); // we now have a real slice.
const TComSPS *sps = pSlice->getSPS();
const TComPPS *pps = pSlice->getPPS();
// check that the current active PPS has not changed...
if (m_parameterSetManager.getSPSChangedFlag(sps->getSPSId()) )
{
printf("Error - a new SPS has been decoded while processing a picture\n");
exit(1);
}
if (m_parameterSetManager.getPPSChangedFlag(pps->getPPSId()) )
//.........这里部分代码省略.........
示例11: assert
Void TDecTop::xActivateParameterSets()
{
if (m_bFirstSliceInPicture)
{
const TComPPS *pps = m_parameterSetManager.getPPS(m_apcSlicePilot->getPPSId()); // this is a temporary PPS object. Do not store this value
assert (pps != 0);
const TComSPS *sps = m_parameterSetManager.getSPS(pps->getSPSId()); // this is a temporary SPS object. Do not store this value
assert (sps != 0);
m_parameterSetManager.clearSPSChangedFlag(sps->getSPSId());
m_parameterSetManager.clearPPSChangedFlag(pps->getPPSId());
if (false == m_parameterSetManager.activatePPS(m_apcSlicePilot->getPPSId(),m_apcSlicePilot->isIRAP()))
{
printf ("Parameter set activation failed!");
assert (0);
}
xParsePrefixSEImessages();
#if RExt__HIGH_BIT_DEPTH_SUPPORT==0
if (sps->getSpsRangeExtension().getExtendedPrecisionProcessingFlag() || sps->getBitDepth(CHANNEL_TYPE_LUMA)>12 || sps->getBitDepth(CHANNEL_TYPE_CHROMA)>12 )
{
printf("High bit depth support must be enabled at compile-time in order to decode this bitstream\n");
assert (0);
exit(1);
}
#endif
// NOTE: globals were set up here originally. You can now use:
// g_uiMaxCUDepth = sps->getMaxTotalCUDepth();
// g_uiAddCUDepth = sps->getMaxTotalCUDepth() - sps->getLog2DiffMaxMinCodingBlockSize()
// Get a new picture buffer. This will also set up m_pcPic, and therefore give us a SPS and PPS pointer that we can use.
xGetNewPicBuffer (*(sps), *(pps), m_pcPic, m_apcSlicePilot->getTLayer());
m_apcSlicePilot->applyReferencePictureSet(m_cListPic, m_apcSlicePilot->getRPS());
// make the slice-pilot a real slice, and set up the slice-pilot for the next slice
assert(m_pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
m_apcSlicePilot = m_pcPic->getPicSym()->swapSliceObject(m_apcSlicePilot, m_uiSliceIdx);
// we now have a real slice:
TComSlice *pSlice = m_pcPic->getSlice(m_uiSliceIdx);
// Update the PPS and SPS pointers with the ones of the picture.
pps=pSlice->getPPS();
sps=pSlice->getSPS();
// Initialise the various objects for the new set of settings
m_cSAO.create( sps->getPicWidthInLumaSamples(), sps->getPicHeightInLumaSamples(), sps->getChromaFormatIdc(), sps->getMaxCUWidth(), sps->getMaxCUHeight(), sps->getMaxTotalCUDepth(), pps->getPpsRangeExtension().getLog2SaoOffsetScale(CHANNEL_TYPE_LUMA), pps->getPpsRangeExtension().getLog2SaoOffsetScale(CHANNEL_TYPE_CHROMA) );
m_cLoopFilter.create( sps->getMaxTotalCUDepth() );
m_cPrediction.initTempBuff(sps->getChromaFormatIdc());
Bool isField = false;
Bool isTopField = false;
if(!m_SEIs.empty())
{
// Check if any new Picture Timing SEI has arrived
SEIMessages pictureTimingSEIs = getSeisByType(m_SEIs, SEI::PICTURE_TIMING);
if (pictureTimingSEIs.size()>0)
{
SEIPictureTiming* pictureTiming = (SEIPictureTiming*) *(pictureTimingSEIs.begin());
isField = (pictureTiming->m_picStruct == 1) || (pictureTiming->m_picStruct == 2) || (pictureTiming->m_picStruct == 9) || (pictureTiming->m_picStruct == 10) || (pictureTiming->m_picStruct == 11) || (pictureTiming->m_picStruct == 12);
isTopField = (pictureTiming->m_picStruct == 1) || (pictureTiming->m_picStruct == 9) || (pictureTiming->m_picStruct == 11);
}
}
//Set Field/Frame coding mode
m_pcPic->setField(isField);
m_pcPic->setTopField(isTopField);
// transfer any SEI messages that have been received to the picture
m_pcPic->setSEIs(m_SEIs);
m_SEIs.clear();
// Recursive structure
m_cCuDecoder.create ( sps->getMaxTotalCUDepth(), sps->getMaxCUWidth(), sps->getMaxCUHeight(), sps->getChromaFormatIdc() );
m_cCuDecoder.init ( &m_cEntropyDecoder, &m_cTrQuant, &m_cPrediction );
m_cTrQuant.init ( sps->getMaxTrSize() );
m_cSliceDecoder.create();
}
else
{
// make the slice-pilot a real slice, and set up the slice-pilot for the next slice
m_pcPic->allocateNewSlice();
assert(m_pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
m_apcSlicePilot = m_pcPic->getPicSym()->swapSliceObject(m_apcSlicePilot, m_uiSliceIdx);
TComSlice *pSlice = m_pcPic->getSlice(m_uiSliceIdx); // we now have a real slice.
const TComSPS *sps = pSlice->getSPS();
const TComPPS *pps = pSlice->getPPS();
// check that the current active PPS has not changed...
if (m_parameterSetManager.getSPSChangedFlag(sps->getSPSId()) )
{
//.........这里部分代码省略.........