当前位置: 首页>>代码示例>>C++>>正文


C++ CH264Nalu::ReadNext方法代码示例

本文整理汇总了C++中CH264Nalu::ReadNext方法的典型用法代码示例。如果您正苦于以下问题:C++ CH264Nalu::ReadNext方法的具体用法?C++ CH264Nalu::ReadNext怎么用?C++ CH264Nalu::ReadNext使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在CH264Nalu的用法示例。


在下文中一共展示了CH264Nalu::ReadNext方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: while

void CDXVADecoderH264::CopyBitstream(BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSize)
{
    CH264Nalu Nalu;
    int nDummy;
    int nSlices = 0;
    UINT m_nSize = nSize;
    int slice_step = 1;
    int nDxvaNalLength;

    while (!nSlices && slice_step <= 2) {
        Nalu.SetBuffer(pBuffer, m_nSize, slice_step == 1 ? m_nNALLength : 0);
        nSize = 0;
        while (Nalu.ReadNext()) {
            switch (Nalu.GetType()) {
                case NALU_TYPE_SLICE:
                case NALU_TYPE_IDR:
                    // Skip the NALU if the data length is below 0
                    if (Nalu.GetDataLength() < 0) {
                        break;
                    }

                    // For AVC1, put startcode 0x000001
                    pDXVABuffer[0] = pDXVABuffer[1] = 0;
                    pDXVABuffer[2] = 1;

                    // Copy NALU
                    __try {
                        memcpy(pDXVABuffer + 3, Nalu.GetDataBuffer(), Nalu.GetDataLength());
                    } __except (EXCEPTION_EXECUTE_HANDLER) {
                        break;
                    }

                    // Update slice control buffer
                    nDxvaNalLength = (int)Nalu.GetDataLength() + 3;
                    m_pSliceShort[nSlices].BSNALunitDataLocation = nSize;
                    m_pSliceShort[nSlices].SliceBytesInBuffer = nDxvaNalLength;

                    nSize += nDxvaNalLength;
                    pDXVABuffer += nDxvaNalLength;
                    nSlices++;
                    break;
            }
        }
        slice_step++;
    }

    // Complete with zero padding (buffer size should be a multiple of 128)
    nDummy = 128 - (nSize % 128);

    memset(pDXVABuffer, 0, nDummy);
    m_pSliceShort[nSlices - 1].SliceBytesInBuffer += nDummy;
    nSize += nDummy;
}
开发者ID:GenomeXP,项目名称:mpc-hc,代码行数:53,代码来源:DXVADecoderH264.cpp

示例2:

HRESULT CDXVADecoderH264_DXVA1::CopyBitstream(BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSize, UINT nDXVASize/* = UINT_MAX*/)
{
	CH264Nalu	Nalu;
	UINT		m_nSize		= nSize;
	int			nDxvaNalLength;

	m_nSlices = 0;

	Nalu.SetBuffer(pBuffer, m_nSize, m_nNALLength);
	nSize = 0;
	while (Nalu.ReadNext()) {
		switch (Nalu.GetType()) {
			case NALU_TYPE_SLICE:
			case NALU_TYPE_IDR:
				// Skip the NALU if the data length is below 0
				if ((int)Nalu.GetDataLength() < 0) {
					break;
				}

				// For AVC1, put startcode 0x000001
				pDXVABuffer[0] = pDXVABuffer[1] = 0; pDXVABuffer[2] = 1;

				// Copy NALU
				memcpy_sse(pDXVABuffer + 3, Nalu.GetDataBuffer(), Nalu.GetDataLength());

				// Update slice control buffer
				nDxvaNalLength									= Nalu.GetDataLength() + 3;
				m_pSliceShort[m_nSlices].BSNALunitDataLocation	= nSize;
				m_pSliceShort[m_nSlices].SliceBytesInBuffer		= nDxvaNalLength;

				nSize											+= nDxvaNalLength;
				pDXVABuffer										+= nDxvaNalLength;
				m_nSlices++;
				break;
		}
	}

	// Complete bitstream buffer with zero padding (buffer size should be a multiple of 128)
	if (nSize % 128) {
		int nDummy = 128 - (nSize % 128);

		memset(pDXVABuffer, 0, nDummy);
		m_pSliceShort[m_nSlices-1].SliceBytesInBuffer	+= nDummy;
		nSize											+= nDummy;
	}

	return S_OK;
}
开发者ID:Tphive,项目名称:mpc-be,代码行数:48,代码来源:DXVADecoderH264_DXVA1.cpp

示例3: while

HRESULT CH264SequenceParser::ParseNALs(const BYTE *buffer, size_t buflen, int nal_size)
{
  CH264Nalu nalu;
  nalu.SetBuffer(buffer, buflen, nal_size);

  while (nalu.ReadNext())  {
    const BYTE *data = nalu.GetDataBuffer() + 1;
    const size_t len = nalu.GetDataLength() - 1;
    if (nalu.GetType() == NALU_TYPE_SPS) {
      ParseSPS(data, len);
      break;
    }
  }

  return S_OK;
}
开发者ID:Brijen,项目名称:LAVFilters,代码行数:16,代码来源:H264SequenceParser.cpp

示例4: avc_parse_annexb

size_t avc_parse_annexb(BYTE *extra, int extrasize, BYTE *dst)
{
  size_t dstSize = 0;

  CH264Nalu Nalu;
  Nalu.SetBuffer(extra, extrasize, 0);
  while (Nalu.ReadNext()) {
    if (Nalu.GetType() == NALU_TYPE_SPS || Nalu.GetType() == NALU_TYPE_PPS) {
      size_t len = Nalu.GetDataLength();
      AV_WB16(dst+dstSize, (uint16_t)len);
      dstSize += 2;
      memcpy(dst+dstSize, Nalu.GetDataBuffer(), Nalu.GetDataLength());
      dstSize += Nalu.GetDataLength();
    }
  }
  return dstSize;
}
开发者ID:hiplayer,项目名称:mpc_hc,代码行数:17,代码来源:LAVFVideoHelper.cpp

示例5:

void CDXVADecoderH264::CopyBitstream(BYTE* pDXVABuffer, BYTE* pBuffer, UINT& nSize)
{
	CH264Nalu		Nalu;
	int				nDummy;
	int				nSlices = 0;
	int				nDxvaNalLength;

	Nalu.SetBuffer (pBuffer, nSize, m_nNALLength);
	nSize = 0;

	{
		while (Nalu.ReadNext())
		{
			switch (Nalu.GetType())
			{
			case NALU_TYPE_SLICE:
			case NALU_TYPE_IDR:
				// For AVC1, put startcode 0x000001
				pDXVABuffer[0]=pDXVABuffer[1]=0;pDXVABuffer[2]=1;
				
				// Copy NALU
				memcpy (pDXVABuffer+3, Nalu.GetDataBuffer(), Nalu.GetDataLength());
				
				// Update slice control buffer
				nDxvaNalLength									= Nalu.GetDataLength()+3;
				m_pSliceShort[nSlices].BSNALunitDataLocation	= nSize;
				m_pSliceShort[nSlices].SliceBytesInBuffer		= nDxvaNalLength;

				nSize										   += nDxvaNalLength;
				pDXVABuffer									   += nDxvaNalLength;
				nSlices++;
				break;
			}
		}

		// Complete with zero padding (buffer size should be a multiple of 128)
		nDummy  = 128 - (nSize %128);

		memset (pDXVABuffer, 0, nDummy);
		m_pSliceShort[nSlices-1].SliceBytesInBuffer		+= nDummy;
		nSize											+= nDummy;
	}
}
开发者ID:sd-eblana,项目名称:bawx,代码行数:43,代码来源:DXVADecoderH264.cpp

示例6: min

HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{

	HRESULT						hr			= S_FALSE;
  
	CH264Nalu				Nalu;
	UINT						nSlices		= 0;
	int							nSurfaceIndex;
	int							nFieldType;
	int							nSliceType;
	int							nFramePOC;
  IDirect3DSurface9* pSampleToDeliver;
	int							nDXIndex	= 0;
	UINT						nNalOffset	= 0;
	int							nOutPOC;
	REFERENCE_TIME				rtOutStart;

  if(pDataIn == NULL || nSize == 0)
    return S_FALSE;

	Nalu.SetBuffer (pDataIn, nSize, m_nNALLength); 
	FFH264DecodeBuffer (m_pFilter->GetAVCtx(), pDataIn, nSize, &nFramePOC, &nOutPOC, &rtOutStart);

  //CLog::Log(LOGDEBUG, "nFramePOC = %d nOutPOC %d rtOutStart%d", nFramePOC, nOutPOC, rtOutStart);

	while (Nalu.ReadNext())
	{
		switch (Nalu.GetType())
		{
		case NALU_TYPE_SLICE:
		case NALU_TYPE_IDR:
				if(m_bUseLongSlice) 
				{
					m_pSliceLong[nSlices].BSNALunitDataLocation	= nNalOffset;
					m_pSliceLong[nSlices].SliceBytesInBuffer	= Nalu.GetDataLength()+3; //.GetRoundedDataLength();
					m_pSliceLong[nSlices].slice_id				= nSlices;
					FF264UpdateRefFrameSliceLong(&m_DXVAPicParams, &m_pSliceLong[nSlices], m_pFilter->GetAVCtx());

					if (nSlices>0)
						m_pSliceLong[nSlices-1].NumMbsForSlice = m_pSliceLong[nSlices].NumMbsForSlice = m_pSliceLong[nSlices].first_mb_in_slice - m_pSliceLong[nSlices-1].first_mb_in_slice;
				}
				nSlices++; 
				nNalOffset += (UINT)(Nalu.GetDataLength() + 3);
				if (nSlices > MAX_SLICES) break;
				break;
		}
	}
	if (nSlices == 0) return S_FALSE;

	m_nMaxWaiting	= min (max (m_DXVAPicParams.num_ref_frames, 3), 8);

	// If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
	if (FAILED (FFH264BuildPicParams (&m_DXVAPicParams, &m_DXVAScalingMatrix, &nFieldType, &nSliceType, m_pFilter->GetAVCtx(), m_pFilter->GetPCIVendor())))
		return S_FALSE;

	// Wait I frame after a flush
	if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag)
		return S_FALSE;
	
	CHECK_HR (GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));

	FFH264SetCurrentPicture (nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx());

	CHECK_HR (BeginFrame(pSampleToDeliver));
	
	m_DXVAPicParams.StatusReportFeedbackNumber++;

//	TRACE("CDXVADecoderH264 : Decode frame %u\n", m_DXVAPicParams.StatusReportFeedbackNumber);

	// Send picture parameters
	CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
	CHECK_HR (Execute());

	// Add bitstream, slice control and quantization matrix
	CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

	if (m_bUseLongSlice)
	{
		CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType,  sizeof(DXVA_Slice_H264_Long)*nSlices, m_pSliceLong));
	}
	else
	{
		CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_Slice_H264_Short)*nSlices, m_pSliceShort));
	}

	CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof (DXVA_Qmatrix_H264), (void*)&m_DXVAScalingMatrix));

	// Decode bitstream
	CHECK_HR (Execute());

	CHECK_HR (EndFrame(nSurfaceIndex));

#ifdef _DEBUG
	//DisplayStatus();
#endif

	bool bAdded		= AddToStore (nSurfaceIndex, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
								  m_DXVAPicParams.field_pic_flag, (FF_FIELD_TYPE)nFieldType, 
								  (FF_SLICE_TYPE)nSliceType, nFramePOC);

//.........这里部分代码省略.........
开发者ID:sd-eblana,项目名称:bawx,代码行数:101,代码来源:DXVADecoderH264.cpp

示例7: InitPacket

HRESULT CStreamParser::ParseH264AnnexB(Packet *pPacket)
{
  if (!m_pPacketBuffer) {
    m_pPacketBuffer = InitPacket(pPacket);
  }

  m_pPacketBuffer->Append(pPacket);

  BYTE *start = m_pPacketBuffer->GetData();
  BYTE *end = start + m_pPacketBuffer->GetDataSize();

  MOVE_TO_H264_START_CODE(start, end);

  while(start <= end-4) {
    BYTE *next = start + 1;

    MOVE_TO_H264_START_CODE(next, end);

    // End of buffer reached
    if(next >= end-4) {
      break;
    }

    size_t size = next - start;

    CH264Nalu Nalu;
    Nalu.SetBuffer(start, (int)size, 0);

    Packet *p2 = NULL;

    while (Nalu.ReadNext()) {
      Packet *p3 = new Packet();
      p3->SetDataSize(Nalu.GetDataLength() + 4);

      // Write size of the NALU (Big Endian)
      AV_WB32(p3->GetData(), (uint32_t)Nalu.GetDataLength());
      memcpy(p3->GetData() + 4, Nalu.GetDataBuffer(), Nalu.GetDataLength());

      if (!p2) {
        p2 = p3;
      } else {
        p2->Append(p3);
        SAFE_DELETE(p3);
      }
    }

    if (!p2)
      break;

    p2->StreamId = m_pPacketBuffer->StreamId;
    p2->bDiscontinuity = m_pPacketBuffer->bDiscontinuity;
    m_pPacketBuffer->bDiscontinuity = FALSE;

    p2->bSyncPoint = m_pPacketBuffer->bSyncPoint;
    m_pPacketBuffer->bSyncPoint = FALSE;

    p2->rtStart = m_pPacketBuffer->rtStart;
    m_pPacketBuffer->rtStart = Packet::INVALID_TIME;
    p2->rtStop = m_pPacketBuffer->rtStop;
    m_pPacketBuffer->rtStop = Packet::INVALID_TIME;

    p2->pmt = m_pPacketBuffer->pmt;
    m_pPacketBuffer->pmt = NULL;

    m_queue.Queue(p2);

    if(pPacket->rtStart != Packet::INVALID_TIME) {
      m_pPacketBuffer->rtStart = pPacket->rtStart;
      m_pPacketBuffer->rtStop = pPacket->rtStop;
      pPacket->rtStart = Packet::INVALID_TIME;
    }
    if(pPacket->bDiscontinuity) {
      m_pPacketBuffer->bDiscontinuity = pPacket->bDiscontinuity;
      pPacket->bDiscontinuity = FALSE;
    }
    if(pPacket->bSyncPoint) {
      m_pPacketBuffer->bSyncPoint = pPacket->bSyncPoint;
      pPacket->bSyncPoint = FALSE;
    }
    if(m_pPacketBuffer->pmt) {
      DeleteMediaType(m_pPacketBuffer->pmt);
    }

    m_pPacketBuffer->pmt = pPacket->pmt;
    pPacket->pmt = NULL;

    start = next;
  }

  if(start > m_pPacketBuffer->GetData()) {
    m_pPacketBuffer->RemoveHead(start - m_pPacketBuffer->GetData());
  }

  SAFE_DELETE(pPacket);

  do {
    pPacket = NULL;

    REFERENCE_TIME rtStart = Packet::INVALID_TIME, rtStop = rtStart = Packet::INVALID_TIME;

//.........这里部分代码省略.........
开发者ID:cynics,项目名称:LAVFilters,代码行数:101,代码来源:StreamParser.cpp

示例8: Decode

STDMETHODIMP CDecMSDKMVC::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop, BOOL bSyncPoint, BOOL bDiscontinuity)
{
  if (!m_mfxSession)
    return E_UNEXPECTED;

  HRESULT hr = S_OK;
  CBitstreamBuffer bsBuffer(&m_buff);
  mfxStatus sts = MFX_ERR_NONE;
  mfxBitstream bs = { 0 };
  BOOL bFlush = (buffer == nullptr);

  if (rtStart >= -TIMESTAMP_OFFSET && rtStart != AV_NOPTS_VALUE)
    bs.TimeStamp = rtStart + TIMESTAMP_OFFSET;
  else
    bs.TimeStamp = MFX_TIMESTAMP_UNKNOWN;

  bs.DecodeTimeStamp = MFX_TIMESTAMP_UNKNOWN;

  if (!bFlush) {
    if (m_pAnnexBConverter) {
      BYTE *pOutBuffer = nullptr;
      int pOutSize = 0;
      hr = m_pAnnexBConverter->Convert(&pOutBuffer, &pOutSize, buffer, buflen);
      if (FAILED(hr))
        return hr;

      bsBuffer.SetBuffer(pOutBuffer, pOutSize, true);
    }
    else {
      bsBuffer.SetBuffer((BYTE *)buffer, buflen, false);
    }

    // Check the buffer for SEI NALU, and some unwanted NALUs that need filtering
    // MSDK's SEI reading functionality is slightly buggy
    CH264Nalu nalu;
    nalu.SetBuffer(bsBuffer.GetBuffer(), bsBuffer.GetBufferSize(), 0);
    BOOL bNeedFilter = FALSE;
    while (nalu.ReadNext()) {
      if (nalu.GetType() == NALU_TYPE_SEI) {
        ParseSEI(nalu.GetDataBuffer() + 1, nalu.GetDataLength() - 1, bs.TimeStamp);
      }
      else if (nalu.GetType() == NALU_TYPE_EOSEQ) {
        bsBuffer.EnsureWriteable();
        // This is rather ugly, and relies on the bitstream being AnnexB, so simply overwriting the EOS NAL with zero works.
        // In the future a more elaborate bitstream filter might be advised
        memset(bsBuffer.GetBuffer() + nalu.GetNALPos(), 0, 4);
      }
    }

    bs.Data = bsBuffer.GetBuffer();
    bs.DataLength = mfxU32(bsBuffer.GetBufferSize());
    bs.MaxLength = bs.DataLength;

    AddFrameToGOP(bs.TimeStamp);
  }

  if (!m_bDecodeReady) {
    sts = MFXVideoDECODE_DecodeHeader(m_mfxSession, &bs, &m_mfxVideoParams);
    if (sts == MFX_ERR_NOT_ENOUGH_BUFFER) {
      hr = AllocateMVCExtBuffers();
      if (FAILED(hr))
        return hr;

      sts = MFXVideoDECODE_DecodeHeader(m_mfxSession, &bs, &m_mfxVideoParams);
    }

    if (sts == MFX_ERR_NONE) {
      m_mfxVideoParams.IOPattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
      m_mfxVideoParams.AsyncDepth = ASYNC_DEPTH;

      sts = MFXVideoDECODE_Init(m_mfxSession, &m_mfxVideoParams);
      if (sts != MFX_ERR_NONE) {
        DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Error initializing the MSDK decoder (%d)", sts));
        return E_FAIL;
      }

      if (m_mfxExtMVCSeq.NumView != 2) {
        DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Only MVC with two views is supported"));
        return E_FAIL;
      }

      DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Initialized MVC with View Ids %d, %d", m_mfxExtMVCSeq.View[0].ViewId, m_mfxExtMVCSeq.View[1].ViewId));

      m_bDecodeReady = TRUE;
    }
  }

  if (!m_bDecodeReady)
    return S_FALSE;

  mfxSyncPoint sync = nullptr;

  // Loop over the decoder to ensure all data is being consumed
  while (1) {
    MVCBuffer *pInputBuffer = GetBuffer();
    if (pInputBuffer == nullptr)
      return E_OUTOFMEMORY;

    mfxFrameSurface1 *outsurf = nullptr;
    sts = MFXVideoDECODE_DecodeFrameAsync(m_mfxSession, bFlush ? nullptr : &bs, &pInputBuffer->surface, &outsurf, &sync);
//.........这里部分代码省略.........
开发者ID:BlueSplash,项目名称:LAVFilters,代码行数:101,代码来源:msdk_mvc.cpp


注:本文中的CH264Nalu::ReadNext方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。