当前位置: 首页>>代码示例>>C++>>正文


C++ VideoFrame::SetTimestamp方法代码示例

本文整理汇总了C++中VideoFrame::SetTimestamp方法的典型用法代码示例。如果您正苦于以下问题:C++ VideoFrame::SetTimestamp方法的具体用法?C++ VideoFrame::SetTimestamp怎么用?C++ VideoFrame::SetTimestamp使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在VideoFrame的用法示例。


在下文中一共展示了VideoFrame::SetTimestamp方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: SendVideo


//.........这里部分代码省略.........
	if (!rtpVideo.SetSendingCodec(rtpVideoCodec))
		//Error
		return Error("Peer do not support [%d,%s]\n",rtpVideoCodec,VideoCodec::GetNameFor(rtpVideoCodec));

	//While sending video
	while (sendingVideo)
	{
		//Wait for next video
		if (!videoFrames.Wait(0))
			//Check again
			continue;

		//Get audio grame
		RTMPVideoFrame* video = videoFrames.Pop();
		//check
		if (!video)
			//Again
			continue;

		//Get time difference
		DWORD diff = 0;
		//Get timestam
		QWORD ts = video->GetTimestamp();
		//If it is not the first frame
		if (lastVideoTs)
			//Calculate it
			diff = ts - lastVideoTs;
		//Set the last audio timestamp
		lastVideoTs = ts;

		//Check
		if (video->GetVideoCodec()!=RTMPVideoFrame::FLV1)
			//Error
			continue;

		//Decode frame
		if (!decoder->Decode(video->GetMediaData(),video->GetMediaSize()))
		{
			Error("decode packet error");
			//Next
			continue;
		}

		//Check size
		if (decoder->GetWidth()!=width || decoder->GetHeight()!=height)
		{
			//Get dimension
			width = decoder->GetWidth();
			height = decoder->GetHeight();

			//Set size
			numpixels = width*height*3/2;

			//Set also frame rate and bps
			encoder->SetFrameRate(25,300,500);

			//Set them in the encoder
			encoder->SetSize(width,height);
		}
		//Check size
		if (!numpixels)
		{
			Error("numpixels equals 0");
			//Next
			continue;
		}
		//Check fpu
		if (sendFPU)
		{
			//Send it
			encoder->FastPictureUpdate();
			//Reset
			sendFPU = false;
		}

		//Encode it
		VideoFrame *videoFrame = encoder->EncodeFrame(decoder->GetFrame(),numpixels);

		//If was failed
		if (!videoFrame)
		{
			Log("No video frame\n");
			//Next
			continue;
		}

		//Set frame time
		videoFrame->SetTimestamp(diff);

		//Send it smoothly
		smoother.SendFrame(videoFrame,diff);

		//Delete video frame
		delete(video);
	}

	Log("<SendVideo\n");

	return 1;
}
开发者ID:crubia,项目名称:wt,代码行数:101,代码来源:mediabridgesession.cpp

示例2: EncodeVideo


//.........这里部分代码省略.........
		if (!encoded)
			break;

		//Check size
		if (frame.GetMaxMediaSize()<encoded->GetLength())
		{
			//Not enougth space
			Error("Not enought space to copy FLV encodec frame [frame:%d,encoded:%d",frame.GetMaxMediaSize(),encoded->GetLength());
			//NExt
			continue;
		}

		//Check
		if (frameTime)
		{
			timespec ts;
			//Lock
			pthread_mutex_lock(&mutex);
			//Calculate timeout
			calcAbsTimeout(&ts,&prev,frameTime);
			//Wait next or stopped
			int canceled  = !pthread_cond_timedwait(&cond,&mutex,&ts);
			//Unlock
			pthread_mutex_unlock(&mutex);
			//Check if we have been canceled
			if (canceled)
				//Exit
				break;
		}
		//Set sending time of previous frame
		getUpdDifTime(&prev);

		//Set timestamp
		encoded->SetTimestamp(getDifTime(&first)/1000);

		//Set next one
		frameTime = 1000/fps;

		//Set duration
		encoded->SetDuration(frameTime);
		
		//Get full frame
		frame.SetVideoFrame(encoded->GetData(),encoded->GetLength());

		//Set buffer size
		frame.SetMediaSize(encoded->GetLength());

		//Check type
		if (encoded->IsIntra())
			//Set type
			frame.SetFrameType(RTMPVideoFrame::INTRA);
		else
			//Set type
			frame.SetFrameType(RTMPVideoFrame::INTER);

	
		//If we need desc but yet not have it
		if (!frameDesc && encoded->IsIntra() && videoCodec==VideoCodec::H264)
		{
			//Create new description
			AVCDescriptor desc;
			//Set values
			desc.SetConfigurationVersion(1);
			desc.SetAVCProfileIndication(0x42);
			desc.SetProfileCompatibility(0x80);
			desc.SetAVCLevelIndication(0x0C);
开发者ID:tidehc,项目名称:media-server-1,代码行数:67,代码来源:flvencoder.cpp

示例3: envir

void H264FrameSource::doGetNextFrame()
{  
        // 根据 fps, 计算等待时间  
        double delay = 1000.0 / videoFPS ;
        int to_delay = delay * 1000;    // us  
  
        if(!m_videoInput)
		return;

	BYTE *pic = m_videoInput->GrabFrame();

	//Check picture
	if (!pic) {
		fFrameSize = 0;
		m_started = 0; 
		return;
	}

	//Check if we need to send intra
	if (sendFPU)
	{
		videoEncoder->FastPictureUpdate();
	}

	//if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
      		// This is the first frame, so use the current time:
      		
	//} else {
		// Increment by the play time of the previous data:
	//	unsigned uSeconds	= fPresentationTime.tv_usec + fLastPlayTime;
	//	fPresentationTime.tv_sec += uSeconds/1000000;
	//	fPresentationTime.tv_usec = uSeconds%1000000;
	//}
	
	// Remember the play time of this data:
	//fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
	//fDurationInMicroseconds = fLastPlayTime;
	//fDurationInMicroseconds = 1000.0 / videoFPS;

	VideoFrame *videoFrame = videoEncoder->EncodeFrame(pic,m_videoInput->GetBufferSize());
	
	//If was failed
	if (!videoFrame){
		//Next
		fFrameSize = 0;
		m_started = 0;
		Log("-----Error encoding video\n");
        	double delay = 1000.0 / videoFPS;
        	int to_delay = delay * 1000;    // us  
        	nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay,
                (TaskFunc*)FramedSource::afterGetting, this); 
		return;
	}
	
	if(sendFPU)
		sendFPU = false;

	//Set frame timestamp
	videoFrame->SetTimestamp(getDifTime(&first)/1000);

	//Set sending time of previous frame
	//getUpdDifTime(&prev);

	//gettimeofday(&fPresentationTime, 0);

	fFrameSize = videoFrame->GetLength();

	memmove(fTo, videoFrame->GetData(), fFrameSize);

	if (fFrameSize > fMaxSize) {
		fNumTruncatedBytes = fFrameSize - fMaxSize;
		fFrameSize = fMaxSize;
	}
	else {
		fNumTruncatedBytes = 0;
	}
	
	gettimeofday(&fPresentationTime, NULL);

	//to_delay = ((1000 / videoFPS) * fFrameSize / RTPPAYLOADSIZE) * 1000;    // us  

        nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay,
				(TaskFunc*)FramedSource::afterGetting, this);
	
}
开发者ID:chenxiuheng,项目名称:mcumediaserver,代码行数:85,代码来源:H264FrameSource.cpp

示例4: SendVideo


//.........这里部分代码省略.........
			QWORD sleep = frameTime;
			//Remove extra sleep from prev
			if (overslept<sleep)
				//Remove it
				sleep -= overslept;
			else
				//Do not overflow
				sleep = 1;

			//Calculate timeout
			calcAbsTimeoutNS(&ts,&prev,sleep);
			//Wait next or stopped
			int canceled  = !pthread_cond_timedwait(&cond,&mutex,&ts);
			//Unlock
			pthread_mutex_unlock(&mutex);
			//Check if we have been canceled
			if (canceled)
				//Exit
				break;
			//Get differencence
			QWORD diff = getDifTime(&prev);
			//If it is biffer
			if (diff>frameTime)
				//Get what we have slept more
				overslept = diff-frameTime;
			else
				//No oversletp (shoulddn't be possible)
				overslept = 0;
		}

		//Increase frame counter
		fpsAcu.Update(getTime()/1000,1);
		
		//If first
		if (!frameTime)
		{
			//Set frame time, slower
			frameTime = 5*1000000/videoFPS;
			//Restore bitrate
			videoEncoder->SetFrameRate(videoFPS,current,videoIntraPeriod);
		} else {
			//Set frame time
			frameTime = 1000000/videoFPS;
		}
		
		//Add frame size in bits to bitrate calculator
		bitrateAcu.Update(getDifTime(&ini)/1000,videoFrame->GetLength()*8);

		//Set frame timestamp
		videoFrame->SetTimestamp(getDifTime(&ini)/1000);

		//Check if we have mediaListener
		if (mediaListener)
			//Call it
			mediaListener->onMediaFrame(*videoFrame);

		//Set sending time of previous frame
		getUpdDifTime(&prev);

		//Calculate sending times based on bitrate
		DWORD sendingTime = videoFrame->GetLength()*8/current;

		//Adjust to maximum time
		if (sendingTime>frameTime/1000)
			//Cap it
			sendingTime = frameTime/1000;

		//If it was a I frame
		if (videoFrame->IsIntra())
			//Clean rtp rtx buffer
			rtp.FlushRTXPackets();

		//Send it smoothly
		smoother.SendFrame(videoFrame,sendingTime);

		//Dump statistics
		if (num && ((num%videoFPS*10)==0))
		{
			Debug("-Send bitrate target=%d current=%d avg=%llf rate=[%llf,%llf] fps=[%llf,%llf] limit=%d\n",target,current,bitrateAcu.GetInstantAvg()/1000,bitrateAcu.GetMinAvg()/1000,bitrateAcu.GetMaxAvg()/1000,fpsAcu.GetMinAvg(),fpsAcu.GetMaxAvg(),videoBitrateLimit);
			bitrateAcu.ResetMinMax();
			fpsAcu.ResetMinMax();
		}
		num++;
	}

	Log("-SendVideo out of loop\n");

	//Terminamos de capturar
	videoInput->StopVideoCapture();

	//Check
	if (videoEncoder)
		//Borramos el encoder
		delete videoEncoder;

	//Salimos
	Log("<SendVideo [%d]\n",sendingVideo);

	return 0;
}
开发者ID:tidehc,项目名称:media-server-1,代码行数:101,代码来源:videostream.cpp

示例5: GetNextFrameTime

QWORD MP4RtpTrack::Read(Listener *listener)
{
	int last = 0;
	uint8_t* data;
	bool isSyncSample;

	// If it's first packet of a frame
	if (!numHintSamples)
	{
		// Get number of rtp packets for this sample
		if (!MP4ReadRtpHint(mp4, hint, sampleId, &numHintSamples))
		{
			//Print error
			Error("Error reading hintt");
			//Exit
			return MP4_INVALID_TIMESTAMP;
		}

		// Get number of samples for this sample
		frameSamples = MP4GetSampleDuration(mp4, hint, sampleId);

		// Get size of sample
		frameSize = MP4GetSampleSize(mp4, hint, sampleId);

		// Get sample timestamp
		frameTime = MP4GetSampleTime(mp4, hint, sampleId);
		//Convert to miliseconds
		frameTime = MP4ConvertFromTrackTimestamp(mp4, hint, frameTime, 1000);

		// Check if it is H264 and it is a Sync frame
		if (codec==VideoCodec::H264 && MP4GetSampleSync(mp4,track,sampleId))
			// Send SEI info
			SendH263SEI(listener);

		//Get max data lenght
		BYTE *data = NULL;
		DWORD dataLen = 0;
		MP4Timestamp	startTime;
		MP4Duration	duration;
		MP4Duration	renderingOffset;

		//Get values
		data	= frame->GetData();
		dataLen = frame->GetMaxMediaLength();
		
		// Read next rtp packet
		if (!MP4ReadSample(
			mp4,				// MP4FileHandle hFile
			track,				// MP4TrackId hintTrackId
			sampleId,			// MP4SampleId sampleId,
			(u_int8_t **) &data,		// u_int8_t** ppBytes
			(u_int32_t *) &dataLen,		// u_int32_t* pNumBytes
			&startTime,			// MP4Timestamp* pStartTime
			&duration,			// MP4Duration* pDuration
			&renderingOffset,		// MP4Duration* pRenderingOffset
			&isSyncSample			// bool* pIsSyncSample
			))
		{
			Error("Error reading sample");
			//Last
			return MP4_INVALID_TIMESTAMP;
		}

		//Check type
		if (media == MediaFrame::Video)
		{
			//Get video frame
			VideoFrame *video = (VideoFrame*)frame;
			//Set lenght
			video->SetLength(dataLen);
			//Timestamp
			video->SetTimestamp(startTime*90000/timeScale);
			//Set intra
			video->SetIntra(isSyncSample);
		} else {
			//Get Audio frame
			AudioFrame *audio = (AudioFrame*)frame;
			//Set lenght
			audio->SetLength(dataLen);
			//Timestamp
			audio->SetTimestamp(startTime*8000/timeScale);
		}

		//Check listener
		if (listener)
			//Frame callback
			listener->onMediaFrame(*frame);
	}

	// if it's the last
	if (packetIndex + 1 == numHintSamples)
		//Set last mark
		last = 1;
	
	// Set mark bit
	rtp.SetMark(last);

	// Get data pointer
	data = rtp.GetMediaData();
	//Get max data lenght
//.........这里部分代码省略.........
开发者ID:chenxiuheng,项目名称:mcumediaserver,代码行数:101,代码来源:mp4streamer.cpp


注:本文中的VideoFrame::SetTimestamp方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。