本文整理汇总了C++中VideoFrame::GetData方法的典型用法代码示例。如果您正苦于以下问题:C++ VideoFrame::GetData方法的具体用法?C++ VideoFrame::GetData怎么用?C++ VideoFrame::GetData使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类VideoFrame
的用法示例。
在下文中一共展示了VideoFrame::GetData方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: EncodeVideo
//.........这里部分代码省略.........
continue;
}
//Check
if (frameTime)
{
timespec ts;
//Lock
pthread_mutex_lock(&mutex);
//Calculate timeout
calcAbsTimeout(&ts,&prev,frameTime);
//Wait next or stopped
int canceled = !pthread_cond_timedwait(&cond,&mutex,&ts);
//Unlock
pthread_mutex_unlock(&mutex);
//Check if we have been canceled
if (canceled)
//Exit
break;
}
//Set sending time of previous frame
getUpdDifTime(&prev);
//Set timestamp
encoded->SetTimestamp(getDifTime(&first)/1000);
//Set next one
frameTime = 1000/fps;
//Set duration
encoded->SetDuration(frameTime);
//Get full frame
frame.SetVideoFrame(encoded->GetData(),encoded->GetLength());
//Set buffer size
frame.SetMediaSize(encoded->GetLength());
//Check type
if (encoded->IsIntra())
//Set type
frame.SetFrameType(RTMPVideoFrame::INTRA);
else
//Set type
frame.SetFrameType(RTMPVideoFrame::INTER);
//If we need desc but yet not have it
if (!frameDesc && encoded->IsIntra() && videoCodec==VideoCodec::H264)
{
//Create new description
AVCDescriptor desc;
//Set values
desc.SetConfigurationVersion(1);
desc.SetAVCProfileIndication(0x42);
desc.SetProfileCompatibility(0x80);
desc.SetAVCLevelIndication(0x0C);
desc.SetNALUnitLength(3);
//Get encoded data
BYTE *data = encoded->GetData();
//Get size
DWORD size = encoded->GetLength();
//get from frame
desc.AddParametersFromFrame(data,size);
//Crete desc frame
frameDesc = new RTMPVideoFrame(getDifTime(&first)/1000,desc);
示例2: SmoothFrame
int RTPMultiplexerSmoother::SmoothFrame(const MediaFrame* frame,DWORD duration)
{
//Check
if (!frame || !frame->HasRtpPacketizationInfo())
//Error
return Error("Frame do not has packetization info");
//Get info
const MediaFrame::RtpPacketizationInfo& info = frame->GetRtpPacketizationInfo();
DWORD codec = 0;
BYTE *frameData = NULL;
DWORD frameSize = 0;
//Depending on the type
switch(frame->GetType())
{
case MediaFrame::Audio:
{
//get audio frame
AudioFrame * audio = (AudioFrame*)frame;
//Get codec
codec = audio->GetCodec();
//Get data
frameData = audio->GetData();
//Get size
frameSize = audio->GetLength();
}
break;
case MediaFrame::Video:
{
//get Video frame
VideoFrame * video = (VideoFrame*)frame;
//Get codec
codec = video->GetCodec();
//Get data
frameData = video->GetData();
//Get size
frameSize = video->GetLength();
}
break;
default:
return Error("No smoother for frame");
}
DWORD frameLength = 0;
//Calculate total length
for (int i=0;i<info.size();i++)
//Get total length
frameLength += info[i]->GetTotalLength();
//Calculate bitrate for frame
DWORD current = 0;
//For each one
for (int i=0;i<info.size();i++)
{
//Get packet
MediaFrame::RtpPacketization* rtp = info[i];
//Create rtp packet
RTPPacketSched *packet = new RTPPacketSched(frame->GetType(),codec);
//Make sure it is enought length
if (rtp->GetPrefixLen()+rtp->GetSize()>packet->GetMaxMediaLength())
//Error
continue;
//Get pointer to media data
BYTE* out = packet->GetMediaData();
//Copy prefic
memcpy(out,rtp->GetPrefixData(),rtp->GetPrefixLen());
//Copy data
memcpy(out+rtp->GetPrefixLen(),frameData+rtp->GetPos(),rtp->GetSize());
//Set length
DWORD len = rtp->GetPrefixLen()+rtp->GetSize();
//Set length
packet->SetMediaLength(len);
switch(packet->GetMedia())
{
case MediaFrame::Video:
//Set timestamp
packet->SetTimestamp(frame->GetTimeStamp()*90);
break;
case MediaFrame::Audio:
//Set timestamp
packet->SetTimestamp(frame->GetTimeStamp()*8);
break;
default:
//Set timestamp
packet->SetTimestamp(frame->GetTimeStamp());
}
//Check
if (i+1==info.size())
//last
packet->SetMark(true);
else
//No last
packet->SetMark(false);
//Calculate partial lenght
//.........这里部分代码省略.........
示例3: AddBroadcastReceiver
bool MultiConf::AddBroadcastReceiver(RTMPStream *receiver)
{
broadcast.AddReceiver(receiver);
Participants::iterator itBroadcaster = participants.find(m_CurrentBroadCaster);
if(itBroadcaster != participants.end())
{
RTPParticipant *broadCaster = (RTPParticipant*)itBroadcaster->second;
Log("Send idr packet to newly broadcast reciever\n");
IDRPacketSize idrPacketSize = broadCaster->GetIdrPacketSize();
IDRPacket idrPacket = broadCaster->GetIdrPacket();
DWORD currentTimeStamp = broadCaster->GetCurrentTimestamp();
size_t packetSize = idrPacket.size();
//Crete desc frame
RTMPVideoFrame frameDesc(0,2048);
//Send
frameDesc.SetTimestamp(currentTimeStamp);
//Set type
frameDesc.SetVideoCodec(RTMPVideoFrame::AVC);
//Set type
frameDesc.SetFrameType(RTMPVideoFrame::INTRA);
//Set NALU type
frameDesc.SetAVCType(0);
//Set no delay
frameDesc.SetAVCTS(0);
//Create description
AVCDescriptor desc;
//Set values
desc.SetConfigurationVersion(1);
//desc.SetAVCProfileIndication(0x42);
//desc.SetProfileCompatibility(0x80);
//desc.SetAVCLevelIndication(0x14);
//desc.SetAVCProfileIndication(idrPacket[0][1]);
//desc.SetProfileCompatibility(idrPacket[0][2]);
//desc.SetAVCLevelIndication(idrPacket[0][3]);
desc.SetAVCProfileIndication(0x64);
desc.SetProfileCompatibility(0x00);
desc.SetAVCLevelIndication(0x28);
desc.SetNALUnitLength(3);
desc.AddSequenceParameterSet(idrPacket[0],idrPacketSize[0]);
desc.AddPictureParameterSet(idrPacket[1],idrPacketSize[1]);
//Serialize
DWORD len = desc.Serialize(frameDesc.GetMediaData(),frameDesc.GetMaxMediaSize());
//Set size
frameDesc.SetMediaSize(len);
//broadcast.OnPublishedFrame(0, &frameDesc);
receiver->PlayMediaFrame(&frameDesc);
frameDesc.Dump();
RTMPVideoFrame frame(0,65535);
//Set codec
frame.SetVideoCodec(RTMPVideoFrame::AVC);
//Set NALU type
frame.SetAVCType(1);
//Set no delay
frame.SetAVCTS(0);
frame.SetTimestamp(currentTimeStamp);
frame.SetFrameType(RTMPVideoFrame::INTRA);
VideoFrame *videoFrame;
RTPDepacketizer *depacketizer = RTPDepacketizer::Create( MediaFrame::Video, VideoCodec::H264);
for(int i = 0; i < packetSize; i++) {
BYTE *packet = idrPacket[i];
int packet_size = idrPacketSize[i];
videoFrame = (VideoFrame *)depacketizer->AddPayload(packet,packet_size);
}
frame.SetVideoFrame(videoFrame->GetData(), videoFrame->GetLength());
receiver->PlayMediaFrame(&frame);
frame.Dump();
delete depacketizer;
}
return true;
}
示例4: envir
void H264FrameSource::doGetNextFrame()
{
// 根据 fps, 计算等待时间
double delay = 1000.0 / videoFPS ;
int to_delay = delay * 1000; // us
if(!m_videoInput)
return;
BYTE *pic = m_videoInput->GrabFrame();
//Check picture
if (!pic) {
fFrameSize = 0;
m_started = 0;
return;
}
//Check if we need to send intra
if (sendFPU)
{
videoEncoder->FastPictureUpdate();
}
//if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
// This is the first frame, so use the current time:
//} else {
// Increment by the play time of the previous data:
// unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime;
// fPresentationTime.tv_sec += uSeconds/1000000;
// fPresentationTime.tv_usec = uSeconds%1000000;
//}
// Remember the play time of this data:
//fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
//fDurationInMicroseconds = fLastPlayTime;
//fDurationInMicroseconds = 1000.0 / videoFPS;
VideoFrame *videoFrame = videoEncoder->EncodeFrame(pic,m_videoInput->GetBufferSize());
//If was failed
if (!videoFrame){
//Next
fFrameSize = 0;
m_started = 0;
Log("-----Error encoding video\n");
double delay = 1000.0 / videoFPS;
int to_delay = delay * 1000; // us
nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay,
(TaskFunc*)FramedSource::afterGetting, this);
return;
}
if(sendFPU)
sendFPU = false;
//Set frame timestamp
videoFrame->SetTimestamp(getDifTime(&first)/1000);
//Set sending time of previous frame
//getUpdDifTime(&prev);
//gettimeofday(&fPresentationTime, 0);
fFrameSize = videoFrame->GetLength();
memmove(fTo, videoFrame->GetData(), fFrameSize);
if (fFrameSize > fMaxSize) {
fNumTruncatedBytes = fFrameSize - fMaxSize;
fFrameSize = fMaxSize;
}
else {
fNumTruncatedBytes = 0;
}
gettimeofday(&fPresentationTime, NULL);
//to_delay = ((1000 / videoFPS) * fFrameSize / RTPPAYLOADSIZE) * 1000; // us
nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay,
(TaskFunc*)FramedSource::afterGetting, this);
}