本文整理汇总了C++中IDeckLinkMutableVideoFrame类的典型用法代码示例。如果您正苦于以下问题:C++ IDeckLinkMutableVideoFrame类的具体用法?C++ IDeckLinkMutableVideoFrame怎么用?C++ IDeckLinkMutableVideoFrame使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了IDeckLinkMutableVideoFrame类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: Start
bool OpenGLComposite::Start()
{
mTotalPlayoutFrames = 0;
// Preroll frames
for (unsigned i = 0; i < 5; i++)
{
// Take each video frame from the front of the queue and move it to the back
IDeckLinkMutableVideoFrame* outputVideoFrame = mDLOutputVideoFrameQueue.front();
mDLOutputVideoFrameQueue.push_back(outputVideoFrame);
mDLOutputVideoFrameQueue.pop_front();
// Start with a black frame for playout
void* pFrame;
outputVideoFrame->GetBytes((void**)&pFrame);
memset(pFrame, 0, outputVideoFrame->GetRowBytes() * mFrameHeight); // 0 is black in RGBA format
if (mDLOutput->ScheduleVideoFrame(outputVideoFrame, (mTotalPlayoutFrames * mFrameDuration), mFrameDuration, mFrameTimescale) != S_OK)
return false;
mTotalPlayoutFrames++;
}
mDLInput->StartStreams();
mDLOutput->StartScheduledPlayback(0, mFrameTimescale, 1.0);
return true;
}
示例2: void
HRESULT TestPattern::CreateFrame(IDeckLinkVideoFrame** frame, void (*fillFunc)(IDeckLinkVideoFrame*))
{
HRESULT result;
int bytesPerPixel = GetBytesPerPixel(m_config->m_pixelFormat);
IDeckLinkMutableVideoFrame* newFrame = NULL;
IDeckLinkMutableVideoFrame* referenceFrame = NULL;
IDeckLinkVideoConversion* frameConverter = NULL;
*frame = NULL;
result = m_deckLinkOutput->CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth * bytesPerPixel, m_config->m_pixelFormat, bmdFrameFlagDefault, &newFrame);
if (result != S_OK)
{
fprintf(stderr, "Failed to create video frame\n");
goto bail;
}
if (m_config->m_pixelFormat == bmdFormat8BitYUV)
{
fillFunc(newFrame);
}
else
{
// Create a black frame in 8 bit YUV and convert to desired format
result = m_deckLinkOutput->CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth * 2, bmdFormat8BitYUV, bmdFrameFlagDefault, &referenceFrame);
if (result != S_OK)
{
fprintf(stderr, "Failed to create reference video frame\n");
goto bail;
}
fillFunc(referenceFrame);
frameConverter = CreateVideoConversionInstance();
result = frameConverter->ConvertFrame(referenceFrame, newFrame);
if (result != S_OK)
{
fprintf(stderr, "Failed to convert frame\n");
goto bail;
}
}
*frame = newFrame;
newFrame = NULL;
bail:
if (referenceFrame != NULL)
referenceFrame->Release();
if (frameConverter != NULL)
frameConverter->Release();
if (newFrame != NULL)
newFrame->Release();
return result;
}
示例3: CreateBlackFrame
SignalGenerator3DVideoFrame* CSignalGeneratorDlg::CreateBlackFrame ()
{
IDeckLinkMutableVideoFrame* referenceBlack = NULL;
IDeckLinkMutableVideoFrame* scheduleBlack = NULL;
HRESULT hr;
BMDPixelFormat pixelFormat;
int bytesPerPixel;
IDeckLinkVideoConversion* frameConverter = NULL;
SignalGenerator3DVideoFrame* ret = NULL;
pixelFormat = (BMDPixelFormat)m_pixelFormatCombo.GetItemData(m_pixelFormatCombo.GetCurSel());
bytesPerPixel = GetBytesPerPixel(pixelFormat);
hr = m_deckLinkOutput->CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth*bytesPerPixel, pixelFormat, bmdFrameFlagDefault, &scheduleBlack);
if (hr != S_OK)
goto bail;
if (pixelFormat == bmdFormat8BitYUV)
{
FillBlack(scheduleBlack);
}
else
{
hr = m_deckLinkOutput->CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth*2, bmdFormat8BitYUV, bmdFrameFlagDefault, &referenceBlack);
if (hr != S_OK)
goto bail;
FillBlack(referenceBlack);
hr = CoCreateInstance(CLSID_CDeckLinkVideoConversion, NULL, CLSCTX_ALL, IID_IDeckLinkVideoConversion, (void**)&frameConverter);
if (hr != S_OK)
goto bail;
hr = frameConverter->ConvertFrame(referenceBlack, scheduleBlack);
if (hr != S_OK)
goto bail;
}
ret = new SignalGenerator3DVideoFrame(scheduleBlack);
bail:
if (referenceBlack)
referenceBlack->Release();
if (scheduleBlack)
scheduleBlack->Release();
if (frameConverter)
frameConverter->Release();
return ret;
}
示例4: fprintf
void Player::ScheduleNextFrame(bool prerolling)
{
AVPacket pkt;
AVPicture picture;
if (serial_fd > 0 && packet_queue_get(&dataqueue, &pkt, 0)) {
if (pkt.data[0] != ' '){
fprintf(stderr,"written %.*s \n", pkt.size, pkt.data);
write(serial_fd, pkt.data, pkt.size);
}
av_free_packet(&pkt);
}
if (packet_queue_get(&videoqueue, &pkt, 1) < 0)
return;
IDeckLinkMutableVideoFrame *videoFrame;
m_deckLinkOutput->CreateVideoFrame(m_frameWidth,
m_frameHeight,
m_frameWidth * 2,
pix,
bmdFrameFlagDefault,
&videoFrame);
void *frame;
int got_picture;
videoFrame->GetBytes(&frame);
avcodec_decode_video2(video_st->codec, avframe, &got_picture, &pkt);
if (got_picture) {
avpicture_fill(&picture, (uint8_t *)frame, pix_fmt,
m_frameWidth, m_frameHeight);
sws_scale(sws, avframe->data, avframe->linesize, 0, avframe->height,
picture.data, picture.linesize);
if (m_deckLinkOutput->ScheduleVideoFrame(videoFrame,
pkt.pts *
video_st->time_base.num,
pkt.duration *
video_st->time_base.num,
video_st->time_base.den) !=
S_OK)
fprintf(stderr, "Error scheduling frame\n");
}
videoFrame->Release();
av_free_packet(&pkt);
}
示例5: scheduleNextFrame
void SignalGenerator::scheduleNextFrame(bool prerolling)
{
IDeckLinkMutableVideoFrame *currentFrame;
if (prerolling == false)
{
// If not prerolling, make sure that playback is still active
if (running == false)
return;
}
if (outputSignal == kOutputSignalPip)
{
if ((totalFramesScheduled % framesPerSecond) == 0)
currentFrame = videoFrameBars;
else
currentFrame = videoFrameBlack;
}
else
{
if ((totalFramesScheduled % framesPerSecond) == 0)
currentFrame = videoFrameBlack;
else
currentFrame = videoFrameBars;
}
printf("frames: %d\n", timeCode->frames());
currentFrame->SetTimecodeFromComponents(timeCodeFormat,
timeCode->hours(),
timeCode->minutes(),
timeCode->seconds(),
timeCode->frames(),
bmdTimecodeFlagDefault);
if (deckLinkOutput->ScheduleVideoFrame(currentFrame, (totalFramesScheduled * frameDuration), frameDuration, frameTimescale) != S_OK)
goto out;
totalFramesScheduled += 1;
out:
timeCode->update();
}
示例6: preroll_video_frames
void preroll_video_frames(unsigned int n_frames) {
IDeckLinkMutableVideoFrame *frame;
IDeckLinkVideoFrameAncillary *anc;
for (unsigned int i = 0; i < n_frames; i++) {
if (deckLinkOutput->CreateVideoFrame(norms[norm].w,
norms[norm].h, 2*norms[norm].w, bpf,
bmdFrameFlagDefault, &frame) != S_OK) {
throw std::runtime_error("Failed to create frame");
}
if (deckLinkOutput->CreateAncillaryData(bpf, &anc) != S_OK) {
throw std::runtime_error("failed to set frame ancillary data");
}
if (frame->SetAncillaryData(anc) != S_OK) {
throw std::runtime_error("failed to set frame ancillary data");
}
schedule_frame(frame);
}
}
示例7: stop
void stop()
{
// Stop the audio and video output streams immediately
if ( m_deckLinkOutput )
{
m_deckLinkOutput->StopScheduledPlayback( 0, 0, 0 );
m_deckLinkOutput->DisableAudioOutput();
m_deckLinkOutput->DisableVideoOutput();
}
while ( mlt_deque_count( m_videoFrameQ ) )
{
m_videoFrame = (IDeckLinkMutableVideoFrame*) mlt_deque_pop_back( m_videoFrameQ );
m_videoFrame->Release();
}
m_videoFrame = 0;
if ( m_fifo ) sample_fifo_close( m_fifo );
}
示例8: gst_decklink_video_sink_prepare
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
GstVideoFrame vframe;
IDeckLinkMutableVideoFrame *frame;
guint8 *outdata, *indata;
GstFlowReturn flow_ret;
HRESULT ret;
GstClockTime timestamp, duration;
GstClockTime running_time, running_time_duration;
GstClockTime latency, render_delay;
GstClockTimeDiff ts_offset;
gint i;
GstDecklinkVideoFormat caps_format;
BMDPixelFormat format;
gint bpp;
GstVideoTimeCodeMeta *tc_meta;
GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);
// FIXME: Handle no timestamps
if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
return GST_FLOW_ERROR;
}
caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
format = gst_decklink_pixel_format_from_type (caps_format);
bpp = gst_decklink_bpp_from_type (caps_format);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
if (duration == GST_CLOCK_TIME_NONE) {
duration =
gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
self->info.fps_n);
}
running_time =
gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
GST_FORMAT_TIME, timestamp);
running_time_duration =
gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
GST_FORMAT_TIME, timestamp + duration) - running_time;
/* See gst_base_sink_adjust_time() */
latency = gst_base_sink_get_latency (bsink);
render_delay = gst_base_sink_get_render_delay (bsink);
ts_offset = gst_base_sink_get_ts_offset (bsink);
running_time += latency;
if (ts_offset < 0) {
ts_offset = -ts_offset;
if ((GstClockTime) ts_offset < running_time)
running_time -= ts_offset;
else
running_time = 0;
} else {
running_time += ts_offset;
}
if (running_time > render_delay)
running_time -= render_delay;
else
running_time = 0;
ret = self->output->output->CreateVideoFrame (self->info.width,
self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
&frame);
if (ret != S_OK) {
GST_ELEMENT_ERROR (self, STREAM, FAILED,
(NULL), ("Failed to create video frame: 0x%08x", ret));
return GST_FLOW_ERROR;
}
if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Failed to map video frame");
flow_ret = GST_FLOW_ERROR;
goto out;
}
frame->GetBytes ((void **) &outdata);
indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
for (i = 0; i < self->info.height; i++) {
memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
outdata += frame->GetRowBytes ();
}
gst_video_frame_unmap (&vframe);
tc_meta = gst_buffer_get_video_time_code_meta (buffer);
if (tc_meta) {
BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
gchar *tc_str;
if (((GstVideoTimeCodeFlags) (tc_meta->tc.
config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
else
bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
//.........这里部分代码省略.........
示例9: gst_decklink_sink_videosink_chain
static GstFlowReturn
gst_decklink_sink_videosink_chain (GstPad * pad, GstObject * parent,
GstBuffer * buffer)
{
GstDecklinkSink *decklinksink;
IDeckLinkMutableVideoFrame *frame;
void *data;
GstFlowReturn ret;
const GstDecklinkMode *mode;
decklinksink = GST_DECKLINK_SINK (parent);
#if 0
if (!decklinksink->video_enabled) {
HRESULT ret;
ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode,
bmdVideoOutputFlagDefault);
if (ret != S_OK) {
GST_WARNING ("failed to enable video output");
//return FALSE;
}
decklinksink->video_enabled = TRUE;
}
#endif
mode = gst_decklink_get_mode (decklinksink->mode);
decklinksink->output->CreateVideoFrame (mode->width,
mode->height, mode->width * 2, decklinksink->pixel_format,
bmdFrameFlagDefault, &frame);
frame->GetBytes (&data);
gst_buffer_extract (buffer, 0, data, gst_buffer_get_size (buffer));
gst_buffer_unref (buffer);
g_mutex_lock (&decklinksink->mutex);
while (decklinksink->queued_frames > 2 && !decklinksink->stop) {
g_cond_wait (&decklinksink->cond, &decklinksink->mutex);
}
if (!decklinksink->stop) {
decklinksink->queued_frames++;
}
g_mutex_unlock (&decklinksink->mutex);
if (!decklinksink->stop) {
decklinksink->output->ScheduleVideoFrame (frame,
decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n);
decklinksink->num_frames++;
if (!decklinksink->sched_started) {
decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0);
decklinksink->sched_started = TRUE;
}
ret = GST_FLOW_OK;
} else {
ret = GST_FLOW_FLUSHING;
}
frame->Release ();
return ret;
}
示例10: DisplayVideo
static void DisplayVideo(vout_display_t *vd, picture_t *picture, subpicture_t *)
{
vout_display_sys_t *sys = vd->sys;
struct decklink_sys_t *decklink_sys = GetDLSys(VLC_OBJECT(vd));
mtime_t now = mdate();
if (!picture)
return;
picture_t *orig_picture = picture;
if (now - picture->date > sys->nosignal_delay * CLOCK_FREQ) {
msg_Dbg(vd, "no signal");
if (sys->pic_nosignal) {
picture = sys->pic_nosignal;
} else {
if (sys->tenbits) { // I422_10L
plane_t *y = &picture->p[0];
memset(y->p_pixels, 0x0, y->i_lines * y->i_pitch);
for (int i = 1; i < picture->i_planes; i++) {
plane_t *p = &picture->p[i];
size_t len = p->i_lines * p->i_pitch / 2;
int16_t *data = (int16_t*)p->p_pixels;
for (size_t j = 0; j < len; j++) // XXX: SIMD
data[j] = 0x200;
}
} else { // UYVY
size_t len = picture->p[0].i_lines * picture->p[0].i_pitch;
for (size_t i = 0; i < len; i+= 2) { // XXX: SIMD
picture->p[0].p_pixels[i+0] = 0x80;
picture->p[0].p_pixels[i+1] = 0;
}
}
}
picture->date = now;
}
HRESULT result;
int w, h, stride, length;
w = decklink_sys->i_width;
h = decklink_sys->i_height;
IDeckLinkMutableVideoFrame *pDLVideoFrame;
result = decklink_sys->p_output->CreateVideoFrame(w, h, w*3,
sys->tenbits ? bmdFormat10BitYUV : bmdFormat8BitYUV,
bmdFrameFlagDefault, &pDLVideoFrame);
if (result != S_OK) {
msg_Err(vd, "Failed to create video frame: 0x%X", result);
pDLVideoFrame = NULL;
goto end;
}
void *frame_bytes;
pDLVideoFrame->GetBytes((void**)&frame_bytes);
stride = pDLVideoFrame->GetRowBytes();
if (sys->tenbits)
v210_convert(frame_bytes, picture, stride);
else for(int y = 0; y < h; ++y) {
uint8_t *dst = (uint8_t *)frame_bytes + stride * y;
const uint8_t *src = (const uint8_t *)picture->p[0].p_pixels +
picture->p[0].i_pitch * y;
memcpy(dst, src, w * 2 /* bpp */);
}
// compute frame duration in CLOCK_FREQ units
length = (decklink_sys->frameduration * CLOCK_FREQ) / decklink_sys->timescale;
picture->date -= decklink_sys->offset;
result = decklink_sys->p_output->ScheduleVideoFrame(pDLVideoFrame,
picture->date, length, CLOCK_FREQ);
if (result != S_OK) {
msg_Err(vd, "Dropped Video frame %"PRId64 ": 0x%x",
picture->date, result);
goto end;
}
now = mdate() - decklink_sys->offset;
BMDTimeValue decklink_now;
double speed;
decklink_sys->p_output->GetScheduledStreamTime (CLOCK_FREQ, &decklink_now, &speed);
if ((now - decklink_now) > 400000) {
/* XXX: workaround card clock drift */
decklink_sys->offset += 50000;
msg_Err(vd, "Delaying: offset now %"PRId64"", decklink_sys->offset);
}
end:
if (pDLVideoFrame)
pDLVideoFrame->Release();
picture_Release(orig_picture);
}
示例11: EnterCriticalSection
// Draw the captured video frame texture onto a box, rendering to the off-screen frame buffer.
// Read the rendered scene back from the frame buffer and schedule it for playout.
void OpenGLComposite::PlayoutFrameCompleted(IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult completionResult)
{
EnterCriticalSection(&pMutex);
// Get the first frame from the queue
IDeckLinkMutableVideoFrame* outputVideoFrame = mDLOutputVideoFrameQueue.front();
mDLOutputVideoFrameQueue.push_back(outputVideoFrame);
mDLOutputVideoFrameQueue.pop_front();
void* pFrame;
outputVideoFrame->GetBytes(&pFrame);
long rowbytes = outputVideoFrame->GetRowBytes();
long height = outputVideoFrame->GetHeight();
long memSize = rowbytes * height;
// make GL context current in this thread
wglMakeCurrent( hGLDC, hGLRC );
// Draw OpenGL scene to the off-screen frame buffer
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, mIdFrameBuf);
// Setup view and projection
GLfloat aspectRatio = (GLfloat)mFrameWidth / (GLfloat)mFrameHeight;
glViewport (0, 0, mFrameWidth, mFrameHeight);
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
gluPerspective( 45.0f, aspectRatio, 0.1f, 100.0f );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glScalef( aspectRatio, 1.0f, 1.0f ); // Scale x for correct aspect ratio
glTranslatef( 0.0f, 0.0f, -4.0f ); // Move into screen
glRotatef( mRotateAngle, 1.0f, 1.0f, 1.0f ); // Rotate model around a vector
mRotateAngle -= mRotateAngleRate; // update the rotation angle for next iteration
glFinish(); // Ensure changes to GL state are complete
// Draw a colourful frame around the front face of the box
// (provides a pleasing nesting effect when you connect the playout output to the capture input)
glBegin(GL_QUAD_STRIP);
glColor3f( 1.0f, 0.0f, 0.0f );
glVertex3f( 1.2f, 1.2f, 1.0f);
glVertex3f( 1.0f, 1.0f, 1.0f);
glColor3f( 0.0f, 0.0f, 1.0f );
glVertex3f( 1.2f, -1.2f, 1.0f);
glVertex3f( 1.0f, -1.0f, 1.0f);
glColor3f( 0.0f, 1.0f, 0.0f );
glVertex3f(-1.2f, -1.2f, 1.0f);
glVertex3f(-1.0f, -1.0f, 1.0f);
glColor3f( 1.0f, 1.0f, 0.0f );
glVertex3f(-1.2f, 1.2f, 1.0f);
glVertex3f(-1.0f, 1.0f, 1.0f);
glColor3f( 1.0f, 0.0f, 0.0f );
glVertex3f( 1.2f, 1.2f, 1.0f);
glVertex3f( 1.0f, 1.0f, 1.0f);
glEnd();
if (mHasNoInputSource)
{
// Draw a big X when no input is available on capture
glBegin( GL_QUADS );
glColor3f( 1.0f, 0.0f, 1.0f );
glVertex3f( 0.8f, 0.9f, 1.0f );
glVertex3f( 0.9f, 0.8f, 1.0f );
glColor3f( 1.0f, 1.0f, 0.0f );
glVertex3f( -0.8f, -0.9f, 1.0f );
glVertex3f( -0.9f, -0.8f, 1.0f );
glColor3f( 1.0f, 0.0f, 1.0f );
glVertex3f( -0.8f, 0.9f, 1.0f );
glVertex3f( -0.9f, 0.8f, 1.0f );
glColor3f( 1.0f, 1.0f, 0.0f );
glVertex3f( 0.8f, -0.9f, 1.0f );
glVertex3f( 0.9f, -0.8f, 1.0f );
glEnd();
}
else
{
if (mFastTransferExtensionAvailable)
{
// Signal that we're about to draw using mCaptureTexture onto mFBOTexture
mCaptureAllocator->beginTextureInUse();
}
// Pass texture unit 0 to the fragment shader as a uniform variable
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, mCaptureTexture);
glUseProgram(mProgram);
GLint locUYVYtex = glGetUniformLocation(mProgram, "UYVYtex");
glUniform1i(locUYVYtex, 0); // Bind texture unit 0
// Draw front and back faces of box applying video texture to each face
glBegin(GL_QUADS);
glTexCoord2f(1.0f, 0.0f); glVertex3f( 1.0f, 1.0f, 1.0f ); // Top right of front side
glTexCoord2f(0.0f, 0.0f); glVertex3f( -1.0f, 1.0f, 1.0f ); // Top left of front side
glTexCoord2f(0.0f, 1.0f); glVertex3f( -1.0f, -1.0f, 1.0f ); // Bottom left of front side
glTexCoord2f(1.0f, 1.0f); glVertex3f( 1.0f, -1.0f, 1.0f ); // Bottom right of front side
//.........这里部分代码省略.........
示例12: gst_decklink_video_sink_prepare
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
GstVideoFrame vframe;
IDeckLinkMutableVideoFrame *frame;
guint8 *outdata, *indata;
GstFlowReturn flow_ret;
HRESULT ret;
GstClockTime timestamp, duration;
GstClockTime running_time, running_time_duration;
gint i;
GstClock *clock;
GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);
// FIXME: Handle no timestamps
if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
return GST_FLOW_ERROR;
}
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
if (duration == GST_CLOCK_TIME_NONE) {
duration =
gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
self->info.fps_n);
}
running_time =
gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
GST_FORMAT_TIME, timestamp);
running_time_duration =
gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
GST_FORMAT_TIME, timestamp + duration) - running_time;
// FIXME: https://bugzilla.gnome.org/show_bug.cgi?id=742916
// We need to drop late buffers here immediately instead of
// potentially overflowing the internal queue of the hardware
clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
if (clock) {
GstClockTime clock_running_time, base_time, clock_time, latency,
max_lateness;
base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
clock_time = gst_clock_get_time (clock);
if (base_time != GST_CLOCK_TIME_NONE && clock_time != GST_CLOCK_TIME_NONE) {
clock_running_time = clock_time - base_time;
latency = gst_base_sink_get_latency (GST_BASE_SINK_CAST (self));
max_lateness = gst_base_sink_get_max_lateness (GST_BASE_SINK_CAST (self));
if (clock_running_time >
running_time + running_time_duration + latency + max_lateness) {
GST_DEBUG_OBJECT (self,
"Late buffer: %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
GST_TIME_ARGS (clock_running_time),
GST_TIME_ARGS (running_time + running_time_duration));
if (self->last_render_time == GST_CLOCK_TIME_NONE
|| (self->last_render_time < clock_running_time
&& clock_running_time - self->last_render_time >= GST_SECOND)) {
GST_DEBUG_OBJECT (self,
"Rendering frame nonetheless because we had none for more than 1s");
running_time = clock_running_time;
running_time_duration = 0;
} else {
GST_WARNING_OBJECT (self, "Dropping frame");
gst_object_unref (clock);
return GST_FLOW_OK;
}
}
}
gst_object_unref (clock);
}
self->last_render_time = running_time;
ret = self->output->output->CreateVideoFrame (self->info.width,
self->info.height, self->info.stride[0], bmdFormat8BitYUV,
bmdFrameFlagDefault, &frame);
if (ret != S_OK) {
GST_ELEMENT_ERROR (self, STREAM, FAILED,
(NULL), ("Failed to create video frame: 0x%08x", ret));
return GST_FLOW_ERROR;
}
if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Failed to map video frame");
flow_ret = GST_FLOW_ERROR;
goto out;
}
frame->GetBytes ((void **) &outdata);
indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
for (i = 0; i < self->info.height; i++) {
memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * 2);
indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
outdata += frame->GetRowBytes ();
}
gst_video_frame_unmap (&vframe);
//.........这里部分代码省略.........
示例13: gst_decklink_sink_videosink_chain
static GstFlowReturn
gst_decklink_sink_videosink_chain (GstPad * pad, GstBuffer * buffer)
{
GstDecklinkSink *decklinksink;
IDeckLinkMutableVideoFrame *frame;
void *data;
GstFlowReturn ret;
const GstDecklinkMode *mode;
decklinksink = GST_DECKLINK_SINK (gst_pad_get_parent (pad));
GST_DEBUG_OBJECT (decklinksink, "chain");
#if 0
if (!decklinksink->video_enabled) {
HRESULT ret;
ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode,
bmdVideoOutputFlagDefault);
if (ret != S_OK) {
GST_ERROR ("failed to enable video output");
//return FALSE;
}
decklinksink->video_enabled = TRUE;
}
#endif
mode = gst_decklink_get_mode (decklinksink->mode);
decklinksink->output->CreateVideoFrame (mode->width,
mode->height, mode->width * 2, bmdFormat8BitYUV,
bmdFrameFlagDefault, &frame);
frame->GetBytes (&data);
memcpy (data, GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
gst_buffer_unref (buffer);
g_mutex_lock (decklinksink->mutex);
while (decklinksink->queued_frames > 2 && !decklinksink->stop) {
g_cond_wait (decklinksink->cond, decklinksink->mutex);
}
if (!decklinksink->stop) {
decklinksink->queued_frames++;
}
g_mutex_unlock (decklinksink->mutex);
if (!decklinksink->stop) {
decklinksink->output->ScheduleVideoFrame (frame,
decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n);
decklinksink->num_frames++;
if (!decklinksink->sched_started) {
decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0);
decklinksink->sched_started = TRUE;
}
ret = GST_FLOW_OK;
} else {
ret = GST_FLOW_WRONG_STATE;
}
frame->Release ();
gst_object_unref (decklinksink);
return ret;
}
示例14: render
HRESULT render( mlt_frame frame )
{
HRESULT result = S_OK;
// Get the audio
double speed = mlt_properties_get_double( MLT_FRAME_PROPERTIES(frame), "_speed" );
if ( speed == 1.0 )
{
mlt_audio_format format = mlt_audio_s16;
int frequency = bmdAudioSampleRate48kHz;
int samples = mlt_sample_calculator( m_fps, frequency, m_count );
int16_t *pcm = 0;
if ( !mlt_frame_get_audio( frame, (void**) &pcm, &format, &frequency, &m_channels, &samples ) )
{
int count = samples;
if ( !m_isPrerolling )
{
uint32_t audioCount = 0;
uint32_t videoCount = 0;
// Check for resync
m_deckLinkOutput->GetBufferedAudioSampleFrameCount( &audioCount );
m_deckLinkOutput->GetBufferedVideoFrameCount( &videoCount );
// Underflow typically occurs during non-normal speed playback.
if ( audioCount < 1 || videoCount < 1 )
{
// Upon switching to normal playback, buffer some frames faster than realtime.
mlt_log_info( &m_consumer, "buffer underrun: audio buf %u video buf %u frames\n", audioCount, videoCount );
m_prerollCounter = 0;
}
// While rebuffering
if ( isBuffering() )
{
// Only append audio to reach the ideal level and not overbuffer.
int ideal = ( m_preroll - 1 ) * bmdAudioSampleRate48kHz / m_fps;
int actual = m_fifo->used / m_channels + audioCount;
int diff = ideal / 2 - actual;
count = diff < 0 ? 0 : diff < count ? diff : count;
}
}
if ( count > 0 )
sample_fifo_append( m_fifo, pcm, count * m_channels );
}
}
// Create video frames while pre-rolling
if ( m_isPrerolling )
{
createFrame();
if ( !m_videoFrame )
{
mlt_log_error( &m_consumer, "failed to create video frame\n" );
return S_FALSE;
}
}
// Get the video
if ( mlt_properties_get_int( MLT_FRAME_PROPERTIES( frame ), "rendered") )
{
mlt_image_format format = mlt_image_yuv422;
uint8_t* image = 0;
uint8_t* buffer = 0;
if ( !mlt_frame_get_image( frame, &image, &format, &m_width, &m_height, 0 ) )
{
m_videoFrame = (IDeckLinkMutableVideoFrame*) mlt_deque_pop_back( m_videoFrameQ );
m_videoFrame->GetBytes( (void**) &buffer );
if ( m_displayMode->GetFieldDominance() == bmdUpperFieldFirst )
// convert lower field first to top field first
swab( image, buffer + m_width * 2, m_width * ( m_height - 1 ) * 2 );
else
swab( image, buffer, m_width * m_height * 2 );
m_deckLinkOutput->ScheduleVideoFrame( m_videoFrame, m_count * m_duration, m_duration, m_timescale );
mlt_deque_push_front( m_videoFrameQ, m_videoFrame );
}
}
else
{
mlt_log_verbose( &m_consumer, "dropped video frame\n" );
}
++m_count;
// Check for end of pre-roll
if ( ++m_prerollCounter > m_preroll && m_isPrerolling )
{
// Start audio and video output
m_deckLinkOutput->EndAudioPreroll();
m_deckLinkOutput->StartScheduledPlayback( 0, m_timescale, 1.0 );
m_isPrerolling = false;
}
return result;
}
示例15: createFrame
void createFrame()
{
m_videoFrame = 0;
// Generate a DeckLink video frame
if ( S_OK != m_deckLinkOutput->CreateVideoFrame( m_width, m_height,
m_width * 2, bmdFormat8BitYUV, bmdFrameFlagDefault, &m_videoFrame ) )
{
mlt_log_verbose( &m_consumer, "Failed to create video frame\n" );
stop();
return;
}
// Make the first line black for field order correction.
uint8_t *buffer = 0;
if ( S_OK == m_videoFrame->GetBytes( (void**) &buffer ) && buffer )
{
for ( int i = 0; i < m_width; i++ )
{
*buffer++ = 128;
*buffer++ = 16;
}
}
mlt_log_debug( &m_consumer, "created video frame\n" );
mlt_deque_push_back( m_videoFrameQ, m_videoFrame );
}