本文整理汇总了C++中IDeckLinkMutableVideoFrame::GetRowBytes方法的典型用法代码示例。如果您正苦于以下问题:C++ IDeckLinkMutableVideoFrame::GetRowBytes方法的具体用法?C++ IDeckLinkMutableVideoFrame::GetRowBytes怎么用?C++ IDeckLinkMutableVideoFrame::GetRowBytes使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IDeckLinkMutableVideoFrame
的用法示例。
在下文中一共展示了IDeckLinkMutableVideoFrame::GetRowBytes方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: Start
bool OpenGLComposite::Start()
{
mTotalPlayoutFrames = 0;
// Preroll frames
for (unsigned i = 0; i < 5; i++)
{
// Take each video frame from the front of the queue and move it to the back
IDeckLinkMutableVideoFrame* outputVideoFrame = mDLOutputVideoFrameQueue.front();
mDLOutputVideoFrameQueue.push_back(outputVideoFrame);
mDLOutputVideoFrameQueue.pop_front();
// Start with a black frame for playout
void* pFrame;
outputVideoFrame->GetBytes((void**)&pFrame);
memset(pFrame, 0, outputVideoFrame->GetRowBytes() * mFrameHeight); // 0 is black in RGBA format
if (mDLOutput->ScheduleVideoFrame(outputVideoFrame, (mTotalPlayoutFrames * mFrameDuration), mFrameDuration, mFrameTimescale) != S_OK)
return false;
mTotalPlayoutFrames++;
}
mDLInput->StartStreams();
mDLOutput->StartScheduledPlayback(0, mFrameTimescale, 1.0);
return true;
}
示例2: memcpy
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
GstVideoFrame vframe;
IDeckLinkMutableVideoFrame *frame;
guint8 *outdata, *indata;
GstFlowReturn flow_ret;
HRESULT ret;
GstClockTime timestamp, duration;
GstClockTime running_time, running_time_duration;
GstClockTime latency, render_delay;
GstClockTimeDiff ts_offset;
gint i;
GstDecklinkVideoFormat caps_format;
BMDPixelFormat format;
gint bpp;
GstVideoTimeCodeMeta *tc_meta;
GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);
// FIXME: Handle no timestamps
if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
return GST_FLOW_ERROR;
}
caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
format = gst_decklink_pixel_format_from_type (caps_format);
bpp = gst_decklink_bpp_from_type (caps_format);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
if (duration == GST_CLOCK_TIME_NONE) {
duration =
gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
self->info.fps_n);
}
running_time =
gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
GST_FORMAT_TIME, timestamp);
running_time_duration =
gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
GST_FORMAT_TIME, timestamp + duration) - running_time;
/* See gst_base_sink_adjust_time() */
latency = gst_base_sink_get_latency (bsink);
render_delay = gst_base_sink_get_render_delay (bsink);
ts_offset = gst_base_sink_get_ts_offset (bsink);
running_time += latency;
if (ts_offset < 0) {
ts_offset = -ts_offset;
if ((GstClockTime) ts_offset < running_time)
running_time -= ts_offset;
else
running_time = 0;
} else {
running_time += ts_offset;
}
if (running_time > render_delay)
running_time -= render_delay;
else
running_time = 0;
ret = self->output->output->CreateVideoFrame (self->info.width,
self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
&frame);
if (ret != S_OK) {
GST_ELEMENT_ERROR (self, STREAM, FAILED,
(NULL), ("Failed to create video frame: 0x%08x", ret));
return GST_FLOW_ERROR;
}
if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Failed to map video frame");
flow_ret = GST_FLOW_ERROR;
goto out;
}
frame->GetBytes ((void **) &outdata);
indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
for (i = 0; i < self->info.height; i++) {
memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
outdata += frame->GetRowBytes ();
}
gst_video_frame_unmap (&vframe);
tc_meta = gst_buffer_get_video_time_code_meta (buffer);
if (tc_meta) {
BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
gchar *tc_str;
if (((GstVideoTimeCodeFlags) (tc_meta->tc.
config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
else
bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
//.........这里部分代码省略.........
示例3: DisplayVideo
static void DisplayVideo(vout_display_t *vd, picture_t *picture, subpicture_t *)
{
vout_display_sys_t *sys = vd->sys;
struct decklink_sys_t *decklink_sys = GetDLSys(VLC_OBJECT(vd));
mtime_t now = mdate();
if (!picture)
return;
picture_t *orig_picture = picture;
if (now - picture->date > sys->nosignal_delay * CLOCK_FREQ) {
msg_Dbg(vd, "no signal");
if (sys->pic_nosignal) {
picture = sys->pic_nosignal;
} else {
if (sys->tenbits) { // I422_10L
plane_t *y = &picture->p[0];
memset(y->p_pixels, 0x0, y->i_lines * y->i_pitch);
for (int i = 1; i < picture->i_planes; i++) {
plane_t *p = &picture->p[i];
size_t len = p->i_lines * p->i_pitch / 2;
int16_t *data = (int16_t*)p->p_pixels;
for (size_t j = 0; j < len; j++) // XXX: SIMD
data[j] = 0x200;
}
} else { // UYVY
size_t len = picture->p[0].i_lines * picture->p[0].i_pitch;
for (size_t i = 0; i < len; i+= 2) { // XXX: SIMD
picture->p[0].p_pixels[i+0] = 0x80;
picture->p[0].p_pixels[i+1] = 0;
}
}
}
picture->date = now;
}
HRESULT result;
int w, h, stride, length;
w = decklink_sys->i_width;
h = decklink_sys->i_height;
IDeckLinkMutableVideoFrame *pDLVideoFrame;
result = decklink_sys->p_output->CreateVideoFrame(w, h, w*3,
sys->tenbits ? bmdFormat10BitYUV : bmdFormat8BitYUV,
bmdFrameFlagDefault, &pDLVideoFrame);
if (result != S_OK) {
msg_Err(vd, "Failed to create video frame: 0x%X", result);
pDLVideoFrame = NULL;
goto end;
}
void *frame_bytes;
pDLVideoFrame->GetBytes((void**)&frame_bytes);
stride = pDLVideoFrame->GetRowBytes();
if (sys->tenbits)
v210_convert(frame_bytes, picture, stride);
else for(int y = 0; y < h; ++y) {
uint8_t *dst = (uint8_t *)frame_bytes + stride * y;
const uint8_t *src = (const uint8_t *)picture->p[0].p_pixels +
picture->p[0].i_pitch * y;
memcpy(dst, src, w * 2 /* bpp */);
}
// compute frame duration in CLOCK_FREQ units
length = (decklink_sys->frameduration * CLOCK_FREQ) / decklink_sys->timescale;
picture->date -= decklink_sys->offset;
result = decklink_sys->p_output->ScheduleVideoFrame(pDLVideoFrame,
picture->date, length, CLOCK_FREQ);
if (result != S_OK) {
msg_Err(vd, "Dropped Video frame %"PRId64 ": 0x%x",
picture->date, result);
goto end;
}
now = mdate() - decklink_sys->offset;
BMDTimeValue decklink_now;
double speed;
decklink_sys->p_output->GetScheduledStreamTime (CLOCK_FREQ, &decklink_now, &speed);
if ((now - decklink_now) > 400000) {
/* XXX: workaround card clock drift */
decklink_sys->offset += 50000;
msg_Err(vd, "Delaying: offset now %"PRId64"", decklink_sys->offset);
}
end:
if (pDLVideoFrame)
pDLVideoFrame->Release();
picture_Release(orig_picture);
}
示例4: PlayoutFrameCompleted
// Draw the captured video frame texture onto a box, rendering to the off-screen frame buffer.
// Read the rendered scene back from the frame buffer and schedule it for playout.
void OpenGLComposite::PlayoutFrameCompleted(IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult completionResult)
{
EnterCriticalSection(&pMutex);
// Get the first frame from the queue
IDeckLinkMutableVideoFrame* outputVideoFrame = mDLOutputVideoFrameQueue.front();
mDLOutputVideoFrameQueue.push_back(outputVideoFrame);
mDLOutputVideoFrameQueue.pop_front();
void* pFrame;
outputVideoFrame->GetBytes(&pFrame);
long rowbytes = outputVideoFrame->GetRowBytes();
long height = outputVideoFrame->GetHeight();
long memSize = rowbytes * height;
// make GL context current in this thread
wglMakeCurrent( hGLDC, hGLRC );
// Draw OpenGL scene to the off-screen frame buffer
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, mIdFrameBuf);
// Setup view and projection
GLfloat aspectRatio = (GLfloat)mFrameWidth / (GLfloat)mFrameHeight;
glViewport (0, 0, mFrameWidth, mFrameHeight);
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
gluPerspective( 45.0f, aspectRatio, 0.1f, 100.0f );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glScalef( aspectRatio, 1.0f, 1.0f ); // Scale x for correct aspect ratio
glTranslatef( 0.0f, 0.0f, -4.0f ); // Move into screen
glRotatef( mRotateAngle, 1.0f, 1.0f, 1.0f ); // Rotate model around a vector
mRotateAngle -= mRotateAngleRate; // update the rotation angle for next iteration
glFinish(); // Ensure changes to GL state are complete
// Draw a colourful frame around the front face of the box
// (provides a pleasing nesting effect when you connect the playout output to the capture input)
glBegin(GL_QUAD_STRIP);
glColor3f( 1.0f, 0.0f, 0.0f );
glVertex3f( 1.2f, 1.2f, 1.0f);
glVertex3f( 1.0f, 1.0f, 1.0f);
glColor3f( 0.0f, 0.0f, 1.0f );
glVertex3f( 1.2f, -1.2f, 1.0f);
glVertex3f( 1.0f, -1.0f, 1.0f);
glColor3f( 0.0f, 1.0f, 0.0f );
glVertex3f(-1.2f, -1.2f, 1.0f);
glVertex3f(-1.0f, -1.0f, 1.0f);
glColor3f( 1.0f, 1.0f, 0.0f );
glVertex3f(-1.2f, 1.2f, 1.0f);
glVertex3f(-1.0f, 1.0f, 1.0f);
glColor3f( 1.0f, 0.0f, 0.0f );
glVertex3f( 1.2f, 1.2f, 1.0f);
glVertex3f( 1.0f, 1.0f, 1.0f);
glEnd();
if (mHasNoInputSource)
{
// Draw a big X when no input is available on capture
glBegin( GL_QUADS );
glColor3f( 1.0f, 0.0f, 1.0f );
glVertex3f( 0.8f, 0.9f, 1.0f );
glVertex3f( 0.9f, 0.8f, 1.0f );
glColor3f( 1.0f, 1.0f, 0.0f );
glVertex3f( -0.8f, -0.9f, 1.0f );
glVertex3f( -0.9f, -0.8f, 1.0f );
glColor3f( 1.0f, 0.0f, 1.0f );
glVertex3f( -0.8f, 0.9f, 1.0f );
glVertex3f( -0.9f, 0.8f, 1.0f );
glColor3f( 1.0f, 1.0f, 0.0f );
glVertex3f( 0.8f, -0.9f, 1.0f );
glVertex3f( 0.9f, -0.8f, 1.0f );
glEnd();
}
else
{
if (mFastTransferExtensionAvailable)
{
// Signal that we're about to draw using mCaptureTexture onto mFBOTexture
mCaptureAllocator->beginTextureInUse();
}
// Pass texture unit 0 to the fragment shader as a uniform variable
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, mCaptureTexture);
glUseProgram(mProgram);
GLint locUYVYtex = glGetUniformLocation(mProgram, "UYVYtex");
glUniform1i(locUYVYtex, 0); // Bind texture unit 0
// Draw front and back faces of box applying video texture to each face
glBegin(GL_QUADS);
glTexCoord2f(1.0f, 0.0f); glVertex3f( 1.0f, 1.0f, 1.0f ); // Top right of front side
glTexCoord2f(0.0f, 0.0f); glVertex3f( -1.0f, 1.0f, 1.0f ); // Top left of front side
glTexCoord2f(0.0f, 1.0f); glVertex3f( -1.0f, -1.0f, 1.0f ); // Bottom left of front side
glTexCoord2f(1.0f, 1.0f); glVertex3f( 1.0f, -1.0f, 1.0f ); // Bottom right of front side
//.........这里部分代码省略.........
示例5: memcpy
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
GstVideoFrame vframe;
IDeckLinkMutableVideoFrame *frame;
guint8 *outdata, *indata;
GstFlowReturn flow_ret;
HRESULT ret;
GstClockTime timestamp, duration;
GstClockTime running_time, running_time_duration;
gint i;
GstClock *clock;
GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);
// FIXME: Handle no timestamps
if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
return GST_FLOW_ERROR;
}
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
if (duration == GST_CLOCK_TIME_NONE) {
duration =
gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
self->info.fps_n);
}
running_time =
gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
GST_FORMAT_TIME, timestamp);
running_time_duration =
gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
GST_FORMAT_TIME, timestamp + duration) - running_time;
// FIXME: https://bugzilla.gnome.org/show_bug.cgi?id=742916
// We need to drop late buffers here immediately instead of
// potentially overflowing the internal queue of the hardware
clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
if (clock) {
GstClockTime clock_running_time, base_time, clock_time, latency,
max_lateness;
base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
clock_time = gst_clock_get_time (clock);
if (base_time != GST_CLOCK_TIME_NONE && clock_time != GST_CLOCK_TIME_NONE) {
clock_running_time = clock_time - base_time;
latency = gst_base_sink_get_latency (GST_BASE_SINK_CAST (self));
max_lateness = gst_base_sink_get_max_lateness (GST_BASE_SINK_CAST (self));
if (clock_running_time >
running_time + running_time_duration + latency + max_lateness) {
GST_DEBUG_OBJECT (self,
"Late buffer: %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
GST_TIME_ARGS (clock_running_time),
GST_TIME_ARGS (running_time + running_time_duration));
if (self->last_render_time == GST_CLOCK_TIME_NONE
|| (self->last_render_time < clock_running_time
&& clock_running_time - self->last_render_time >= GST_SECOND)) {
GST_DEBUG_OBJECT (self,
"Rendering frame nonetheless because we had none for more than 1s");
running_time = clock_running_time;
running_time_duration = 0;
} else {
GST_WARNING_OBJECT (self, "Dropping frame");
gst_object_unref (clock);
return GST_FLOW_OK;
}
}
}
gst_object_unref (clock);
}
self->last_render_time = running_time;
ret = self->output->output->CreateVideoFrame (self->info.width,
self->info.height, self->info.stride[0], bmdFormat8BitYUV,
bmdFrameFlagDefault, &frame);
if (ret != S_OK) {
GST_ELEMENT_ERROR (self, STREAM, FAILED,
(NULL), ("Failed to create video frame: 0x%08x", ret));
return GST_FLOW_ERROR;
}
if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Failed to map video frame");
flow_ret = GST_FLOW_ERROR;
goto out;
}
frame->GetBytes ((void **) &outdata);
indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
for (i = 0; i < self->info.height; i++) {
memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * 2);
indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
outdata += frame->GetRowBytes ();
}
gst_video_frame_unmap (&vframe);
//.........这里部分代码省略.........