本文整理汇总了C++中GST_VIDEO_FRAME_COMP_DATA函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_VIDEO_FRAME_COMP_DATA函数的具体用法?C++ GST_VIDEO_FRAME_COMP_DATA怎么用?C++ GST_VIDEO_FRAME_COMP_DATA使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_VIDEO_FRAME_COMP_DATA函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: gst_pixbufscale_transform_frame
static GstFlowReturn
gst_pixbufscale_transform_frame (GstVideoFilter * filter,
GstVideoFrame * in, GstVideoFrame * out)
{
GstPixbufScale *scale;
GdkPixbuf *src_pixbuf, *dest_pixbuf;
scale = GST_PIXBUFSCALE (filter);
src_pixbuf =
gdk_pixbuf_new_from_data (GST_VIDEO_FRAME_COMP_DATA (in, 0),
GDK_COLORSPACE_RGB, FALSE, 8, GST_VIDEO_FRAME_WIDTH (in),
GST_VIDEO_FRAME_HEIGHT (in),
GST_VIDEO_FRAME_COMP_STRIDE (in, 0), NULL, NULL);
dest_pixbuf =
gdk_pixbuf_new_from_data (GST_VIDEO_FRAME_COMP_DATA (out, 0),
GDK_COLORSPACE_RGB, FALSE, 8, GST_VIDEO_FRAME_WIDTH (out),
GST_VIDEO_FRAME_HEIGHT (out),
GST_VIDEO_FRAME_COMP_STRIDE (out, 0), NULL, NULL);
gdk_pixbuf_scale (src_pixbuf, dest_pixbuf, 0, 0,
GST_VIDEO_FRAME_WIDTH (out),
GST_VIDEO_FRAME_HEIGHT (out), 0, 0,
(double) GST_VIDEO_FRAME_WIDTH (out) / GST_VIDEO_FRAME_WIDTH (in),
(double) GST_VIDEO_FRAME_HEIGHT (out) / GST_VIDEO_FRAME_HEIGHT (in),
scale->gdk_method);
g_object_unref (src_pixbuf);
g_object_unref (dest_pixbuf);
return GST_FLOW_OK;
}
示例2: NS_ASSERTION
void GStreamerReader::ImageDataFromVideoFrame(GstVideoFrame *aFrame,
PlanarYCbCrImage::Data *aData)
{
NS_ASSERTION(GST_VIDEO_INFO_IS_YUV(&mVideoInfo),
"Non-YUV video frame formats not supported");
NS_ASSERTION(GST_VIDEO_FRAME_N_COMPONENTS(aFrame) == 3,
"Unsupported number of components in video frame");
aData->mPicX = aData->mPicY = 0;
aData->mPicSize = gfx::IntSize(mPicture.width, mPicture.height);
aData->mStereoMode = StereoMode::MONO;
aData->mYChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 0);
aData->mYStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 0);
aData->mYSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 0),
GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 0));
aData->mYSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 0) - 1;
aData->mCbCrStride = GST_VIDEO_FRAME_COMP_STRIDE(aFrame, 1);
aData->mCbCrSize = gfx::IntSize(GST_VIDEO_FRAME_COMP_WIDTH(aFrame, 1),
GST_VIDEO_FRAME_COMP_HEIGHT(aFrame, 1));
aData->mCbChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 1);
aData->mCrChannel = GST_VIDEO_FRAME_COMP_DATA(aFrame, 2);
aData->mCbSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 1) - 1;
aData->mCrSkip = GST_VIDEO_FRAME_COMP_PSTRIDE(aFrame, 2) - 1;
}
示例3: gst_video_balance_packed_yuv
static void
gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
GstVideoFrame * frame)
{
gint x, y, stride;
guint8 *ydata, *udata, *vdata;
gint yoff, uoff, voff;
gint width, height;
gint width2, height2;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
for (y = 0; y < height; y++) {
guint8 *yptr;
yptr = ydata + y * stride;
for (x = 0; x < width; x++) {
*yptr = tabley[*yptr];
yptr += yoff;
}
}
width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);
for (y = 0; y < height2; y++) {
guint8 *uptr, *vptr;
guint8 u1, v1;
uptr = udata + y * stride;
vptr = vdata + y * stride;
for (x = 0; x < width2; x++) {
u1 = *uptr;
v1 = *vptr;
*uptr = tableu[u1][v1];
*vptr = tablev[u1][v1];
uptr += uoff;
vptr += voff;
}
}
}
示例4: gst_smpte_blend_i420
static void
gst_smpte_blend_i420 (GstVideoFrame * frame1, GstVideoFrame * frame2,
GstVideoFrame * oframe, GstMask * mask, gint border, gint pos)
{
guint32 *maskp;
gint value;
gint i, j;
gint min, max;
guint8 *in1, *in2, *out, *in1u, *in1v, *in2u, *in2v, *outu, *outv;
gint width, height;
if (border == 0)
border++;
min = pos - border;
max = pos;
width = GST_VIDEO_FRAME_WIDTH (frame1);
height = GST_VIDEO_FRAME_HEIGHT (frame1);
in1 = GST_VIDEO_FRAME_COMP_DATA (frame1, 0);
in2 = GST_VIDEO_FRAME_COMP_DATA (frame2, 0);
out = GST_VIDEO_FRAME_COMP_DATA (oframe, 0);
in1u = GST_VIDEO_FRAME_COMP_DATA (frame1, 1);
in1v = GST_VIDEO_FRAME_COMP_DATA (frame1, 2);
in2u = GST_VIDEO_FRAME_COMP_DATA (frame2, 1);
in2v = GST_VIDEO_FRAME_COMP_DATA (frame2, 2);
outu = GST_VIDEO_FRAME_COMP_DATA (oframe, 1);
outv = GST_VIDEO_FRAME_COMP_DATA (oframe, 2);
maskp = mask->data;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
value = *maskp++;
value = ((CLAMP (value, min, max) - min) << 8) / border;
out[j] = ((in1[j] * value) + (in2[j] * (256 - value))) >> 8;
if (!(i & 1) && !(j & 1)) {
outu[j / 2] =
((in1u[j / 2] * value) + (in2u[j / 2] * (256 - value))) >> 8;
outv[j / 2] =
((in1v[j / 2] * value) + (in2v[j / 2] * (256 - value))) >> 8;
}
}
in1 += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 0);
in2 += GST_VIDEO_FRAME_COMP_STRIDE (frame2, 0);
out += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 0);
if (!(i & 1)) {
in1u += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 1);
in2u += GST_VIDEO_FRAME_COMP_STRIDE (frame2, 1);
in1v += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 2);
in2v += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 2);
outu += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 1);
outv += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 2);
}
}
示例5: gst_av1_enc_fill_image
static void
gst_av1_enc_fill_image (GstAV1Enc * enc, GstVideoFrame * frame,
aom_image_t * image)
{
image->planes[AOM_PLANE_Y] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
image->planes[AOM_PLANE_U] = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
image->planes[AOM_PLANE_V] = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
image->stride[AOM_PLANE_Y] = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
image->stride[AOM_PLANE_U] = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
image->stride[AOM_PLANE_V] = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2);
}
示例6: gst_gaussianblur_transform_frame
static GstFlowReturn
gst_gaussianblur_transform_frame (GstVideoFilter * vfilter,
GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
GstGaussianBlur *filter = GST_GAUSSIANBLUR (vfilter);
GstClockTime timestamp;
gint64 stream_time;
gfloat sigma;
guint8 *src, *dest;
/* GstController: update the properties */
timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time);
GST_OBJECT_LOCK (filter);
sigma = filter->sigma;
GST_OBJECT_UNLOCK (filter);
if (filter->cur_sigma != sigma) {
g_free (filter->kernel);
filter->kernel = NULL;
g_free (filter->kernel_sum);
filter->kernel_sum = NULL;
filter->cur_sigma = sigma;
}
if (filter->kernel == NULL &&
!make_gaussian_kernel (filter, filter->cur_sigma)) {
GST_ELEMENT_ERROR (filter, RESOURCE, NO_SPACE_LEFT, ("Out of memory"),
("Failed to allocation gaussian kernel"));
return GST_FLOW_ERROR;
}
/*
* Perform gaussian smoothing on the image using the input standard
* deviation.
*/
src = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
dest = GST_VIDEO_FRAME_COMP_DATA (out_frame, 0);
gst_video_frame_copy (out_frame, in_frame);
gaussian_smooth (filter, src, dest);
return GST_FLOW_OK;
}
示例7: yadif_filter
void
yadif_filter (GstYadif * yadif, int parity, int tff)
{
int y, i;
const GstVideoInfo *vi = &yadif->video_info;
const GstVideoFormatInfo *vfi = vi->finfo;
for (i = 0; i < GST_VIDEO_FORMAT_INFO_N_COMPONENTS (vfi); i++) {
int w = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vfi, i, vi->width);
int h = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vfi, i, vi->height);
int refs = GST_VIDEO_INFO_COMP_STRIDE (vi, i);
int df = GST_VIDEO_INFO_COMP_PSTRIDE (vi, i);
guint8 *prev_data = GST_VIDEO_FRAME_COMP_DATA (&yadif->prev_frame, i);
guint8 *cur_data = GST_VIDEO_FRAME_COMP_DATA (&yadif->cur_frame, i);
guint8 *next_data = GST_VIDEO_FRAME_COMP_DATA (&yadif->next_frame, i);
guint8 *dest_data = GST_VIDEO_FRAME_COMP_DATA (&yadif->dest_frame, i);
for (y = 0; y < h; y++) {
if ((y ^ parity) & 1) {
guint8 *prev = prev_data + y * refs;
guint8 *cur = cur_data + y * refs;
guint8 *next = next_data + y * refs;
guint8 *dst = dest_data + y * refs;
int mode = ((y == 1) || (y + 2 == h)) ? 2 : yadif->mode;
#if HAVE_CPU_X86_64
if (0) {
filter_line_c (dst, prev, cur, next, w,
y + 1 < h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
} else {
filter_line_x86_64 (dst, prev, cur, next, w,
y + 1 < h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
}
#else
filter_line_c (dst, prev, cur, next, w,
y + 1 < h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
#endif
} else {
guint8 *dst = dest_data + y * refs;
guint8 *cur = cur_data + y * refs;
memcpy (dst, cur, w * df);
}
}
}
#if 0
emms_c ();
#endif
}
示例8: fill_image_planar8_3
static void
fill_image_planar8_3 (opj_image_t * image, GstVideoFrame * frame)
{
gint c, x, y, w, h;
const guint8 *data_in, *tmp;
gint *data_out;
gint sstride;
for (c = 0; c < 3; c++) {
w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
data_in = GST_VIDEO_FRAME_COMP_DATA (frame, c);
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c);
data_out = image->comps[c].data;
for (y = 0; y < h; y++) {
tmp = data_in;
for (x = 0; x < w; x++) {
*data_out = *tmp;
data_out++;
tmp++;
}
data_in += sstride;
}
}
}
示例9: gst_gdk_pixbuf_sink_get_pixbuf_from_buffer
static GdkPixbuf *
gst_gdk_pixbuf_sink_get_pixbuf_from_buffer (GstGdkPixbufSink * sink,
GstBuffer * buf)
{
GdkPixbuf *pix = NULL;
GstVideoFrame *frame;
gint minsize, bytes_per_pixel;
g_return_val_if_fail (GST_VIDEO_SINK_WIDTH (sink) > 0, NULL);
g_return_val_if_fail (GST_VIDEO_SINK_HEIGHT (sink) > 0, NULL);
frame = g_slice_new0 (GstVideoFrame);
gst_video_frame_map (frame, &sink->info, buf, GST_MAP_READ);
bytes_per_pixel = (sink->has_alpha) ? 4 : 3;
/* last row needn't have row padding */
minsize = (GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) *
(GST_VIDEO_SINK_HEIGHT (sink) - 1)) +
(bytes_per_pixel * GST_VIDEO_SINK_WIDTH (sink));
g_return_val_if_fail (gst_buffer_get_size (buf) >= minsize, NULL);
gst_buffer_ref (buf);
pix = gdk_pixbuf_new_from_data (GST_VIDEO_FRAME_COMP_DATA (frame, 0),
GDK_COLORSPACE_RGB, sink->has_alpha, 8, GST_VIDEO_SINK_WIDTH (sink),
GST_VIDEO_SINK_HEIGHT (sink), GST_VIDEO_FRAME_COMP_STRIDE (frame, 0),
(GdkPixbufDestroyNotify) gst_gdk_pixbuf_sink_pixbuf_destroy_notify,
frame);
return pix;
}
示例10: user_endrow_callback
static void
user_endrow_callback (png_structp png_ptr, png_bytep new_row,
png_uint_32 row_num, int pass)
{
GstPngDec *pngdec = NULL;
pngdec = GST_PNGDEC (png_get_io_ptr (png_ptr));
/* FIXME: implement interlaced pictures */
/* If buffer_out doesn't exist, it means buffer_alloc failed, which
* will already have set the return code */
if (GST_IS_BUFFER (pngdec->current_frame->output_buffer)) {
GstVideoFrame frame;
GstBuffer *buffer = pngdec->current_frame->output_buffer;
size_t offset;
gint width;
guint8 *data;
if (!gst_video_frame_map (&frame, &pngdec->output_state->info, buffer,
GST_MAP_WRITE)) {
pngdec->ret = GST_FLOW_ERROR;
return;
}
data = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
offset = row_num * GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
GST_LOG ("got row %u, copying in buffer %p at offset %" G_GSIZE_FORMAT,
(guint) row_num, pngdec->current_frame->output_buffer, offset);
width = GST_ROUND_UP_4 (png_get_rowbytes (pngdec->png, pngdec->info));
memcpy (data + offset, new_row, width);
gst_video_frame_unmap (&frame);
pngdec->ret = GST_FLOW_OK;
}
}
示例11: theora_enc_init_buffer
static void
theora_enc_init_buffer (th_ycbcr_buffer buf, GstVideoFrame * frame)
{
GstVideoInfo vinfo;
guint i;
/* According to Theora developer Timothy Terriberry, the Theora
* encoder will not use memory outside of pic_width/height, even when
* the frame size is bigger. The values outside this region will be encoded
* to default values.
* Due to this, setting the frame's width/height as the buffer width/height
* is perfectly ok, even though it does not strictly look ok.
*/
gst_video_info_init (&vinfo);
gst_video_info_set_format (&vinfo, GST_VIDEO_FRAME_FORMAT (frame),
GST_ROUND_UP_16 (GST_VIDEO_FRAME_WIDTH (frame)),
GST_ROUND_UP_16 (GST_VIDEO_FRAME_HEIGHT (frame)));
for (i = 0; i < 3; i++) {
buf[i].width = GST_VIDEO_INFO_COMP_WIDTH (&vinfo, i);
buf[i].height = GST_VIDEO_INFO_COMP_HEIGHT (&vinfo, i);
buf[i].data = GST_VIDEO_FRAME_COMP_DATA (frame, i);
buf[i].stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
}
}
示例12: fill_frame_planar16_3
static void
fill_frame_planar16_3 (GstVideoFrame * frame, opj_image_t * image)
{
gint c, x, y, w, h;
guint16 *data_out, *tmp;
const gint *data_in;
gint dstride;
gint shift;
for (c = 0; c < 3; c++) {
w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
dstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, c) / 2;
data_out = (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, c);
data_in = image->comps[c].data;
shift = 16 - image->comps[c].prec;
for (y = 0; y < h; y++) {
tmp = data_out;
for (x = 0; x < w; x++) {
*tmp = *data_in << shift;
tmp++;
data_in++;
}
data_out += dstride;
}
}
}
示例13: user_endrow_callback
static void
user_endrow_callback (png_structp png_ptr, png_bytep new_row,
png_uint_32 row_num, int pass)
{
GstPngDec *pngdec = NULL;
pngdec = GST_PNGDEC (png_get_io_ptr (png_ptr));
/* If buffer_out doesn't exist, it means buffer_alloc failed, which
* will already have set the return code */
if (new_row && GST_IS_BUFFER (pngdec->current_frame->output_buffer)) {
GstVideoFrame frame;
GstBuffer *buffer = pngdec->current_frame->output_buffer;
size_t offset;
guint8 *data;
if (!gst_video_frame_map (&frame, &pngdec->output_state->info, buffer,
GST_MAP_WRITE)) {
pngdec->ret = GST_FLOW_ERROR;
return;
}
data = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
offset = row_num * GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
GST_LOG ("got row %u at pass %d, copying in buffer %p at offset %"
G_GSIZE_FORMAT, (guint) row_num, pass,
pngdec->current_frame->output_buffer, offset);
png_progressive_combine_row (pngdec->png, data + offset, new_row);
gst_video_frame_unmap (&frame);
pngdec->ret = GST_FLOW_OK;
} else
pngdec->ret = GST_FLOW_OK;
}
示例14: gst_jpeg_dec_decode_rgb
static void
gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
{
guchar *r_rows[16], *g_rows[16], *b_rows[16];
guchar **scanarray[3] = { r_rows, g_rows, b_rows };
gint i, j, k;
gint lines;
guint8 *base[3];
guint pstride, rstride;
gint width, height;
GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return;
for (i = 0; i < 3; i++)
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
i = 0;
while (i < height) {
lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
if (G_LIKELY (lines > 0)) {
for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
gint p;
p = 0;
for (k = 0; k < width; k++) {
base[0][p] = r_rows[j][k];
base[1][p] = g_rows[j][k];
base[2][p] = b_rows[j][k];
p += pstride;
}
base[0] += rstride;
base[1] += rstride;
base[2] += rstride;
}
} else {
GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
}
}
}
示例15: daala_handle_image
/* Allocate buffer and copy image data into Y444 format */
static GstFlowReturn
daala_handle_image (GstDaalaDec * dec, od_img * img, GstVideoCodecFrame * frame)
{
GstVideoDecoder *decoder = GST_VIDEO_DECODER (dec);
gint width, height, stride;
GstFlowReturn result;
gint i, comp;
guint8 *dest, *src;
GstVideoFrame vframe;
result = gst_video_decoder_allocate_output_frame (decoder, frame);
if (G_UNLIKELY (result != GST_FLOW_OK)) {
GST_DEBUG_OBJECT (dec, "could not get buffer, reason: %s",
gst_flow_get_name (result));
return result;
}
/* if only libdaala would allow us to give it a destination frame */
GST_CAT_TRACE_OBJECT (GST_CAT_PERFORMANCE, dec,
"doing unavoidable video frame copy");
if (G_UNLIKELY (!gst_video_frame_map (&vframe, &dec->output_state->info,
frame->output_buffer, GST_MAP_WRITE)))
goto invalid_frame;
for (comp = 0; comp < 3; comp++) {
width = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, comp);
height = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, comp);
stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, comp);
dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, comp);
src = img->planes[comp].data;
for (i = 0; i < height; i++) {
memcpy (dest, src, width);
dest += stride;
src += img->planes[comp].ystride;
}
}
gst_video_frame_unmap (&vframe);
return GST_FLOW_OK;
invalid_frame:
{
GST_DEBUG_OBJECT (dec, "could not map video frame");
return GST_FLOW_ERROR;
}
}