本文整理汇总了C++中GST_VIDEO_INFO_FORMAT函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_VIDEO_INFO_FORMAT函数的具体用法?C++ GST_VIDEO_INFO_FORMAT怎么用?C++ GST_VIDEO_INFO_FORMAT使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_VIDEO_INFO_FORMAT函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: gst_yuv_to_rgb_set_info
static gboolean
gst_yuv_to_rgb_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info)
{
GstYuvToRgb *yuvtorgb = GST_YUVTORGB_CAST (filter);
if (in_info->width != out_info->width || in_info->height != out_info->height
|| in_info->fps_n != out_info->fps_n || in_info->fps_d != out_info->fps_d)
goto format_mismatch;
/* if present, these must match too */
if (in_info->par_n != out_info->par_n || in_info->par_d != out_info->par_d)
goto format_mismatch;
/* if present, these must match too */
if (in_info->interlace_mode != out_info->interlace_mode)
goto format_mismatch;
GST_DEBUG ("reconfigured %d %d", GST_VIDEO_INFO_FORMAT (in_info),
GST_VIDEO_INFO_FORMAT (out_info));
return TRUE;
/* ERRORS */
format_mismatch:
{
GST_ERROR_OBJECT (yuvtorgb, "input and output formats do not match");
return FALSE;
}
}
示例2: _RGB_to_GRAY
static void
_RGB_to_GRAY (GstGLColorConvert * convert)
{
struct ConvertInfo *info = &convert->priv->convert_info;
GstVideoFormat in_format = GST_VIDEO_INFO_FORMAT (&convert->in_info);
const gchar *in_format_str = gst_video_format_to_string (in_format);
gchar *pixel_order = _RGB_pixel_order (in_format_str, "rgba");
gchar *alpha = NULL;
info->in_n_textures = 1;
info->out_n_textures = 1;
info->shader_tex_names[0] = "tex";
if (_is_RGBx (in_format))
alpha = g_strdup_printf ("t.%c = 1.0;", pixel_order[3]);
switch (GST_VIDEO_INFO_FORMAT (&convert->out_info)) {
case GST_VIDEO_FORMAT_GRAY8:
info->frag_prog = g_strdup_printf (frag_REORDER, alpha ? alpha : "",
pixel_order[0], pixel_order[0], pixel_order[0], pixel_order[3]);
break;
default:
break;
}
g_free (alpha);
g_free (pixel_order);
}
示例3: _gst_gl_download_perform_with_data_unlocked
static gboolean
_gst_gl_download_perform_with_data_unlocked (GstGLDownload * download,
GLuint texture_id, gpointer data[GST_VIDEO_MAX_PLANES])
{
guint i;
g_return_val_if_fail (download != NULL, FALSE);
g_return_val_if_fail (texture_id > 0, FALSE);
g_return_val_if_fail (GST_VIDEO_INFO_FORMAT (&download->info) !=
GST_VIDEO_FORMAT_UNKNOWN
&& GST_VIDEO_INFO_FORMAT (&download->info) != GST_VIDEO_FORMAT_ENCODED,
FALSE);
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&download->info); i++) {
g_return_val_if_fail (data[i] != NULL, FALSE);
}
if (!download->priv->in_tex[0])
download->priv->in_tex[0] =
gst_gl_memory_wrapped_texture (download->context, texture_id,
GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&download->info),
GST_VIDEO_INFO_HEIGHT (&download->info), NULL, NULL);
download->priv->in_tex[0]->tex_id = texture_id;
return _do_download (download, texture_id, data);
}
示例4: gst_mfxpostproc_update_src_caps
static gboolean
gst_mfxpostproc_update_src_caps (GstMfxPostproc * vpp, GstCaps * caps,
gboolean * caps_changed_ptr)
{
GST_INFO_OBJECT (vpp, "new src caps = %" GST_PTR_FORMAT, caps);
if (!video_info_update (caps, &vpp->srcpad_info, caps_changed_ptr))
return FALSE;
if (GST_VIDEO_INFO_FORMAT (&vpp->sinkpad_info) !=
GST_VIDEO_INFO_FORMAT (&vpp->srcpad_info))
vpp->flags |= GST_MFX_POSTPROC_FLAG_FORMAT;
if ((vpp->width || vpp->height) &&
vpp->width != GST_VIDEO_INFO_WIDTH (&vpp->sinkpad_info) &&
vpp->height != GST_VIDEO_INFO_HEIGHT (&vpp->sinkpad_info))
vpp->flags |= GST_MFX_POSTPROC_FLAG_SIZE;
if (vpp->fps_n && gst_util_fraction_compare(
GST_VIDEO_INFO_FPS_N (&vpp->srcpad_info),
GST_VIDEO_INFO_FPS_D (&vpp->srcpad_info),
GST_VIDEO_INFO_FPS_N (&vpp->sinkpad_info),
GST_VIDEO_INFO_FPS_D (&vpp->sinkpad_info)))
vpp->flags |= GST_MFX_POSTPROC_FLAG_FRC;
return TRUE;
}
示例5: _RGB_to_RGB
static void
_RGB_to_RGB (GstGLColorConvert * convert)
{
struct ConvertInfo *info = &convert->priv->convert_info;
GstVideoFormat in_format = GST_VIDEO_INFO_FORMAT (&convert->in_info);
const gchar *in_format_str = gst_video_format_to_string (in_format);
GstVideoFormat out_format = GST_VIDEO_INFO_FORMAT (&convert->out_info);
const gchar *out_format_str = gst_video_format_to_string (out_format);
gchar *pixel_order = _RGB_pixel_order (in_format_str, out_format_str);
gchar *alpha = NULL;
info->in_n_textures = 1;
info->out_n_textures = 1;
if (_is_RGBx (in_format)) {
int i;
char input_alpha_channel = 'a';
for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
if (in_format_str[i] == 'X' || in_format_str[i] == 'x') {
input_alpha_channel = _index_to_shader_swizzle (i);
break;
}
}
alpha = g_strdup_printf ("t.%c = 1.0;", input_alpha_channel);
}
info->frag_prog = g_strdup_printf (frag_REORDER, alpha ? alpha : "",
pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]);
info->shader_tex_names[0] = "tex";
g_free (alpha);
g_free (pixel_order);
}
示例6: _gst_gl_color_convert_set_format_unlocked
static void
_gst_gl_color_convert_set_format_unlocked (GstGLColorConvert * convert,
GstVideoInfo * in_info, GstVideoInfo * out_info)
{
g_return_if_fail (convert != NULL);
g_return_if_fail (in_info);
g_return_if_fail (out_info);
g_return_if_fail (GST_VIDEO_INFO_FORMAT (in_info) !=
GST_VIDEO_FORMAT_UNKNOWN);
g_return_if_fail (GST_VIDEO_INFO_FORMAT (in_info) !=
GST_VIDEO_FORMAT_ENCODED);
g_return_if_fail (GST_VIDEO_INFO_FORMAT (out_info) !=
GST_VIDEO_FORMAT_UNKNOWN);
g_return_if_fail (GST_VIDEO_INFO_FORMAT (out_info) !=
GST_VIDEO_FORMAT_ENCODED);
if (gst_video_info_is_equal (&convert->in_info, in_info) &&
gst_video_info_is_equal (&convert->out_info, out_info))
return;
gst_gl_color_convert_reset (convert);
convert->in_info = *in_info;
convert->out_info = *out_info;
convert->initted = FALSE;
}
示例7: gst_msdkvpp_set_passthrough
static void
gst_msdkvpp_set_passthrough (GstMsdkVPP * thiz)
{
gboolean passthrough = TRUE;
/* no passthrough if any of the filter algorithm is enabled */
if (thiz->flags)
passthrough = FALSE;
/* vpp could be needed in some specific circumstances, for eg:
* input surface is dmabuf and output must be videomemory. So far
* the underline iHD driver doesn't seems to support dmabuf mapping,
* so we could explicitly ask msdkvpp to provide non-dambuf videomemory
* surfaces as output thourgh capsfileters */
if (thiz->need_vpp)
passthrough = FALSE;
/* no passthrough if there is change in out width,height or format */
if (GST_VIDEO_INFO_WIDTH (&thiz->sinkpad_info) !=
GST_VIDEO_INFO_WIDTH (&thiz->srcpad_info)
|| GST_VIDEO_INFO_HEIGHT (&thiz->sinkpad_info) !=
GST_VIDEO_INFO_HEIGHT (&thiz->srcpad_info)
|| GST_VIDEO_INFO_FORMAT (&thiz->sinkpad_info) !=
GST_VIDEO_INFO_FORMAT (&thiz->srcpad_info))
passthrough = FALSE;
gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (thiz), passthrough);
}
示例8: gst_imx_pxp_video_transform_are_video_infos_equal
gboolean gst_imx_pxp_video_transform_are_video_infos_equal(G_GNUC_UNUSED GstImxBlitterVideoTransform *blitter_video_transform, GstVideoInfo const *in_info, GstVideoInfo const *out_info)
{
return
(GST_VIDEO_INFO_WIDTH(in_info) == GST_VIDEO_INFO_WIDTH(out_info)) &&
(GST_VIDEO_INFO_HEIGHT(in_info) == GST_VIDEO_INFO_HEIGHT(out_info)) &&
(GST_VIDEO_INFO_FORMAT(in_info) == GST_VIDEO_INFO_FORMAT(out_info))
;
}
示例9: gst_video_balance_semiplanar_yuv
static void
gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
GstVideoFrame * frame)
{
gint x, y;
guint8 *ydata;
guint8 *uvdata;
gint ystride, uvstride;
gint width, height;
gint width2, height2;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
gint upos, vpos;
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
for (y = 0; y < height; y++) {
guint8 *yptr;
yptr = ydata + y * ystride;
for (x = 0; x < width; x++) {
*yptr = tabley[*yptr];
yptr++;
}
}
width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;
for (y = 0; y < height2; y++) {
guint8 *uvptr;
guint8 u1, v1;
uvptr = uvdata + y * uvstride;
for (x = 0; x < width2; x++) {
u1 = uvptr[upos];
v1 = uvptr[vpos];
uvptr[upos] = tableu[u1][v1];
uvptr[vpos] = tablev[u1][v1];
uvptr += 2;
}
}
}
示例10: video_info_changed
static gboolean
video_info_changed (GstVideoInfo * old_vip, GstVideoInfo * new_vip)
{
if (GST_VIDEO_INFO_FORMAT (old_vip) != GST_VIDEO_INFO_FORMAT (new_vip))
return TRUE;
if (GST_VIDEO_INFO_WIDTH (old_vip) != GST_VIDEO_INFO_WIDTH (new_vip))
return TRUE;
if (GST_VIDEO_INFO_HEIGHT (old_vip) != GST_VIDEO_INFO_HEIGHT (new_vip))
return TRUE;
return FALSE;
}
示例11: gst_video_convert_set_info
static gboolean
gst_video_convert_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info)
{
GstVideoConvert *space;
space = GST_VIDEO_CONVERT_CAST (filter);
if (space->convert) {
gst_video_converter_free (space->convert);
space->convert = NULL;
}
/* these must match */
if (in_info->width != out_info->width || in_info->height != out_info->height
|| in_info->fps_n != out_info->fps_n || in_info->fps_d != out_info->fps_d)
goto format_mismatch;
/* if present, these must match too */
if (in_info->par_n != out_info->par_n || in_info->par_d != out_info->par_d)
goto format_mismatch;
/* if present, these must match too */
if (in_info->interlace_mode != out_info->interlace_mode)
goto format_mismatch;
space->convert = gst_video_converter_new (in_info, out_info,
gst_structure_new ("GstVideoConvertConfig",
GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD,
space->dither,
GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION, G_TYPE_UINT,
space->dither_quantization, NULL));
if (space->convert == NULL)
goto no_convert;
GST_DEBUG ("reconfigured %d %d", GST_VIDEO_INFO_FORMAT (in_info),
GST_VIDEO_INFO_FORMAT (out_info));
return TRUE;
/* ERRORS */
format_mismatch:
{
GST_ERROR_OBJECT (space, "input and output formats do not match");
return FALSE;
}
no_convert:
{
GST_ERROR_OBJECT (space, "could not create converter");
return FALSE;
}
}
示例12: gst_pngenc_set_format
static gboolean
gst_pngenc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state)
{
GstPngEnc *pngenc;
gboolean ret = TRUE;
GstVideoInfo *info;
GstVideoCodecState *output_state;
pngenc = GST_PNGENC (encoder);
info = &state->info;
switch (GST_VIDEO_INFO_FORMAT (info)) {
case GST_VIDEO_FORMAT_RGBA:
pngenc->png_color_type = PNG_COLOR_TYPE_RGBA;
break;
case GST_VIDEO_FORMAT_RGB:
pngenc->png_color_type = PNG_COLOR_TYPE_RGB;
break;
case GST_VIDEO_FORMAT_GRAY8:
case GST_VIDEO_FORMAT_GRAY16_BE:
pngenc->png_color_type = PNG_COLOR_TYPE_GRAY;
break;
default:
ret = FALSE;
goto done;
}
switch (GST_VIDEO_INFO_FORMAT (info)) {
case GST_VIDEO_FORMAT_GRAY16_BE:
pngenc->depth = 16;
break;
default: /* GST_VIDEO_FORMAT_RGB and GST_VIDEO_FORMAT_GRAY8 */
pngenc->depth = 8;
break;
}
if (pngenc->input_state)
gst_video_codec_state_unref (pngenc->input_state);
pngenc->input_state = gst_video_codec_state_ref (state);
output_state =
gst_video_encoder_set_output_state (encoder,
gst_caps_new_empty_simple ("image/png"), state);
gst_video_codec_state_unref (output_state);
done:
return ret;
}
示例13: gst_video_info_is_equal
/**
* gst_video_info_is_equal:
* @info: a #GstVideoInfo
* @other: a #GstVideoInfo
*
* Compares two #GstVideoInfo and returns whether they are equal or not
*
* Returns: %TRUE if @info and @other are equal, else %FALSE.
*/
gboolean
gst_video_info_is_equal (const GstVideoInfo * info, const GstVideoInfo * other)
{
gint i;
if (GST_VIDEO_INFO_FORMAT (info) != GST_VIDEO_INFO_FORMAT (other))
return FALSE;
if (GST_VIDEO_INFO_INTERLACE_MODE (info) !=
GST_VIDEO_INFO_INTERLACE_MODE (other))
return FALSE;
if (GST_VIDEO_INFO_FLAGS (info) != GST_VIDEO_INFO_FLAGS (other))
return FALSE;
if (GST_VIDEO_INFO_WIDTH (info) != GST_VIDEO_INFO_WIDTH (other))
return FALSE;
if (GST_VIDEO_INFO_HEIGHT (info) != GST_VIDEO_INFO_HEIGHT (other))
return FALSE;
if (GST_VIDEO_INFO_SIZE (info) != GST_VIDEO_INFO_SIZE (other))
return FALSE;
if (GST_VIDEO_INFO_PAR_N (info) != GST_VIDEO_INFO_PAR_N (other))
return FALSE;
if (GST_VIDEO_INFO_PAR_D (info) != GST_VIDEO_INFO_PAR_D (other))
return FALSE;
if (GST_VIDEO_INFO_FPS_N (info) != GST_VIDEO_INFO_FPS_N (other))
return FALSE;
if (GST_VIDEO_INFO_FPS_D (info) != GST_VIDEO_INFO_FPS_D (other))
return FALSE;
if (!gst_video_colorimetry_is_equal (&GST_VIDEO_INFO_COLORIMETRY (info),
&GST_VIDEO_INFO_COLORIMETRY (other)))
return FALSE;
if (GST_VIDEO_INFO_CHROMA_SITE (info) != GST_VIDEO_INFO_CHROMA_SITE (other))
return FALSE;
if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) !=
GST_VIDEO_INFO_MULTIVIEW_MODE (other))
return FALSE;
if (GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) !=
GST_VIDEO_INFO_MULTIVIEW_FLAGS (other))
return FALSE;
if (GST_VIDEO_INFO_VIEWS (info) != GST_VIDEO_INFO_VIEWS (other))
return FALSE;
for (i = 0; i < info->finfo->n_planes; i++) {
if (info->stride[i] != other->stride[i])
return FALSE;
if (info->offset[i] != other->offset[i])
return FALSE;
}
return TRUE;
}
示例14: gst_wl_shm_memory_construct_wl_buffer
struct wl_buffer *
gst_wl_shm_memory_construct_wl_buffer (GstMemory * mem, GstWlDisplay * display,
const GstVideoInfo * info)
{
GstWlShmMemory *shm_mem = (GstWlShmMemory *) mem;
gint width, height, stride;
gsize size;
enum wl_shm_format format;
struct wl_shm_pool *wl_pool;
struct wl_buffer *wbuffer;
width = GST_VIDEO_INFO_WIDTH (info);
height = GST_VIDEO_INFO_HEIGHT (info);
stride = GST_VIDEO_INFO_PLANE_STRIDE (info, 0);
size = GST_VIDEO_INFO_SIZE (info);
format = gst_video_format_to_wl_shm_format (GST_VIDEO_INFO_FORMAT (info));
g_return_val_if_fail (gst_is_wl_shm_memory (mem), NULL);
g_return_val_if_fail (size <= mem->size, NULL);
g_return_val_if_fail (shm_mem->fd != -1, NULL);
GST_DEBUG_OBJECT (mem->allocator, "Creating wl_buffer of size %"
G_GSSIZE_FORMAT " (%d x %d, stride %d), format %s", size, width, height,
stride, gst_wl_shm_format_to_string (format));
wl_pool = wl_shm_create_pool (display->shm, shm_mem->fd, mem->size);
wbuffer = wl_shm_pool_create_buffer (wl_pool, 0, width, height, stride,
format);
close (shm_mem->fd);
shm_mem->fd = -1;
wl_shm_pool_destroy (wl_pool);
return wbuffer;
}
示例15: gst_wayland_buffer_pool_alloc
static GstFlowReturn
gst_wayland_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
GstBufferPoolAcquireParams * params)
{
GstWaylandBufferPool *self = GST_WAYLAND_BUFFER_POOL_CAST (pool);
gint width, height, stride;
gsize size;
enum wl_shm_format format;
gint offset;
void *data;
GstWlMeta *meta;
width = GST_VIDEO_INFO_WIDTH (&self->info);
height = GST_VIDEO_INFO_HEIGHT (&self->info);
stride = GST_VIDEO_INFO_PLANE_STRIDE (&self->info, 0);
size = GST_VIDEO_INFO_SIZE (&self->info);
format =
gst_video_format_to_wayland_format (GST_VIDEO_INFO_FORMAT (&self->info));
GST_DEBUG_OBJECT (self, "Allocating buffer of size %" G_GSSIZE_FORMAT
" (%d x %d, stride %d), format %s", size, width, height, stride,
gst_wayland_format_to_string (format));
/* try to reserve another memory block from the shm pool */
if (self->used + size > self->size)
goto no_buffer;
offset = self->used;
self->used += size;
data = ((gchar *) self->data) + offset;
/* create buffer and its metadata object */
*buffer = gst_buffer_new ();
meta = (GstWlMeta *) gst_buffer_add_meta (*buffer, GST_WL_META_INFO, NULL);
meta->pool = self;
meta->wbuffer = wl_shm_pool_create_buffer (self->wl_pool, offset,
width, height, stride, format);
meta->used_by_compositor = FALSE;
/* configure listening to wl_buffer.release */
g_mutex_lock (&self->buffers_map_mutex);
g_hash_table_insert (self->buffers_map, meta->wbuffer, *buffer);
g_mutex_unlock (&self->buffers_map_mutex);
wl_buffer_add_listener (meta->wbuffer, &buffer_listener, self);
/* add the allocated memory on the GstBuffer */
gst_buffer_append_memory (*buffer,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data,
size, 0, size, NULL, NULL));
return GST_FLOW_OK;
/* ERROR */
no_buffer:
{
GST_WARNING_OBJECT (pool, "can't create buffer");
return GST_FLOW_ERROR;
}
}