当前位置: 首页>>代码示例>>C++>>正文


C++ GST_VIDEO_INFO_WIDTH函数代码示例

本文整理汇总了C++中GST_VIDEO_INFO_WIDTH函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_VIDEO_INFO_WIDTH函数的具体用法?C++ GST_VIDEO_INFO_WIDTH怎么用?C++ GST_VIDEO_INFO_WIDTH使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了GST_VIDEO_INFO_WIDTH函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: _display_size_to_stream_size

static void
_display_size_to_stream_size (GtkGstBaseWidget * base_widget, gdouble x,
    gdouble y, gdouble * stream_x, gdouble * stream_y)
{
  gdouble stream_width, stream_height;
  GtkAllocation allocation;
  GstVideoRectangle result;

  gtk_widget_get_allocation (GTK_WIDGET (base_widget), &allocation);
  _fit_stream_to_allocated_size (base_widget, &allocation, &result);

  stream_width = (gdouble) GST_VIDEO_INFO_WIDTH (&base_widget->v_info);
  stream_height = (gdouble) GST_VIDEO_INFO_HEIGHT (&base_widget->v_info);

  /* from display coordinates to stream coordinates */
  if (result.w > 0)
    *stream_x = (x - result.x) / result.w * stream_width;
  else
    *stream_x = 0.;

  /* clip to stream size */
  if (*stream_x < 0.)
    *stream_x = 0.;
  if (*stream_x > GST_VIDEO_INFO_WIDTH (&base_widget->v_info))
    *stream_x = GST_VIDEO_INFO_WIDTH (&base_widget->v_info);

  /* same for y-axis */
  if (result.h > 0)
    *stream_y = (y - result.y) / result.h * stream_height;
  else
    *stream_y = 0.;

  if (*stream_y < 0.)
    *stream_y = 0.;
  if (*stream_y > GST_VIDEO_INFO_HEIGHT (&base_widget->v_info))
    *stream_y = GST_VIDEO_INFO_HEIGHT (&base_widget->v_info);

  GST_TRACE ("transform %fx%f into %fx%f", x, y, *stream_x, *stream_y);
}
开发者ID:CogentEmbedded,项目名称:gst-plugins-bad,代码行数:39,代码来源:gtkgstbasewidget.c

示例2: gst_video_crop_meta_transform

static gboolean
gst_video_crop_meta_transform (GstBuffer * dest, GstMeta * meta,
    GstBuffer * buffer, GQuark type, gpointer data)
{
  GstVideoCropMeta *dmeta, *smeta;

  if (GST_META_TRANSFORM_IS_COPY (type)) {
    smeta = (GstVideoCropMeta *) meta;
    dmeta = gst_buffer_add_video_crop_meta (dest);

    GST_DEBUG ("copy crop metadata");
    dmeta->x = smeta->x;
    dmeta->y = smeta->y;
    dmeta->width = smeta->width;
    dmeta->height = smeta->height;
  } else if (GST_VIDEO_META_TRANSFORM_IS_SCALE (type)) {
    GstVideoMetaTransform *trans = data;
    gint ow, oh, nw, nh;

    smeta = (GstVideoCropMeta *) meta;
    dmeta = gst_buffer_add_video_crop_meta (dest);

    ow = GST_VIDEO_INFO_WIDTH (trans->in_info);
    nw = GST_VIDEO_INFO_WIDTH (trans->out_info);
    oh = GST_VIDEO_INFO_HEIGHT (trans->in_info);
    nh = GST_VIDEO_INFO_HEIGHT (trans->out_info);

    GST_DEBUG ("scaling crop metadata %dx%d -> %dx%d", ow, oh, nw, nh);
    dmeta->x = (smeta->x * nw) / ow;
    dmeta->y = (smeta->y * nh) / oh;
    dmeta->width = (smeta->width * nw) / ow;
    dmeta->height = (smeta->height * nh) / oh;
    GST_DEBUG ("crop offset %dx%d -> %dx%d", smeta->x, smeta->y, dmeta->x,
        dmeta->y);
    GST_DEBUG ("crop size   %dx%d -> %dx%d", smeta->width, smeta->height,
        dmeta->width, dmeta->height);
  }
  return TRUE;
}
开发者ID:elisescu,项目名称:gst-plugins-base,代码行数:39,代码来源:gstvideometa.c

示例3: gst_mfxpostproc_create

static gboolean
gst_mfxpostproc_create (GstMfxPostproc * vpp)
{
  if (!gst_mfxpostproc_ensure_filter (vpp))
    return FALSE;

  gst_mfx_filter_set_frame_info_from_gst_video_info (vpp->filter,
      &vpp->sinkpad_info);

  if (vpp->async_depth)
    gst_mfx_filter_set_async_depth (vpp->filter, vpp->async_depth);

  gst_mfx_filter_set_size (vpp->filter,
    GST_VIDEO_INFO_WIDTH (&vpp->srcpad_info),
    GST_VIDEO_INFO_HEIGHT (&vpp->srcpad_info));

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_FORMAT)
    gst_mfx_filter_set_format (vpp->filter,
      gst_video_format_to_mfx_fourcc (vpp->format));

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_DENOISE)
    gst_mfx_filter_set_denoising_level (vpp->filter, vpp->denoise_level);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_DETAIL)
    gst_mfx_filter_set_detail_level (vpp->filter, vpp->detail_level);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_HUE)
    gst_mfx_filter_set_hue (vpp->filter, vpp->hue);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_SATURATION)
    gst_mfx_filter_set_saturation (vpp->filter, vpp->saturation);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_BRIGHTNESS)
    gst_mfx_filter_set_brightness (vpp->filter, vpp->brightness);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_CONTRAST)
    gst_mfx_filter_set_contrast (vpp->filter, vpp->contrast);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_ROTATION)
    gst_mfx_filter_set_rotation (vpp->filter, vpp->angle);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_DEINTERLACING)
    gst_mfx_filter_set_deinterlace_mode (vpp->filter, vpp->deinterlace_mode);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_FRC) {
    gst_mfx_filter_set_frc_algorithm (vpp->filter, vpp->alg);
    gst_mfx_filter_set_framerate (vpp->filter, vpp->fps_n, vpp->fps_d);
  }

  return  gst_mfx_filter_prepare (vpp->filter);
}
开发者ID:01org,项目名称:gstreamer-media-SDK,代码行数:51,代码来源:gstmfxpostproc.c

示例4: gst_imx_egl_viv_sink_egl_platform_set_video_info

void gst_imx_egl_viv_sink_egl_platform_set_video_info(GstImxEglVivSinkEGLPlatform *platform, GstVideoInfo *video_info)
{
	Window x11_window;

	EGL_PLATFORM_LOCK(platform);
	if (platform->native_window == 0)
	{
		GST_LOG("window not open - cannot set video info");
		EGL_PLATFORM_UNLOCK(platform);
		return;
	}

	x11_window = (Window)(platform->native_window);

	platform->video_width = GST_VIDEO_INFO_WIDTH(video_info);
	platform->video_height = GST_VIDEO_INFO_HEIGHT(video_info);

	if (platform->fullscreen || (platform->fixed_window_width != 0) || (platform->fixed_window_height != 0) || (platform->parent_window != 0))
	{
		/* even though the window itself might not have been resized, the callback
		 * still needs to be invoked, because it depends on both the window and the
		 * video frame sizes */
		if (platform->window_resized_event_cb != NULL)
		{
			// do not call the resize callback here directly; instead, notify the main loop about this change
			// because here, the EGL context is not and cannot be set
			gst_imx_egl_viv_sink_egl_platform_send_cmd(platform, GSTIMX_EGLX11_CMD_CALL_RESIZE_CB);
		}
	}
	else
	{
		/* not calling the resize callback here, since the XResizeWindow() call
		 * creates a resize event that will be handled in the main loop */
		XResizeWindow((Display *)(platform->native_display), x11_window, GST_VIDEO_INFO_WIDTH(video_info), GST_VIDEO_INFO_HEIGHT(video_info));
	}

	EGL_PLATFORM_UNLOCK(platform);
}
开发者ID:Au-Zone,项目名称:gstreamer-imx,代码行数:38,代码来源:egl_platform_x11.c

示例5: _init_upload

/* Called in the gl thread */
static gboolean
_init_upload (GstGLUpload * upload)
{
    GstGLFuncs *gl;
    GstVideoFormat v_format;
    GstVideoInfo out_info;

    gl = upload->context->gl_vtable;

    v_format = GST_VIDEO_INFO_FORMAT (&upload->in_info);

    GST_INFO ("Initializing texture upload for format:%s",
              gst_video_format_to_string (v_format));

    if (!gl->CreateProgramObject && !gl->CreateProgram) {
        gst_gl_context_set_error (upload->context,
                                  "Cannot upload YUV formats without OpenGL shaders");
        goto error;
    }

    gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA,
                               GST_VIDEO_INFO_WIDTH (&upload->in_info),
                               GST_VIDEO_INFO_HEIGHT (&upload->in_info));

    gst_gl_color_convert_set_format (upload->convert, &upload->in_info,
                                     &out_info);

    upload->out_tex = gst_gl_memory_wrapped_texture (upload->context, 0,
                      GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&upload->in_info),
                      GST_VIDEO_INFO_HEIGHT (&upload->in_info), NULL, NULL);

    upload->initted = TRUE;

    return TRUE;

error:
    return FALSE;
}
开发者ID:MathieuDuponchelle,项目名称:gst-plugins-bad,代码行数:39,代码来源:gstglupload.c

示例6: gst_gl_transformation_set_caps

static gboolean
gst_gl_transformation_set_caps (GstGLFilter * filter, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstGLTransformation *transformation = GST_GL_TRANSFORMATION (filter);

  transformation->aspect =
      (gdouble) GST_VIDEO_INFO_WIDTH (&filter->out_info) /
      (gdouble) GST_VIDEO_INFO_HEIGHT (&filter->out_info);

  gst_gl_transformation_build_mvp (transformation);

  return TRUE;
}
开发者ID:Lachann,项目名称:gst-plugins-bad,代码行数:14,代码来源:gstgltransformation.c

示例7: gst_kms_sink_calculate_display_ratio

static gboolean
gst_kms_sink_calculate_display_ratio (GstKMSSink * self, GstVideoInfo * vinfo)
{
  guint dar_n, dar_d;
  guint video_width, video_height;
  guint video_par_n, video_par_d;
  guint dpy_par_n, dpy_par_d;

  video_width = GST_VIDEO_INFO_WIDTH (vinfo);
  video_height = GST_VIDEO_INFO_HEIGHT (vinfo);
  video_par_n = GST_VIDEO_INFO_PAR_N (vinfo);
  video_par_d = GST_VIDEO_INFO_PAR_D (vinfo);

  gst_video_calculate_device_ratio (self->hdisplay, self->vdisplay,
      self->mm_width, self->mm_height, &dpy_par_n, &dpy_par_d);

  if (!gst_video_calculate_display_ratio (&dar_n, &dar_d, video_width,
          video_height, video_par_n, video_par_d, dpy_par_n, dpy_par_d))
    return FALSE;

  GST_DEBUG_OBJECT (self, "video calculated display ratio: %d/%d", dar_n,
      dar_d);

  /* now find a width x height that respects this display ratio.
   * prefer those that have one of w/h the same as the incoming video
   * using wd / hd = dar_n / dar_d */

  /* start with same height, because of interlaced video */
  /* check hd / dar_d is an integer scale factor, and scale wd with the PAR */
  if (video_height % dar_d == 0) {
    GST_DEBUG_OBJECT (self, "keeping video height");
    GST_VIDEO_SINK_WIDTH (self) = (guint)
        gst_util_uint64_scale_int (video_height, dar_n, dar_d);
    GST_VIDEO_SINK_HEIGHT (self) = video_height;
  } else if (video_width % dar_n == 0) {
    GST_DEBUG_OBJECT (self, "keeping video width");
    GST_VIDEO_SINK_WIDTH (self) = video_width;
    GST_VIDEO_SINK_HEIGHT (self) = (guint)
        gst_util_uint64_scale_int (video_width, dar_d, dar_n);
  } else {
    GST_DEBUG_OBJECT (self, "approximating while keeping video height");
    GST_VIDEO_SINK_WIDTH (self) = (guint)
        gst_util_uint64_scale_int (video_height, dar_n, dar_d);
    GST_VIDEO_SINK_HEIGHT (self) = video_height;
  }
  GST_DEBUG_OBJECT (self, "scaling to %dx%d", GST_VIDEO_SINK_WIDTH (self),
      GST_VIDEO_SINK_HEIGHT (self));

  return TRUE;
}
开发者ID:justinjoy,项目名称:gst-plugins-bad,代码行数:50,代码来源:gstkmssink.c

示例8: plugin_bind_dma_to_vaapi_buffer

static gboolean
plugin_bind_dma_to_vaapi_buffer (GstVaapiPluginBase * plugin,
    GstBuffer * inbuf, GstBuffer * outbuf)
{
  GstVideoInfo *const vip = &plugin->sinkpad_info;
  GstVaapiVideoMeta *meta;
  GstVaapiSurface *surface;
  GstVaapiSurfaceProxy *proxy;
  gint fd;

  fd = gst_dmabuf_memory_get_fd (gst_buffer_peek_memory (inbuf, 0));
  if (fd < 0)
    return FALSE;

  if (!plugin_update_sinkpad_info_from_buffer (plugin, inbuf))
    goto error_update_sinkpad_info;

  meta = gst_buffer_get_vaapi_video_meta (outbuf);
  g_return_val_if_fail (meta != NULL, FALSE);

  surface = gst_vaapi_surface_new_with_dma_buf_handle (plugin->display, fd,
      GST_VIDEO_INFO_SIZE (vip), GST_VIDEO_INFO_FORMAT (vip),
      GST_VIDEO_INFO_WIDTH (vip), GST_VIDEO_INFO_HEIGHT (vip),
      vip->offset, vip->stride);
  if (!surface)
    goto error_create_surface;

  proxy = gst_vaapi_surface_proxy_new (surface);
  gst_vaapi_object_unref (surface);
  if (!proxy)
    goto error_create_proxy;

  gst_vaapi_surface_proxy_set_destroy_notify (proxy,
      (GDestroyNotify) gst_buffer_unref, (gpointer) gst_buffer_ref (inbuf));
  gst_vaapi_video_meta_set_surface_proxy (meta, proxy);
  gst_vaapi_surface_proxy_unref (proxy);
  return TRUE;

  /* ERRORS */
error_update_sinkpad_info:
  GST_ERROR ("failed to update sink pad video info from video meta");
  return FALSE;
error_create_surface:
  GST_ERROR ("failed to create VA surface from dma_buf handle");
  return FALSE;
error_create_proxy:
  GST_ERROR ("failed to create VA surface proxy from wrapped VA surface");
  return FALSE;
}
开发者ID:gbeauchesne,项目名称:gstreamer-vaapi,代码行数:49,代码来源:gstvaapipluginbase.c

示例9: gst_glimage_sink_show_frame

static GstFlowReturn
gst_glimage_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
{
  GstGLImageSink *glimage_sink;
  GstBuffer *stored_buffer;

  GST_TRACE ("rendering buffer:%p", buf);

  glimage_sink = GST_GLIMAGE_SINK (vsink);

  GST_TRACE ("redisplay texture:%u of size:%ux%u, window size:%ux%u",
      glimage_sink->next_tex, GST_VIDEO_INFO_WIDTH (&glimage_sink->info),
      GST_VIDEO_INFO_HEIGHT (&glimage_sink->info),
      GST_VIDEO_SINK_WIDTH (glimage_sink),
      GST_VIDEO_SINK_HEIGHT (glimage_sink));

  /* Avoid to release the texture while drawing */
  GST_GLIMAGE_SINK_LOCK (glimage_sink);
  glimage_sink->redisplay_texture = glimage_sink->next_tex;
  stored_buffer = glimage_sink->stored_buffer;
  glimage_sink->stored_buffer = gst_buffer_ref (buf);
  GST_GLIMAGE_SINK_UNLOCK (glimage_sink);
  if (stored_buffer)
    gst_buffer_unref (stored_buffer);

  /* Ask the underlying window to redraw its content */
  if (!gst_glimage_sink_redisplay (glimage_sink))
    goto redisplay_failed;

  GST_TRACE ("post redisplay");

  if (g_atomic_int_get (&glimage_sink->to_quit) != 0) {
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", gst_gl_context_get_error ()), (NULL));
    gst_gl_upload_release_buffer (glimage_sink->upload);
    return GST_FLOW_ERROR;
  }

  return GST_FLOW_OK;

/* ERRORS */
redisplay_failed:
  {
    gst_gl_upload_release_buffer (glimage_sink->upload);
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", gst_gl_context_get_error ()), (NULL));
    return GST_FLOW_ERROR;
  }
}
开发者ID:asdlei00,项目名称:gst-plugins-bad,代码行数:49,代码来源:gstglimagesink.c

示例10: generate_sampling_factors

/* based on upstream gst-plugins-good jpegencoder */
static void
generate_sampling_factors (GstVaapiEncoderJpeg * encoder)
{
  GstVideoInfo *vinfo;
  gint i;

  vinfo = GST_VAAPI_ENCODER_VIDEO_INFO (encoder);

  if (GST_VIDEO_INFO_FORMAT (vinfo) == GST_VIDEO_FORMAT_ENCODED) {
    /* Use native I420 format */
    encoder->n_components = 3;
    for (i = 0; i < encoder->n_components; ++i) {
      if (i == 0)
        encoder->h_samp[i] = encoder->v_samp[i] = 2;
      else
        encoder->h_samp[i] = encoder->v_samp[i] = 1;
      GST_DEBUG ("sampling factors: %d %d", encoder->h_samp[i],
          encoder->v_samp[i]);
    }
    return;
  }

  encoder->n_components = GST_VIDEO_INFO_N_COMPONENTS (vinfo);

  encoder->h_max_samp = 0;
  encoder->v_max_samp = 0;
  for (i = 0; i < encoder->n_components; ++i) {
    encoder->cwidth[i] = GST_VIDEO_INFO_COMP_WIDTH (vinfo, i);
    encoder->cheight[i] = GST_VIDEO_INFO_COMP_HEIGHT (vinfo, i);
    encoder->h_samp[i] =
        GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (vinfo)) / encoder->cwidth[i];
    encoder->h_max_samp = MAX (encoder->h_max_samp, encoder->h_samp[i]);
    encoder->v_samp[i] =
        GST_ROUND_UP_4 (GST_VIDEO_INFO_HEIGHT (vinfo)) / encoder->cheight[i];
    encoder->v_max_samp = MAX (encoder->v_max_samp, encoder->v_samp[i]);
  }
  /* samp should only be 1, 2 or 4 */
  g_assert (encoder->h_max_samp <= 4);
  g_assert (encoder->v_max_samp <= 4);

  /* now invert */
  /* maximum is invariant, as one of the components should have samp 1 */
  for (i = 0; i < encoder->n_components; ++i) {
    encoder->h_samp[i] = encoder->h_max_samp / encoder->h_samp[i];
    encoder->v_samp[i] = encoder->v_max_samp / encoder->v_samp[i];
    GST_DEBUG ("sampling factors: %d %d", encoder->h_samp[i],
        encoder->v_samp[i]);
  }
}
开发者ID:GStreamer,项目名称:gstreamer-vaapi,代码行数:50,代码来源:gstvaapiencoder_jpeg.c

示例11: new_surface

static GstVaapiSurface *
new_surface (GstVaapiDisplay * display, const GstVideoInfo * vip)
{
  GstVaapiSurface *surface;
  GstVaapiChromaType chroma_type;

  /* Try with explicit format first */
  if (!USE_NATIVE_FORMATS &&
      GST_VIDEO_INFO_FORMAT (vip) != GST_VIDEO_FORMAT_ENCODED) {
    surface = gst_vaapi_surface_new_with_format (display,
        GST_VIDEO_INFO_FORMAT (vip), GST_VIDEO_INFO_WIDTH (vip),
        GST_VIDEO_INFO_HEIGHT (vip));
    if (surface)
      return surface;
  }

  /* Try to pick something compatible, i.e. with same chroma type */
  chroma_type =
      gst_vaapi_video_format_get_chroma_type (GST_VIDEO_INFO_FORMAT (vip));
  if (!chroma_type)
    return NULL;
  return gst_vaapi_surface_new (display, chroma_type,
      GST_VIDEO_INFO_WIDTH (vip), GST_VIDEO_INFO_HEIGHT (vip));
}
开发者ID:DarkLighters,项目名称:gstreamer-vaapi,代码行数:24,代码来源:gstvaapivideomemory.c

示例12: gst_msdkdec_set_format

static gboolean
gst_msdkdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstMsdkDec *thiz = GST_MSDKDEC (decoder);

  if (thiz->input_state) {
    /* mark for re-negotiation if display resolution changes */
    if ((GST_VIDEO_INFO_WIDTH (&thiz->input_state->info) !=
            GST_VIDEO_INFO_WIDTH (&state->info)) ||
        GST_VIDEO_INFO_HEIGHT (&thiz->input_state->info) !=
        GST_VIDEO_INFO_HEIGHT (&state->info))
      thiz->do_renego = TRUE;
    gst_video_codec_state_unref (thiz->input_state);
  }
  thiz->input_state = gst_video_codec_state_ref (state);

  /* we don't set output state here to avoid caching of mismatched
   * video information if there is dynamic resolution change in the stream.
   * All negotiation code is consolidated in gst_msdkdec_negotiate() and
   * this will be invoked from handle_frame() */

  gst_msdkdec_set_latency (thiz);
  return TRUE;
}
开发者ID:MaZderMind,项目名称:gst-plugins-bad,代码行数:24,代码来源:gstmsdkdec.c

示例13: _mixer_pad_get_output_size

static void
_mixer_pad_get_output_size (GstGLVideoMixer * mix,
    GstGLVideoMixerPad * mix_pad, gint * width, gint * height)
{
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix);
  GstVideoAggregatorPad *vagg_pad = GST_VIDEO_AGGREGATOR_PAD (mix_pad);
  gint pad_width, pad_height;
  guint dar_n, dar_d;

  /* FIXME: Anything better we can do here? */
  if (!vagg_pad->info.finfo
      || vagg_pad->info.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) {
    GST_DEBUG_OBJECT (mix_pad, "Have no caps yet");
    *width = 0;
    *height = 0;
    return;
  }

  pad_width =
      mix_pad->width <=
      0 ? GST_VIDEO_INFO_WIDTH (&vagg_pad->info) : mix_pad->width;
  pad_height =
      mix_pad->height <=
      0 ? GST_VIDEO_INFO_HEIGHT (&vagg_pad->info) : mix_pad->height;

  gst_video_calculate_display_ratio (&dar_n, &dar_d, pad_width, pad_height,
      GST_VIDEO_INFO_PAR_N (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_D (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_N (&vagg->info), GST_VIDEO_INFO_PAR_D (&vagg->info)
      );
  GST_LOG_OBJECT (mix_pad, "scaling %ux%u by %u/%u (%u/%u / %u/%u)", pad_width,
      pad_height, dar_n, dar_d, GST_VIDEO_INFO_PAR_N (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_D (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_N (&vagg->info), GST_VIDEO_INFO_PAR_D (&vagg->info));

  if (pad_height % dar_n == 0) {
    pad_width = gst_util_uint64_scale_int (pad_height, dar_n, dar_d);
  } else if (pad_width % dar_d == 0) {
    pad_height = gst_util_uint64_scale_int (pad_width, dar_d, dar_n);
  } else {
    pad_width = gst_util_uint64_scale_int (pad_height, dar_n, dar_d);
  }

  if (width)
    *width = pad_width;
  if (height)
    *height = pad_height;
}
开发者ID:ndufresne,项目名称:gst-plugins-bad,代码行数:48,代码来源:gstglvideomixer.c

示例14: _upload_memory

static gboolean
_upload_memory (GstGLUpload * upload)
{
    guint in_width, in_height;
    guint in_texture[GST_VIDEO_MAX_PLANES];
    GstBuffer *inbuf;
    GstVideoFrame out_frame;
    GstVideoInfo out_info;
    gint i;

    in_width = GST_VIDEO_INFO_WIDTH (&upload->in_info);
    in_height = GST_VIDEO_INFO_HEIGHT (&upload->in_info);

    if (!upload->initted) {
        if (!_init_upload (upload)) {
            return FALSE;
        }
    }

    inbuf = gst_buffer_new ();
    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&upload->in_info); i++) {
        in_texture[i] = upload->in_tex[i]->tex_id;
        gst_buffer_append_memory (inbuf,
                                  gst_memory_ref ((GstMemory *) upload->in_tex[i]));
    }

    GST_TRACE ("uploading with textures:%u,%u,%u dimensions:%ux%u",
               in_texture[0], in_texture[1], in_texture[2], in_width, in_height);

    upload->priv->outbuf = gst_gl_color_convert_perform (upload->convert, inbuf);
    gst_buffer_unref (inbuf);

    gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA, in_width,
                               in_height);
    if (!gst_video_frame_map (&out_frame, &out_info, upload->priv->outbuf,
                              GST_MAP_READ | GST_MAP_GL)) {
        gst_buffer_unref (upload->priv->outbuf);
        upload->priv->outbuf = NULL;
        return FALSE;
    }

    upload->out_tex->tex_id = *(guint *) out_frame.data[0];

    gst_video_frame_unmap (&out_frame);
    upload->priv->released = FALSE;

    return TRUE;
}
开发者ID:MathieuDuponchelle,项目名称:gst-plugins-bad,代码行数:48,代码来源:gstglupload.c

示例15: gst_gl_video_mixer_process_textures

static gboolean
gst_gl_video_mixer_process_textures (GstGLMixer * mix, GPtrArray * frames,
    guint out_tex)
{
  GstGLVideoMixer *video_mixer = GST_GL_VIDEO_MIXER (mix);

  video_mixer->input_frames = frames;

  gst_gl_context_use_fbo_v2 (GST_GL_BASE_MIXER (mix)->context,
      GST_VIDEO_INFO_WIDTH (&GST_VIDEO_AGGREGATOR (mix)->info),
      GST_VIDEO_INFO_HEIGHT (&GST_VIDEO_AGGREGATOR (mix)->info),
      mix->fbo, mix->depthbuffer,
      out_tex, gst_gl_video_mixer_callback, (gpointer) video_mixer);

  return TRUE;
}
开发者ID:ndufresne,项目名称:gst-plugins-bad,代码行数:16,代码来源:gstglvideomixer.c


注:本文中的GST_VIDEO_INFO_WIDTH函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。