本文整理汇总了C++中GST_VIDEO_CAPS_MAKE函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_VIDEO_CAPS_MAKE函数的具体用法?C++ GST_VIDEO_CAPS_MAKE怎么用?C++ GST_VIDEO_CAPS_MAKE使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_VIDEO_CAPS_MAKE函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: gst_gl_mixer_update_caps
GstCaps *
gst_gl_mixer_update_caps (GstGLMixer * mix, GstCaps * caps)
{
GstCaps *result = NULL;
GstCaps *glcaps = gst_gl_mixer_set_caps_features (caps,
GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
#if GST_GL_HAVE_PLATFORM_EGL
GstCaps *eglcaps = gst_gl_mixer_set_caps_features (caps,
GST_CAPS_FEATURE_MEMORY_EGL_IMAGE);
#endif
GstCaps *uploadcaps = gst_gl_mixer_set_caps_features (caps,
GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META);
GstCaps *raw_caps =
gst_caps_from_string (GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS));
result = gst_caps_new_empty ();
result = gst_caps_merge (result, glcaps);
#if GST_GL_HAVE_PLATFORM_EGL
result = gst_caps_merge (result, eglcaps);
#endif
result = gst_caps_merge (result, uploadcaps);
result = gst_caps_merge (result, raw_caps);
result = gst_caps_merge (result, gst_gl_mixer_caps_remove_format_info (caps));
GST_DEBUG_OBJECT (mix, "returning %" GST_PTR_FORMAT, result);
return result;
}
示例2: GST_DEBUG_CATEGORY_STATIC
#define orc_memset memset
#else
#include <orc/orcfunctions.h>
#endif
GST_DEBUG_CATEGORY_STATIC (gst_compositor_debug);
#define GST_CAT_DEFAULT gst_compositor_debug
#define FORMATS " { AYUV, BGRA, ARGB, RGBA, ABGR, Y444, Y42B, YUY2, UYVY, "\
" YVYU, I420, YV12, NV12, NV21, Y41B, RGB, BGR, xRGB, xBGR, "\
" RGBx, BGRx } "
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
);
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
GST_PAD_SINK,
GST_PAD_REQUEST,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
);
#define DEFAULT_PAD_XPOS 0
#define DEFAULT_PAD_YPOS 0
#define DEFAULT_PAD_WIDTH 0
#define DEFAULT_PAD_HEIGHT 0
#define DEFAULT_PAD_ALPHA 1.0
enum
{
示例3: GST_DEBUG_CATEGORY_EXTERN
#include "gstmsdksystemmemory.h"
#include "gstmsdkcontextutil.h"
#include "gstmsdkvpputil.h"
#ifndef _WIN32
#include "gstmsdkallocator_libva.h"
#endif
GST_DEBUG_CATEGORY_EXTERN (gst_msdkvpp_debug);
#define GST_CAT_DEFAULT gst_msdkvpp_debug
static GstStaticPadTemplate gst_msdkvpp_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
("{ NV12, YV12, I420, YUY2, UYVY, BGRA, BGRx }")
", " "interlace-mode = (string){ progressive, interleaved, mixed }" ";"
GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_DMABUF,
"{ NV12, BGRA, YUY2}")));
static GstStaticPadTemplate gst_msdkvpp_src_factory =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
(GST_CAPS_FEATURE_MEMORY_DMABUF,
"{ BGRA, YUY2, NV12}") ";"
GST_VIDEO_CAPS_MAKE ("{ NV12, YUY2, BGRA, BGRx }") ", "
"interlace-mode = (string){ progressive, interleaved, mixed }" ";"));
enum
示例4: GST_DEBUG_CATEGORY_STATIC
#define GST_PLUGIN_NAME "vaapiencode_mpeg2"
#define GST_PLUGIN_DESC "A VA-API based MPEG-2 video encoder"
GST_DEBUG_CATEGORY_STATIC (gst_vaapi_mpeg2_encode_debug);
#define GST_CAT_DEFAULT gst_vaapi_mpeg2_encode_debug
#define GST_CODEC_CAPS \
"video/mpeg, mpegversion = (int) 2, " \
"systemstream = (boolean) false"
/* *INDENT-OFF* */
static const char gst_vaapiencode_mpeg2_sink_caps_str[] =
GST_VAAPI_MAKE_ENC_SURFACE_CAPS ", "
GST_CAPS_INTERLACED_FALSE "; "
GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL) ", "
GST_CAPS_INTERLACED_FALSE;
/* *INDENT-ON* */
/* *INDENT-OFF* */
static const char gst_vaapiencode_mpeg2_src_caps_str[] =
GST_CODEC_CAPS;
/* *INDENT-ON* */
/* *INDENT-OFF* */
static GstStaticPadTemplate gst_vaapiencode_mpeg2_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (gst_vaapiencode_mpeg2_sink_caps_str));
/* *INDENT-ON* */
示例5: gst_pngdec_parse
GstVideoCodecState * state);
static GstFlowReturn gst_pngdec_parse (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
static GstFlowReturn gst_pngdec_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame);
static gboolean gst_pngdec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query);
#define parent_class gst_pngdec_parent_class
G_DEFINE_TYPE (GstPngDec, gst_pngdec, GST_TYPE_VIDEO_DECODER);
static GstStaticPadTemplate gst_pngdec_src_pad_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
("{ RGBA, RGB, ARGB64, GRAY8, GRAY16_BE }"))
);
static GstStaticPadTemplate gst_pngdec_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("image/png")
);
static void
gst_pngdec_class_init (GstPngDecClass * klass)
{
GstElementClass *element_class = (GstElementClass *) klass;
GstVideoDecoderClass *vdec_class = (GstVideoDecoderClass *) klass;
示例6: GST_STATIC_PAD_TEMPLATE
enum
{
PROP_0,
PROP_POSITION,
PROP_BORDER
};
#define DEFAULT_POSITION 0.0
#define DEFAULT_BORDER 0.0
static GstStaticPadTemplate video_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("video_sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }")));
static GstStaticPadTemplate mask_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("mask_sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw, "
"format = (string) GRAY8, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1 ; "
"video/x-raw, " "format = (string) " GST_VIDEO_NE (GRAY16) ", "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1"));
static GstStaticPadTemplate src_pad_template =
GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
示例7: GST_STATIC_PAD_TEMPLATE
GstBuffer ** buf);
static GstCaps *gst_inter_video_src_fixate (GstBaseSrc * src, GstCaps * caps);
enum
{
PROP_0,
PROP_CHANNEL
};
/* pad templates */
static GstStaticPadTemplate gst_inter_video_src_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420"))
);
/* class initialization */
G_DEFINE_TYPE (GstInterVideoSrc, gst_inter_video_src, GST_TYPE_BASE_SRC);
static void
gst_inter_video_src_class_init (GstInterVideoSrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (gst_inter_video_src_debug_category, "intervideosrc",
示例8: main
static const gchar *redisplay_fragment_shader_str_gles2 =
"precision mediump float; \n"
"varying vec2 v_texCoord; \n"
"uniform sampler2D s_texture; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D( s_texture, v_texCoord );\n"
"} \n";
/* *INDENT-ON* */
#endif
static GstStaticPadTemplate gst_glimage_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_GL_UPLOAD_FORMATS) "; "
GST_VIDEO_CAPS_MAKE_WITH_FEATURES
(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
GST_GL_UPLOAD_FORMATS))
);
enum
{
ARG_0,
ARG_DISPLAY,
PROP_CLIENT_RESHAPE_CALLBACK,
PROP_CLIENT_DRAW_CALLBACK,
PROP_CLIENT_DATA,
PROP_FORCE_ASPECT_RATIO,
PROP_PIXEL_ASPECT_RATIO,
PROP_OTHER_CONTEXT
开发者ID:freedesktop-unofficial-mirror,项目名称:gstreamer__attic__gst-plugins-gl,代码行数:31,代码来源:gstglimagesink.c
示例9: GST_DEBUG_CATEGORY_STATIC
#include <string.h>
#include "gstvp8utils.h"
#include "gstvp9enc.h"
GST_DEBUG_CATEGORY_STATIC (gst_vp9enc_debug);
#define GST_CAT_DEFAULT gst_vp9enc_debug
/* FIXME: Y42B and Y444 do not work yet it seems */
static GstStaticPadTemplate gst_vp9_enc_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
/*GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12, Y42B, Y444 }")) */
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12 }"))
);
static GstStaticPadTemplate gst_vp9_enc_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-vp9, " "profile = (string) {0, 1, 2, 3}")
);
#define parent_class gst_vp9_enc_parent_class
G_DEFINE_TYPE (GstVP9Enc, gst_vp9_enc, GST_TYPE_VPX_ENC);
static vpx_codec_iface_t *gst_vp9_enc_get_algo (GstVPXEnc * enc);
static gboolean gst_vp9_enc_enable_scaling (GstVPXEnc * enc);
static void gst_vp9_enc_set_image_format (GstVPXEnc * enc, vpx_image_t * image);
示例10: GST_STATIC_PAD_TEMPLATE
#endif
#include <stdlib.h>
#include "gstspacescope.h"
#if G_BYTE_ORDER == G_BIG_ENDIAN
#define RGB_ORDER "xRGB"
#else
#define RGB_ORDER "BGRx"
#endif
static GstStaticPadTemplate gst_space_scope_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (RGB_ORDER))
);
static GstStaticPadTemplate gst_space_scope_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw, "
"format = (string) " GST_AUDIO_NE (S16) ", "
"layout = (string) interleaved, "
"rate = (int) [ 8000, 96000 ], "
"channels = (int) 2, " "channel-mask = (bitmask) 0x3")
);
GST_DEBUG_CATEGORY_STATIC (space_scope_debug);
示例11: gst_fbdevsink_setcaps
static gboolean gst_fbdevsink_setcaps (GstBaseSink * bsink, GstCaps * caps);
static void gst_fbdevsink_finalize (GObject * object);
static void gst_fbdevsink_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_fbdevsink_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_fbdevsink_change_state (GstElement * element,
GstStateChange transition);
#define VIDEO_CAPS "{ RGB, BGR, BGRx, xBGR, RGB, RGBx, xRGB, RGB15, RGB16 }"
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (VIDEO_CAPS))
);
#define parent_class gst_fbdevsink_parent_class
G_DEFINE_TYPE (GstFBDEVSink, gst_fbdevsink, GST_TYPE_VIDEO_SINK);
static void
gst_fbdevsink_init (GstFBDEVSink * fbdevsink)
{
/* nothing to do here yet */
}
#if 0
static void
gst_fbdevsink_get_times (GstBaseSink * basesink, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
示例12: GST_CAPS_CODEC
GST_CAPS_CODEC("video/x-wmv")
#if USE_VP8_DECODER
GST_CAPS_CODEC("video/x-vp8")
#endif
#if USE_JPEG_DECODER
GST_CAPS_CODEC("image/jpeg")
#endif
#if USE_VP9_DECODER
GST_CAPS_CODEC("video/x-vp9")
#endif
;
static const char gst_vaapidecode_src_caps_str[] =
GST_VAAPI_MAKE_SURFACE_CAPS ";"
GST_VAAPI_MAKE_GLTEXUPLOAD_CAPS ";"
GST_VIDEO_CAPS_MAKE("{ I420, YV12, NV12 }");
static GstStaticPadTemplate gst_vaapidecode_sink_factory =
GST_STATIC_PAD_TEMPLATE(
"sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS(gst_vaapidecode_sink_caps_str));
static GstStaticPadTemplate gst_vaapidecode_src_factory =
GST_STATIC_PAD_TEMPLATE(
"src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS(gst_vaapidecode_src_caps_str));
示例13: gst_qt_quick2_video_sink_class_init
//.........这里部分代码省略.........
qtquick2_class->update_node = gst_qt_quick2_video_sink_update_node;
/**
* GstQtQuick2VideoSink::pixel-aspect-ratio
*
* The pixel aspect ratio of the display device.
**/
g_object_class_install_property(gobject_class, PROP_PIXEL_ASPECT_RATIO,
g_param_spec_string("pixel-aspect-ratio", "Pixel aspect ratio",
"The pixel aspect ratio of the display device",
"1/1", static_cast<GParamFlags>(G_PARAM_READWRITE)));
/**
* GstQtQuick2VideoSink::force-aspect-ratio
*
* If set to TRUE, the sink will scale the video respecting its original aspect ratio
* and any remaining space will be filled with black.
* If set to FALSE, the sink will scale the video to fit the whole drawing area.
**/
g_object_class_install_property(gobject_class, PROP_FORCE_ASPECT_RATIO,
g_param_spec_boolean("force-aspect-ratio", "Force aspect ratio",
"When enabled, scaling will respect original aspect ratio",
FALSE, static_cast<GParamFlags>(G_PARAM_READWRITE)));
g_object_class_install_property(gobject_class, PROP_CONTRAST,
g_param_spec_int("contrast", "Contrast", "The contrast of the video",
-100, 100, 0, static_cast<GParamFlags>(G_PARAM_READWRITE)));
g_object_class_install_property(gobject_class, PROP_BRIGHTNESS,
g_param_spec_int("brightness", "Brightness", "The brightness of the video",
-100, 100, 0, static_cast<GParamFlags>(G_PARAM_READWRITE)));
g_object_class_install_property(gobject_class, PROP_HUE,
g_param_spec_int("hue", "Hue", "The hue of the video",
-100, 100, 0, static_cast<GParamFlags>(G_PARAM_READWRITE)));
g_object_class_install_property(gobject_class, PROP_SATURATION,
g_param_spec_int("saturation", "Saturation", "The saturation of the video",
-100, 100, 0, static_cast<GParamFlags>(G_PARAM_READWRITE)));
/**
* GstQtQuick2VideoSink::update-node
* @node: The QSGNode to update
* @x: The x coordinate of the target area rectangle
* @y: The y coordinate of the target area rectangle
* @width: The width of the target area rectangle
* @height: The height of the target area rectangle
* @returns: The updated QGSNode
*
* This is an action signal that you can call from your QQuickItem subclass
* inside its updateNode function to render the video. It takes a QSGNode*
* and the item's area rectangle as arguments. You should schedule to call
* this function to repaint the surface whenever the ::update signal is
* emited.
*
* Note that the x,y,width and height arguments are actually qreal.
* This means that on architectures like arm they will be float instead
* of double. You should cast the arguments to qreal if they are not
* already when emitting this signal.
*/
s_signals[ACTION_UPDATE_NODE] =
g_signal_new("update-node", G_TYPE_FROM_CLASS(klass),
static_cast<GSignalFlags>(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
G_STRUCT_OFFSET(GstQtQuick2VideoSinkClass, update_node),
NULL, NULL,
qRealIsDouble() ?
g_cclosure_user_marshal_POINTER__POINTER_DOUBLE_DOUBLE_DOUBLE_DOUBLE :
g_cclosure_user_marshal_POINTER__POINTER_FLOAT_FLOAT_FLOAT_FLOAT,
G_TYPE_POINTER, 5,
G_TYPE_POINTER, G_TYPE_QREAL, G_TYPE_QREAL, G_TYPE_QREAL, G_TYPE_QREAL);
/**
* GstQtQuick2VideoSink::update
*
* This signal is emited when the surface should be repainted. It should
* be connected to QQuickItem::update().
*/
s_signals[SIGNAL_UPDATE] =
g_signal_new("update", G_TYPE_FROM_CLASS(klass),
G_SIGNAL_RUN_LAST,
0, NULL, NULL,
g_cclosure_marshal_VOID__VOID,
G_TYPE_NONE, 0);
g_type_class_add_private (klass, sizeof (GstQtQuick2VideoSinkPrivate));
static GstStaticPadTemplate sink_pad_template =
GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (CAPS_FORMATS))
);
gst_element_class_add_pad_template(
element_class, gst_static_pad_template_get(&sink_pad_template));
gst_element_class_set_details_simple(element_class,
"QtQuick2 video sink", "Sink/Video",
"A video sink that can draw on a QQuickItem",
"George Kiagiadakis <[email protected]>");
}
示例14: gst_vtenc_encode_frame
static GstFlowReturn gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf);
static VTStatus gst_vtenc_enqueue_buffer (void *data, int a2, int a3, int a4,
CMSampleBufferRef sbuf, int a6, int a7);
static gboolean gst_vtenc_buffer_is_keyframe (GstVTEnc * self,
CMSampleBufferRef sbuf);
static GstVTEncFrame *gst_vtenc_frame_new (GstBuffer * buf,
GstVideoInfo * videoinfo);
static void gst_vtenc_frame_free (GstVTEncFrame * frame);
static void gst_pixel_buffer_release_cb (void *releaseRefCon,
const void *dataPtr, size_t dataSize, size_t numberOfPlanes,
const void *planeAddresses[]);
static GstStaticCaps sink_caps =
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ NV12, I420 }"));
static void
gst_vtenc_base_init (GstVTEncClass * klass)
{
const GstVTEncoderDetails *codec_details =
GST_VTENC_CLASS_GET_CODEC_DETAILS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
const int min_width = 1, max_width = G_MAXINT;
const int min_height = 1, max_height = G_MAXINT;
const int min_fps_n = 0, max_fps_n = G_MAXINT;
const int min_fps_d = 1, max_fps_d = 1;
GstPadTemplate *sink_template, *src_template;
GstCaps *src_caps;
gchar *longname, *description;
示例15: gst_dshow_new_video_caps
GstCaps *
gst_dshow_new_video_caps (GstVideoFormat video_format, const gchar * name,
GstCapturePinMediaType * pin_mediatype)
{
GstCaps *video_caps = NULL;
GstStructure *video_structure = NULL;
gint min_w, max_w;
gint min_h, max_h;
gint min_fr, max_fr;
/* raw video format */
switch (video_format) {
case GST_VIDEO_FORMAT_BGR:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR"));
break;
case GST_VIDEO_FORMAT_I420:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("I420"));
break;
case GST_VIDEO_FORMAT_YUY2:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("YUY2"));
break;
case GST_VIDEO_FORMAT_UYVY:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("UYVY"));
break;
case GST_VIDEO_FORMAT_BGRx:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGRx"));
break;
case GST_VIDEO_FORMAT_BGR16:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR16"));
break;
case GST_VIDEO_FORMAT_BGR15:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR15"));
break;
case GST_VIDEO_FORMAT_GRAY8:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("GRAY8"));
break;
default:
break;
}
/* other video format */
if (!video_caps) {
if (g_ascii_strncasecmp (name, "video/x-dv, systemstream=FALSE", 31) == 0) {
video_caps = gst_caps_new_simple ("video/x-dv",
"systemstream", G_TYPE_BOOLEAN, FALSE,
"format", G_TYPE_STRING, "dvsd",
NULL);
} else if (g_ascii_strncasecmp (name, "video/x-dv, systemstream=TRUE", 31) == 0) {
video_caps = gst_caps_new_simple ("video/x-dv",
"systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
return video_caps;
} else if (g_ascii_strncasecmp (name, "image/jpeg", 10) == 0) {
video_caps = gst_caps_new_simple ("image/jpeg", NULL);
} else if (g_ascii_strncasecmp (name, "video/x-h264", 12) == 0) {
video_caps = gst_caps_new_simple ("video/x-h264", NULL);
}
}
if (!video_caps)
return NULL;
video_structure = gst_caps_get_structure (video_caps, 0);
/* Hope GST_TYPE_INT_RANGE_STEP will exits in future gstreamer releases */
/* because we could use : */
/* "width", GST_TYPE_INT_RANGE_STEP, video_default->minWidth, video_default->maxWidth, video_default->granularityWidth */
/* instead of : */
/* "width", GST_TYPE_INT_RANGE, video_default->minWidth, video_default->maxWidth */
/* For framerate we do not need a step (granularity) because */
/* "The IAMStreamConfig::SetFormat method will set the frame rate to the closest */
/* value that the filter supports" as it said in the VIDEO_STREAM_CONFIG_CAPS dshwo doc */
min_w = pin_mediatype->vscc.MinOutputSize.cx;
max_w = pin_mediatype->vscc.MaxOutputSize.cx;
min_h = pin_mediatype->vscc.MinOutputSize.cy;
max_h = pin_mediatype->vscc.MaxOutputSize.cy;
min_fr = (gint) (10000000 / pin_mediatype->vscc.MaxFrameInterval);
max_fr = (gint)(10000000 / pin_mediatype->vscc.MinFrameInterval);
if (min_w == max_w)
gst_structure_set (video_structure, "width", G_TYPE_INT, min_w, NULL);
else
gst_structure_set (video_structure,
"width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
if (min_h == max_h)
gst_structure_set (video_structure, "height", G_TYPE_INT, min_h, NULL);
else
gst_structure_set (video_structure,
"height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
if (min_fr == max_fr)
gst_structure_set (video_structure, "framerate",
GST_TYPE_FRACTION, min_fr, 1, NULL);
else
gst_structure_set (video_structure, "framerate",
GST_TYPE_FRACTION_RANGE, min_fr, 1, max_fr, 1, NULL);
return video_caps;
//.........这里部分代码省略.........