本文整理汇总了C++中GST_ELEMENT函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ELEMENT函数的具体用法?C++ GST_ELEMENT怎么用?C++ GST_ELEMENT使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_ELEMENT函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: OpenDecoder
/*****************************************************************************
* OpenDecoder: probe the decoder and return score
*****************************************************************************/
static int OpenDecoder( vlc_object_t *p_this )
{
decoder_t *p_dec = ( decoder_t* )p_this;
decoder_sys_t *p_sys;
GstStateChangeReturn i_ret;
gboolean b_ret;
sink_src_caps_t caps = { NULL, NULL };
GstStructure *p_str;
GstAppSrcCallbacks cb;
int i_rval = VLC_SUCCESS;
GList *p_list;
bool dbin;
#define VLC_GST_CHECK( r, v, s, t ) \
{ if( r == v ){ msg_Err( p_dec, s ); i_rval = t; goto fail; } }
if( !vlc_gst_init( ))
{
msg_Err( p_dec, "failed to register vlcvideosink" );
return VLC_EGENERIC;
}
p_str = vlc_to_gst_fmt( &p_dec->fmt_in );
if( !p_str )
return VLC_EGENERIC;
/* Allocate the memory needed to store the decoder's structure */
p_sys = p_dec->p_sys = calloc( 1, sizeof( *p_sys ) );
if( p_sys == NULL )
{
gst_structure_free( p_str );
return VLC_ENOMEM;
}
dbin = var_CreateGetBool( p_dec, "use-decodebin" );
msg_Dbg( p_dec, "Using decodebin? %s", dbin ? "yes ":"no" );
caps.p_sinkcaps = gst_caps_new_empty( );
gst_caps_append_structure( caps.p_sinkcaps, p_str );
/* Currently supports only system memory raw output format */
caps.p_srccaps = gst_caps_new_empty_simple( "video/x-raw" );
/* Get the list of all the available gstreamer decoders */
p_list = gst_element_factory_list_get_elements(
GST_ELEMENT_FACTORY_TYPE_DECODER, GST_RANK_MARGINAL );
VLC_GST_CHECK( p_list, NULL, "no decoder list found", VLC_ENOMOD );
if( !dbin )
{
GList *p_l;
/* Sort them as per ranks */
p_list = g_list_sort( p_list, gst_plugin_feature_rank_compare_func );
VLC_GST_CHECK( p_list, NULL, "failed to sort decoders list",
VLC_ENOMOD );
p_l = g_list_find_custom( p_list, &caps, find_decoder_func );
VLC_GST_CHECK( p_l, NULL, "no suitable decoder found",
VLC_ENOMOD );
/* create the decoder with highest rank */
p_sys->p_decode_in = gst_element_factory_create(
( GstElementFactory* )p_l->data, NULL );
VLC_GST_CHECK( p_sys->p_decode_in, NULL,
"failed to create decoder", VLC_ENOMOD );
}
else
{
GList *p_l;
/* Just check if any suitable decoder exists, rest will be
* handled by decodebin */
p_l = g_list_find_custom( p_list, &caps, find_decoder_func );
VLC_GST_CHECK( p_l, NULL, "no suitable decoder found",
VLC_ENOMOD );
}
gst_plugin_feature_list_free( p_list );
p_list = NULL;
gst_caps_unref( caps.p_srccaps );
caps.p_srccaps = NULL;
p_sys->b_prerolled = false;
p_sys->b_running = false;
/* Queue: GStreamer thread will dump buffers into this queue,
* DecodeBlock() will pop out the buffers from the queue */
p_sys->p_que = gst_atomic_queue_new( 0 );
VLC_GST_CHECK( p_sys->p_que, NULL, "failed to create queue",
VLC_ENOMEM );
p_sys->p_decode_src = gst_element_factory_make( "appsrc", NULL );
VLC_GST_CHECK( p_sys->p_decode_src, NULL, "appsrc not found",
VLC_ENOMOD );
g_object_set( G_OBJECT( p_sys->p_decode_src ), "caps", caps.p_sinkcaps,
"emit-signals", TRUE, "format", GST_FORMAT_BYTES,
"stream-type", GST_APP_STREAM_TYPE_SEEKABLE,
/* Making DecodeBlock() to block on appsrc with max queue size of 1 byte.
* This will make the push_buffer() tightly coupled with the buffer
* flow from appsrc -> decoder. push_buffer() will only return when
* the same buffer it just fed to appsrc has also been fed to the
* decoder element as well */
"block", TRUE, "max-bytes", ( guint64 )1, NULL );
//.........这里部分代码省略.........
示例2: close
bool CvCapture_GStreamer::open( int type, const char* filename )
{
close();
CV_FUNCNAME("cvCaptureFromCAM_GStreamer");
__BEGIN__;
gst_initializer::init();
// if(!isInited) {
// printf("gst_init\n");
// gst_init (NULL, NULL);
// gst_debug_set_active(TRUE);
// gst_debug_set_colored(TRUE);
// gst_debug_set_default_threshold(GST_LEVEL_WARNING);
// isInited = true;
// }
bool stream = false;
bool manualpipeline = false;
char *uri = NULL;
uridecodebin = NULL;
if(type != CV_CAP_GSTREAMER_FILE) {
close();
return false;
}
if(!gst_uri_is_valid(filename)) {
uri = realpath(filename, NULL);
stream=false;
if(uri) {
uri = g_filename_to_uri(uri, NULL, NULL);
if(!uri) {
CV_WARN("GStreamer: Error opening file\n");
close();
return false;
}
} else {
GError *err = NULL;
//uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);
uridecodebin = gst_parse_launch(filename, &err);
if(!uridecodebin) {
CV_WARN("GStreamer: Error opening bin\n");
close();
return false;
}
stream = true;
manualpipeline = true;
}
} else {
stream = true;
uri = g_strdup(filename);
}
if(!uridecodebin) {
uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
if(!uridecodebin) {
CV_WARN("GStreamer: Failed to create uridecodebin\n");
close();
return false;
}
}
if(manualpipeline) {
GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
return false;
}
pipeline = uridecodebin;
} else {
pipeline = gst_pipeline_new (NULL);
color = gst_element_factory_make("ffmpegcolorspace", NULL);
sink = gst_element_factory_make("appsink", NULL);
gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
if(!gst_element_link(color, sink)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
gst_object_unref(pipeline);
return false;
}
}
gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
caps = gst_caps_new_simple("video/x-raw-rgb",
"red_mask", G_TYPE_INT, 0x0000FF,
"green_mask", G_TYPE_INT, 0x00FF00,
"blue_mask", G_TYPE_INT, 0xFF0000,
NULL);
gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
gst_caps_unref(caps);
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
//.........这里部分代码省略.........
示例3: CV_WARN
bool CvCapture_GStreamer::setProperty( int propId, double value )
{
GstFormat format;
GstSeekFlags flags;
if(!pipeline) {
CV_WARN("GStreamer: no pipeline");
return false;
}
switch(propId) {
case CV_CAP_PROP_POS_MSEC:
format = GST_FORMAT_TIME;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) (value * GST_MSECOND))) {
CV_WARN("GStreamer: unable to seek");
}
break;
case CV_CAP_PROP_POS_FRAMES:
format = GST_FORMAT_DEFAULT;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) value)) {
CV_WARN("GStreamer: unable to seek");
}
break;
case CV_CAP_PROP_POS_AVI_RATIO:
format = GST_FORMAT_PERCENT;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
CV_WARN("GStreamer: unable to seek");
}
break;
case CV_CAP_PROP_FRAME_WIDTH:
if(value > 0)
setFilter("width", G_TYPE_INT, (int) value, 0);
else
removeFilter("width");
break;
case CV_CAP_PROP_FRAME_HEIGHT:
if(value > 0)
setFilter("height", G_TYPE_INT, (int) value, 0);
else
removeFilter("height");
break;
case CV_CAP_PROP_FPS:
if(value > 0) {
int num, denom;
num = (int) value;
if(value != num) { // FIXME this supports only fractions x/1 and x/2
num = (int) (value * 2);
denom = 2;
} else
denom = 1;
setFilter("framerate", GST_TYPE_FRACTION, num, denom);
} else
removeFilter("framerate");
break;
case CV_CAP_PROP_FOURCC:
case CV_CAP_PROP_FRAME_COUNT:
case CV_CAP_PROP_FORMAT:
case CV_CAP_PROP_MODE:
case CV_CAP_PROP_BRIGHTNESS:
case CV_CAP_PROP_CONTRAST:
case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_HUE:
case CV_CAP_PROP_GAIN:
case CV_CAP_PROP_CONVERT_RGB:
break;
case CV_CAP_GSTREAMER_QUEUE_LENGTH:
if(!sink)
break;
gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
break;
default:
CV_WARN("GStreamer: unhandled property");
}
return false;
}
示例4: _update_caps
/* Return the possible output caps based on inputs and downstream prefs */
static GstCaps *
_update_caps (GstVideoAggregator * vagg, GstCaps * caps, GstCaps * filter)
{
GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
GList *l;
gint best_width = -1, best_height = -1;
gdouble best_fps = -1, cur_fps;
gint best_fps_n = 0, best_fps_d = 1;
GstVideoInfo *mix_info;
GstCaps *blend_caps, *tmp_caps;
GstCaps *out_caps;
GST_OBJECT_LOCK (vagg);
for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
GstVideoAggregatorPad *pad = l->data;
GstVideoInfo tmp = pad->info;
gint this_width, this_height;
gint fps_n, fps_d;
if (!pad->info.finfo)
continue;
/* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
continue;
/* Convert to per-view width/height for unpacked forms */
gst_video_multiview_video_info_change_mode (&tmp,
GST_VIDEO_MULTIVIEW_MODE_SEPARATED, GST_VIDEO_MULTIVIEW_FLAGS_NONE);
this_width = GST_VIDEO_INFO_WIDTH (&tmp);
this_height = GST_VIDEO_INFO_HEIGHT (&tmp);
fps_n = GST_VIDEO_INFO_FPS_N (&tmp);
fps_d = GST_VIDEO_INFO_FPS_D (&tmp);
GST_INFO_OBJECT (vagg, "Input pad %" GST_PTR_FORMAT
" w %u h %u", pad, this_width, this_height);
if (this_width == 0 || this_height == 0)
continue;
if (best_width < this_width)
best_width = this_width;
if (best_height < this_height)
best_height = this_height;
if (fps_d == 0)
cur_fps = 0.0;
else
gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
if (best_fps < cur_fps) {
best_fps = cur_fps;
best_fps_n = fps_n;
best_fps_d = fps_d;
}
/* FIXME: Preserve PAR for at least one input when different sized inputs */
}
GST_OBJECT_UNLOCK (vagg);
mix_info = &mix->mix_info;
gst_video_info_set_format (mix_info, GST_VIDEO_FORMAT_RGBA, best_width,
best_height);
GST_VIDEO_INFO_FPS_N (mix_info) = best_fps_n;
GST_VIDEO_INFO_FPS_D (mix_info) = best_fps_d;
GST_VIDEO_INFO_MULTIVIEW_MODE (mix_info) = GST_VIDEO_MULTIVIEW_MODE_SEPARATED;
GST_VIDEO_INFO_VIEWS (mix_info) = 2;
/* FIXME: If input is marked as flipped or flopped, preserve those flags */
GST_VIDEO_INFO_MULTIVIEW_FLAGS (mix_info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
/* Choose our output format based on downstream preferences */
blend_caps = gst_video_info_to_caps (mix_info);
gst_caps_set_features (blend_caps, 0,
gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
tmp_caps = get_converted_caps (GST_GL_STEREO_MIX (vagg), blend_caps);
gst_caps_unref (blend_caps);
out_caps = gst_caps_intersect (caps, tmp_caps);
gst_caps_unref (tmp_caps);
GST_DEBUG_OBJECT (vagg, "Possible output caps %" GST_PTR_FORMAT, out_caps);
return out_caps;
}
示例5: gst_opus_dec_parse_header
static GstFlowReturn
gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
const guint8 *data;
GstAudioChannelPosition pos[64];
const GstAudioChannelPosition *posn = NULL;
GstMapInfo map;
if (!gst_opus_header_is_id_header (buf)) {
GST_ERROR_OBJECT (dec, "Header is not an Opus ID header");
return GST_FLOW_ERROR;
}
gst_buffer_map (buf, &map, GST_MAP_READ);
data = map.data;
if (!(dec->n_channels == 0 || dec->n_channels == data[9])) {
gst_buffer_unmap (buf, &map);
GST_ERROR_OBJECT (dec, "Opus ID header has invalid channels");
return GST_FLOW_ERROR;
}
dec->n_channels = data[9];
dec->sample_rate = GST_READ_UINT32_LE (data + 12);
dec->pre_skip = GST_READ_UINT16_LE (data + 10);
dec->r128_gain = GST_READ_UINT16_LE (data + 16);
dec->r128_gain_volume = gst_opus_dec_get_r128_volume (dec->r128_gain);
GST_INFO_OBJECT (dec,
"Found pre-skip of %u samples, R128 gain %d (volume %f)",
dec->pre_skip, dec->r128_gain, dec->r128_gain_volume);
dec->channel_mapping_family = data[18];
if (dec->channel_mapping_family == 0) {
/* implicit mapping */
GST_INFO_OBJECT (dec, "Channel mapping family 0, implicit mapping");
dec->n_streams = dec->n_stereo_streams = 1;
dec->channel_mapping[0] = 0;
dec->channel_mapping[1] = 1;
} else {
dec->n_streams = data[19];
dec->n_stereo_streams = data[20];
memcpy (dec->channel_mapping, data + 21, dec->n_channels);
if (dec->channel_mapping_family == 1) {
GST_INFO_OBJECT (dec, "Channel mapping family 1, Vorbis mapping");
switch (dec->n_channels) {
case 1:
case 2:
/* nothing */
break;
case 3:
case 4:
case 5:
case 6:
case 7:
case 8:
posn = gst_opus_channel_positions[dec->n_channels - 1];
break;
default:{
gint i;
GST_ELEMENT_WARNING (GST_ELEMENT (dec), STREAM, DECODE,
(NULL), ("Using NONE channel layout for more than 8 channels"));
for (i = 0; i < dec->n_channels; i++)
pos[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
posn = pos;
}
}
} else {
GST_INFO_OBJECT (dec, "Channel mapping family %d",
dec->channel_mapping_family);
}
}
gst_opus_dec_negotiate (dec, posn);
gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
示例6: webkitVideoSinkNew
GstElement* webkitVideoSinkNew(WebCore::GStreamerGWorld* gstGWorld)
{
GstElement* element = GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
WEBKIT_VIDEO_SINK(element)->priv->gstGWorld = gstGWorld;
return element;
}
示例7: gst_curl_smtp_sink_set_property
static void
gst_curl_smtp_sink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCurlSmtpSink *sink;
GstState cur_state;
g_return_if_fail (GST_IS_CURL_SMTP_SINK (object));
sink = GST_CURL_SMTP_SINK (object);
gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0);
if (cur_state != GST_STATE_PLAYING && cur_state != GST_STATE_PAUSED) {
GST_OBJECT_LOCK (sink);
switch (prop_id) {
case PROP_MAIL_RCPT:
g_free (sink->mail_rcpt);
sink->mail_rcpt = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "mail-rcpt set to %s", sink->mail_rcpt);
break;
case PROP_MAIL_FROM:
g_free (sink->mail_from);
sink->mail_from = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "mail-from set to %s", sink->mail_from);
break;
case PROP_SUBJECT:
g_free (sink->subject);
sink->subject = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "subject set to %s", sink->subject);
break;
case PROP_MESSAGE_BODY:
g_free (sink->message_body);
sink->message_body = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "message-body set to %s", sink->message_body);
break;
case PROP_CONTENT_TYPE:
g_free (sink->content_type);
sink->content_type = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "content-type set to %s", sink->content_type);
break;
case PROP_USE_SSL:
sink->use_ssl = g_value_get_boolean (value);
GST_DEBUG_OBJECT (sink, "use-ssl set to %d", sink->use_ssl);
break;
case PROP_NBR_ATTACHMENTS:
sink->nbr_attachments = g_value_get_int (value);
sink->nbr_attachments_left = sink->nbr_attachments;
GST_DEBUG_OBJECT (sink, "nbr-attachments set to %d",
sink->nbr_attachments);
break;
case PROP_POP_USER_NAME:
g_free (sink->pop_user);
sink->pop_user = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "pop-user set to %s", sink->pop_user);
break;
case PROP_POP_USER_PASSWD:
g_free (sink->pop_passwd);
sink->pop_passwd = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "pop-passwd set to %s", sink->pop_passwd);
break;
case PROP_POP_LOCATION:
g_free (sink->pop_location);
sink->pop_location = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "pop-location set to %s", sink->pop_location);
break;
default:
GST_DEBUG_OBJECT (sink, "invalid property id %d", prop_id);
break;
}
GST_OBJECT_UNLOCK (sink);
return;
}
/* in PLAYING or PAUSED state */
GST_OBJECT_LOCK (sink);
switch (prop_id) {
case PROP_CONTENT_TYPE:
g_free (sink->content_type);
sink->content_type = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);
break;
default:
GST_WARNING_OBJECT (sink, "cannot set property when PLAYING");
break;
}
GST_OBJECT_UNLOCK (sink);
}
示例8: gst_curl_http_sink_set_property
static void
gst_curl_http_sink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCurlHttpSink *sink;
GstState cur_state;
g_return_if_fail (GST_IS_CURL_HTTP_SINK (object));
sink = GST_CURL_HTTP_SINK (object);
gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0);
if (cur_state != GST_STATE_PLAYING && cur_state != GST_STATE_PAUSED) {
GST_OBJECT_LOCK (sink);
switch (prop_id) {
case PROP_PROXY:
g_free (sink->proxy);
sink->proxy = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "proxy set to %s", sink->proxy);
break;
case PROP_PROXY_PORT:
sink->proxy_port = g_value_get_int (value);
GST_DEBUG_OBJECT (sink, "proxy port set to %d", sink->proxy_port);
break;
case PROP_PROXY_USER_NAME:
g_free (sink->proxy_user);
sink->proxy_user = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "proxy user set to %s", sink->proxy_user);
break;
case PROP_PROXY_USER_PASSWD:
g_free (sink->proxy_passwd);
sink->proxy_passwd = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "proxy password set to %s", sink->proxy_passwd);
break;
case PROP_USE_CONTENT_LENGTH:
sink->use_content_length = g_value_get_boolean (value);
GST_DEBUG_OBJECT (sink, "use_content_length set to %d",
sink->use_content_length);
break;
case PROP_CONTENT_TYPE:
g_free (sink->content_type);
sink->content_type = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);
break;
default:
GST_DEBUG_OBJECT (sink, "invalid property id %d", prop_id);
break;
}
GST_OBJECT_UNLOCK (sink);
return;
}
/* in PLAYING or PAUSED state */
GST_OBJECT_LOCK (sink);
switch (prop_id) {
case PROP_CONTENT_TYPE:
g_free (sink->content_type);
sink->content_type = g_value_dup_string (value);
GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);
break;
default:
GST_WARNING_OBJECT (sink, "cannot set property when PLAYING");
break;
}
GST_OBJECT_UNLOCK (sink);
}
示例9: gst_opus_dec_parse_header
static GstFlowReturn
gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
const guint8 *data = GST_BUFFER_DATA (buf);
GstCaps *caps;
const GstAudioChannelPosition *pos = NULL;
if (!gst_opus_header_is_id_header (buf)) {
GST_ERROR_OBJECT (dec, "Header is not an Opus ID header");
return GST_FLOW_ERROR;
}
if (!(dec->n_channels == 0 || dec->n_channels == data[9])) {
GST_ERROR_OBJECT (dec, "Opus ID header has invalid channels");
return GST_FLOW_ERROR;
}
dec->n_channels = data[9];
dec->pre_skip = GST_READ_UINT16_LE (data + 10);
dec->r128_gain = GST_READ_UINT16_LE (data + 16);
dec->r128_gain_volume = gst_opus_dec_get_r128_volume (dec->r128_gain);
GST_INFO_OBJECT (dec,
"Found pre-skip of %u samples, R128 gain %d (volume %f)",
dec->pre_skip, dec->r128_gain, dec->r128_gain_volume);
dec->channel_mapping_family = data[18];
if (dec->channel_mapping_family == 0) {
/* implicit mapping */
GST_INFO_OBJECT (dec, "Channel mapping family 0, implicit mapping");
dec->n_streams = dec->n_stereo_streams = 1;
dec->channel_mapping[0] = 0;
dec->channel_mapping[1] = 1;
} else {
dec->n_streams = data[19];
dec->n_stereo_streams = data[20];
memcpy (dec->channel_mapping, data + 21, dec->n_channels);
if (dec->channel_mapping_family == 1) {
GST_INFO_OBJECT (dec, "Channel mapping family 1, Vorbis mapping");
switch (dec->n_channels) {
case 1:
case 2:
/* nothing */
break;
case 3:
case 4:
case 5:
case 6:
case 7:
case 8:
pos = gst_opus_channel_positions[dec->n_channels - 1];
break;
default:{
gint i;
GstAudioChannelPosition *posn =
g_new (GstAudioChannelPosition, dec->n_channels);
GST_ELEMENT_WARNING (GST_ELEMENT (dec), STREAM, DECODE,
(NULL), ("Using NONE channel layout for more than 8 channels"));
for (i = 0; i < dec->n_channels; i++)
posn[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
pos = posn;
}
}
} else {
GST_INFO_OBJECT (dec, "Channel mapping family %d",
dec->channel_mapping_family);
}
}
caps = gst_opus_dec_negotiate (dec);
if (pos) {
GST_DEBUG_OBJECT (dec, "Setting channel positions on caps");
gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
}
if (dec->n_channels > 8) {
g_free ((GstAudioChannelPosition *) pos);
}
GST_INFO_OBJECT (dec, "Setting src caps to %" GST_PTR_FORMAT, caps);
gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps);
gst_caps_unref (caps);
return GST_FLOW_OK;
}
示例10: GStreamer_init
int GStreamer_init(const char *mplayer)
{
GError* error;
GstBus *bus;
GstElement *videosink, *audiosink;
int err;
if (g_initialized)
g_error("GStreamer: already initialized, call destroy first!\n");
g_state_callback = NULL;
g_duration = 0;
g_position = 0;
/* pthread synchronization */
pthread_mutex_init(&g_mutex, NULL);
err = pthread_cond_init(&g_main_cond, NULL);
if (err) {
g_error("GStreamer: failed to initialize main condition %s\n",
strerror(errno));
return -1;
}
/* init gstreamer library */
if (!gst_init_check(NULL, NULL, &error)) {
g_error("GStreamer: failed to initialize gstreamer library: [%d] %s\n",
error->code, error->message);
g_error_free(error);
return -1;
}
/* create pipeline */
g_pipeline = gst_pipeline_new("pipeline");
g_pipeline_name = gst_element_get_name(GST_ELEMENT(g_pipeline));
/* register callback */
bus = gst_pipeline_get_bus(GST_PIPELINE(g_pipeline));
gst_bus_add_watch(bus, my_bus_callback, NULL);
gst_object_unref(bus);
#if 0
/* TODO unlinked when removed from pipeline */
/* hardcode audio/video sink */
g_videosink = create_video_sink();
g_audiosink = create_audio_sink();
if (!g_videosink || !g_audiosink) {
/* TODO memory leak */
g_error("GStreamer: failed to create sink elements\n");
return -1;
}
#endif
/* prepare http/file src */
g_filesrc = gst_element_factory_make ("filesrc", "filesrc");
g_httpsrc = gst_element_factory_make ("souphttpsrc", "httpsrc");
if (!g_filesrc || !g_httpsrc) {
/* TODO memory leak */
g_error("GStreamer: failed to create src elements %x %x\n", g_filesrc, g_httpsrc);
return -1;
}
g_object_ref(g_filesrc);
g_object_ref(g_httpsrc);
/* initialize pipeline */
/* TODO do for audio/video pipe separately */
if (gst_element_set_state(g_pipeline, GST_STATE_READY) ==
GST_STATE_CHANGE_FAILURE) {
g_error("GStreamer: could not set pipeline to ready\n");
}
/* start main loop */
g_main_loop = g_main_loop_new(NULL, FALSE);
err = pthread_create(&g_reader_thread, NULL, main_thread_proc, NULL);
if (err) {
g_error("GStreamer: failed to launch gstreamer main thread %s\n",
strerror(errno));
goto err_pthread;
}
g_print("GStreamer: SUCCESSFULLY INITIALIZED\n");
g_initialized = 1;
return 0;
err_pthread:
pthread_cond_destroy(&g_main_cond);
pthread_mutex_destroy(&g_mutex);
return err;
}
示例11: gst_auto_video_sink_find_best
static GstElement *
gst_auto_video_sink_find_best (GstAutoVideoSink * sink)
{
GList *list, *item;
GstElement *choice = NULL;
GstMessage *message = NULL;
GSList *errors = NULL;
GstBus *bus = gst_bus_new ();
GstPad *el_pad = NULL;
GstCaps *el_caps = NULL;
gboolean no_match = TRUE;
list = gst_registry_feature_filter (gst_registry_get (),
(GstPluginFeatureFilter) gst_auto_video_sink_factory_filter, FALSE, sink);
list = g_list_sort (list, (GCompareFunc) gst_auto_video_sink_compare_ranks);
GST_LOG_OBJECT (sink, "Trying to find usable video devices ...");
for (item = list; item != NULL; item = item->next) {
GstElementFactory *f = GST_ELEMENT_FACTORY (item->data);
GstElement *el;
if ((el = gst_auto_video_sink_create_element_with_pretty_name (sink, f))) {
GstStateChangeReturn ret;
GST_DEBUG_OBJECT (sink, "Testing %s", GST_OBJECT_NAME (f));
/* If autovideosink has been provided with filter caps,
* accept only sinks that match with the filter caps */
if (sink->filter_caps) {
el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");
el_caps = gst_pad_query_caps (el_pad, NULL);
gst_object_unref (el_pad);
GST_DEBUG_OBJECT (sink,
"Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
sink->filter_caps, el_caps);
no_match = !gst_caps_can_intersect (sink->filter_caps, el_caps);
gst_caps_unref (el_caps);
if (no_match) {
GST_DEBUG_OBJECT (sink, "Incompatible caps");
gst_object_unref (el);
continue;
} else {
GST_DEBUG_OBJECT (sink, "Found compatible caps");
}
}
gst_element_set_bus (el, bus);
ret = gst_element_set_state (el, GST_STATE_READY);
if (ret == GST_STATE_CHANGE_SUCCESS) {
GST_DEBUG_OBJECT (sink, "This worked!");
choice = el;
break;
}
/* collect all error messages */
while ((message = gst_bus_pop_filtered (bus, GST_MESSAGE_ERROR))) {
GST_DEBUG_OBJECT (sink, "error message %" GST_PTR_FORMAT, message);
errors = g_slist_append (errors, message);
}
gst_element_set_state (el, GST_STATE_NULL);
gst_object_unref (el);
}
}
GST_DEBUG_OBJECT (sink, "done trying");
if (!choice) {
if (errors) {
/* FIXME: we forward the first error for now; but later on it might make
* sense to actually analyse them */
gst_message_ref (GST_MESSAGE (errors->data));
GST_DEBUG_OBJECT (sink, "reposting message %p", errors->data);
gst_element_post_message (GST_ELEMENT (sink), GST_MESSAGE (errors->data));
} else {
/* send warning message to application and use a fakesink */
GST_ELEMENT_WARNING (sink, RESOURCE, NOT_FOUND, (NULL),
("Failed to find a usable video sink"));
choice = gst_element_factory_make ("fakesink", "fake-video-sink");
if (g_object_class_find_property (G_OBJECT_GET_CLASS (choice), "sync"))
g_object_set (choice, "sync", TRUE, NULL);
gst_element_set_state (choice, GST_STATE_READY);
}
}
gst_object_unref (bus);
gst_plugin_feature_list_free (list);
g_slist_foreach (errors, (GFunc) gst_mini_object_unref, NULL);
g_slist_free (errors);
return choice;
}
示例12: gst_parse_launch_full
GstElement *create_video_sink()
{
GstElement *bin, *decoder = NULL;
GstIterator *iter;
GstIteratorResult res;
GError *error = NULL;
GstPad *pad;
gpointer element = NULL;
const char* decoder_name;
#ifndef DESKTOP
/* create pipeline */
decoder_name = "tividdec20";
bin = gst_parse_launch_full("TIViddec2 genTimeStamps=FALSE \
engineName=decode \
codecName=h264dec numFrames=-1 \
! videoscale method=0 \
! video/x-raw-yuv, format=(fourcc)I420, width=320, height=240 \
! ffmpegcolorspace \
! video/x-raw-rgb, bpp=16 \
! TIDmaiVideoSink displayStd=fbdev displayDevice=/dev/fb0 videoStd=QVGA \
videoOutput=LCD resizer=FALSE accelFrameCopy=TRUE",
NULL, 0, &error);
#else
decoder_name = "decodebin";
bin = gst_parse_launch_full("decodebin \
! videoscale method=0 \
! video/x-raw-yuv, format=(fourcc)I420, width=320, height=240 \
! xvimagesink",
NULL, 0, &error);
#endif
if (!bin) {
g_error("GStreamer: failed to parse video sink pipeline\n");
return NULL;
}
gst_object_set_name(GST_OBJECT(bin), "video-sink");
iter = gst_bin_iterate_elements(GST_BIN(bin));
res = gst_iterator_next (iter, &element);
while (res == GST_ITERATOR_OK) {
gchar *name;
name = gst_object_get_name(GST_OBJECT (element));
if (name) {
if (!strncmp(name, decoder_name, strlen(decoder_name))) {
decoder = GST_ELEMENT(element);
}
g_printf("GS: video sink element: %s \n", name);
g_free (name);
}
gst_object_unref (element);
element = NULL;
res = gst_iterator_next (iter, &element);
}
gst_iterator_free (iter);
if (!decoder) {
/* mem leak */
g_printf("decoder element not found\n");
return NULL;
}
/* add ghostpad */
pad = gst_element_get_static_pad (decoder, "sink");
gst_element_add_pad(bin, gst_ghost_pad_new("sink", pad));
gst_object_unref(GST_OBJECT(pad));
return bin;
}
示例13: my_bus_callback
/* http://<xxx>/manual/html/section-bus-message-types.html */
static gboolean my_bus_callback(GstBus *bus, GstMessage *msg,
gpointer user_data)
{
GstMessageType msgType;
GstObject *msgSrc;
gchar *msgSrcName;
/* used in switch */
/* error message */
gchar *debug;
GError *err;
GstState oldstate, newstate, pending;
/* stream status */
GstElement *owner;
msgType = GST_MESSAGE_TYPE(msg);
msgSrc = GST_MESSAGE_SRC(msg);
msgSrcName = GST_OBJECT_NAME(msgSrc);
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
g_print("GStreamer: end-of-stream\n");
pthread_mutex_lock(&g_mutex);
gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_NULL);
trigger_callback(GST_STATE_NULL);
pthread_mutex_unlock(&g_mutex);
break;
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug);
g_free (debug);
g_error("GStreamer: error: [%d] %s\n", err->code, err->message);
g_error_free(err);
/* TODO no sleep in callback */
pthread_mutex_lock(&g_mutex);
/* setting state to null flushes pipeline */
gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_NULL);
trigger_callback(GST_STATE_NULL);
pthread_mutex_unlock(&g_mutex);
break;
case GST_MESSAGE_STATE_CHANGED:
gst_message_parse_state_changed(msg, &oldstate, &newstate, &pending);
#if 0 /* noisy */
g_print("GStreamer: %s: State change: OLD: '%s', NEW: '%s', PENDING: '%s'\n",
msgSrcName,
gststate_get_name(oldstate),
gststate_get_name(newstate),
gststate_get_name(pending));
#endif
if (!strcmp(msgSrcName, g_pipeline_name))
trigger_callback(newstate); /* TODO GstState != GStreamer_state */
break;
case GST_MESSAGE_WARNING:
case GST_MESSAGE_INFO:
/* TODO */
break;
case GST_MESSAGE_APPLICATION: /* marshal information into the main thread */
case GST_MESSAGE_ASYNC_START:
case GST_MESSAGE_ASYNC_DONE:
case GST_MESSAGE_BUFFERING: /* caching of network streams */
case GST_MESSAGE_CLOCK_LOST:
case GST_MESSAGE_CLOCK_PROVIDE:
case GST_MESSAGE_ELEMENT: /* custom message, e.g. qtdemux redirect */
case GST_MESSAGE_LATENCY:
case GST_MESSAGE_NEW_CLOCK:
case GST_MESSAGE_REQUEST_STATE:
case GST_MESSAGE_SEGMENT_DONE:
case GST_MESSAGE_SEGMENT_START:
case GST_MESSAGE_STATE_DIRTY:
case GST_MESSAGE_STEP_DONE:
case GST_MESSAGE_STRUCTURE_CHANGE:
case GST_MESSAGE_TAG: /* meta data: artist, title */
/* ignore */
break;
case GST_MESSAGE_DURATION:
default:
g_print("GStreamer: BUS_CALL %s %d\n",
gst_message_type_get_name(GST_MESSAGE_TYPE(msg)),
GST_MESSAGE_TYPE(msg));
break;
}
return 1;
}
示例14: main
//.........这里部分代码省略.........
strncpy (input, optarg, sizeof (input) / sizeof (input[0]));
break;
case 'f':
frequency = atol (optarg);
break;
case 'h':
printf ("Usage: v4l2src-test [OPTION]...\n");
for (c = 0; long_options[c].name; ++c) {
printf ("-%c, --%s\r\t\t\t\t%s\n", long_options[c].val,
long_options[c].name, long_options_desc[c]);
}
exit (0);
break;
case '?':
/* getopt_long already printed an error message. */
printf ("Use -h to see help message.\n");
break;
default:
abort ();
}
}
/* Print any remaining command line arguments (not options). */
if (optind < argc) {
printf ("Use -h to see help message.\n" "non-option ARGV-elements: ");
while (optind < argc)
printf ("%s ", argv[optind++]);
putchar ('\n');
}
/* init */
gst_init (&argc, &argv);
/* create elements */
if (!(pipeline = gst_pipeline_new ("my_pipeline"))) {
fprintf (stderr, "error: gst_pipeline_new return NULL");
return -1;
}
if (!(source = gst_element_factory_make ("v4l2src", NULL))) {
fprintf (stderr,
"error: gst_element_factory_make (\"v4l2src\", NULL) return NULL");
return -1;
}
if (!(sink = gst_element_factory_make ("xvimagesink", NULL))) {
fprintf (stderr,
"error: gst_element_factory_make (\"xvimagesink\", NULL) return NULL");
return -1;
}
if (numbuffers > -1) {
g_object_set (source, "num-buffers", numbuffers, NULL);
}
if (device[0]) {
g_object_set (source, "device", device, NULL);
}
if (input[0]) {
g_object_set (source, "input", input, NULL);
}
if (frequency) {
g_object_set (source, "frequency", frequency, NULL);
}
/* you would normally check that the elements were created properly */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, my_bus_callback, NULL);
/* put together a pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
gst_element_link_pads (source, "src", sink, "sink");
/* start the pipeline */
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
input_thread = g_thread_try_new ("v4l2src-test", read_user, source, NULL);
if (input_thread == NULL) {
fprintf (stderr, "error: g_thread_try_new() failed");
return -1;
}
g_main_loop_run (loop);
g_thread_join (input_thread);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
gst_object_unref (bus);
gst_object_unref (pipeline);
gst_deinit ();
return 0;
}
示例15: gst_hls_demux_cache_fragments
static gboolean
gst_hls_demux_cache_fragments (GstHLSDemux * demux)
{
gint i;
/* If this playlist is a variant playlist, select the first one
* and update it */
if (gst_m3u8_client_has_variant_playlist (demux->client)) {
GstM3U8 *child = NULL;
if (demux->connection_speed == 0) {
GST_M3U8_CLIENT_LOCK (demux->client);
child = demux->client->main->current_variant->data;
GST_M3U8_CLIENT_UNLOCK (demux->client);
} else {
GList *tmp = gst_m3u8_client_get_playlist_for_bitrate (demux->client,
demux->connection_speed);
child = GST_M3U8 (tmp->data);
}
gst_m3u8_client_set_current (demux->client, child);
if (!gst_hls_demux_update_playlist (demux, FALSE)) {
GST_ERROR_OBJECT (demux, "Could not fetch the child playlist %s",
child->uri);
return FALSE;
}
}
if (!gst_m3u8_client_is_live (demux->client)) {
GstClockTime duration = gst_m3u8_client_get_duration (demux->client);
GST_DEBUG_OBJECT (demux, "Sending duration message : %" GST_TIME_FORMAT,
GST_TIME_ARGS (duration));
if (duration != GST_CLOCK_TIME_NONE)
gst_element_post_message (GST_ELEMENT (demux),
gst_message_new_duration (GST_OBJECT (demux),
GST_FORMAT_TIME, duration));
}
/* Cache the first fragments */
for (i = 0; i < demux->fragments_cache; i++) {
gst_element_post_message (GST_ELEMENT (demux),
gst_message_new_buffering (GST_OBJECT (demux),
100 * i / demux->fragments_cache));
g_get_current_time (&demux->next_update);
if (!gst_hls_demux_get_next_fragment (demux, TRUE)) {
if (demux->end_of_playlist)
break;
if (!demux->cancelled)
GST_ERROR_OBJECT (demux, "Error caching the first fragments");
return FALSE;
}
/* make sure we stop caching fragments if something cancelled it */
if (demux->cancelled)
return FALSE;
gst_hls_demux_switch_playlist (demux);
}
gst_element_post_message (GST_ELEMENT (demux),
gst_message_new_buffering (GST_OBJECT (demux), 100));
g_get_current_time (&demux->next_update);
demux->need_cache = FALSE;
return TRUE;
}