本文整理汇总了C++中GST_OBJECT函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_OBJECT函数的具体用法?C++ GST_OBJECT怎么用?C++ GST_OBJECT使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_OBJECT函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: music_destroy_pipeline
void music_destroy_pipeline(struct music_rtp_pipeline *pipe)
{
gst_object_unref(GST_OBJECT(pipe->pipeline));
}
示例2: main
int main(int argc, char *argv[])
{
GstElement *pipeline;
GstElement *src, *colorspace, *codec, *wrapper, *netsink;
gboolean status;
GstCaps *capsRaw;
gchar *params;
FILE *lf;
int width, height;
/* Initialize GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
/* Create pipeline */
pipeline = gst_pipeline_new("truba");
//src = gst_element_factory_make("videotestsrc", "src");
src = gst_element_factory_make("appsrc", "src");
colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspaceconverter");
codec = gst_element_factory_make(CODEC, "codec");
wrapper = gst_element_factory_make("rtph264pay", "wrapper");
netsink = gst_element_factory_make("udpsink", "netsink");
if (NULL == getenv("VIT_WIDTH")) {
width = IMWIDTH;
}
else {
width = atoi(getenv("VIT_WIDTH"));
}
if (NULL == getenv("VIT_HEIGHT")) {
height = IMHEIGHT;
}
else {
height = atoi(getenv("VIT_HEIGHT"));
}
/* Set up pipeline */
capsRaw = gst_caps_new_simple( "video/x-raw-gray",
"bpp", G_TYPE_INT, 8,
"depth", G_TYPE_INT, 8,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, 25, 1,
NULL);
g_signal_connect(src, "need-data", G_CALLBACK(cb_need_data), NULL);
g_object_set(G_OBJECT(src), "caps", capsRaw, NULL);
g_object_set(G_OBJECT(src), "stream-type", 0, "format",
GST_FORMAT_TIME, NULL);
if (NULL == getenv("VIT_HOST"))
g_object_set(G_OBJECT(netsink), "host", HOST, NULL);
else {
g_object_set(G_OBJECT(netsink), "host", getenv("VIT_HOST"), NULL);
printf("Connected to host %s\n", getenv("VIT_HOST"));
}
g_object_set(G_OBJECT(netsink), "port", PORT, NULL);
gst_bin_add_many(GST_BIN(pipeline), src, colorspace, codec, wrapper,
netsink, NULL);
status = gst_element_link_many(src, colorspace, codec, wrapper, netsink,
NULL);
if(!status) {
printf("Linking elements failed!\n");
}
else {
printf("Linking elements succeed!\n");
}
params = NULL;
/* Create lock file */
lf = fopen(LOCKFILE, "w");
fprintf(lf, "%d", getpid());
fclose(lf);
/* Setup signal handler */
if (SIG_ERR == signal(SIGUSR1, _t_sigusr1))
{
printf("Failed to spoof signal handler\n");
}
//Run
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
gst_element_set_state(pipeline, GST_STATE_NULL);
//Deinit
gst_object_unref(GST_OBJECT(pipeline));
g_main_loop_unref(loop);
return 0;
}
示例3: gst_gme_dec_src_event
static gboolean
gst_gme_dec_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstGmeDec *gme = GST_GME_DEC (parent);
gboolean result = FALSE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
{
gdouble rate;
GstFormat format;
GstSeekFlags flags;
GstSeekType start_type, stop_type;
gint64 start, stop;
gboolean flush;
gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
&stop_type, &stop);
gst_event_unref (event);
if (format != GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (gme, "seeking is only supported in TIME format");
break;
}
if (start_type != GST_SEEK_TYPE_SET || stop_type != GST_SEEK_TYPE_NONE) {
GST_DEBUG_OBJECT (gme, "unsupported seek type");
break;
}
if (stop_type == GST_SEEK_TYPE_NONE)
stop = GST_CLOCK_TIME_NONE;
if (start_type == GST_SEEK_TYPE_SET) {
GstSegment seg;
guint64 cur = gme_tell (gme->player) * GST_MSECOND;
guint64 dest = (guint64) start;
if (gme->total_duration != GST_CLOCK_TIME_NONE)
dest = CLAMP (dest, 0, gme->total_duration);
else
dest = MAX (0, dest);
if (dest == cur)
break;
flush = (flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH;
if (flush) {
gst_pad_push_event (gme->srcpad, gst_event_new_flush_start ());
} else {
gst_pad_stop_task (gme->srcpad);
}
GST_PAD_STREAM_LOCK (gme->srcpad);
if (flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT (gme),
gst_message_new_segment_start (GST_OBJECT (gme), format, cur));
}
if (flush) {
gst_pad_push_event (gme->srcpad, gst_event_new_flush_stop (TRUE));
}
if (stop == GST_CLOCK_TIME_NONE
&& gme->total_duration != GST_CLOCK_TIME_NONE)
stop = gme->total_duration;
gst_segment_init (&seg, GST_FORMAT_TIME);
seg.rate = rate;
seg.start = dest;
seg.stop = stop;
seg.time = dest;
gst_pad_push_event (gme->srcpad, gst_event_new_segment (&seg));
gme->seekpoint = dest / GST_MSECOND; /* nsecs to msecs */
gme->seeking = TRUE;
gst_pad_start_task (gme->srcpad, (GstTaskFunction) gst_gme_play,
gme->srcpad, NULL);
GST_PAD_STREAM_UNLOCK (gme->srcpad);
result = TRUE;
}
break;
}
default:
result = gst_pad_push_event (gme->sinkpad, event);
break;
}
return result;
}
示例4: debug_dump_element
/*
* debug_dump_element:
* @bin: the bin that should be analyzed
* @out: file to write to
* @indent: level of graph indentation
*
* Helper for gst_debug_bin_to_dot_file() to recursively dump a pipeline.
*/
static void
debug_dump_element (GstBin * bin, GstDebugGraphDetails details,
GString * str, const gint indent)
{
GstIterator *element_iter, *pad_iter;
gboolean elements_done, pads_done;
GValue item = { 0, };
GValue item2 = { 0, };
GstElement *element;
GstPad *pad = NULL;
guint src_pads, sink_pads;
gchar *src_pad_name = NULL, *sink_pad_name = NULL;
gchar *element_name;
gchar *state_name = NULL;
gchar *param_name = NULL;
const gchar *spc = MAKE_INDENT (indent);
element_iter = gst_bin_iterate_elements (bin);
elements_done = FALSE;
while (!elements_done) {
switch (gst_iterator_next (element_iter, &item)) {
case GST_ITERATOR_OK:
element = g_value_get_object (&item);
element_name = debug_dump_make_object_name (GST_OBJECT (element));
if (details & GST_DEBUG_GRAPH_SHOW_STATES) {
state_name = debug_dump_get_element_state (GST_ELEMENT (element));
}
if (details & GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS) {
param_name = debug_dump_get_object_params (G_OBJECT (element),
details, NULL);
}
/* elements */
g_string_append_printf (str, "%ssubgraph cluster_%s {\n", spc,
element_name);
g_string_append_printf (str, "%s fontname=\"Bitstream Vera Sans\";\n",
spc);
g_string_append_printf (str, "%s fontsize=\"8\";\n", spc);
g_string_append_printf (str, "%s style=\"filled,rounded\";\n", spc);
g_string_append_printf (str, "%s color=black;\n", spc);
g_string_append_printf (str, "%s label=\"%s\\n%s%s%s\";\n", spc,
G_OBJECT_TYPE_NAME (element), GST_OBJECT_NAME (element),
(state_name ? state_name : ""), (param_name ? param_name : "")
);
if (state_name) {
g_free (state_name);
state_name = NULL;
}
if (param_name) {
g_free (param_name);
param_name = NULL;
}
src_pads = sink_pads = 0;
if ((pad_iter = gst_element_iterate_sink_pads (element))) {
gchar *cluster_name = g_strdup_printf ("%s_sink", element_name);
debug_dump_element_pads (pad_iter, pad, element, details, str,
indent + 1, &sink_pads, cluster_name, &sink_pad_name);
g_free (cluster_name);
gst_iterator_free (pad_iter);
}
if ((pad_iter = gst_element_iterate_src_pads (element))) {
gchar *cluster_name = g_strdup_printf ("%s_src", element_name);
debug_dump_element_pads (pad_iter, pad, element, details, str,
indent + 1, &src_pads, cluster_name, &src_pad_name);
g_free (cluster_name);
gst_iterator_free (pad_iter);
}
if (sink_pads && src_pads) {
/* add invisible link from first sink to first src pad */
g_string_append_printf (str,
"%s %s_%s -> %s_%s [style=\"invis\"];\n",
spc, element_name, sink_pad_name, element_name, src_pad_name);
}
g_free (sink_pad_name);
g_free (src_pad_name);
g_free (element_name);
sink_pad_name = src_pad_name = NULL;
if (GST_IS_BIN (element)) {
g_string_append_printf (str, "%s fillcolor=\"#ffffff\";\n", spc);
/* recurse */
debug_dump_element (GST_BIN (element), details, str, indent + 1);
} else {
if (src_pads && !sink_pads)
g_string_append_printf (str, "%s fillcolor=\"#ffaaaa\";\n", spc);
else if (!src_pads && sink_pads)
g_string_append_printf (str, "%s fillcolor=\"#aaaaff\";\n", spc);
else if (src_pads && sink_pads)
g_string_append_printf (str, "%s fillcolor=\"#aaffaa\";\n", spc);
else
g_string_append_printf (str, "%s fillcolor=\"#ffffff\";\n", spc);
}
//.........这里部分代码省略.........
示例5: debug_dump_element_pad_link
static void
debug_dump_element_pad_link (GstPad * pad, GstElement * element,
GstDebugGraphDetails details, FILE * out, const gint indent)
{
GstElement *peer_element;
GstPad *peer_pad;
GstCaps *caps, *peer_caps;
gchar *media = NULL;
gchar *media_src = NULL, *media_sink = NULL;
gchar *pad_name, *element_name;
gchar *peer_pad_name, *peer_element_name;
const gchar *spc = &spaces[MAX (sizeof (spaces) - (1 + indent * 2), 0)];
if ((peer_pad = gst_pad_get_peer (pad))) {
if ((details & GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE) ||
(details & GST_DEBUG_GRAPH_SHOW_CAPS_DETAILS)
) {
caps = gst_pad_get_current_caps (pad);
if (!caps)
caps = gst_pad_get_pad_template_caps (pad);
peer_caps = gst_pad_get_current_caps (peer_pad);
if (!peer_caps)
peer_caps = gst_pad_get_pad_template_caps (peer_pad);
media = debug_dump_describe_caps (caps, details);
/* check if peer caps are different */
if (peer_caps && !gst_caps_is_equal (caps, peer_caps)) {
gchar *tmp;
tmp = debug_dump_describe_caps (peer_caps, details);
if (gst_pad_get_direction (pad) == GST_PAD_SRC) {
media_src = media;
media_sink = tmp;
} else {
media_src = tmp;
media_sink = media;
}
media = NULL;
}
gst_caps_unref (peer_caps);
gst_caps_unref (caps);
}
pad_name = debug_dump_make_object_name (GST_OBJECT (pad));
if (element) {
element_name = debug_dump_make_object_name (GST_OBJECT (element));
} else {
element_name = g_strdup ("");
}
peer_pad_name = debug_dump_make_object_name (GST_OBJECT (peer_pad));
if ((peer_element = gst_pad_get_parent_element (peer_pad))) {
peer_element_name =
debug_dump_make_object_name (GST_OBJECT (peer_element));
} else {
peer_element_name = g_strdup ("");
}
/* pad link */
if (media) {
fprintf (out, "%s%s_%s -> %s_%s [label=\"%s\"]\n", spc,
element_name, pad_name, peer_element_name, peer_pad_name, media);
g_free (media);
} else if (media_src && media_sink) {
/* dot has some issues with placement of head and taillabels,
* we need an empty label to make space */
fprintf (out, "%s%s_%s -> %s_%s [labeldistance=\"10\", labelangle=\"0\", "
"label=\" \", "
"taillabel=\"%s\", headlabel=\"%s\"]\n",
spc, element_name, pad_name, peer_element_name, peer_pad_name,
media_src, media_sink);
g_free (media_src);
g_free (media_sink);
} else {
fprintf (out, "%s%s_%s -> %s_%s\n", spc,
element_name, pad_name, peer_element_name, peer_pad_name);
}
g_free (pad_name);
g_free (element_name);
g_free (peer_pad_name);
g_free (peer_element_name);
if (peer_element)
gst_object_unref (peer_element);
gst_object_unref (peer_pad);
}
}
示例6: vorbis_handle_comment_packet
static GstFlowReturn
vorbis_handle_comment_packet (GstVorbisDec * vd, ogg_packet * packet)
{
guint bitrate = 0;
gchar *encoder = NULL;
GstTagList *list, *old_list;
GstBuffer *buf;
GST_DEBUG_OBJECT (vd, "parsing comment packet");
buf = gst_buffer_new ();
GST_BUFFER_DATA (buf) = gst_ogg_packet_data (packet);
GST_BUFFER_SIZE (buf) = gst_ogg_packet_size (packet);
list =
gst_tag_list_from_vorbiscomment_buffer (buf, (guint8 *) "\003vorbis", 7,
&encoder);
old_list = vd->taglist;
vd->taglist = gst_tag_list_merge (vd->taglist, list, GST_TAG_MERGE_REPLACE);
if (old_list)
gst_tag_list_free (old_list);
gst_tag_list_free (list);
gst_buffer_unref (buf);
if (!vd->taglist) {
GST_ERROR_OBJECT (vd, "couldn't decode comments");
vd->taglist = gst_tag_list_new ();
}
if (encoder) {
if (encoder[0])
gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_ENCODER, encoder, NULL);
g_free (encoder);
}
gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_ENCODER_VERSION, vd->vi.version,
GST_TAG_AUDIO_CODEC, "Vorbis", NULL);
if (vd->vi.bitrate_nominal > 0 && vd->vi.bitrate_nominal <= 0x7FFFFFFF) {
gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_NOMINAL_BITRATE, (guint) vd->vi.bitrate_nominal, NULL);
bitrate = vd->vi.bitrate_nominal;
}
if (vd->vi.bitrate_upper > 0 && vd->vi.bitrate_upper <= 0x7FFFFFFF) {
gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_MAXIMUM_BITRATE, (guint) vd->vi.bitrate_upper, NULL);
if (!bitrate)
bitrate = vd->vi.bitrate_upper;
}
if (vd->vi.bitrate_lower > 0 && vd->vi.bitrate_lower <= 0x7FFFFFFF) {
gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_MINIMUM_BITRATE, (guint) vd->vi.bitrate_lower, NULL);
if (!bitrate)
bitrate = vd->vi.bitrate_lower;
}
if (bitrate) {
gst_tag_list_add (vd->taglist, GST_TAG_MERGE_REPLACE,
GST_TAG_BITRATE, (guint) bitrate, NULL);
}
if (vd->initialized) {
gst_element_found_tags_for_pad (GST_ELEMENT_CAST (vd), vd->srcpad,
vd->taglist);
vd->taglist = NULL;
} else {
/* Only post them as messages for the time being. *
* They will be pushed on the pad once the decoder is initialized */
gst_element_post_message (GST_ELEMENT_CAST (vd),
gst_message_new_tag (GST_OBJECT (vd), gst_tag_list_copy (vd->taglist)));
}
return GST_FLOW_OK;
}
示例7: debug_dump_pad
static void
debug_dump_pad (GstPad * pad, const gchar * color_name,
const gchar * element_name, GstDebugGraphDetails details, GString * str,
const gint indent)
{
GstPadTemplate *pad_templ;
GstPadPresence presence;
gchar *pad_name, *param_name = NULL;
const gchar *style_name;
static const char *const ignore_propnames[] =
{ "parent", "direction", "template",
"caps", NULL
};
const gchar *spc = MAKE_INDENT (indent);
pad_name = debug_dump_make_object_name (GST_OBJECT (pad));
/* pad availability */
style_name = "filled,solid";
if ((pad_templ = gst_pad_get_pad_template (pad))) {
presence = GST_PAD_TEMPLATE_PRESENCE (pad_templ);
gst_object_unref (pad_templ);
if (presence == GST_PAD_SOMETIMES) {
style_name = "filled,dotted";
} else if (presence == GST_PAD_REQUEST) {
style_name = "filled,dashed";
}
}
param_name =
debug_dump_get_object_params (G_OBJECT (pad), details, ignore_propnames);
if (details & GST_DEBUG_GRAPH_SHOW_STATES) {
gchar pad_flags[4];
const gchar *activation_mode = "-><";
const gchar *task_mode = "";
GstTask *task;
GST_OBJECT_LOCK (pad);
task = GST_PAD_TASK (pad);
if (task) {
switch (gst_task_get_state (task)) {
case GST_TASK_STARTED:
task_mode = "[T]";
break;
case GST_TASK_PAUSED:
task_mode = "[t]";
break;
default:
/* Invalid task state, ignoring */
break;
}
}
GST_OBJECT_UNLOCK (pad);
/* check if pad flags */
pad_flags[0] =
GST_OBJECT_FLAG_IS_SET (pad, GST_PAD_FLAG_BLOCKED) ? 'B' : 'b';
pad_flags[1] =
GST_OBJECT_FLAG_IS_SET (pad, GST_PAD_FLAG_FLUSHING) ? 'F' : 'f';
pad_flags[2] =
GST_OBJECT_FLAG_IS_SET (pad, GST_PAD_FLAG_BLOCKING) ? 'B' : 'b';
pad_flags[3] = '\0';
g_string_append_printf (str,
"%s %s_%s [color=black, fillcolor=\"%s\", label=\"%s%s\\n[%c][%s]%s\", height=\"0.2\", style=\"%s\"];\n",
spc, element_name, pad_name, color_name, GST_OBJECT_NAME (pad),
(param_name ? param_name : ""),
activation_mode[pad->mode], pad_flags, task_mode, style_name);
} else {
g_string_append_printf (str,
"%s %s_%s [color=black, fillcolor=\"%s\", label=\"%s%s\", height=\"0.2\", style=\"%s\"];\n",
spc, element_name, pad_name, color_name, GST_OBJECT_NAME (pad),
(param_name ? param_name : ""), style_name);
}
g_free (param_name);
g_free (pad_name);
}
示例8: gst_swfdec_render
//.........这里部分代码省略.........
SwfdecBuffer *audio_buffer;
SwfdecBuffer *video_buffer;
GstBuffer *videobuf;
GstBuffer *audiobuf;
gboolean ret;
GstFlowReturn res;
const char *url;
GST_DEBUG_OBJECT (swfdec, "render:SWF_EOF");
swfdec_decoder_set_mouse (swfdec->decoder, swfdec->x, swfdec->y,
swfdec->button);
ret = swfdec_render_iterate (swfdec->decoder);
if (swfdec->decoder->using_experimental) {
GST_ELEMENT_ERROR (swfdec, LIBRARY, FAILED,
("SWF file contains features known to trigger bugs."),
("SWF file contains features known to trigger bugs."));
gst_task_stop (swfdec->task);
}
if (!ret) {
gst_task_stop (swfdec->task);
res = gst_pad_push_event (swfdec->videopad, gst_event_new_eos ());
res = gst_pad_push_event (swfdec->audiopad, gst_event_new_eos ());
return;
}
if (swfdec->send_discont) {
GstEvent *event;
swfdec->timestamp = swfdec_render_get_frame_index (swfdec->decoder) *
swfdec->interval;
GST_DEBUG ("sending discont %" G_GINT64_FORMAT, swfdec->timestamp);
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
swfdec->timestamp, GST_CLOCK_TIME_NONE, 0);
gst_pad_push_event (swfdec->videopad, event);
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
swfdec->timestamp, GST_CLOCK_TIME_NONE, 0);
gst_pad_push_event (swfdec->audiopad, event);
swfdec->send_discont = FALSE;
}
GST_DEBUG ("pushing image/sound %" G_GINT64_FORMAT, swfdec->timestamp);
if (swfdec->skip_index) {
video_buffer = NULL;
swfdec->skip_index--;
} else {
video_buffer = swfdec_render_get_image (swfdec->decoder);
if (!video_buffer) {
gst_task_stop (swfdec->task);
gst_pad_push_event (swfdec->videopad, gst_event_new_eos ());
gst_pad_push_event (swfdec->audiopad, gst_event_new_eos ());
return;
}
swfdec->skip_index = swfdec->skip_frames - 1;
videobuf = gst_swfdec_buffer_from_swf (video_buffer);
GST_BUFFER_TIMESTAMP (videobuf) = swfdec->timestamp;
gst_buffer_set_caps (videobuf, GST_PAD_CAPS (swfdec->videopad));
gst_pad_push (swfdec->videopad, videobuf);
}
audio_buffer = swfdec_render_get_audio (swfdec->decoder);
if (audio_buffer) {
audiobuf = gst_swfdec_buffer_from_swf (audio_buffer);
GST_BUFFER_TIMESTAMP (audiobuf) = swfdec->timestamp;
gst_buffer_set_caps (audiobuf, GST_PAD_CAPS (swfdec->audiopad));
gst_pad_push (swfdec->audiopad, audiobuf);
}
swfdec->timestamp += swfdec->interval;
url = swfdec_decoder_get_url (swfdec->decoder);
if (url) {
GstStructure *s;
GstMessage *msg;
s = gst_structure_new ("embedded-url", "url", G_TYPE_STRING, url,
"target", G_TYPE_STRING, "_self", NULL);
msg = gst_message_new_element (GST_OBJECT (swfdec), s);
gst_element_post_message (GST_ELEMENT (swfdec), msg);
}
}
}
示例9: gst_object_unref
Pipeline::~Pipeline()
{
gst_object_unref(GST_OBJECT(camerabin));
}
示例10: gst_video_rate_transform_ip
static GstFlowReturn
gst_video_rate_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
{
GstVideoRate *videorate;
GstFlowReturn res = GST_BASE_TRANSFORM_FLOW_DROPPED;
GstClockTime intime, in_ts, in_dur;
GstClockTime avg_period;
gboolean skip = FALSE;
videorate = GST_VIDEO_RATE (trans);
/* make sure the denominators are not 0 */
if (videorate->from_rate_denominator == 0 ||
videorate->to_rate_denominator == 0)
goto not_negotiated;
GST_OBJECT_LOCK (videorate);
avg_period = videorate->average_period_set;
GST_OBJECT_UNLOCK (videorate);
/* MT-safe switching between modes */
if (G_UNLIKELY (avg_period != videorate->average_period)) {
gboolean switch_mode = (avg_period == 0 || videorate->average_period == 0);
videorate->average_period = avg_period;
videorate->last_ts = GST_CLOCK_TIME_NONE;
if (switch_mode) {
if (avg_period) {
/* enabling average mode */
videorate->average = 0;
/* make sure no cached buffers from regular mode are left */
gst_video_rate_swap_prev (videorate, NULL, 0);
} else {
/* enable regular mode */
videorate->next_ts = GST_CLOCK_TIME_NONE;
skip = TRUE;
}
/* max averaging mode has a no latency, normal mode does */
gst_element_post_message (GST_ELEMENT (videorate),
gst_message_new_latency (GST_OBJECT (videorate)));
}
}
if (videorate->average_period > 0)
return gst_video_rate_trans_ip_max_avg (videorate, buffer);
in_ts = GST_BUFFER_TIMESTAMP (buffer);
in_dur = GST_BUFFER_DURATION (buffer);
if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) {
in_ts = videorate->last_ts;
if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE))
goto invalid_buffer;
}
/* get the time of the next expected buffer timestamp, we use this when the
* next buffer has -1 as a timestamp */
videorate->last_ts = in_ts;
if (in_dur != GST_CLOCK_TIME_NONE)
videorate->last_ts += in_dur;
GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (in_ts));
/* the input time is the time in the segment + all previously accumulated
* segments */
intime = in_ts + videorate->segment.base;
/* we need to have two buffers to compare */
if (videorate->prevbuf == NULL) {
gst_video_rate_swap_prev (videorate, buffer, intime);
videorate->in++;
if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) {
/* new buffer, we expect to output a buffer that matches the first
* timestamp in the segment */
if (videorate->skip_to_first || skip) {
videorate->next_ts = intime;
videorate->base_ts = in_ts - videorate->segment.start;
videorate->out_frame_count = 0;
} else {
videorate->next_ts = videorate->segment.start + videorate->segment.base;
}
}
} else {
GstClockTime prevtime;
gint count = 0;
gint64 diff1, diff2;
prevtime = videorate->prev_ts;
GST_LOG_OBJECT (videorate,
"BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT
" outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime),
GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts));
videorate->in++;
/* drop new buffer if it's before previous one */
if (intime < prevtime) {
//.........这里部分代码省略.........
示例11: message_loop_to_state_change
static gboolean
message_loop_to_state_change (MetadataExtractor *extractor,
GstState state)
{
GstBus *bus;
GstMessageType events;
g_return_val_if_fail (extractor, FALSE);
g_return_val_if_fail (extractor->playbin, FALSE);
bus = gst_element_get_bus (extractor->playbin);
events = (GST_MESSAGE_TAG | GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
for (;;) {
GstMessage *message;
message = gst_bus_timed_pop_filtered (bus, GST_SECOND * 5, events);
if (message == NULL)
goto timed_out;
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state;
GstState new_state;
old_state = new_state = GST_STATE_NULL;
gst_message_parse_state_changed (message, &old_state, &new_state, NULL);
if (old_state == new_state)
break;
/* we only care about playbin (pipeline) state changes */
if (GST_MESSAGE_SRC (message) != GST_OBJECT (extractor->playbin))
break;
if ((old_state == GST_STATE_READY) && (new_state == GST_STATE_PAUSED))
update_stream_info (extractor);
else if ((old_state == GST_STATE_PAUSED) && (new_state == GST_STATE_READY))
reset_extractor_data (extractor);
if (new_state == state) {
gst_message_unref (message);
goto success;
}
break;
}
case GST_MESSAGE_TAG: {
GstTagList *tag_list;
GstTagList *result;
tag_list = NULL;
gst_message_parse_tag (message, &tag_list);
result = gst_tag_list_merge (extractor->tagcache, tag_list, GST_TAG_MERGE_KEEP);
if (extractor->tagcache != NULL)
gst_tag_list_unref (extractor->tagcache);
extractor->tagcache = result;
gst_tag_list_free (tag_list);
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *gsterror = NULL;
gst_message_parse_error (message, &gsterror, &debug);
/*g_warning ("Error: %s (%s)", gsterror->message, debug);*/
g_error_free (gsterror);
gst_message_unref (message);
g_free (debug);
goto error;
}
break;
case GST_MESSAGE_EOS: {
g_warning ("Media file could not be played.");
gst_message_unref (message);
goto error;
}
break;
default:
g_assert_not_reached ();
break;
}
gst_message_unref (message);
}
g_assert_not_reached ();
success:
/* state change succeeded */
GST_DEBUG ("state change to %s succeeded", gst_element_state_get_name (state));
//.........这里部分代码省略.........
示例12: build_pipeline
static void
build_pipeline (SjExtractor *extractor)
{
SjExtractorPrivate *priv;
GstBus *bus;
g_return_if_fail (SJ_IS_EXTRACTOR (extractor));
priv = extractor->priv;
if (priv->pipeline != NULL) {
gst_object_unref (GST_OBJECT (priv->pipeline));
}
priv->pipeline = gst_pipeline_new ("pipeline");
bus = gst_element_get_bus (priv->pipeline);
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", G_CALLBACK (error_cb), extractor);
/* Read from CD */
priv->cdsrc = gst_element_make_from_uri (GST_URI_SRC, "cdda://1", "cd_src", NULL);
if (priv->cdsrc == NULL) {
g_set_error (&priv->construct_error,
SJ_ERROR, SJ_ERROR_INTERNAL_ERROR,
_("Could not create GStreamer CD reader"));
return;
}
g_object_set (G_OBJECT (priv->cdsrc), "device", priv->device_path, NULL);
if (g_object_class_find_property (G_OBJECT_GET_CLASS (priv->cdsrc), "paranoia-mode")) {
g_object_set (G_OBJECT (priv->cdsrc), "paranoia-mode", priv->paranoia_mode, NULL);
}
/* Get the track format for seeking later */
priv->track_format = gst_format_get_by_nick ("track");
g_assert (priv->track_format != 0);
/* Encode */
priv->encodebin = build_encoder (extractor);
if (priv->encodebin == NULL) {
g_set_error (&priv->construct_error,
SJ_ERROR, SJ_ERROR_INTERNAL_ERROR,
_("Could not create GStreamer encoders for %s"),
gst_encoding_profile_get_name (priv->profile));
return;
}
/* Connect to the eos so we know when its finished */
g_signal_connect (bus, "message::eos", G_CALLBACK (eos_cb), extractor);
/* Write to disk */
priv->filesink = gst_element_factory_make (FILE_SINK, "file_sink");
if (priv->filesink == NULL) {
g_set_error (&priv->construct_error,
SJ_ERROR, SJ_ERROR_INTERNAL_ERROR,
_("Could not create GStreamer file output"));
return;
}
#if 0
g_signal_connect (G_OBJECT (priv->filesink), "allow-overwrite", G_CALLBACK (just_say_yes), extractor);
#endif
/* Add the elements to the pipeline */
gst_bin_add_many (GST_BIN (priv->pipeline), priv->cdsrc, priv->encodebin, priv->filesink, NULL);
/* Link it all together */
if (!gst_element_link_many (priv->cdsrc, priv->encodebin, priv->filesink, NULL)) {
g_set_error (&priv->construct_error,
SJ_ERROR, SJ_ERROR_INTERNAL_ERROR,
_("Could not link pipeline"));
return;
}
priv->rebuild_pipeline = FALSE;
}
示例13: main
gint
main (gint argc, gchar ** argv)
{
GstElement *pipeline;
GstElement *shapewipe;
GstControlSource *cs;
GMainLoop *loop;
GstBus *bus;
gchar *pipeline_string;
gfloat border = 0.05;
if (argc < 2) {
g_print ("Usage: shapewipe mask.png <border>\n");
return -1;
}
gst_init (&argc, &argv);
if (argc > 2) {
border = atof (argv[2]);
}
pipeline_string =
g_strdup_printf
("videotestsrc ! video/x-raw,format=(string)AYUV,width=640,height=480 ! shapewipe name=shape border=%f ! videomixer name=mixer ! videoconvert ! autovideosink filesrc location=%s ! typefind ! decodebin2 ! videoconvert ! videoscale ! queue ! shape.mask_sink videotestsrc pattern=snow ! video/x-raw,format=(string)AYUV,width=640,height=480 ! queue ! mixer.",
border, argv[1]);
pipeline = gst_parse_launch (pipeline_string, NULL);
g_free (pipeline_string);
if (pipeline == NULL) {
g_print ("Failed to create pipeline\n");
return -2;
}
shapewipe = gst_bin_get_by_name (GST_BIN (pipeline), "shape");
cs = gst_lfo_control_source_new ();
gst_object_add_control_binding (GST_OBJECT_CAST (shapewipe),
gst_direct_control_binding_new (GST_OBJECT_CAST (shapewipe), "position",
cs));
g_object_set (cs,
"amplitude", 0.5,
"offset", 0.5, "frequency", 0.25, "timeshift", 500 * GST_MSECOND, NULL);
g_object_unref (cs);
loop = g_main_loop_new (NULL, FALSE);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message", G_CALLBACK (on_message), loop);
gst_object_unref (GST_OBJECT (bus));
if (gst_element_set_state (pipeline,
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
return -4;
}
g_main_loop_run (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (loop);
gst_object_unref (G_OBJECT (pipeline));
return 0;
}
示例14: gst_element_post_message
void PlaybackPipeline::notifyDurationChanged()
{
gst_element_post_message(GST_ELEMENT(m_webKitMediaSrc.get()), gst_message_new_duration_changed(GST_OBJECT(m_webKitMediaSrc.get())));
// WebKitMediaSrc will ask MediaPlayerPrivateGStreamerMSE for the new duration later, when somebody asks for it.
}
示例15: gst_visual_gl_change_state
static GstStateChangeReturn
gst_visual_gl_change_state (GstElement * element, GstStateChange transition)
{
GstVisualGL *visual = GST_VISUAL_GL (element);
GstStateChangeReturn ret;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
GstElement *parent = GST_ELEMENT (gst_element_get_parent (visual));
GstStructure *structure = NULL;
GstQuery *query = NULL;
gboolean isPerformed = FALSE;
gchar *name;
if (!parent) {
GST_ELEMENT_ERROR (visual, CORE, STATE_CHANGE, (NULL),
("A parent bin is required"));
return FALSE;
}
name = gst_element_get_name (visual);
structure = gst_structure_new (name, NULL);
query = gst_query_new_application (GST_QUERY_CUSTOM, structure);
g_free (name);
isPerformed = gst_element_query (parent, query);
if (isPerformed) {
const GValue *id_value =
gst_structure_get_value (structure, "gstgldisplay");
if (G_VALUE_HOLDS_POINTER (id_value))
/* at least one gl element is after in our gl chain */
visual->display =
gst_object_ref (GST_GL_DISPLAY (g_value_get_pointer (id_value)));
else {
/* this gl filter is a sink in terms of the gl chain */
visual->display = gst_gl_display_new ();
gst_gl_display_create_context (visual->display, 0);
//TODO visual->external_gl_context);
}
gst_visual_gl_reset (visual);
visual->actor =
visual_actor_new (GST_VISUAL_GL_GET_CLASS (visual)->plugin->info->
plugname);
visual->video = visual_video_new ();
visual->audio = visual_audio_new ();
if (!visual->actor || !visual->video)
goto actor_setup_failed;
gst_gl_display_thread_add (visual->display,
(GstGLDisplayThreadFunc) actor_setup, visual);
if (visual->actor_setup_result != 0)
goto actor_setup_failed;
else
visual_actor_set_video (visual->actor, visual->video);
}
gst_query_unref (query);
gst_object_unref (GST_OBJECT (parent));
if (!isPerformed)
return GST_STATE_CHANGE_FAILURE;
}
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
{
if (visual->fbo) {
gst_gl_display_del_fbo (visual->display, visual->fbo,
visual->depthbuffer);
visual->fbo = 0;
visual->depthbuffer = 0;
}
if (visual->midtexture) {
gst_gl_display_del_texture (visual->display, visual->midtexture,
visual->width, visual->height);
visual->midtexture = 0;
}
if (visual->display) {
gst_object_unref (visual->display);
visual->display = NULL;
}
//.........这里部分代码省略.........