本文整理汇总了C++中GST_EVENT_TYPE函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_EVENT_TYPE函数的具体用法?C++ GST_EVENT_TYPE怎么用?C++ GST_EVENT_TYPE使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_EVENT_TYPE函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: vorbis_dec_sink_event
static gboolean
vorbis_dec_sink_event (GstPad * pad, GstEvent * event)
{
gboolean ret = FALSE;
GstVorbisDec *dec;
dec = GST_VORBIS_DEC (gst_pad_get_parent (pad));
GST_LOG_OBJECT (dec, "handling event");
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
if (dec->segment.rate < 0.0)
vorbis_dec_chain_reverse (dec, TRUE, NULL);
ret = gst_pad_push_event (dec->srcpad, event);
break;
case GST_EVENT_FLUSH_START:
ret = gst_pad_push_event (dec->srcpad, event);
break;
case GST_EVENT_FLUSH_STOP:
/* here we must clean any state in the decoder */
#ifdef HAVE_VORBIS_SYNTHESIS_RESTART
vorbis_synthesis_restart (&dec->vd);
#endif
gst_vorbis_dec_reset (dec);
ret = gst_pad_push_event (dec->srcpad, event);
break;
case GST_EVENT_NEWSEGMENT:
{
GstFormat format;
gdouble rate, arate;
gint64 start, stop, time;
gboolean update;
gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
&start, &stop, &time);
/* we need time for now */
if (format != GST_FORMAT_TIME)
goto newseg_wrong_format;
GST_DEBUG_OBJECT (dec,
"newsegment: update %d, rate %g, arate %g, start %" GST_TIME_FORMAT
", stop %" GST_TIME_FORMAT ", time %" GST_TIME_FORMAT,
update, rate, arate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
GST_TIME_ARGS (time));
/* now configure the values */
gst_segment_set_newsegment_full (&dec->segment, update,
rate, arate, format, start, stop, time);
dec->seqnum = gst_event_get_seqnum (event);
if (dec->initialized)
/* and forward */
ret = gst_pad_push_event (dec->srcpad, event);
else {
/* store it to send once we're initialized */
dec->pendingevents = g_list_append (dec->pendingevents, event);
ret = TRUE;
}
break;
}
case GST_EVENT_TAG:
{
if (dec->initialized)
/* and forward */
ret = gst_pad_push_event (dec->srcpad, event);
else {
/* store it to send once we're initialized */
dec->pendingevents = g_list_append (dec->pendingevents, event);
ret = TRUE;
}
break;
}
default:
ret = gst_pad_push_event (dec->srcpad, event);
break;
}
done:
gst_object_unref (dec);
return ret;
/* ERRORS */
newseg_wrong_format:
{
GST_DEBUG_OBJECT (dec, "received non TIME newsegment");
goto done;
}
}
示例2: dxr3videosink_handle_event
static gboolean
dxr3videosink_handle_event (GstPad * pad, GstEvent * event)
{
GstEventType type;
Dxr3VideoSink *sink;
sink = DXR3VIDEOSINK (gst_pad_get_parent (pad));
type = event ? GST_EVENT_TYPE (event) : GST_EVENT_UNKNOWN;
switch (type) {
case GST_EVENT_EMPTY:
//fprintf (stderr, "++++++ Video empty event\n");
{
/* FIXME: Handle this with a discontinuity or something. */
/* Write an MPEG2 sequence end code, to ensure that the card
actually displays the last picture. Apparently some DVDs are
encoded without proper sequence end codes. */
static const guint8 sec[4] = { 0x00, 0x00, 0x01, 0xb7 };
if (sink->cur_buf != NULL) {
dxr3videosink_write_data (sink, 0);
}
write (sink->video_fd, &sec, 4);
}
break;
case GST_EVENT_DISCONTINUOUS:
//fprintf (stderr, "++++++ Video discont event\n");
{
gint64 time;
gboolean has_time;
unsigned cur_scr, mpeg_scr, diff;
has_time = gst_event_discont_get_value (event, GST_FORMAT_TIME, &time);
if (has_time) {
/* fprintf (stderr, "^^^^^^ Discontinuous event has time %.4f\n", */
/* (double) time / GST_SECOND); */
/* If the SCR in the card is way off, fix it. */
ioctl (sink->control_fd, EM8300_IOCTL_SCR_GET, &cur_scr);
mpeg_scr = MPEGTIME_TO_DXRTIME (GSTTIME_TO_MPEGTIME (time));
diff = cur_scr > mpeg_scr ? cur_scr - mpeg_scr : mpeg_scr - cur_scr;
if (diff > 1800) {
unsigned zero = 0;
/* fprintf (stderr, "====== Adjusting SCR from video\n"); */
ioctl (sink->control_fd, EM8300_IOCTL_SCR_SET, &zero);
ioctl (sink->control_fd, EM8300_IOCTL_SCR_SET, &mpeg_scr);
}
} else {
/* fprintf (stderr, "^^^^^^ Discontinuous event has no time\n"); */
}
}
break;
case GST_EVENT_FLUSH:
dxr3videosink_reset_parser (sink);
break;
default:
gst_pad_event_default (pad, event);
break;
}
return TRUE;
}
示例3: handle_mq_input
static GstPadProbeReturn
handle_mq_input (GstPad * pad, GstPadProbeInfo * info, MqStreamCtx * ctx)
{
GstSplitMuxSink *splitmux = ctx->splitmux;
GstBuffer *buf;
MqStreamBuf *buf_info = NULL;
GstClockTime ts;
gboolean loop_again;
gboolean keyframe = FALSE;
GST_LOG_OBJECT (pad, "Fired probe type 0x%x\n", info->type);
/* FIXME: Handle buffer lists, until then make it clear they won't work */
if (info->type & GST_PAD_PROBE_TYPE_BUFFER_LIST) {
g_warning ("Buffer list handling not implemented");
return GST_PAD_PROBE_DROP;
}
if (info->type & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) {
GstEvent *event = gst_pad_probe_info_get_event (info);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEGMENT:
gst_event_copy_segment (event, &ctx->in_segment);
break;
case GST_EVENT_FLUSH_STOP:
GST_SPLITMUX_LOCK (splitmux);
gst_segment_init (&ctx->in_segment, GST_FORMAT_UNDEFINED);
ctx->in_eos = FALSE;
ctx->in_bytes = 0;
ctx->in_running_time = 0;
GST_SPLITMUX_UNLOCK (splitmux);
break;
case GST_EVENT_EOS:
GST_SPLITMUX_LOCK (splitmux);
ctx->in_eos = TRUE;
if (splitmux->state == SPLITMUX_STATE_STOPPED)
goto beach;
if (ctx->is_video) {
GST_INFO_OBJECT (splitmux, "Got Video EOS. Finishing up");
/* Act as if this is a new keyframe with infinite timestamp */
splitmux->max_in_running_time = GST_CLOCK_TIME_NONE;
splitmux->state = SPLITMUX_STATE_WAITING_GOP_COMPLETE;
/* Wake up other input pads to collect this GOP */
GST_SPLITMUX_BROADCAST (splitmux);
check_completed_gop (splitmux, ctx);
} else if (splitmux->state == SPLITMUX_STATE_WAITING_GOP_COMPLETE) {
/* If we are waiting for a GOP to be completed (ie, for aux
* pads to catch up), then this pad is complete, so check
* if the whole GOP is.
*/
check_completed_gop (splitmux, ctx);
}
GST_SPLITMUX_UNLOCK (splitmux);
break;
default:
break;
}
return GST_PAD_PROBE_PASS;
}
buf = gst_pad_probe_info_get_buffer (info);
ctx->in_running_time = gst_segment_to_running_time (&ctx->in_segment,
GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf));
buf_info = mq_stream_buf_new ();
if (GST_BUFFER_PTS_IS_VALID (buf))
ts = GST_BUFFER_PTS (buf);
else
ts = GST_BUFFER_DTS (buf);
GST_SPLITMUX_LOCK (splitmux);
if (splitmux->state == SPLITMUX_STATE_STOPPED)
goto beach;
/* If this buffer has a timestamp, advance the input timestamp of the
* stream */
if (GST_CLOCK_TIME_IS_VALID (ts)) {
GstClockTime running_time =
gst_segment_to_running_time (&ctx->in_segment, GST_FORMAT_TIME,
GST_BUFFER_TIMESTAMP (buf));
if (GST_CLOCK_TIME_IS_VALID (running_time) &&
(ctx->in_running_time == GST_CLOCK_TIME_NONE
|| running_time > ctx->in_running_time))
ctx->in_running_time = running_time;
}
/* Try to make sure we have a valid running time */
if (!GST_CLOCK_TIME_IS_VALID (ctx->in_running_time)) {
ctx->in_running_time =
gst_segment_to_running_time (&ctx->in_segment, GST_FORMAT_TIME,
ctx->in_segment.start);
}
buf_info->run_ts = ctx->in_running_time;
buf_info->buf_size = gst_buffer_get_size (buf);
/* Update total input byte counter for overflow detect */
//.........这里部分代码省略.........
示例4: pad_event
static gboolean
pad_event (GstPad *pad,
GstEvent *event)
{
GstOmxBaseFilter *self;
GOmxCore *gomx;
GOmxPort *in_port;
gboolean ret = TRUE;
self = GST_OMX_BASE_FILTER (GST_OBJECT_PARENT (pad));
gomx = self->gomx;
in_port = self->in_port;
GST_LOG_OBJECT (self, "begin");
GST_INFO_OBJECT (self, "event: %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event))
{
case GST_EVENT_EOS:
/* if we are init'ed, and there is a running loop; then
* if we get a buffer to inform it of EOS, let it handle the rest
* in any other case, we send EOS */
if (self->ready && self->last_pad_push_return == GST_FLOW_OK)
{
/* send buffer with eos flag */
/** @todo move to util */
{
OMX_BUFFERHEADERTYPE *omx_buffer;
GST_LOG_OBJECT (self, "request buffer");
omx_buffer = g_omx_port_request_buffer (in_port);
if (G_LIKELY (omx_buffer))
{
omx_buffer->nFlags |= OMX_BUFFERFLAG_EOS;
GST_LOG_OBJECT (self, "release_buffer");
/* foo_buffer_untaint (omx_buffer); */
g_omx_port_release_buffer (in_port, omx_buffer);
/* loop handles EOS, eat it here */
gst_event_unref (event);
break;
}
}
}
/* we tried, but it's up to us here */
ret = gst_pad_push_event (self->srcpad, event);
break;
case GST_EVENT_FLUSH_START:
gst_pad_push_event (self->srcpad, event);
self->last_pad_push_return = GST_FLOW_WRONG_STATE;
g_omx_core_flush_start (gomx);
gst_pad_pause_task (self->srcpad);
ret = TRUE;
break;
case GST_EVENT_FLUSH_STOP:
gst_pad_push_event (self->srcpad, event);
self->last_pad_push_return = GST_FLOW_OK;
g_omx_core_flush_stop (gomx);
if (self->ready)
gst_pad_start_task (self->srcpad, output_loop, self->srcpad);
ret = TRUE;
break;
case GST_EVENT_NEWSEGMENT:
ret = gst_pad_push_event (self->srcpad, event);
break;
default:
ret = gst_pad_push_event (self->srcpad, event);
break;
}
GST_LOG_OBJECT (self, "end");
return ret;
}
示例5: celt_dec_sink_event
static gboolean
celt_dec_sink_event (GstPad * pad, GstEvent * event)
{
GstCeltDec *dec;
gboolean ret = FALSE;
dec = GST_CELT_DEC (gst_pad_get_parent (pad));
GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:{
GstFormat format;
gdouble rate, arate;
gint64 start, stop, time;
gboolean update;
gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
&start, &stop, &time);
if (format != GST_FORMAT_TIME)
goto newseg_wrong_format;
if (rate <= 0.0)
goto newseg_wrong_rate;
if (update) {
/* time progressed without data, see if we can fill the gap with
* some concealment data */
if (dec->segment.last_stop < start) {
GstClockTime duration;
duration = start - dec->segment.last_stop;
celt_dec_chain_parse_data (dec, NULL, dec->segment.last_stop,
duration);
}
}
/* now configure the values */
gst_segment_set_newsegment_full (&dec->segment, update,
rate, arate, GST_FORMAT_TIME, start, stop, time);
dec->granulepos = -1;
GST_DEBUG_OBJECT (dec, "segment now: cur = %" GST_TIME_FORMAT " [%"
GST_TIME_FORMAT " - %" GST_TIME_FORMAT "]",
GST_TIME_ARGS (dec->segment.last_stop),
GST_TIME_ARGS (dec->segment.start),
GST_TIME_ARGS (dec->segment.stop));
ret = gst_pad_push_event (dec->srcpad, event);
break;
}
default:
ret = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (dec);
return ret;
/* ERRORS */
newseg_wrong_format:
{
GST_DEBUG_OBJECT (dec, "received non TIME newsegment");
gst_object_unref (dec);
return FALSE;
}
newseg_wrong_rate:
{
GST_DEBUG_OBJECT (dec, "negative rates not supported yet");
gst_object_unref (dec);
return FALSE;
}
}
示例6: rsn_audiomunge_sink_event
static gboolean
rsn_audiomunge_sink_event (GstPad * pad, GstEvent * event)
{
gboolean ret = FALSE;
RsnAudioMunge *munge = RSN_AUDIOMUNGE (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
rsn_audiomunge_reset (munge);
ret = gst_pad_push_event (munge->srcpad, event);
break;
case GST_EVENT_NEWSEGMENT:
{
GstSegment *segment;
gboolean update;
GstFormat format;
gdouble rate, arate;
gint64 start, stop, time;
gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
&start, &stop, &time);
/* we need TIME format */
if (format != GST_FORMAT_TIME)
goto newseg_wrong_format;
/* now configure the values */
segment = &munge->sink_segment;
gst_segment_set_newsegment_full (segment, update,
rate, arate, format, start, stop, time);
if (munge->have_audio) {
ret = gst_pad_push_event (munge->srcpad, event);
break;
}
/*
* FIXME:
* If the accum >= threshold or we're in a still frame and there's been
* no audio received, then we need to generate some audio data.
* If caused by a segment start update (time advancing in a gap) adjust
* the new-segment and send the buffer.
*
* Otherwise, send the buffer before the newsegment, so that it appears
* in the closing segment.
*/
if (!update) {
GST_DEBUG_OBJECT (munge, "Sending newsegment: start %" GST_TIME_FORMAT
" stop %" GST_TIME_FORMAT " accum now %" GST_TIME_FORMAT,
GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
GST_TIME_ARGS (segment->accum));
ret = gst_pad_push_event (munge->srcpad, event);
}
if (segment->accum >= AUDIO_FILL_THRESHOLD || munge->in_still) {
g_print ("*********** Sending audio fill: accum = %" GST_TIME_FORMAT
" still-state=%d\n", GST_TIME_ARGS (segment->accum),
munge->in_still);
/* Just generate a 100ms silence buffer for now. FIXME: Fill the gap */
if (rsn_audiomunge_make_audio (munge, segment->start,
GST_SECOND / 10) == GST_FLOW_OK)
munge->have_audio = TRUE;
} else {
GST_LOG_OBJECT (munge, "Not sending audio fill buffer: "
"segment accum below thresh: accum = %" GST_TIME_FORMAT,
GST_TIME_ARGS (segment->accum));
}
if (update) {
GST_DEBUG_OBJECT (munge, "Sending newsegment: start %" GST_TIME_FORMAT
" stop %" GST_TIME_FORMAT " accum now %" GST_TIME_FORMAT,
GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
GST_TIME_ARGS (segment->accum));
ret = gst_pad_push_event (munge->srcpad, event);
}
break;
}
case GST_EVENT_CUSTOM_DOWNSTREAM:
{
const GstStructure *s = gst_event_get_structure (event);
if (s && gst_structure_has_name (s, "application/x-gst-dvd"))
rsn_audiomunge_handle_dvd_event (munge, event);
ret = gst_pad_push_event (munge->srcpad, event);
break;
}
default:
ret = gst_pad_push_event (munge->srcpad, event);
break;
}
return ret;
newseg_wrong_format:
//.........这里部分代码省略.........
示例7: gst_base_video_codec_src_event
static gboolean
gst_base_video_codec_src_event (GstPad * pad, GstEvent * event)
{
GstBaseVideoCodec *base_video_codec;
gboolean res = FALSE;
base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
{
GstFormat format, tformat;
gdouble rate;
GstEvent *real_seek;
GstSeekFlags flags;
GstSeekType cur_type, stop_type;
gint64 cur, stop;
gint64 tcur, tstop;
gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
&cur, &stop_type, &stop);
gst_event_unref (event);
tformat = GST_FORMAT_TIME;
res = gst_base_video_encoded_video_convert (&base_video_codec->state,
format, cur, &tformat, &tcur);
if (!res)
goto convert_error;
res = gst_base_video_encoded_video_convert (&base_video_codec->state,
format, stop, &tformat, &tstop);
if (!res)
goto convert_error;
real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
flags, cur_type, tcur, stop_type, tstop);
res = gst_pad_push_event (base_video_codec->sinkpad, real_seek);
break;
}
#if 0
case GST_EVENT_QOS:
{
gdouble proportion;
GstClockTimeDiff diff;
GstClockTime timestamp;
gst_event_parse_qos (event, &proportion, &diff, ×tamp);
GST_OBJECT_LOCK (base_video_codec);
base_video_codec->proportion = proportion;
base_video_codec->earliest_time = timestamp + diff;
GST_OBJECT_UNLOCK (base_video_codec);
GST_DEBUG_OBJECT (base_video_codec,
"got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT,
GST_TIME_ARGS (timestamp), diff);
res = gst_pad_push_event (base_video_codec->sinkpad, event);
break;
}
#endif
default:
res = gst_pad_push_event (base_video_codec->sinkpad, event);
break;
}
done:
gst_object_unref (base_video_codec);
return res;
convert_error:
GST_DEBUG_OBJECT (base_video_codec, "could not convert format");
goto done;
}
示例8: gst_interlace_sink_event
static gboolean
gst_interlace_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
gboolean ret;
GstInterlace *interlace;
interlace = GST_INTERLACE (parent);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
GST_DEBUG_OBJECT (interlace, "handling FLUSH_START");
ret = gst_pad_push_event (interlace->srcpad, event);
break;
case GST_EVENT_FLUSH_STOP:
GST_DEBUG_OBJECT (interlace, "handling FLUSH_STOP");
gst_interlace_reset (interlace);
ret = gst_pad_push_event (interlace->srcpad, event);
break;
case GST_EVENT_EOS:
#if 0
/* FIXME revive this when we output ONEFIELD and RFF buffers */
{
gint num_fields;
const PulldownFormat *format = &formats[interlace->pattern];
num_fields =
format->n_fields[interlace->phase_index] -
interlace->stored_fields_pushed;
interlace->stored_fields_pushed = 0;
/* on EOS we want to push as many sane frames as are left */
while (num_fields > 1) {
GstBuffer *output_buffer;
/* make metadata writable before editing it */
interlace->stored_frame =
gst_buffer_make_metadata_writable (interlace->stored_frame);
num_fields -= 2;
gst_interlace_decorate_buffer (interlace, interlace->stored_frame,
n_fields, FALSE);
/* ref output_buffer/stored frame because we want to keep it for now
* and pushing gives away a ref */
output_buffer = gst_buffer_ref (interlace->stored_frame);
if (gst_pad_push (interlace->srcpad, output_buffer)) {
GST_DEBUG_OBJECT (interlace, "Failed to push buffer %p",
output_buffer);
return FALSE;
}
output_buffer = NULL;
if (num_fields <= 1) {
gst_buffer_unref (interlace->stored_frame);
interlace->stored_frame = NULL;
break;
}
}
/* increment the phase index */
interlace->phase_index++;
if (!format->n_fields[interlace->phase_index]) {
interlace->phase_index = 0;
}
}
#endif
ret = gst_pad_push_event (interlace->srcpad, event);
break;
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
ret = gst_interlace_setcaps (interlace, caps);
gst_event_unref (event);
break;
}
default:
ret = gst_pad_push_event (interlace->srcpad, event);
break;
}
return ret;
}
示例9: _sink_event
/* GstAggregator vmethods default implementations */
static gboolean
_sink_event (GstAggregator * self, GstAggregatorPad * aggpad, GstEvent * event)
{
gboolean res = TRUE;
GstPad *pad = GST_PAD (aggpad);
GstAggregatorPrivate *priv = self->priv;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
{
_flush_start (self, aggpad, event);
/* We forward only in one case: right after flush_seeking */
event = NULL;
goto eat;
}
case GST_EVENT_FLUSH_STOP:
{
GST_DEBUG_OBJECT (aggpad, "Got FLUSH_STOP");
_aggpad_flush (aggpad, self);
if (g_atomic_int_get (&priv->flush_seeking)) {
g_atomic_int_set (&aggpad->priv->pending_flush_stop, FALSE);
if (g_atomic_int_get (&priv->flush_seeking)) {
if (_all_flush_stop_received (self)) {
/* That means we received FLUSH_STOP/FLUSH_STOP on
* all sinkpads -- Seeking is Done... sending FLUSH_STOP */
_flush (self);
gst_pad_push_event (self->srcpad, event);
priv->send_eos = TRUE;
event = NULL;
QUEUE_PUSH (self);
GST_INFO_OBJECT (self, "Releasing source pad STREAM_LOCK");
GST_PAD_STREAM_UNLOCK (self->srcpad);
_start_srcpad_task (self);
}
}
}
/* We never forward the event */
goto eat;
}
case GST_EVENT_EOS:
{
GST_DEBUG_OBJECT (aggpad, "EOS");
/* We still have a buffer, and we don't want the subclass to have to
* check for it. Mark pending_eos, eos will be set when steal_buffer is
* called
*/
PAD_LOCK_EVENT (aggpad);
if (!aggpad->buffer) {
aggpad->eos = TRUE;
} else {
aggpad->priv->pending_eos = TRUE;
}
PAD_UNLOCK_EVENT (aggpad);
QUEUE_PUSH (self);
goto eat;
}
case GST_EVENT_SEGMENT:
{
PAD_LOCK_EVENT (aggpad);
gst_event_copy_segment (event, &aggpad->segment);
self->priv->seqnum = gst_event_get_seqnum (event);
PAD_UNLOCK_EVENT (aggpad);
goto eat;
}
case GST_EVENT_STREAM_START:
{
goto eat;
}
case GST_EVENT_TAG:
{
GstTagList *tags;
gst_event_parse_tag (event, &tags);
if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
gst_aggregator_merge_tags (self, tags, GST_TAG_MERGE_REPLACE);
gst_event_unref (event);
event = NULL;
goto eat;
}
break;
}
default:
{
break;
}
}
GST_DEBUG_OBJECT (pad, "Forwarding event: %" GST_PTR_FORMAT, event);
return gst_pad_event_default (pad, GST_OBJECT (self), event);
eat:
GST_DEBUG_OBJECT (pad, "Eating event: %" GST_PTR_FORMAT, event);
//.........这里部分代码省略.........
示例10: gst_merger_event_srcv
static gboolean
gst_merger_event_srcv (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstMerger *merger = GST_MERGER (parent);
gboolean ret;
GST_DEBUG_OBJECT (pad, "got source event %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
{
GstCaps *caps;
gst_event_parse_caps (event, &caps);
GST_DEBUG_OBJECT (pad, "event caps are %" GST_PTR_FORMAT, caps);
break;
}
case GST_EVENT_EOS:
process_eos (merger);
gst_event_unref (event);
ret = TRUE;
break;
case GST_EVENT_SEGMENT:
gst_event_copy_segment (event, &merger->s_segment);
GST_DEBUG_OBJECT (merger, "segment: %" GST_SEGMENT_FORMAT,
&merger->s_segment);
break;
case GST_EVENT_QOS:
{
GstQOSType type;
gdouble proportion;
GstClockTimeDiff diff;
GstClockTime timestamp;
gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
if (type == GST_QOS_TYPE_UNDERFLOW) {
GST_INFO_OBJECT (pad,
"got QOS event UNDERFLOW proportion %f diff %" PRIu64 " ts %"
PRIu64, proportion, diff, timestamp);
GST_OBJECT_LOCK (merger);
gint num, den;
{
GstCaps *caps = gst_pad_get_current_caps (merger->srcv_pad);
GstStructure *s = gst_caps_get_structure (caps, 0);
gst_structure_get_fraction (s, "framerate", &num, &den);
gst_caps_unref (caps);
}
int nb_bufs = 0.5 + diff / (1e9 * den / num);
GST_WARNING_OBJECT (merger, "Discarding %d buffers", nb_bufs);
g_queue_pop_head (&merger->bufs_l);
g_queue_pop_head (&merger->bufs_r);
GST_OBJECT_UNLOCK (merger);
} else {
GST_WARNING_OBJECT (pad, "QOS type %d not implemented", type);
ret = gst_pad_event_default (pad, parent, event);
}
}
break;
default:
GST_INFO_OBJECT (pad, "got source event %s", GST_EVENT_TYPE_NAME (event));
ret = gst_pad_event_default (pad, parent, event);
break;
}
return ret;
}
示例11: gst_gme_dec_src_event
static gboolean
gst_gme_dec_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
GstGmeDec *gme = GST_GME_DEC (parent);
gboolean result = FALSE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
{
gdouble rate;
GstFormat format;
GstSeekFlags flags;
GstSeekType start_type, stop_type;
gint64 start, stop;
gboolean flush;
gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
&stop_type, &stop);
gst_event_unref (event);
if (format != GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (gme, "seeking is only supported in TIME format");
break;
}
if (start_type != GST_SEEK_TYPE_SET || stop_type != GST_SEEK_TYPE_NONE) {
GST_DEBUG_OBJECT (gme, "unsupported seek type");
break;
}
if (stop_type == GST_SEEK_TYPE_NONE)
stop = GST_CLOCK_TIME_NONE;
if (start_type == GST_SEEK_TYPE_SET) {
GstSegment seg;
guint64 cur = gme_tell (gme->player) * GST_MSECOND;
guint64 dest = (guint64) start;
if (gme->total_duration != GST_CLOCK_TIME_NONE)
dest = CLAMP (dest, 0, gme->total_duration);
else
dest = MAX (0, dest);
if (dest == cur)
break;
flush = (flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH;
if (flush) {
gst_pad_push_event (gme->srcpad, gst_event_new_flush_start ());
} else {
gst_pad_stop_task (gme->srcpad);
}
GST_PAD_STREAM_LOCK (gme->srcpad);
if (flags & GST_SEEK_FLAG_SEGMENT) {
gst_element_post_message (GST_ELEMENT (gme),
gst_message_new_segment_start (GST_OBJECT (gme), format, cur));
}
if (flush) {
gst_pad_push_event (gme->srcpad, gst_event_new_flush_stop (TRUE));
}
if (stop == GST_CLOCK_TIME_NONE
&& gme->total_duration != GST_CLOCK_TIME_NONE)
stop = gme->total_duration;
gst_segment_init (&seg, GST_FORMAT_TIME);
seg.rate = rate;
seg.start = dest;
seg.stop = stop;
seg.time = dest;
gst_pad_push_event (gme->srcpad, gst_event_new_segment (&seg));
gme->seekpoint = dest / GST_MSECOND; /* nsecs to msecs */
gme->seeking = TRUE;
gst_pad_start_task (gme->srcpad, (GstTaskFunction) gst_gme_play,
gme->srcpad, NULL);
GST_PAD_STREAM_UNLOCK (gme->srcpad);
result = TRUE;
}
break;
}
default:
result = gst_pad_push_event (gme->sinkpad, event);
break;
}
return result;
}
示例12: gst_two_lame_sink_event
static gboolean
gst_two_lame_sink_event (GstPad * pad, GstEvent * event)
{
gboolean ret;
GstTwoLame *twolame;
twolame = GST_TWO_LAME (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:{
GST_DEBUG_OBJECT (twolame, "handling EOS event");
if (twolame->glopts != NULL) {
GstBuffer *buf;
gint size;
buf = gst_buffer_new_and_alloc (16384);
size =
twolame_encode_flush (twolame->glopts, GST_BUFFER_DATA (buf),
16394);
if (size > 0 && twolame->last_flow == GST_FLOW_OK) {
gint64 duration;
duration = gst_util_uint64_scale (size, 8 * GST_SECOND,
1000 * twolame->bitrate);
if (twolame->last_ts == GST_CLOCK_TIME_NONE) {
twolame->last_ts = twolame->eos_ts;
twolame->last_duration = duration;
} else {
twolame->last_duration += duration;
}
GST_BUFFER_TIMESTAMP (buf) = twolame->last_ts;
GST_BUFFER_DURATION (buf) = twolame->last_duration;
twolame->last_ts = GST_CLOCK_TIME_NONE;
GST_BUFFER_SIZE (buf) = size;
GST_DEBUG_OBJECT (twolame, "pushing final packet of %u bytes", size);
gst_buffer_set_caps (buf, GST_PAD_CAPS (twolame->srcpad));
gst_pad_push (twolame->srcpad, buf);
} else {
GST_DEBUG_OBJECT (twolame, "no final packet (size=%d, last_flow=%s)",
size, gst_flow_get_name (twolame->last_flow));
gst_buffer_unref (buf);
}
}
ret = gst_pad_event_default (pad, event);
break;
}
case GST_EVENT_FLUSH_START:
GST_DEBUG_OBJECT (twolame, "handling FLUSH start event");
/* forward event */
ret = gst_pad_push_event (twolame->srcpad, event);
break;
case GST_EVENT_FLUSH_STOP:
{
guchar *mp3_data = NULL;
gint mp3_buffer_size, mp3_size = 0;
GST_DEBUG_OBJECT (twolame, "handling FLUSH stop event");
/* clear buffers */
mp3_buffer_size = 16384;
mp3_data = g_malloc (mp3_buffer_size);
mp3_size =
twolame_encode_flush (twolame->glopts, mp3_data, mp3_buffer_size);
ret = gst_pad_push_event (twolame->srcpad, event);
g_free (mp3_data);
break;
}
default:
ret = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (twolame);
return ret;
}
示例13: gst_identity_sink_event
static gboolean
gst_identity_sink_event (GstBaseTransform * trans, GstEvent * event)
{
GstIdentity *identity;
gboolean ret = TRUE;
identity = GST_IDENTITY (trans);
if (!identity->silent) {
const GstStructure *s;
const gchar *tstr;
gchar *sstr;
GST_OBJECT_LOCK (identity);
g_free (identity->last_message);
tstr = gst_event_type_get_name (GST_EVENT_TYPE (event));
if ((s = gst_event_get_structure (event)))
sstr = gst_structure_to_string (s);
else
sstr = g_strdup ("");
identity->last_message =
g_strdup_printf ("event ******* (%s:%s) E (type: %s (%d), %s) %p",
GST_DEBUG_PAD_NAME (trans->sinkpad), tstr, GST_EVENT_TYPE (event),
sstr, event);
g_free (sstr);
GST_OBJECT_UNLOCK (identity);
gst_identity_notify_last_message (identity);
}
if (identity->single_segment && (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT)) {
if (!trans->have_segment) {
GstEvent *news;
GstSegment segment;
gst_event_copy_segment (event, &segment);
gst_event_copy_segment (event, &trans->segment);
trans->have_segment = TRUE;
/* This is the first segment, send out a (0, -1) segment */
gst_segment_init (&segment, segment.format);
news = gst_event_new_segment (&segment);
gst_pad_event_default (trans->sinkpad, GST_OBJECT_CAST (trans), news);
} else {
/* need to track segment for proper running time */
gst_event_copy_segment (event, &trans->segment);
}
}
if (GST_EVENT_TYPE (event) == GST_EVENT_GAP &&
trans->have_segment && trans->segment.format == GST_FORMAT_TIME) {
GstClockTime start, dur;
gst_event_parse_gap (event, &start, &dur);
if (GST_CLOCK_TIME_IS_VALID (start)) {
start = gst_segment_to_running_time (&trans->segment,
GST_FORMAT_TIME, start);
gst_identity_do_sync (identity, start);
/* also transform GAP timestamp similar to buffer timestamps */
if (identity->single_segment) {
gst_event_unref (event);
event = gst_event_new_gap (start, dur);
}
}
}
/* Reset previous timestamp, duration and offsets on SEGMENT
* to prevent false warnings when checking for perfect streams */
if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
identity->prev_timestamp = identity->prev_duration = GST_CLOCK_TIME_NONE;
identity->prev_offset = identity->prev_offset_end = GST_BUFFER_OFFSET_NONE;
}
if (identity->single_segment && GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
/* eat up segments */
gst_event_unref (event);
ret = TRUE;
} else {
if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START) {
GST_OBJECT_LOCK (identity);
if (identity->clock_id) {
GST_DEBUG_OBJECT (identity, "unlock clock wait");
gst_clock_id_unschedule (identity->clock_id);
gst_clock_id_unref (identity->clock_id);
identity->clock_id = NULL;
}
GST_OBJECT_UNLOCK (identity);
}
ret = GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
}
return ret;
}
示例14: gst_vdp_mpeg_dec_sink_event
static gboolean
gst_vdp_mpeg_dec_sink_event (GstPad * pad, GstEvent * event)
{
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
gboolean res;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
{
GST_DEBUG_OBJECT (mpeg_dec, "flush stop");
gst_vdp_mpeg_dec_flush (mpeg_dec);
res = gst_pad_push_event (mpeg_dec->src, event);
break;
}
case GST_EVENT_NEWSEGMENT:
{
gboolean update;
gdouble rate;
GstFormat format;
gint64 start;
gint64 stop;
gint64 position;
gst_event_parse_new_segment (event, &update, &rate, &format,
&start, &stop, &position);
if (format != GST_FORMAT_TIME) {
if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, start,
GST_FORMAT_TIME, &start))
goto convert_error;
if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, stop,
GST_FORMAT_TIME, &stop))
goto convert_error;
if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, position,
GST_FORMAT_TIME, &position))
goto convert_error;
gst_event_unref (event);
event = gst_event_new_new_segment (update, rate, GST_FORMAT_TIME, start,
stop, position);
}
g_mutex_lock (mpeg_dec->mutex);
/* if we seek ourselves we don't push out a newsegment now since we
* use the calculated timestamp of the first frame for this */
if (mpeg_dec->seeking) {
gst_event_unref (event);
res = TRUE;
g_mutex_unlock (mpeg_dec->mutex);
goto done;
}
g_mutex_unlock (mpeg_dec->mutex);
GST_DEBUG_OBJECT (mpeg_dec,
"Pushing new segment update %d format %d start %"
GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " position %"
GST_TIME_FORMAT, update, format, GST_TIME_ARGS (start),
GST_TIME_ARGS (stop), GST_TIME_ARGS (position));
convert_error:
res = gst_pad_push_event (mpeg_dec->src, event);
break;
}
default:
res = gst_pad_event_default (pad, event);
}
done:
gst_object_unref (mpeg_dec);
return res;
}
示例15: xing_mp3_encoder_chain
static GstFlowReturn
xing_mp3_encoder_chain(GstPad *pad, GstBuffer *buf)
{
XingMp3Encoder *encoder;
GstFlowReturn ret = GST_FLOW_OK;
g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
g_return_val_if_fail(GST_IS_PAD(pad), GST_FLOW_ERROR);
g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);
encoder = XING_MP3_ENCODER(gst_pad_get_parent(pad));
if(GST_IS_EVENT(buf)) {
GstEvent *event = GST_EVENT(buf);
switch(GST_EVENT_TYPE(event)) {
case GST_EVENT_EOS:
encoder->at_end_of_stream = TRUE;
gst_event_unref(event);
break;
default:
gst_pad_event_default(pad, event);
return ret;
}
} else {
guchar *data;
gulong size;
gulong i, j;
float **buffer;
gint buf_size;
gint buf_pos;
if(!encoder->is_initialized) {
gst_buffer_unref(buf);
GST_ELEMENT_ERROR(encoder, CORE, NEGOTIATION, (NULL), ("Encoder not initialized"));
return GST_FLOW_UNEXPECTED;
}
if(!encoder->header_sent) {
E_CONTROL control;
MPEG_HEAD head;
guchar output_buffer[OUTPUT_BUFFER_SIZE];
gint buf_len;
if(!encoder->use_cbr) {
GstFlowReturn push_ret;
hx_mp3enc_l3_info_ec(encoder->xing_encoder, &control);
hx_mp3enc_l3_info_head(encoder->xing_encoder, &head);
buf_len = XingHeader(control.samprate, head.mode, control.cr_bit, control.original,
VBR_SCALE_FLAG /* FRAMES_FLAG | BYTES_FLAG | TOC_FLAG */, 0, 0,
control.vbr_flag ? control.vbr_mnr : -1, NULL,
output_buffer, 0, 0, 0);
if((push_ret = xing_mp3_encoder_push_buffer(encoder, output_buffer,
buf_len)) != GST_FLOW_OK) {
gst_buffer_unref(buf);
return push_ret;
}
}
encoder->header_sent = TRUE;
}
data = (guchar *)GST_BUFFER_DATA(buf);
buf_size = GST_BUFFER_SIZE(buf);
buf_pos = 0;
/* Transfer incoming data to internal buffer.
* TODO: Use a ring buffer, avoid memmove () */
while(buf_pos < buf_size) {
gint gulp = MIN(buf_size - buf_pos, INPUT_BUFFER_SIZE - encoder->input_buffer_pos);
memcpy(encoder->input_buffer + encoder->input_buffer_pos, data + buf_pos, gulp);
encoder->samples_in += gulp / (2 * encoder->channels);
encoder->input_buffer_pos += gulp;
buf_pos += gulp;
/* Pass data on to encoder */
while(encoder->input_buffer_pos >= encoder->bytes_per_frame) {
guchar output_buffer[OUTPUT_BUFFER_SIZE];
IN_OUT x;
x = hx_mp3enc_mp3_encode_frame(encoder->xing_encoder,
encoder->input_buffer, output_buffer);
if(x.in_bytes == 0 && x.out_bytes == 0) {
break;
}
memmove(encoder->input_buffer, encoder->input_buffer + x.in_bytes,
encoder->input_buffer_pos - x.in_bytes);
encoder->input_buffer_pos -= x.in_bytes;
/* Accept output from encoder and pass it on.
* TODO: Do this less often and save CPU */
//.........这里部分代码省略.........