本文整理汇总了C++中GST_CLOCK_TIME_IS_VALID函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_CLOCK_TIME_IS_VALID函数的具体用法?C++ GST_CLOCK_TIME_IS_VALID怎么用?C++ GST_CLOCK_TIME_IS_VALID使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_CLOCK_TIME_IS_VALID函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: _set_duration
static gboolean
_set_duration (GESTimelineElement * element, GstClockTime duration)
{
GESTrackElement *object = GES_TRACK_ELEMENT (element);
GESTrackElementPrivate *priv = object->priv;
if (GST_CLOCK_TIME_IS_VALID (_MAXDURATION (element)) &&
duration > _INPOINT (object) + _MAXDURATION (element))
duration = _MAXDURATION (element) - _INPOINT (object);
if (priv->gnlobject != NULL) {
if (G_UNLIKELY (duration == _DURATION (object)))
return FALSE;
g_object_set (priv->gnlobject, "duration", duration, NULL);
} else
priv->pending_duration = duration;
_update_control_bindings (element, ges_timeline_element_get_inpoint (element),
duration);
return TRUE;
}
示例2: g_signal_emit_by_name
void AudioTestSource_i::_new_gst_buffer(GstElement *sink, AudioTestSource_i* comp) {
static GstBuffer *buffer;
static std::vector<short> packet;
/* Retrieve the buffer */
g_signal_emit_by_name (sink, "pull-buffer", &buffer);
if (buffer) {
BULKIO::PrecisionUTCTime T;
/* The only thing we do in this example is print a * to indicate a received buffer */
if (GST_CLOCK_TIME_IS_VALID(buffer->timestamp)) {
T = _from_gst_timestamp(buffer->timestamp);
} else {
T = _now();
}
packet.resize(buffer->size / 2); // TODO the division should come from reading buffer->caps
memcpy(&packet[0], buffer->data, buffer->size);
comp->audio_out->pushPacket(packet, T, false, comp->stream_id);
gst_buffer_unref (buffer);
}
}
示例3: gst_audio_panorama_transform
/* this function does the actual processing
*/
static GstFlowReturn
gst_audio_panorama_transform (GstBaseTransform * base, GstBuffer * inbuf,
GstBuffer * outbuf)
{
GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
GstClockTime timestamp, stream_time;
GstMapInfo inmap, outmap;
timestamp = GST_BUFFER_TIMESTAMP (inbuf);
stream_time =
gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time);
gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
memset (outmap.data, 0, outmap.size);
} else {
/* output always stereo, input mono or stereo,
* and info describes input format */
guint num_samples = outmap.size / (2 * GST_AUDIO_INFO_BPS (&filter->info));
filter->process (filter, inmap.data, outmap.data, num_samples);
}
gst_buffer_unmap (inbuf, &inmap);
gst_buffer_unmap (outbuf, &outmap);
return GST_FLOW_OK;
}
示例4: gst_mpegv_parse_parse_frame
static GstFlowReturn
gst_mpegv_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
GstBuffer *buffer = frame->buffer;
if (G_UNLIKELY (mpvparse->pichdr.pic_type == GST_MPEG_VIDEO_PICTURE_TYPE_I))
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
else
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* maybe only sequence in this buffer, though not recommended,
* so mark it as such and force 0 duration */
if (G_UNLIKELY (mpvparse->pic_offset < 0)) {
GST_DEBUG_OBJECT (mpvparse, "frame holds no picture data");
frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;
GST_BUFFER_DURATION (buffer) = 0;
}
if (mpvparse->pic_offset > 4) {
gst_base_parse_set_ts_at_offset (parse, mpvparse->pic_offset - 4);
}
if (mpvparse->frame_repeat_count
&& GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) {
GST_BUFFER_DURATION (buffer) =
(1 + mpvparse->frame_repeat_count) * GST_BUFFER_DURATION (buffer) / 2;
}
if (G_UNLIKELY (mpvparse->drop && !mpvparse->config)) {
GST_DEBUG_OBJECT (mpvparse, "dropping frame as no config yet");
return GST_BASE_PARSE_FLOW_DROPPED;
}
gst_mpegv_parse_update_src_caps (mpvparse);
return GST_FLOW_OK;
}
示例5: gst_burn_transform_frame
/* Actual processing. */
static GstFlowReturn
gst_burn_transform_frame (GstVideoFilter * vfilter,
GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
GstBurn *filter = GST_BURN (vfilter);
gint video_size, adjustment;
guint32 *src, *dest;
GstClockTime timestamp;
gint64 stream_time;
src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
video_size = GST_VIDEO_FRAME_WIDTH (in_frame) *
GST_VIDEO_FRAME_HEIGHT (in_frame);
/* GstController: update the properties */
timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
stream_time =
gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time);
GST_OBJECT_LOCK (filter);
adjustment = filter->adjustment;
GST_OBJECT_UNLOCK (filter);
/*** Now the image processing work.... ***/
gaudi_orc_burn (dest, src, adjustment, video_size);
return GST_FLOW_OK;
}
示例6: gst_rtp_mpv_pay_handle_buffer
static GstFlowReturn
gst_rtp_mpv_pay_handle_buffer (GstBaseRTPPayload * basepayload,
GstBuffer * buffer)
{
GstRTPMPVPay *rtpmpvpay;
guint avail, packet_len;
GstClockTime timestamp, duration;
GstFlowReturn ret;
rtpmpvpay = GST_RTP_MPV_PAY (basepayload);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
gst_adapter_push (rtpmpvpay->adapter, buffer);
avail = gst_adapter_available (rtpmpvpay->adapter);
/* Initialize new RTP payload */
if (avail == 0) {
rtpmpvpay->first_ts = timestamp;
rtpmpvpay->duration = duration;
}
/* get packet length of previous data and this new data,
* payload length includes a 4 byte MPEG video-specific header */
packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);
if (gst_basertppayload_is_filled (basepayload,
packet_len, rtpmpvpay->duration + duration)) {
ret = gst_rtp_mpv_pay_flush (rtpmpvpay, timestamp, duration);
} else {
if (GST_CLOCK_TIME_IS_VALID (duration))
rtpmpvpay->duration += duration;
ret = GST_FLOW_OK;
}
return ret;
}
示例7: gst_goo_timestamp_gst2omx
/**
* Utility function to handle transferring Gstreamer timestamp to OMX
* timestamp. This function handles discontinuities and timestamp
* renormalization.
*
* @omx_buffer the destination OMX buffer for the timestamp
* @buffer the source Gstreamer buffer for the timestamp
* @normalize should this buffer be the one that we renormalize on
* (iff normalization is required)? (ie. with TI OMX, you should
* only re-normalize on a video buffer)
*/
gboolean
gst_goo_timestamp_gst2omx (
OMX_BUFFERHEADERTYPE* omx_buffer,
GstBuffer* buffer,
gboolean normalize)
{
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
if (GST_GOO_UTIL_IS_DISCONT (buffer))
{
needs_normalization = TRUE;
GST_DEBUG ("needs_normalization");
}
if (needs_normalization && normalize)
{
GST_INFO ("Setting OMX_BUFFER_STARTTIME..");
omx_buffer->nFlags |= OMX_BUFFERFLAG_STARTTIME;
omx_normalize_timestamp = GST2OMX_TIMESTAMP ((gint64)timestamp);
needs_normalization = FALSE;
GST_DEBUG ("omx_normalize_timestamp=%lld", omx_normalize_timestamp);
}
/* transfer timestamp to openmax */
if (GST_CLOCK_TIME_IS_VALID (timestamp))
{
omx_buffer->nTimeStamp = GST2OMX_TIMESTAMP ((gint64)timestamp) - omx_normalize_timestamp;
GST_INFO ("OMX timestamp = %lld (%lld - %lld)", omx_buffer->nTimeStamp, GST2OMX_TIMESTAMP ((gint64)timestamp), omx_normalize_timestamp);
return TRUE;
}
else
{
GST_WARNING ("Invalid timestamp!");
return FALSE;
}
}
示例8: gst_timed_value_control_source_unset
/**
* gst_timed_value_control_source_unset:
* @self: the #GstTimedValueControlSource object
* @timestamp: the time the control-change should be removed from
*
* Used to remove the value of given controller-handled property at a certain
* time.
*
* Returns: FALSE if the value couldn't be unset (i.e. not found, TRUE otherwise.
*/
gboolean
gst_timed_value_control_source_unset (GstTimedValueControlSource * self,
GstClockTime timestamp)
{
GSequenceIter *iter;
gboolean res = FALSE;
GstControlPoint *cp = NULL;
g_return_val_if_fail (GST_IS_TIMED_VALUE_CONTROL_SOURCE (self), FALSE);
g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (timestamp), FALSE);
g_mutex_lock (&self->lock);
/* check if a control point for the timestamp exists */
if (G_LIKELY (self->values) && (iter =
g_sequence_lookup (self->values, ×tamp,
(GCompareDataFunc) gst_control_point_find, NULL))) {
/* Iter contains the iter right after timestamp, i.e.
* we need to get the previous one and check the timestamp
*/
cp = g_slice_dup (GstControlPoint, g_sequence_get (iter));
g_sequence_remove (iter);
self->nvalues--;
self->valid_cache = FALSE;
res = TRUE;
}
g_mutex_unlock (&self->lock);
if (cp) {
g_signal_emit (self,
gst_timed_value_control_source_signals[VALUE_REMOVED_SIGNAL], 0, cp);
g_slice_free (GstControlPoint, cp);
}
return res;
}
示例9: gst_direct_control_binding_get_value
static GValue *
gst_direct_control_binding_get_value (GstControlBinding * _self,
GstClockTime timestamp)
{
GstDirectControlBinding *self = GST_DIRECT_CONTROL_BINDING (_self);
GValue *dst_val = NULL;
gdouble src_val;
g_return_val_if_fail (GST_IS_DIRECT_CONTROL_BINDING (self), NULL);
g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (timestamp), NULL);
g_return_val_if_fail (GST_CONTROL_BINDING_PSPEC (self), FALSE);
/* get current value via control source */
if (gst_control_source_get_value (self->cs, timestamp, &src_val)) {
dst_val = g_new0 (GValue, 1);
g_value_init (dst_val, G_PARAM_SPEC_VALUE_TYPE (_self->pspec));
self->convert_g_value (self, src_val, dst_val);
} else {
GST_LOG ("no control value for property %s at ts %" GST_TIME_FORMAT,
_self->name, GST_TIME_ARGS (timestamp));
}
return dst_val;
}
示例10: gst_frame_positionner_transform_ip
static GstFlowReturn
gst_frame_positionner_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
GstFramePositionnerMeta *meta;
GstFramePositionner *framepositionner = GST_FRAME_POSITIONNER (trans);
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buf);
if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
gst_object_sync_values (GST_OBJECT (trans), timestamp);
}
meta =
(GstFramePositionnerMeta *) gst_buffer_add_meta (buf,
gst_frame_positionner_get_info (), NULL);
GST_OBJECT_LOCK (framepositionner);
meta->alpha = framepositionner->alpha;
meta->posx = framepositionner->posx;
meta->posy = framepositionner->posy;
meta->zorder = framepositionner->zorder;
GST_OBJECT_UNLOCK (framepositionner);
return GST_FLOW_OK;
}
示例11: ges_layer_add_asset
/**
* ges_layer_add_asset:
* @layer: a #GESLayer
* @asset: The asset to add to
* @start: The start value to set on the new #GESClip
* @inpoint: The inpoint value to set on the new #GESClip
* @duration: The duration value to set on the new #GESClip
* @track_types: The #GESTrackType to set on the the new #GESClip
*
* Creates Clip from asset, adds it to layer and
* returns a reference to it.
*
* Returns: (transfer none): Created #GESClip
*/
GESClip *
ges_layer_add_asset (GESLayer * layer,
GESAsset * asset, GstClockTime start, GstClockTime inpoint,
GstClockTime duration, GESTrackType track_types)
{
GESClip *clip;
g_return_val_if_fail (GES_IS_LAYER (layer), NULL);
g_return_val_if_fail (GES_IS_ASSET (asset), NULL);
g_return_val_if_fail (g_type_is_a (ges_asset_get_extractable_type
(asset), GES_TYPE_CLIP), NULL);
GST_DEBUG_OBJECT (layer, "Adding asset %s with: start: %" GST_TIME_FORMAT
" inpoint: %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT
" track types: %d (%s)", ges_asset_get_id (asset), GST_TIME_ARGS (start),
GST_TIME_ARGS (inpoint), GST_TIME_ARGS (duration), track_types,
ges_track_type_name (track_types));
clip = GES_CLIP (ges_asset_extract (asset, NULL));
_set_start0 (GES_TIMELINE_ELEMENT (clip), start);
_set_inpoint0 (GES_TIMELINE_ELEMENT (clip), inpoint);
if (track_types != GES_TRACK_TYPE_UNKNOWN)
ges_clip_set_supported_formats (clip, track_types);
if (GST_CLOCK_TIME_IS_VALID (duration)) {
_set_duration0 (GES_TIMELINE_ELEMENT (clip), duration);
}
if (!ges_layer_add_clip (layer, clip)) {
gst_object_unref (clip);
return NULL;
}
return clip;
}
示例12: gst_audio_fx_base_iir_filter_transform_ip
/* GstBaseTransform vmethod implementations */
static GstFlowReturn
gst_audio_fx_base_iir_filter_transform_ip (GstBaseTransform * base,
GstBuffer * buf)
{
GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
guint num_samples;
GstClockTime timestamp, stream_time;
GstMapInfo map;
timestamp = GST_BUFFER_TIMESTAMP (buf);
stream_time =
gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT (filter), stream_time);
gst_buffer_map (buf, &map, GST_MAP_READWRITE);
num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
g_mutex_lock (&filter->lock);
if (filter->a == NULL || filter->b == NULL) {
g_warn_if_fail (filter->a != NULL && filter->b != NULL);
gst_buffer_unmap (buf, &map);
g_mutex_unlock (&filter->lock);
return GST_FLOW_ERROR;
}
filter->process (filter, map.data, num_samples);
g_mutex_unlock (&filter->lock);
gst_buffer_unmap (buf, &map);
return GST_FLOW_OK;
}
示例13: gst_v4l2_video_dec_decide_allocation
static gboolean
gst_v4l2_video_dec_decide_allocation (GstVideoDecoder * decoder,
GstQuery * query)
{
GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
GstClockTime latency;
gboolean ret = FALSE;
if (gst_v4l2_object_decide_allocation (self->v4l2capture, query))
ret = GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
query);
if (GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration)) {
latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT " (%"
G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS (latency),
self->v4l2capture->min_buffers, self->v4l2capture->duration);
gst_video_decoder_set_latency (decoder, latency, latency);
} else {
GST_WARNING_OBJECT (self, "Duration invalid, not setting latency");
}
return ret;
}
示例14: ges_track_object_set_duration_internal
static inline gboolean
ges_track_object_set_duration_internal (GESTrackObject * object,
guint64 duration)
{
GESTrackObjectPrivate *priv = object->priv;
GST_DEBUG ("object:%p, duration:%" GST_TIME_FORMAT,
object, GST_TIME_ARGS (duration));
if (GST_CLOCK_TIME_IS_VALID (priv->maxduration) &&
duration > object->inpoint + priv->maxduration)
duration = priv->maxduration - object->inpoint;
if (priv->gnlobject != NULL) {
if (G_UNLIKELY (duration == object->duration))
return FALSE;
g_object_set (priv->gnlobject, "duration", duration,
"media-duration", duration, NULL);
} else
priv->pending_duration = duration;
return TRUE;
}
示例15: gst_mim_dec_chain
//.........这里部分代码省略.........
if (gst_adapter_available (mimdec->adapter) < payload_size + 24)
return GST_FLOW_OK;
/* We have a whole packet and have read the header, lets flush it out */
gst_adapter_flush (mimdec->adapter, 24);
frame_body = gst_adapter_map (mimdec->adapter, payload_size);
if (mimdec->buffer_size < 0) {
/* Check if its a keyframe, otherwise skip it */
if (GUINT32_FROM_LE (*((guint32 *) (frame_body + 12))) != 0) {
gst_adapter_unmap (mimdec->adapter);
gst_adapter_flush (mimdec->adapter, payload_size);
return GST_FLOW_OK;
}
if (!mimic_decoder_init (mimdec->dec, frame_body)) {
gst_adapter_unmap (mimdec->adapter);
gst_adapter_flush (mimdec->adapter, payload_size);
GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),
("mimic_decoder_init error"));
return GST_FLOW_ERROR;
}
if (!mimic_get_property (mimdec->dec, "buffer_size",
&mimdec->buffer_size)) {
gst_adapter_unmap (mimdec->adapter);
gst_adapter_flush (mimdec->adapter, payload_size);
GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),
("mimic_get_property('buffer_size') error"));
return GST_FLOW_ERROR;
}
mimic_get_property (mimdec->dec, "width", &width);
mimic_get_property (mimdec->dec, "height", &height);
GST_DEBUG_OBJECT (mimdec,
"Initialised decoder with %d x %d payload size %d buffer_size %d",
width, height, payload_size, mimdec->buffer_size);
caps = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"framerate", GST_TYPE_FRACTION, 0, 1,
"width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);
gst_pad_set_caps (mimdec->srcpad, caps);
gst_caps_unref (caps);
}
if (mimdec->need_segment) {
GstSegment segment;
gst_segment_init (&segment, GST_FORMAT_TIME);
if (GST_CLOCK_TIME_IS_VALID (in_time))
segment.start = in_time;
else
segment.start = current_ts * GST_MSECOND;
event = gst_event_new_segment (&segment);
}
mimdec->need_segment = FALSE;
if (event)
result = gst_pad_push_event (mimdec->srcpad, event);
event = NULL;
if (!result) {
GST_WARNING_OBJECT (mimdec, "gst_pad_push_event failed");
return GST_FLOW_ERROR;
}
out_buf = gst_buffer_new_allocate (NULL, mimdec->buffer_size, NULL);
gst_buffer_map (out_buf, &map, GST_MAP_READWRITE);
if (!mimic_decode_frame (mimdec->dec, frame_body, map.data)) {
GST_WARNING_OBJECT (mimdec, "mimic_decode_frame error\n");
gst_adapter_flush (mimdec->adapter, payload_size);
gst_buffer_unmap (out_buf, &map);
gst_buffer_unref (out_buf);
GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL),
("mimic_decode_frame error"));
return GST_FLOW_ERROR;
}
gst_buffer_unmap (out_buf, &map);
gst_adapter_flush (mimdec->adapter, payload_size);
if (GST_CLOCK_TIME_IS_VALID (in_time))
GST_BUFFER_TIMESTAMP (out_buf) = in_time;
else
GST_BUFFER_TIMESTAMP (out_buf) = current_ts * GST_MSECOND;
res = gst_pad_push (mimdec->srcpad, out_buf);
if (res != GST_FLOW_OK)
break;
}
return res;
}