本文整理汇总了C++中GST_BUFFER_FLAG_UNSET函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_BUFFER_FLAG_UNSET函数的具体用法?C++ GST_BUFFER_FLAG_UNSET怎么用?C++ GST_BUFFER_FLAG_UNSET使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_BUFFER_FLAG_UNSET函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: handle_buffer
static gboolean
handle_buffer (GstRtpOnvifParse * self, GstBuffer * buf)
{
GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
guint8 *data;
guint16 bits;
guint wordlen;
guint8 flags;
/*
guint64 timestamp;
guint8 cseq;
*/
if (!gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp)) {
GST_ELEMENT_ERROR (self, STREAM, FAILED,
("Failed to map RTP buffer"), (NULL));
return FALSE;
}
/* Check if the ONVIF RTP extension is present in the packet */
if (!gst_rtp_buffer_get_extension_data (&rtp, &bits, (gpointer) & data,
&wordlen))
goto out;
if (bits != EXTENSION_ID || wordlen != EXTENSION_SIZE)
goto out;
/* timestamp = GST_READ_UINT64_BE (data); TODO */
flags = GST_READ_UINT8 (data + 8);
/* cseq = GST_READ_UINT8 (data + 9); TODO */
/* C */
if (flags & (1 << 7))
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
else
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
/* E */
/* if (flags & (1 << 6)); TODO */
/* D */
if (flags & (1 << 5))
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
else
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
out:
gst_rtp_buffer_unmap (&rtp);
return TRUE;
}
示例2: gst_mpegv_parse_parse_frame
static GstFlowReturn
gst_mpegv_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
GstBuffer *buffer = frame->buffer;
gst_mpegv_parse_update_src_caps (mpvparse);
if (G_UNLIKELY (mpvparse->pichdr.pic_type == GST_MPEG_VIDEO_PICTURE_TYPE_I))
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
else
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* maybe only sequence in this buffer, though not recommended,
* so mark it as such and force 0 duration */
if (G_UNLIKELY (mpvparse->pic_offset < 0)) {
GST_DEBUG_OBJECT (mpvparse, "frame holds no picture data");
frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;
GST_BUFFER_DURATION (buffer) = 0;
}
if (mpvparse->frame_repeat_count
&& GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) {
GST_BUFFER_DURATION (buffer) =
(1 + mpvparse->frame_repeat_count) * GST_BUFFER_DURATION (buffer) / 2;
}
if (G_UNLIKELY (mpvparse->drop && !mpvparse->config)) {
GST_DEBUG_OBJECT (mpvparse, "dropping frame as no config yet");
return GST_BASE_PARSE_FLOW_DROPPED;
} else
return GST_FLOW_OK;
}
示例3: gst_vtenc_enqueue_buffer
static VTStatus
gst_vtenc_enqueue_buffer (void *data, int a2, int a3, int a4,
CMSampleBufferRef sbuf, int a6, int a7)
{
GstVTEnc *self = data;
gboolean is_keyframe;
GstBuffer *buf;
/* This may happen if we don't have enough bitrate */
if (sbuf == NULL)
goto beach;
is_keyframe = gst_vtenc_buffer_is_keyframe (self, sbuf);
if (self->expect_keyframe) {
if (!is_keyframe)
goto beach;
CFDictionaryRemoveValue (self->options,
*(self->ctx->vt->kVTEncodeFrameOptionKey_ForceKeyFrame));
}
self->expect_keyframe = FALSE;
buf = gst_core_media_buffer_new (self->ctx, sbuf);
gst_buffer_copy_metadata (buf, self->cur_inbuf, GST_BUFFER_COPY_TIMESTAMPS);
if (is_keyframe) {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
} else {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
g_ptr_array_add (self->cur_outbufs, buf);
beach:
return kVTSuccess;
}
示例4: gst_h264_parse_parse_frame
static GstFlowReturn
gst_h264_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
GstH264Parse *h264parse;
GstBuffer *buffer;
guint av;
h264parse = GST_H264_PARSE (parse);
buffer = frame->buffer;
gst_h264_parse_update_src_caps (h264parse);
gst_h264_params_get_timestamp (h264parse->params,
&GST_BUFFER_TIMESTAMP (buffer), &GST_BUFFER_DURATION (buffer),
h264parse->frame_start);
if (h264parse->keyframe)
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
else
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* replace with transformed AVC output if applicable */
av = gst_adapter_available (h264parse->frame_out);
if (av) {
GstBuffer *buf;
buf = gst_adapter_take_buffer (h264parse->frame_out, av);
gst_buffer_copy_metadata (buf, buffer, GST_BUFFER_COPY_ALL);
gst_buffer_replace (&frame->buffer, buf);
}
return GST_FLOW_OK;
}
示例5: TRACE_MEDIA_MESSAGE
void PlaybackPipeline::enqueueSample(PassRefPtr<MediaSample> prsample)
{
RefPtr<MediaSample> rsample = prsample;
AtomicString trackId = rsample->trackID();
TRACE_MEDIA_MESSAGE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT, trackId.string().utf8().data(), rsample->presentationTime().toFloat(), rsample->presentationSize().width(), rsample->presentationSize().height(), GST_TIME_ARGS(floatToGstClockTime(rsample->presentationTime().toDouble())));
ASSERT(WTF::isMainThread());
GST_OBJECT_LOCK(m_webKitMediaSrc.get());
Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);
if (!stream) {
WARN_MEDIA_MESSAGE("No stream!");
GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
return;
}
GstElement* appsrc = stream->appsrc;
GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(rsample.get());
if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
GstSample* gstsample = gst_sample_ref(sample->sample());
GST_BUFFER_FLAG_UNSET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
push_sample(GST_APP_SRC(appsrc), gstsample);
// gst_app_src_push_sample() uses transfer-none for gstsample
gst_sample_unref(gstsample);
}
}
示例6: do_alloc_buffer
static GstFlowReturn
do_alloc_buffer (GstBufferPool * pool, GstBuffer ** buffer,
GstBufferPoolAcquireParams * params)
{
GstBufferPoolPrivate *priv = pool->priv;
GstFlowReturn result;
gint cur_buffers, max_buffers;
GstBufferPoolClass *pclass;
pclass = GST_BUFFER_POOL_GET_CLASS (pool);
if (G_UNLIKELY (!pclass->alloc_buffer))
goto no_function;
max_buffers = priv->max_buffers;
/* increment the allocation counter */
cur_buffers = g_atomic_int_add (&priv->cur_buffers, 1);
if (max_buffers && cur_buffers >= max_buffers)
goto max_reached;
result = pclass->alloc_buffer (pool, buffer, params);
if (G_UNLIKELY (result != GST_FLOW_OK))
goto alloc_failed;
/* lock all metadata and mark as pooled, we want this to remain on
* the buffer and we want to remove any other metadata that gets added
* later */
gst_buffer_foreach_meta (*buffer, mark_meta_pooled, pool);
/* un-tag memory, this is how we expect the buffer when it is
* released again */
GST_BUFFER_FLAG_UNSET (*buffer, GST_BUFFER_FLAG_TAG_MEMORY);
GST_LOG_OBJECT (pool, "allocated buffer %d/%d, %p", cur_buffers,
max_buffers, *buffer);
return result;
/* ERRORS */
no_function:
{
GST_ERROR_OBJECT (pool, "no alloc function");
return GST_FLOW_NOT_SUPPORTED;
}
max_reached:
{
GST_DEBUG_OBJECT (pool, "max buffers reached");
g_atomic_int_add (&priv->cur_buffers, -1);
return GST_FLOW_EOS;
}
alloc_failed:
{
GST_WARNING_OBJECT (pool, "alloc function failed");
g_atomic_int_add (&priv->cur_buffers, -1);
return result;
}
}
示例7: gst_inter_sub_src_create
static GstFlowReturn
gst_inter_sub_src_create (GstBaseSrc * src, guint64 offset, guint size,
GstBuffer ** buf)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GstBuffer *buffer;
GST_DEBUG_OBJECT (intersubsrc, "create");
buffer = NULL;
g_mutex_lock (&intersubsrc->surface->mutex);
if (intersubsrc->surface->sub_buffer) {
buffer = gst_buffer_ref (intersubsrc->surface->sub_buffer);
//intersubsrc->surface->sub_buffer_count++;
//if (intersubsrc->surface->sub_buffer_count >= 30) {
gst_buffer_unref (intersubsrc->surface->sub_buffer);
intersubsrc->surface->sub_buffer = NULL;
//}
}
g_mutex_unlock (&intersubsrc->surface->mutex);
if (buffer == NULL) {
GstMapInfo map;
buffer = gst_buffer_new_and_alloc (1);
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
map.data[0] = 0;
gst_buffer_unmap (buffer, &map);
}
buffer = gst_buffer_make_writable (buffer);
/* FIXME: does this make sense? Rate is always 0 */
#if 0
GST_BUFFER_TIMESTAMP (buffer) =
gst_util_uint64_scale_int (GST_SECOND, intersubsrc->n_frames,
intersubsrc->rate);
GST_DEBUG_OBJECT (intersubsrc, "create ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
GST_BUFFER_DURATION (buffer) =
gst_util_uint64_scale_int (GST_SECOND, (intersubsrc->n_frames + 1),
intersubsrc->rate) - GST_BUFFER_TIMESTAMP (buffer);
#endif
GST_BUFFER_OFFSET (buffer) = intersubsrc->n_frames;
GST_BUFFER_OFFSET_END (buffer) = -1;
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
if (intersubsrc->n_frames == 0) {
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
}
intersubsrc->n_frames++;
*buf = buffer;
return GST_FLOW_OK;
}
示例8: gst_raw_video_parse_process
static gboolean
gst_raw_video_parse_process (GstRawBaseParse * raw_base_parse,
GstRawBaseParseConfig config, GstBuffer * in_data,
G_GNUC_UNUSED gsize total_num_in_bytes,
G_GNUC_UNUSED gsize num_valid_in_bytes, GstBuffer ** processed_data)
{
GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
GstRawVideoParseConfig *config_ptr =
gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
guint frame_flags = 0;
GstVideoInfo *video_info = &(config_ptr->info);
GstVideoMeta *videometa;
GstBuffer *out_data;
/* In case of extra padding bytes, get a subbuffer without the padding bytes.
* Otherwise, just add the video meta. */
if (GST_VIDEO_INFO_SIZE (video_info) < config_ptr->frame_stride) {
*processed_data = out_data =
gst_buffer_copy_region (in_data,
GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
GST_BUFFER_COPY_MEMORY, 0, GST_VIDEO_INFO_SIZE (video_info));
} else {
out_data = in_data;
*processed_data = NULL;
}
if (config_ptr->interlaced) {
GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_INTERLACED);
frame_flags |= GST_VIDEO_FRAME_FLAG_INTERLACED;
if (config_ptr->top_field_first) {
GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_TFF);
frame_flags |= GST_VIDEO_FRAME_FLAG_TFF;
} else
GST_BUFFER_FLAG_UNSET (out_data, GST_VIDEO_BUFFER_FLAG_TFF);
}
/* Remove any existing videometa - it will be replaced by the new videometa
* from here */
while ((videometa = gst_buffer_get_video_meta (out_data))) {
GST_LOG_OBJECT (raw_base_parse, "removing existing videometa from buffer");
gst_buffer_remove_meta (out_data, (GstMeta *) videometa);
}
gst_buffer_add_video_meta_full (out_data,
frame_flags,
config_ptr->format,
config_ptr->width,
config_ptr->height,
GST_VIDEO_INFO_N_PLANES (video_info),
config_ptr->plane_offsets, config_ptr->plane_strides);
return TRUE;
}
示例9: gst_v4l2_buffer_finalize
static void
gst_v4l2_buffer_finalize (GstV4l2Buffer * buffer)
{
GstV4l2BufferPool *pool;
gboolean resuscitated = FALSE;
gint index;
pool = buffer->pool;
index = buffer->vbuffer.index;
GST_LOG_OBJECT (pool->v4l2elem, "finalizing buffer %p %d", buffer, index);
GST_V4L2_BUFFER_POOL_LOCK (pool);
if (pool->running) {
if (pool->requeuebuf) {
if (!gst_v4l2_buffer_pool_qbuf (pool, buffer)) {
GST_WARNING ("could not requeue buffer %p %d", buffer, index);
} else {
resuscitated = TRUE;
}
} else {
resuscitated = TRUE;
/* XXX double check this... I think it is ok to not synchronize this
* w.r.t. destruction of the pool, since the buffer is still live and
* the buffer holds a ref to the pool..
*/
g_async_queue_push (pool->avail_buffers, buffer);
}
} else {
GST_LOG_OBJECT (pool->v4l2elem, "the pool is shutting down");
}
if (resuscitated) {
/* FIXME: check that the caps didn't change */
GST_LOG_OBJECT (pool->v4l2elem, "reviving buffer %p, %d", buffer, index);
gst_buffer_ref (GST_BUFFER (buffer));
GST_BUFFER_SIZE (buffer) = 0;
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
pool->buffers[index] = buffer;
}
GST_V4L2_BUFFER_POOL_UNLOCK (pool);
if (!resuscitated) {
GST_LOG_OBJECT (pool->v4l2elem,
"buffer %p (data %p, len %u) not recovered, unmapping",
buffer, GST_BUFFER_DATA (buffer), buffer->mmap_length);
gst_mini_object_unref (GST_MINI_OBJECT (pool));
v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), buffer->mmap_length);
GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT
(buffer));
}
}
示例10: gst_inter_sub_src_create
static GstFlowReturn
gst_inter_sub_src_create (GstBaseSrc * src, guint64 offset, guint size,
GstBuffer ** buf)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GstBuffer *buffer;
GST_DEBUG_OBJECT (intersubsrc, "create");
buffer = NULL;
g_mutex_lock (intersubsrc->surface->mutex);
if (intersubsrc->surface->sub_buffer) {
buffer = gst_buffer_ref (intersubsrc->surface->sub_buffer);
//intersubsrc->surface->sub_buffer_count++;
//if (intersubsrc->surface->sub_buffer_count >= 30) {
gst_buffer_unref (intersubsrc->surface->sub_buffer);
intersubsrc->surface->sub_buffer = NULL;
//}
}
g_mutex_unlock (intersubsrc->surface->mutex);
if (buffer == NULL) {
guint8 *data;
buffer = gst_buffer_new_and_alloc (1);
data = GST_BUFFER_DATA (buffer);
data[0] = 0;
}
buffer = gst_buffer_make_metadata_writable (buffer);
GST_BUFFER_TIMESTAMP (buffer) =
gst_util_uint64_scale_int (GST_SECOND, intersubsrc->n_frames,
intersubsrc->rate);
GST_DEBUG_OBJECT (intersubsrc, "create ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
GST_BUFFER_DURATION (buffer) =
gst_util_uint64_scale_int (GST_SECOND, (intersubsrc->n_frames + 1),
intersubsrc->rate) - GST_BUFFER_TIMESTAMP (buffer);
GST_BUFFER_OFFSET (buffer) = intersubsrc->n_frames;
GST_BUFFER_OFFSET_END (buffer) = -1;
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
if (intersubsrc->n_frames == 0) {
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
}
gst_buffer_set_caps (buffer, GST_PAD_CAPS (GST_BASE_SRC_PAD (intersubsrc)));
intersubsrc->n_frames++;
*buf = buffer;
return GST_FLOW_OK;
}
示例11: gst_video_parse_set_buffer_flags
static void
gst_video_parse_set_buffer_flags (GstRawParse * rp, GstBuffer * buffer)
{
GstVideoParse *vp = GST_VIDEO_PARSE (rp);
if (vp->interlaced) {
if (vp->top_field_first) {
GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
} else {
GST_BUFFER_FLAG_UNSET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
}
}
}
示例12: gst_audio_aggregator_mix_buffer
static gboolean
gst_audio_aggregator_mix_buffer (GstAudioAggregator * aagg,
GstAudioAggregatorPad * pad, GstBuffer * inbuf, GstBuffer * outbuf)
{
guint overlap;
guint out_start;
gboolean filled;
guint blocksize;
blocksize = gst_util_uint64_scale (aagg->priv->output_buffer_duration,
GST_AUDIO_INFO_RATE (&aagg->info), GST_SECOND);
blocksize = MAX (1, blocksize);
/* Overlap => mix */
if (aagg->priv->offset < pad->priv->output_offset)
out_start = pad->priv->output_offset - aagg->priv->offset;
else
out_start = 0;
overlap = pad->priv->size - pad->priv->position;
if (overlap > blocksize - out_start)
overlap = blocksize - out_start;
if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP)) {
/* skip gap buffer */
GST_LOG_OBJECT (pad, "skipping GAP buffer");
pad->priv->output_offset += pad->priv->size - pad->priv->position;
pad->priv->position = pad->priv->size;
gst_buffer_replace (&pad->priv->buffer, NULL);
return FALSE;
}
filled = GST_AUDIO_AGGREGATOR_GET_CLASS (aagg)->aggregate_one_buffer (aagg,
pad, inbuf, pad->priv->position, outbuf, out_start, overlap);
if (filled)
GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP);
pad->priv->position += overlap;
pad->priv->output_offset += overlap;
if (pad->priv->position == pad->priv->size) {
/* Buffer done, drop it */
gst_buffer_replace (&pad->priv->buffer, NULL);
GST_DEBUG_OBJECT (pad, "Finished mixing buffer, waiting for next");
return FALSE;
}
return TRUE;
}
示例13: gst_imx_blitter_video_transform_copy_metadata
static gboolean gst_imx_blitter_video_transform_copy_metadata(G_GNUC_UNUSED GstBaseTransform *trans, GstBuffer *input, GstBuffer *outbuf)
{
/* Only copy timestamps; the rest of the metadata must not be copied */
GST_BUFFER_DTS(outbuf) = GST_BUFFER_DTS(input);
GST_BUFFER_PTS(outbuf) = GST_BUFFER_PTS(input);
/* For GStreamer 1.3.1 and newer, make sure the GST_BUFFER_FLAG_TAG_MEMORY flag
* isn't copied, otherwise the output buffer will be reallocated all the time */
GST_BUFFER_FLAGS(outbuf) = GST_BUFFER_FLAGS(input);
#if GST_CHECK_VERSION(1, 3, 1)
GST_BUFFER_FLAG_UNSET(outbuf, GST_BUFFER_FLAG_TAG_MEMORY);
#endif
return TRUE;
}
示例14: gst_droid_buffer_pool_reset_buffer
static void
gst_droid_buffer_pool_reset_buffer (GstBufferPool * pool, GstBuffer * buffer)
{
GstDroidBufferPool *dpool = GST_DROID_BUFFER_POOL (pool);
gst_buffer_remove_all_memory (buffer);
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_TAG_MEMORY);
g_mutex_lock (&dpool->lock);
++dpool->num_buffers;
GST_DEBUG_OBJECT (dpool, "num buffers: %d", dpool->num_buffers);
g_cond_signal (&dpool->cond);
g_mutex_unlock (&dpool->lock);
return GST_BUFFER_POOL_CLASS (parent_class)->reset_buffer (pool, buffer);
}
示例15: deserialize_framenode
static FrameNode *
deserialize_framenode (const gchar ** names, const gchar ** values)
{
gint i;
FrameNode *framenode = g_slice_new0 (FrameNode);
for (i = 0; names[i] != NULL; i++) {
if (g_strcmp0 (names[i], "id") == 0)
framenode->id = g_ascii_strtoull (values[i], NULL, 0);
else if (g_strcmp0 (names[i], "offset") == 0)
framenode->offset = g_ascii_strtoull (values[i], NULL, 0);
else if (g_strcmp0 (names[i], "offset-end") == 0)
framenode->offset_end = g_ascii_strtoull (values[i], NULL, 0);
else if (g_strcmp0 (names[i], "duration") == 0)
framenode->duration = g_ascii_strtoull (values[i], NULL, 0);
else if (g_strcmp0 (names[i], "pts") == 0)
framenode->pts = g_ascii_strtoull (values[i], NULL, 0);
else if (g_strcmp0 (names[i], "dts") == 0)
framenode->dts = g_ascii_strtoull (values[i], NULL, 0);
else if (g_strcmp0 (names[i], "checksum") == 0)
framenode->checksum = g_strdup (values[i]);
else if (g_strcmp0 (names[i], "is-keyframe") == 0) {
if (!g_ascii_strcasecmp (values[i], "true"))
framenode->is_keyframe = TRUE;
else
framenode->is_keyframe = FALSE;
}
}
framenode->buf = gst_buffer_new_wrapped (framenode->checksum,
strlen (framenode->checksum) + 1);
GST_BUFFER_OFFSET (framenode->buf) = framenode->offset;
GST_BUFFER_OFFSET_END (framenode->buf) = framenode->offset_end;
GST_BUFFER_DURATION (framenode->buf) = framenode->duration;
GST_BUFFER_PTS (framenode->buf) = framenode->pts;
GST_BUFFER_DTS (framenode->buf) = framenode->dts;
if (framenode->is_keyframe) {
GST_BUFFER_FLAG_UNSET (framenode->buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
GST_BUFFER_FLAG_SET (framenode->buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
return framenode;
}