本文整理汇总了C++中GST_BUFFER_DURATION函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_BUFFER_DURATION函数的具体用法?C++ GST_BUFFER_DURATION怎么用?C++ GST_BUFFER_DURATION使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_BUFFER_DURATION函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: gst_jpeg_parse_push_buffer
static GstFlowReturn
gst_jpeg_parse_push_buffer (GstJpegParse * parse, guint len)
{
GstBuffer *outbuf;
GstFlowReturn ret = GST_FLOW_OK;
gboolean header_ok;
/* reset the offset (only when we flushed) */
parse->priv->last_offset = 0;
parse->priv->last_entropy_len = 0;
outbuf = gst_adapter_take_buffer (parse->priv->adapter, len);
if (outbuf == NULL) {
GST_ELEMENT_ERROR (parse, STREAM, DECODE,
("Failed to take buffer of size %u", len),
("Failed to take buffer of size %u", len));
return GST_FLOW_ERROR;
}
header_ok = gst_jpeg_parse_read_header (parse, outbuf);
if (parse->priv->new_segment == TRUE
|| parse->priv->width != parse->priv->caps_width
|| parse->priv->height != parse->priv->caps_height
|| parse->priv->framerate_numerator !=
parse->priv->caps_framerate_numerator
|| parse->priv->framerate_denominator !=
parse->priv->caps_framerate_denominator) {
if (!gst_jpeg_parse_set_new_caps (parse, header_ok)) {
GST_ELEMENT_ERROR (parse, CORE, NEGOTIATION,
("Can't set caps to the src pad"), ("Can't set caps to the src pad"));
return GST_FLOW_ERROR;
}
if (parse->priv->tags) {
GST_DEBUG_OBJECT (parse, "Pushing tags: %" GST_PTR_FORMAT,
parse->priv->tags);
gst_element_found_tags_for_pad (GST_ELEMENT_CAST (parse),
parse->priv->srcpad, parse->priv->tags);
parse->priv->tags = NULL;
}
parse->priv->new_segment = FALSE;
parse->priv->caps_width = parse->priv->width;
parse->priv->caps_height = parse->priv->height;
parse->priv->caps_framerate_numerator = parse->priv->framerate_numerator;
parse->priv->caps_framerate_denominator =
parse->priv->framerate_denominator;
}
GST_BUFFER_TIMESTAMP (outbuf) = parse->priv->next_ts;
if (parse->priv->has_fps && GST_CLOCK_TIME_IS_VALID (parse->priv->next_ts)
&& GST_CLOCK_TIME_IS_VALID (parse->priv->duration)) {
parse->priv->next_ts += parse->priv->duration;
} else {
parse->priv->duration = GST_CLOCK_TIME_NONE;
parse->priv->next_ts = GST_CLOCK_TIME_NONE;
}
GST_BUFFER_DURATION (outbuf) = parse->priv->duration;
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (parse->priv->srcpad));
GST_LOG_OBJECT (parse, "pushing buffer (ts=%" GST_TIME_FORMAT ", len=%u)",
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), len);
ret = gst_pad_push (parse->priv->srcpad, outbuf);
return ret;
}
示例2: gst_ivf_parse_chain
/* chain function
* this function does the actual processing
*/
static GstFlowReturn
gst_ivf_parse_chain (GstPad * pad, GstBuffer * buf)
{
GstIvfParse *ivf = GST_IVF_PARSE (GST_OBJECT_PARENT (pad));
gboolean res;
/* lazy creation of the adapter */
if (G_UNLIKELY (ivf->adapter == NULL)) {
ivf->adapter = gst_adapter_new ();
}
GST_LOG_OBJECT (ivf, "Pushing buffer of size %u to adapter",
GST_BUFFER_SIZE (buf));
gst_adapter_push (ivf->adapter, buf); /* adapter takes ownership of buf */
res = GST_FLOW_OK;
switch (ivf->state) {
case GST_IVF_PARSE_START:
if (gst_adapter_available (ivf->adapter) >= 32) {
GstCaps *caps;
const guint8 *data = gst_adapter_peek (ivf->adapter, 32);
guint32 magic = GST_READ_UINT32_LE (data);
guint16 version = GST_READ_UINT16_LE (data + 4);
guint16 header_size = GST_READ_UINT16_LE (data + 6);
guint32 fourcc = GST_READ_UINT32_LE (data + 8);
guint16 width = GST_READ_UINT16_LE (data + 12);
guint16 height = GST_READ_UINT16_LE (data + 14);
guint32 rate_num = GST_READ_UINT32_LE (data + 16);
guint32 rate_den = GST_READ_UINT32_LE (data + 20);
#ifndef GST_DISABLE_GST_DEBUG
guint32 num_frames = GST_READ_UINT32_LE (data + 24);
#endif
/* last 4 bytes unused */
gst_adapter_flush (ivf->adapter, 32);
if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') ||
version != 0 || header_size != 32 ||
fourcc != GST_MAKE_FOURCC ('V', 'P', '8', '0')) {
GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL));
return GST_FLOW_ERROR;
}
/* create src pad caps */
caps = gst_caps_new_simple ("video/x-vp8",
"width", G_TYPE_INT, width, "height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, rate_num, rate_den, NULL);
GST_INFO_OBJECT (ivf, "Found stream: %" GST_PTR_FORMAT, caps);
GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames);
gst_pad_set_caps (ivf->srcpad, caps);
gst_caps_unref (caps);
/* keep framerate in instance for convenience */
ivf->rate_num = rate_num;
ivf->rate_den = rate_den;
gst_pad_push_event (ivf->srcpad, gst_event_new_new_segment (FALSE, 1.0,
GST_FORMAT_TIME, 0, -1, 0));
/* move along */
ivf->state = GST_IVF_PARSE_DATA;
} else {
GST_LOG_OBJECT (ivf, "Header data not yet available.");
break;
}
/* fall through */
case GST_IVF_PARSE_DATA:
while (gst_adapter_available (ivf->adapter) > 12) {
const guint8 *data = gst_adapter_peek (ivf->adapter, 12);
guint32 frame_size = GST_READ_UINT32_LE (data);
guint64 frame_pts = GST_READ_UINT64_LE (data + 4);
GST_LOG_OBJECT (ivf,
"Read frame header: size %u, pts %" G_GUINT64_FORMAT, frame_size,
frame_pts);
if (gst_adapter_available (ivf->adapter) >= 12 + frame_size) {
GstBuffer *frame;
gst_adapter_flush (ivf->adapter, 12);
frame = gst_adapter_take_buffer (ivf->adapter, frame_size);
gst_buffer_set_caps (frame, GST_PAD_CAPS (ivf->srcpad));
GST_BUFFER_TIMESTAMP (frame) =
gst_util_uint64_scale_int (GST_SECOND * frame_pts, ivf->rate_den,
ivf->rate_num);
GST_BUFFER_DURATION (frame) =
gst_util_uint64_scale_int (GST_SECOND, ivf->rate_den,
ivf->rate_num);
//.........这里部分代码省略.........
示例3: gst_rtp_h263p_pay_flush
static GstFlowReturn
gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
{
guint avail;
GstBufferList *list = NULL;
GstBuffer *outbuf = NULL;
GstFlowReturn ret;
gboolean fragmented = FALSE;
avail = gst_adapter_available (rtph263ppay->adapter);
if (avail == 0)
return GST_FLOW_OK;
fragmented = FALSE;
/* This algorithm assumes the H263/+/++ encoder sends complete frames in each
* buffer */
/* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
* This algorithm implements the Follow-on packets method for packetization.
* This assumes low packet loss network.
* With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
* This algorithm separates large frames at synchronisation points (Segments)
* (See RFC 4629 section 6). It would be interesting to have a property such as network
* quality to select between both packetization methods */
/* TODO Add VRC supprt (See RFC 4629 section 5.2) */
while (avail > 0) {
guint towrite;
guint8 *payload;
gint header_len;
guint next_gop = 0;
gboolean found_gob = FALSE;
GstRTPBuffer rtp = { NULL };
GstBuffer *payload_buf;
if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
/* start after 1st gop possible */
/* Check if we have a gob or eos , eossbs */
/* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
next_gop =
gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
0x00008000, 0, avail);
if (next_gop == 0) {
GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
found_gob = TRUE;
}
/* Find next and cut the packet accordingly */
/* TODO we should get as many gobs as possible until MTU is reached, this
* code seems to just get one GOB per packet */
if (next_gop == 0 && avail > 3)
next_gop =
gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
0x00008000, 3, avail - 3);
GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at : %d", next_gop);
if (next_gop == -1)
next_gop = 0;
}
/* for picture start frames (non-fragmented), we need to remove the first
* two 0x00 bytes and set P=1 */
if (!fragmented || found_gob) {
gst_adapter_flush (rtph263ppay->adapter, 2);
avail -= 2;
}
header_len = 2;
towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
(GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));
if (next_gop > 0)
towrite = MIN (next_gop, towrite);
outbuf = gst_rtp_buffer_new_allocate (header_len, 0, 0);
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* last fragment gets the marker bit set */
gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);
payload = gst_rtp_buffer_get_payload (&rtp);
/* 0 1
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | RR |P|V| PLEN |PEBIT|
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
/* if fragmented or gop header , write p bit =1 */
payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
payload[1] = 0;
GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;
GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
gst_rtp_buffer_unmap (&rtp);
payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);
gst_rtp_copy_meta (GST_ELEMENT_CAST (rtph263ppay), outbuf, payload_buf,
g_quark_from_static_string (GST_META_TAG_VIDEO_STR));
outbuf = gst_buffer_append (outbuf, payload_buf);
avail -= towrite;
//.........这里部分代码省略.........
示例4: speex_dec_chain_parse_data
static GstFlowReturn
speex_dec_chain_parse_data (GstSpeexDec * dec, GstBuffer * buf,
GstClockTime timestamp, GstClockTime duration)
{
GstFlowReturn res = GST_FLOW_OK;
gint i, fpp;
guint size;
guint8 *data;
SpeexBits *bits;
if (!dec->frame_duration)
goto not_negotiated;
if (timestamp != -1) {
dec->segment.last_stop = timestamp;
} else {
timestamp = dec->segment.last_stop;
}
if (buf) {
data = GST_BUFFER_DATA (buf);
size = GST_BUFFER_SIZE (buf);
/* send data to the bitstream */
speex_bits_read_from (&dec->bits, (char *) data, size);
fpp = 0;
bits = &dec->bits;
GST_DEBUG_OBJECT (dec, "received buffer of size %u, fpp %d", size, fpp);
} else {
/* concealment data, pass NULL as the bits parameters */
GST_DEBUG_OBJECT (dec, "creating concealment data");
fpp = dec->header->frames_per_packet;
bits = NULL;
}
/* now decode each frame, catering for unknown number of them (e.g. rtp) */
for (i = 0; (!fpp || i < fpp) && (!bits || speex_bits_remaining (bits) > 0);
i++) {
GstBuffer *outbuf;
gint16 *out_data;
gint ret;
GST_LOG_OBJECT (dec, "decoding frame %d/%d", i, fpp);
res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,
GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header->nb_channels * 2,
GST_PAD_CAPS (dec->srcpad), &outbuf);
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
return res;
}
out_data = (gint16 *) GST_BUFFER_DATA (outbuf);
ret = speex_decode_int (dec->state, bits, out_data);
if (ret == -1) {
/* uh? end of stream */
GST_WARNING_OBJECT (dec, "Unexpected end of stream found");
gst_buffer_unref (outbuf);
outbuf = NULL;
break;
} else if (ret == -2) {
GST_WARNING_OBJECT (dec, "Decoding error: corrupted stream?");
gst_buffer_unref (outbuf);
outbuf = NULL;
break;
}
if (bits && speex_bits_remaining (bits) < 0) {
GST_WARNING_OBJECT (dec, "Decoding overflow: corrupted stream?");
gst_buffer_unref (outbuf);
outbuf = NULL;
break;
}
if (dec->header->nb_channels == 2)
speex_decode_stereo_int (out_data, dec->frame_size, dec->stereo);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = dec->frame_duration;
dec->segment.last_stop += dec->frame_duration;
timestamp = dec->segment.last_stop;
GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"
GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),
GST_TIME_ARGS (dec->frame_duration));
res = gst_pad_push (dec->srcpad, outbuf);
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));
break;
}
}
return res;
//.........这里部分代码省略.........
示例5: gst_gdiscreencapsrc_create
static GstFlowReturn
gst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf)
{
GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);
GstBuffer *new_buf;
gint new_buf_size;
GstClock *clock;
GstClockTime buf_time, buf_dur;
guint64 frame_number;
if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||
!src->info.bmiHeader.biHeight)) {
GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),
("format wasn't negotiated before create function"));
return GST_FLOW_NOT_NEGOTIATED;
}
new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *
(-src->info.bmiHeader.biHeight);
GST_LOG_OBJECT (src,
"creating buffer of %d bytes with %dx%d image",
new_buf_size, (gint) src->info.bmiHeader.biWidth,
(gint) (-src->info.bmiHeader.biHeight));
new_buf = gst_buffer_new_and_alloc (new_buf_size);
clock = gst_element_get_clock (GST_ELEMENT (src));
if (clock != NULL) {
GstClockTime time, base_time;
/* Calculate sync time. */
time = gst_clock_get_time (clock);
base_time = gst_element_get_base_time (GST_ELEMENT (src));
buf_time = time - base_time;
if (src->rate_numerator) {
frame_number = gst_util_uint64_scale (buf_time,
src->rate_numerator, GST_SECOND * src->rate_denominator);
} else {
frame_number = -1;
}
} else {
buf_time = GST_CLOCK_TIME_NONE;
frame_number = -1;
}
if (frame_number != -1 && frame_number == src->frame_number) {
GstClockID id;
GstClockReturn ret;
/* Need to wait for the next frame */
frame_number += 1;
/* Figure out what the next frame time is */
buf_time = gst_util_uint64_scale (frame_number,
src->rate_denominator * GST_SECOND, src->rate_numerator);
id = gst_clock_new_single_shot_id (clock,
buf_time + gst_element_get_base_time (GST_ELEMENT (src)));
GST_OBJECT_LOCK (src);
src->clock_id = id;
GST_OBJECT_UNLOCK (src);
GST_DEBUG_OBJECT (src, "Waiting for next frame time %" G_GUINT64_FORMAT,
buf_time);
ret = gst_clock_id_wait (id, NULL);
GST_OBJECT_LOCK (src);
gst_clock_id_unref (id);
src->clock_id = NULL;
if (ret == GST_CLOCK_UNSCHEDULED) {
/* Got woken up by the unlock function */
GST_OBJECT_UNLOCK (src);
return GST_FLOW_FLUSHING;
}
GST_OBJECT_UNLOCK (src);
/* Duration is a complete 1/fps frame duration */
buf_dur =
gst_util_uint64_scale_int (GST_SECOND, src->rate_denominator,
src->rate_numerator);
} else if (frame_number != -1) {
GstClockTime next_buf_time;
GST_DEBUG_OBJECT (src, "No need to wait for next frame time %"
G_GUINT64_FORMAT " next frame = %" G_GINT64_FORMAT " prev = %"
G_GINT64_FORMAT, buf_time, frame_number, src->frame_number);
next_buf_time = gst_util_uint64_scale (frame_number + 1,
src->rate_denominator * GST_SECOND, src->rate_numerator);
/* Frame duration is from now until the next expected capture time */
buf_dur = next_buf_time - buf_time;
} else {
buf_dur = GST_CLOCK_TIME_NONE;
}
src->frame_number = frame_number;
GST_BUFFER_TIMESTAMP (new_buf) = buf_time;
GST_BUFFER_DURATION (new_buf) = buf_dur;
//.........这里部分代码省略.........
示例6: gst_wavpack_enc_chain
static GstFlowReturn
gst_wavpack_enc_chain (GstPad * pad, GstBuffer * buf)
{
GstWavpackEnc *enc = GST_WAVPACK_ENC (gst_pad_get_parent (pad));
uint32_t sample_count = GST_BUFFER_SIZE (buf) / 4;
GstFlowReturn ret;
/* reset the last returns to GST_FLOW_OK. This is only set to something else
* while WavpackPackSamples() or more specific gst_wavpack_enc_push_block()
* so not valid anymore */
enc->srcpad_last_return = enc->wvcsrcpad_last_return = GST_FLOW_OK;
GST_DEBUG ("got %u raw samples", sample_count);
/* check if we already have a valid WavpackContext, otherwise make one */
if (!enc->wp_context) {
/* create raw context */
enc->wp_context =
WavpackOpenFileOutput (gst_wavpack_enc_push_block, &enc->wv_id,
(enc->correction_mode > 0) ? &enc->wvc_id : NULL);
if (!enc->wp_context) {
GST_ELEMENT_ERROR (enc, LIBRARY, INIT, (NULL),
("error creating Wavpack context"));
gst_object_unref (enc);
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
/* set the WavpackConfig according to our parameters */
gst_wavpack_enc_set_wp_config (enc);
/* set the configuration to the context now that we know everything
* and initialize the encoder */
if (!WavpackSetConfiguration (enc->wp_context,
enc->wp_config, (uint32_t) (-1))
|| !WavpackPackInit (enc->wp_context)) {
GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL),
("error setting up wavpack encoding context"));
WavpackCloseFile (enc->wp_context);
gst_object_unref (enc);
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
GST_DEBUG ("setup of encoding context successfull");
}
/* Save the timestamp of the first buffer. This will be later
* used as offset for all following buffers */
if (enc->timestamp_offset == GST_CLOCK_TIME_NONE) {
if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
enc->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
enc->next_ts = GST_BUFFER_TIMESTAMP (buf);
} else {
enc->timestamp_offset = 0;
enc->next_ts = 0;
}
}
/* Check if we have a continous stream, if not drop some samples or the buffer or
* insert some silence samples */
if (enc->next_ts != GST_CLOCK_TIME_NONE &&
GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {
guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);
guint64 diff_bytes;
GST_WARNING_OBJECT (enc, "Buffer is older than previous "
"timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT
"), cannot handle. Clipping buffer.",
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
GST_TIME_ARGS (enc->next_ts));
diff_bytes =
GST_CLOCK_TIME_TO_FRAMES (diff, enc->samplerate) * enc->channels * 2;
if (diff_bytes >= GST_BUFFER_SIZE (buf)) {
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
buf = gst_buffer_make_metadata_writable (buf);
GST_BUFFER_DATA (buf) += diff_bytes;
GST_BUFFER_SIZE (buf) -= diff_bytes;
GST_BUFFER_TIMESTAMP (buf) += diff;
if (GST_BUFFER_DURATION_IS_VALID (buf))
GST_BUFFER_DURATION (buf) -= diff;
}
/* Allow a diff of at most 5 ms */
if (enc->next_ts != GST_CLOCK_TIME_NONE
&& GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts &&
GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > 5 * GST_MSECOND) {
GST_WARNING_OBJECT (enc,
"Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT,
GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, 5 * GST_MSECOND);
WavpackFlushSamples (enc->wp_context);
enc->timestamp_offset += (GST_BUFFER_TIMESTAMP (buf) - enc->next_ts);
}
}
//.........这里部分代码省略.........
示例7: gst_rtp_celt_pay_flush_queued
static GstFlowReturn
gst_rtp_celt_pay_flush_queued (GstRtpCELTPay * rtpceltpay)
{
GstFlowReturn ret;
GstBuffer *buf, *outbuf;
guint8 *payload, *spayload;
guint payload_len;
GstClockTime duration;
GstRTPBuffer rtp = { NULL, };
payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;
duration = rtpceltpay->qduration;
GST_DEBUG_OBJECT (rtpceltpay, "flushing out %u, duration %" GST_TIME_FORMAT,
payload_len, GST_TIME_ARGS (rtpceltpay->qduration));
/* get a big enough packet for the sizes + payloads */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
GST_BUFFER_DURATION (outbuf) = duration;
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* point to the payload for size headers and data */
spayload = gst_rtp_buffer_get_payload (&rtp);
payload = spayload + rtpceltpay->sbytes;
while ((buf = g_queue_pop_head (rtpceltpay->queue))) {
guint size;
/* copy first timestamp to output */
if (GST_BUFFER_PTS (outbuf) == -1)
GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf);
/* write the size to the header */
size = gst_buffer_get_size (buf);
while (size > 0xff) {
*spayload++ = 0xff;
size -= 0xff;
}
*spayload++ = size;
/* copy payload */
size = gst_buffer_get_size (buf);
gst_buffer_extract (buf, 0, payload, size);
payload += size;
gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpceltpay), outbuf, buf,
g_quark_from_static_string (GST_META_TAG_AUDIO_STR));
gst_buffer_unref (buf);
}
gst_rtp_buffer_unmap (&rtp);
/* we consumed it all */
rtpceltpay->bytes = 0;
rtpceltpay->sbytes = 0;
rtpceltpay->qduration = 0;
ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpceltpay), outbuf);
return ret;
}
示例8: gst_rtp_mux_chain
static GstFlowReturn
gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstRTPMux *rtp_mux;
GstFlowReturn ret;
GstRTPMuxPadPrivate *padpriv;
gboolean drop;
gboolean changed = FALSE;
GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;
rtp_mux = GST_RTP_MUX (parent);
if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
GstCaps *current_caps = gst_pad_get_current_caps (pad);
if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
ret = GST_FLOW_NOT_NEGOTIATED;
gst_buffer_unref (buffer);
goto out;
}
gst_caps_unref (current_caps);
}
GST_OBJECT_LOCK (rtp_mux);
padpriv = gst_pad_get_element_private (pad);
if (!padpriv) {
GST_OBJECT_UNLOCK (rtp_mux);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_LINKED;
}
buffer = gst_buffer_make_writable (buffer);
if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) {
GST_OBJECT_UNLOCK (rtp_mux);
gst_buffer_unref (buffer);
GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer");
return GST_FLOW_ERROR;
}
drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer);
gst_rtp_buffer_unmap (&rtpbuffer);
if (!drop) {
if (pad != rtp_mux->last_pad) {
changed = TRUE;
g_clear_object (&rtp_mux->last_pad);
rtp_mux->last_pad = g_object_ref (pad);
}
if (GST_BUFFER_DURATION_IS_VALID (buffer) &&
GST_BUFFER_PTS_IS_VALID (buffer))
rtp_mux->last_stop = GST_BUFFER_PTS (buffer) +
GST_BUFFER_DURATION (buffer);
else
rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
}
GST_OBJECT_UNLOCK (rtp_mux);
if (changed)
gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);
if (drop) {
gst_buffer_unref (buffer);
ret = GST_FLOW_OK;
} else {
ret = gst_pad_push (rtp_mux->srcpad, buffer);
}
out:
return ret;
}
示例9: gst_audio_segment_clip_clip_buffer
static GstFlowReturn
gst_audio_segment_clip_clip_buffer (GstSegmentClip * base, GstBuffer * buffer,
GstBuffer ** outbuf)
{
GstAudioSegmentClip *self = GST_AUDIO_SEGMENT_CLIP (base);
GstSegment *segment = &base->segment;
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
GstClockTime duration = GST_BUFFER_DURATION (buffer);
guint64 offset = GST_BUFFER_OFFSET (buffer);
guint64 offset_end = GST_BUFFER_OFFSET_END (buffer);
guint size = gst_buffer_get_size (buffer);
if (!self->rate || !self->framesize) {
GST_ERROR_OBJECT (self, "Not negotiated yet");
gst_buffer_unref (buffer);
return GST_FLOW_NOT_NEGOTIATED;
}
if (segment->format != GST_FORMAT_DEFAULT &&
segment->format != GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (self, "Unsupported segment format %s",
gst_format_get_name (segment->format));
*outbuf = buffer;
return GST_FLOW_OK;
}
if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
GST_WARNING_OBJECT (self, "Buffer without valid timestamp");
*outbuf = buffer;
return GST_FLOW_OK;
}
*outbuf =
gst_audio_buffer_clip (buffer, segment, self->rate, self->framesize);
if (!*outbuf) {
GST_DEBUG_OBJECT (self, "Buffer outside the configured segment");
/* Now return unexpected if we're before/after the end */
if (segment->format == GST_FORMAT_TIME) {
if (segment->rate >= 0) {
if (segment->stop != -1 && timestamp >= segment->stop)
return GST_FLOW_EOS;
} else {
if (!GST_CLOCK_TIME_IS_VALID (duration))
duration =
gst_util_uint64_scale_int (size, GST_SECOND,
self->framesize * self->rate);
if (segment->start != -1 && timestamp + duration <= segment->start)
return GST_FLOW_EOS;
}
} else {
if (segment->rate >= 0) {
if (segment->stop != -1 && offset != -1 && offset >= segment->stop)
return GST_FLOW_EOS;
} else if (offset != -1 || offset_end != -1) {
if (offset_end == -1)
offset_end = offset + size / self->framesize;
if (segment->start != -1 && offset_end <= segment->start)
return GST_FLOW_EOS;
}
}
}
return GST_FLOW_OK;
}
示例10: gst_musepackdec_loop
static void
gst_musepackdec_loop (GstPad * sinkpad)
{
GstMusepackDec *musepackdec;
GstFlowReturn flow;
GstBuffer *out;
#ifdef MPC_IS_OLD_API
guint32 update_acc, update_bits;
#else
mpc_frame_info frame;
mpc_status err;
#endif
gint num_samples, samplerate, bitspersample;
musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad));
samplerate = g_atomic_int_get (&musepackdec->rate);
if (samplerate == 0) {
if (!gst_musepack_stream_init (musepackdec))
goto pause_task;
gst_musepackdec_send_newsegment (musepackdec);
samplerate = g_atomic_int_get (&musepackdec->rate);
}
bitspersample = g_atomic_int_get (&musepackdec->bps);
flow = gst_pad_alloc_buffer_and_set_caps (musepackdec->srcpad, -1,
MPC_DECODER_BUFFER_LENGTH * 4, GST_PAD_CAPS (musepackdec->srcpad), &out);
if (flow != GST_FLOW_OK) {
GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
goto pause_task;
}
#ifdef MPC_IS_OLD_API
num_samples = mpc_decoder_decode (musepackdec->d,
(MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out), &update_acc, &update_bits);
if (num_samples < 0) {
GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
goto pause_task;
} else if (num_samples == 0) {
goto eos_and_pause;
}
#else
frame.buffer = (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out);
err = mpc_demux_decode (musepackdec->d, &frame);
if (err != MPC_STATUS_OK) {
GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
goto pause_task;
} else if (frame.bits == -1) {
goto eos_and_pause;
}
num_samples = frame.samples;
#endif
GST_BUFFER_SIZE (out) = num_samples * bitspersample;
GST_BUFFER_OFFSET (out) = musepackdec->segment.last_stop;
GST_BUFFER_TIMESTAMP (out) =
gst_util_uint64_scale_int (musepackdec->segment.last_stop,
GST_SECOND, samplerate);
GST_BUFFER_DURATION (out) =
gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate);
musepackdec->segment.last_stop += num_samples;
GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out)));
flow = gst_pad_push (musepackdec->srcpad, out);
if (flow != GST_FLOW_OK) {
GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
goto pause_task;
}
/* check if we're at the end of a configured segment */
if (musepackdec->segment.stop != -1 &&
musepackdec->segment.last_stop >= musepackdec->segment.stop) {
gint64 stop_time;
GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment");
if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0)
goto eos_and_pause;
GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message");
stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop,
GST_SECOND, samplerate);
gst_element_post_message (GST_ELEMENT (musepackdec),
gst_message_new_segment_done (GST_OBJECT (musepackdec),
GST_FORMAT_TIME, stop_time));
//.........这里部分代码省略.........
示例11: gst_ac3_parse_handle_frame
//.........这里部分代码省略.........
/* Loop till we get one frame from each substream */
do {
framesize += frmsiz;
if (!gst_byte_reader_skip (&reader, frmsiz)
|| map.size < (framesize + 6)) {
more = TRUE;
break;
}
if (!gst_ac3_parse_frame_header (ac3parse, buf, framesize, &frmsiz,
NULL, NULL, NULL, &sid, &eac)) {
*skipsize = off + 2;
goto cleanup;
}
} while (sid);
}
/* We're now at the next frame, so no need to skip if resyncing */
frmsiz = 0;
}
if (lost_sync && !draining) {
guint16 word = 0;
GST_DEBUG_OBJECT (ac3parse, "resyncing; checking next frame syncword");
if (more || !gst_byte_reader_skip (&reader, frmsiz) ||
!gst_byte_reader_get_uint16_be (&reader, &word)) {
GST_DEBUG_OBJECT (ac3parse, "... but not sufficient data");
gst_base_parse_set_min_frame_size (parse, framesize + 8);
*skipsize = 0;
goto cleanup;
} else {
if (word != 0x0b77) {
GST_DEBUG_OBJECT (ac3parse, "0x%x not OK", word);
*skipsize = off + 2;
goto cleanup;
} else {
/* ok, got sync now, let's assume constant frame size */
gst_base_parse_set_min_frame_size (parse, framesize);
}
}
}
/* expect to have found a frame here */
g_assert (framesize);
ret = TRUE;
/* arrange for metadata setup */
if (G_UNLIKELY (sid)) {
/* dependent frame, no need to (ac)count for or consider further */
GST_LOG_OBJECT (parse, "sid: %d", sid);
frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;
/* TODO maybe also mark as DELTA_UNIT,
* if that does not surprise baseparse elsewhere */
/* occupies same time space as previous base frame */
if (G_LIKELY (GST_BUFFER_TIMESTAMP (buf) >= GST_BUFFER_DURATION (buf)))
GST_BUFFER_TIMESTAMP (buf) -= GST_BUFFER_DURATION (buf);
/* only shortcut if we already arranged for caps */
if (G_LIKELY (ac3parse->sample_rate > 0))
goto cleanup;
}
if (G_UNLIKELY (ac3parse->sample_rate != rate || ac3parse->channels != chans
|| ac3parse->eac != eac)) {
GstCaps *caps = gst_caps_new_simple (eac ? "audio/x-eac3" : "audio/x-ac3",
"framed", G_TYPE_BOOLEAN, TRUE, "rate", G_TYPE_INT, rate,
"channels", G_TYPE_INT, chans, NULL);
gst_caps_set_simple (caps, "alignment", G_TYPE_STRING,
g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937 ?
"iec61937" : "frame", NULL);
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
gst_caps_unref (caps);
ac3parse->sample_rate = rate;
ac3parse->channels = chans;
ac3parse->eac = eac;
update_rate = TRUE;
}
if (G_UNLIKELY (ac3parse->blocks != blocks)) {
ac3parse->blocks = blocks;
update_rate = TRUE;
}
if (G_UNLIKELY (update_rate))
gst_base_parse_set_frame_rate (parse, rate, 256 * blocks, 2, 2);
cleanup:
gst_buffer_unmap (buf, &map);
if (ret && framesize <= map.size) {
res = gst_base_parse_finish_frame (parse, frame, framesize);
}
return res;
}
示例12: gst_pad_probe_info_get_buffer
GstPadProbeReturn GstEnginePipeline::HandoffCallback(GstPad*,
GstPadProbeInfo* info,
gpointer self) {
GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(self);
GstBuffer* buf = gst_pad_probe_info_get_buffer(info);
QList<BufferConsumer*> consumers;
{
QMutexLocker l(&instance->buffer_consumers_mutex_);
consumers = instance->buffer_consumers_;
}
for (BufferConsumer* consumer : consumers) {
gst_buffer_ref(buf);
consumer->ConsumeBuffer(buf, instance->id());
}
// Calculate the end time of this buffer so we can stop playback if it's
// after the end time of this song.
if (instance->end_offset_nanosec_ > 0) {
quint64 start_time = GST_BUFFER_TIMESTAMP(buf) - instance->segment_start_;
quint64 duration = GST_BUFFER_DURATION(buf);
quint64 end_time = start_time + duration;
if (end_time > instance->end_offset_nanosec_) {
if (instance->has_next_valid_url()) {
if (instance->next_url_ == instance->url_ &&
instance->next_beginning_offset_nanosec_ ==
instance->end_offset_nanosec_) {
// The "next" song is actually the next segment of this file - so
// cheat and keep on playing, but just tell the Engine we've moved on.
instance->end_offset_nanosec_ = instance->next_end_offset_nanosec_;
instance->next_url_ = QUrl();
instance->next_beginning_offset_nanosec_ = 0;
instance->next_end_offset_nanosec_ = 0;
// GstEngine will try to seek to the start of the new section, but
// we're already there so ignore it.
instance->ignore_next_seek_ = true;
emit instance->EndOfStreamReached(instance->id(), true);
} else {
// We have a next song but we can't cheat, so move to it normally.
instance->TransitionToNext();
}
} else {
// There's no next song
emit instance->EndOfStreamReached(instance->id(), false);
}
}
}
if (instance->emit_track_ended_on_time_discontinuity_) {
if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT) ||
GST_BUFFER_OFFSET(buf) < instance->last_buffer_offset_) {
qLog(Debug) << "Buffer discontinuity - emitting EOS";
instance->emit_track_ended_on_time_discontinuity_ = false;
emit instance->EndOfStreamReached(instance->id(), true);
}
}
instance->last_buffer_offset_ = GST_BUFFER_OFFSET(buf);
return GST_PAD_PROBE_OK;
}
示例13: gst_rtp_amr_pay_handle_buffer
static GstFlowReturn
gst_rtp_amr_pay_handle_buffer (GstBaseRTPPayload * basepayload,
GstBuffer * buffer)
{
GstRtpAMRPay *rtpamrpay;
GstFlowReturn ret;
guint size, payload_len;
GstBuffer *outbuf;
guint8 *payload, *data, *payload_amr;
GstClockTime timestamp, duration;
guint packet_len, mtu;
gint i, num_packets, num_nonempty_packets;
gint amr_len;
gint *frame_size;
rtpamrpay = GST_RTP_AMR_PAY (basepayload);
mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpamrpay);
size = GST_BUFFER_SIZE (buffer);
data = GST_BUFFER_DATA (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
/* setup frame size pointer */
if (rtpamrpay->mode == GST_RTP_AMR_P_MODE_NB)
frame_size = nb_frame_size;
else
frame_size = wb_frame_size;
GST_DEBUG_OBJECT (basepayload, "got %d bytes", size);
/* FIXME, only
* octet aligned, no interleaving, single channel, no CRC,
* no robust-sorting. To fix this you need to implement the downstream
* negotiation function. */
/* first count number of packets and total amr frame size */
amr_len = num_packets = num_nonempty_packets = 0;
for (i = 0; i < size; i++) {
guint8 FT;
gint fr_size;
FT = (data[i] & 0x78) >> 3;
fr_size = frame_size[FT];
GST_DEBUG_OBJECT (basepayload, "frame size %d", fr_size);
/* FIXME, we don't handle this yet.. */
if (fr_size <= 0)
goto wrong_size;
amr_len += fr_size;
num_nonempty_packets++;
num_packets++;
i += fr_size;
}
if (amr_len > size)
goto incomplete_frame;
/* we need one extra byte for the CMR, the ToC is in the input
* data */
payload_len = size + 1;
/* get packet len to check against MTU */
packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
if (packet_len > mtu)
goto too_big;
/* now alloc output buffer */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
/* copy timestamp */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
/* FIXME: when we do more than one AMR frame per packet, fix this */
if (duration != GST_CLOCK_TIME_NONE)
GST_BUFFER_DURATION (outbuf) = duration;
else {
GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND;
}
if (GST_BUFFER_IS_DISCONT (buffer)) {
GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
gst_rtp_buffer_set_marker (outbuf, TRUE);
}
/* get payload, this is now writable */
payload = gst_rtp_buffer_get_payload (outbuf);
/* 0 1 2 3 4 5 6 7
* +-+-+-+-+-+-+-+-+
* | CMR |R|R|R|R|
* +-+-+-+-+-+-+-+-+
*/
payload[0] = 0xF0; /* CMR, no specific mode requested */
/* this is where we copy the AMR data, after num_packets FTs and the
* CMR. */
payload_amr = payload + num_packets + 1;
//.........这里部分代码省略.........
示例14: gst_amc_audio_dec_handle_frame
static GstFlowReturn
gst_amc_audio_dec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf)
{
GstAmcAudioDec *self;
gint idx;
GstAmcBuffer *buf;
GstAmcBufferInfo buffer_info;
guint offset = 0;
GstClockTime timestamp, duration, timestamp_offset = 0;
GstMapInfo minfo;
memset (&minfo, 0, sizeof (minfo));
self = GST_AMC_AUDIO_DEC (decoder);
GST_DEBUG_OBJECT (self, "Handling frame");
/* Make sure to keep a reference to the input here,
* it can be unreffed from the other thread if
* finish_frame() is called */
if (inbuf)
inbuf = gst_buffer_ref (inbuf);
if (!self->started) {
GST_ERROR_OBJECT (self, "Codec not started yet");
if (inbuf)
gst_buffer_unref (inbuf);
return GST_FLOW_NOT_NEGOTIATED;
}
if (self->eos) {
GST_WARNING_OBJECT (self, "Got frame after EOS");
if (inbuf)
gst_buffer_unref (inbuf);
return GST_FLOW_EOS;
}
if (self->flushing)
goto flushing;
if (self->downstream_flow_ret != GST_FLOW_OK)
goto downstream_error;
if (!inbuf)
return gst_amc_audio_dec_drain (self);
timestamp = GST_BUFFER_PTS (inbuf);
duration = GST_BUFFER_DURATION (inbuf);
gst_buffer_map (inbuf, &minfo, GST_MAP_READ);
while (offset < minfo.size) {
/* Make sure to release the base class stream lock, otherwise
* _loop() can't call _finish_frame() and we might block forever
* because no input buffers are released */
GST_AUDIO_DECODER_STREAM_UNLOCK (self);
/* Wait at most 100ms here, some codecs don't fail dequeueing if
* the codec is flushing, causing deadlocks during shutdown */
idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000);
GST_AUDIO_DECODER_STREAM_LOCK (self);
if (idx < 0) {
if (self->flushing)
goto flushing;
switch (idx) {
case INFO_TRY_AGAIN_LATER:
GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out");
continue; /* next try */
break;
case G_MININT:
GST_ERROR_OBJECT (self, "Failed to dequeue input buffer");
goto dequeue_error;
default:
g_assert_not_reached ();
break;
}
continue;
}
if (idx >= self->n_input_buffers)
goto invalid_buffer_index;
if (self->flushing)
goto flushing;
if (self->downstream_flow_ret != GST_FLOW_OK) {
memset (&buffer_info, 0, sizeof (buffer_info));
gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info);
goto downstream_error;
}
/* Now handle the frame */
/* Copy the buffer content in chunks of size as requested
* by the port */
buf = &self->input_buffers[idx];
memset (&buffer_info, 0, sizeof (buffer_info));
buffer_info.offset = 0;
//.........这里部分代码省略.........
示例15: gst_wavpack_enc_push_block
static int
gst_wavpack_enc_push_block (void *id, void *data, int32_t count)
{
GstWavpackEncWriteID *wid = (GstWavpackEncWriteID *) id;
GstWavpackEnc *enc = GST_WAVPACK_ENC (wid->wavpack_enc);
GstFlowReturn *flow;
GstBuffer *buffer;
GstPad *pad;
guchar *block = (guchar *) data;
pad = (wid->correction) ? enc->wvcsrcpad : enc->srcpad;
flow =
(wid->correction) ? &enc->wvcsrcpad_last_return : &enc->
srcpad_last_return;
*flow = gst_pad_alloc_buffer_and_set_caps (pad, GST_BUFFER_OFFSET_NONE,
count, GST_PAD_CAPS (pad), &buffer);
if (*flow != GST_FLOW_OK) {
GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",
GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow));
return FALSE;
}
g_memmove (GST_BUFFER_DATA (buffer), block, count);
if (count > sizeof (WavpackHeader) && memcmp (block, "wvpk", 4) == 0) {
/* if it's a Wavpack block set buffer timestamp and duration, etc */
WavpackHeader wph;
GST_LOG_OBJECT (enc, "got %d bytes of encoded wavpack %sdata",
count, (wid->correction) ? "correction " : "");
gst_wavpack_read_header (&wph, block);
/* Only set when pushing the first buffer again, in that case
* we don't want to delay the buffer or push newsegment events
*/
if (!wid->passthrough) {
/* Only push complete blocks */
if (enc->pending_buffer == NULL) {
enc->pending_buffer = buffer;
enc->pending_offset = wph.block_index;
} else if (enc->pending_offset == wph.block_index) {
enc->pending_buffer = gst_buffer_join (enc->pending_buffer, buffer);
} else {
GST_ERROR ("Got incomplete block, dropping");
gst_buffer_unref (enc->pending_buffer);
enc->pending_buffer = buffer;
enc->pending_offset = wph.block_index;
}
if (!(wph.flags & FINAL_BLOCK))
return TRUE;
buffer = enc->pending_buffer;
enc->pending_buffer = NULL;
enc->pending_offset = 0;
/* if it's the first wavpack block, send a NEW_SEGMENT event */
if (wph.block_index == 0) {
gst_pad_push_event (pad,
gst_event_new_new_segment (FALSE,
1.0, GST_FORMAT_TIME, 0, GST_BUFFER_OFFSET_NONE, 0));
/* save header for later reference, so we can re-send it later on
* EOS with fixed up values for total sample count etc. */
if (enc->first_block == NULL && !wid->correction) {
enc->first_block =
g_memdup (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
enc->first_block_size = GST_BUFFER_SIZE (buffer);
}
}
}
/* set buffer timestamp, duration, offset, offset_end from
* the wavpack header */
GST_BUFFER_TIMESTAMP (buffer) = enc->timestamp_offset +
gst_util_uint64_scale_int (GST_SECOND, wph.block_index,
enc->samplerate);
GST_BUFFER_DURATION (buffer) =
gst_util_uint64_scale_int (GST_SECOND, wph.block_samples,
enc->samplerate);
GST_BUFFER_OFFSET (buffer) = wph.block_index;
GST_BUFFER_OFFSET_END (buffer) = wph.block_index + wph.block_samples;
} else {
/* if it's something else set no timestamp and duration on the buffer */
GST_DEBUG_OBJECT (enc, "got %d bytes of unknown data", count);
GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
}
/* push the buffer and forward errors */
GST_DEBUG_OBJECT (enc, "pushing buffer with %d bytes",
GST_BUFFER_SIZE (buffer));
*flow = gst_pad_push (pad, buffer);
if (*flow != GST_FLOW_OK) {
GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",
//.........这里部分代码省略.........