本文整理汇总了C++中GST_PAD_PARENT函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_PAD_PARENT函数的具体用法?C++ GST_PAD_PARENT怎么用?C++ GST_PAD_PARENT使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_PAD_PARENT函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: gst_live_adder_setcaps
/* the first caps we receive on any of the sinkpads will define the caps for all
* the other sinkpads because we can only mix streams with the same caps.
* */
static gboolean
gst_live_adder_setcaps (GstPad * pad, GstCaps * caps)
{
GstLiveAdder *adder;
GList *pads;
GstStructure *structure;
const char *media_type;
adder = GST_LIVE_ADDER (GST_PAD_PARENT (pad));
GST_LOG_OBJECT (adder, "setting caps on pad %p,%s to %" GST_PTR_FORMAT, pad,
GST_PAD_NAME (pad), caps);
/* FIXME, see if the other pads can accept the format. Also lock the
* format on the other pads to this new format. */
GST_OBJECT_LOCK (adder);
pads = GST_ELEMENT (adder)->pads;
while (pads) {
GstPad *otherpad = GST_PAD (pads->data);
if (otherpad != pad)
gst_caps_replace (&GST_PAD_CAPS (otherpad), caps);
pads = g_list_next (pads);
}
/* parse caps now */
structure = gst_caps_get_structure (caps, 0);
media_type = gst_structure_get_name (structure);
if (strcmp (media_type, "audio/x-raw-int") == 0) {
GST_DEBUG_OBJECT (adder, "parse_caps sets adder to format int");
adder->format = GST_LIVE_ADDER_FORMAT_INT;
gst_structure_get_int (structure, "width", &adder->width);
gst_structure_get_int (structure, "depth", &adder->depth);
gst_structure_get_int (structure, "endianness", &adder->endianness);
gst_structure_get_boolean (structure, "signed", &adder->is_signed);
if (adder->endianness != G_BYTE_ORDER)
goto not_supported;
switch (adder->width) {
case 8:
adder->func = (adder->is_signed ?
(GstLiveAdderFunction) add_int8 : (GstLiveAdderFunction) add_uint8);
break;
case 16:
adder->func = (adder->is_signed ?
(GstLiveAdderFunction) add_int16 : (GstLiveAdderFunction)
add_uint16);
break;
case 32:
adder->func = (adder->is_signed ?
(GstLiveAdderFunction) add_int32 : (GstLiveAdderFunction)
add_uint32);
break;
default:
goto not_supported;
}
} else if (strcmp (media_type, "audio/x-raw-float") == 0) {
GST_DEBUG_OBJECT (adder, "parse_caps sets adder to format float");
adder->format = GST_LIVE_ADDER_FORMAT_FLOAT;
gst_structure_get_int (structure, "width", &adder->width);
switch (adder->width) {
case 32:
adder->func = (GstLiveAdderFunction) add_float32;
break;
case 64:
adder->func = (GstLiveAdderFunction) add_float64;
break;
default:
goto not_supported;
}
} else {
goto not_supported;
}
gst_structure_get_int (structure, "channels", &adder->channels);
gst_structure_get_int (structure, "rate", &adder->rate);
/* precalc bps */
adder->bps = (adder->width / 8) * adder->channels;
GST_OBJECT_UNLOCK (adder);
return TRUE;
/* ERRORS */
not_supported:
{
GST_OBJECT_UNLOCK (adder);
GST_DEBUG_OBJECT (adder, "unsupported format set as caps");
return FALSE;
}
}
示例2: theora_enc_chain
static GstFlowReturn
theora_enc_chain (GstPad * pad, GstBuffer * buffer)
{
GstTheoraEnc *enc;
ogg_packet op;
GstClockTime timestamp, duration, running_time;
GstFlowReturn ret;
gboolean force_keyframe;
enc = GST_THEORA_ENC (GST_PAD_PARENT (pad));
/* we keep track of two timelines.
* - The timestamps from the incomming buffers, which we copy to the outgoing
* encoded buffers as-is. We need to do this as we simply forward the
* newsegment events.
* - The running_time of the buffers, which we use to construct the granulepos
* in the packets.
*/
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
running_time =
gst_segment_to_running_time (&enc->segment, GST_FORMAT_TIME, timestamp);
if ((gint64) running_time < 0) {
GST_DEBUG_OBJECT (enc, "Dropping buffer, timestamp: %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
GST_OBJECT_LOCK (enc);
if (enc->bitrate_changed) {
long int bitrate = enc->video_bitrate;
th_encode_ctl (enc->encoder, TH_ENCCTL_SET_BITRATE, &bitrate,
sizeof (long int));
enc->bitrate_changed = FALSE;
}
if (enc->quality_changed) {
long int quality = enc->video_quality;
th_encode_ctl (enc->encoder, TH_ENCCTL_SET_QUALITY, &quality,
sizeof (long int));
enc->quality_changed = FALSE;
}
/* see if we need to schedule a keyframe */
force_keyframe = enc->force_keyframe;
enc->force_keyframe = FALSE;
GST_OBJECT_UNLOCK (enc);
if (force_keyframe) {
GstClockTime stream_time;
GstStructure *s;
stream_time = gst_segment_to_stream_time (&enc->segment,
GST_FORMAT_TIME, timestamp);
s = gst_structure_new ("GstForceKeyUnit",
"timestamp", G_TYPE_UINT64, timestamp,
"stream-time", G_TYPE_UINT64, stream_time,
"running-time", G_TYPE_UINT64, running_time, NULL);
theora_enc_force_keyframe (enc);
gst_pad_push_event (enc->srcpad,
gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s));
}
/* make sure we copy the discont flag to the next outgoing buffer when it's
* set on the incomming buffer */
if (GST_BUFFER_IS_DISCONT (buffer)) {
enc->next_discont = TRUE;
}
if (enc->packetno == 0) {
/* no packets written yet, setup headers */
GstCaps *caps;
GstBuffer *buf;
GSList *buffers = NULL;
int result;
enc->granulepos_offset = 0;
enc->timestamp_offset = 0;
GST_DEBUG_OBJECT (enc, "output headers");
/* Theora streams begin with three headers; the initial header (with
most of the codec setup parameters) which is mandated by the Ogg
bitstream spec. The second header holds any comment fields. The
third header holds the bitstream codebook. We merely need to
make the headers, then pass them to libtheora one at a time;
libtheora handles the additional Ogg bitstream constraints */
/* create the remaining theora headers */
th_comment_clear (&enc->comment);
th_comment_init (&enc->comment);
while ((result =
th_encode_flushheader (enc->encoder, &enc->comment, &op)) > 0) {
//.........这里部分代码省略.........
示例3: gst_deinterlace2_chain
static GstFlowReturn
gst_deinterlace2_chain (GstPad * pad, GstBuffer * buf)
{
GstDeinterlace2 *self = NULL;
GstClockTime timestamp;
GstFlowReturn ret = GST_FLOW_OK;
gint fields_required = 0;
gint cur_field_idx = 0;
self = GST_DEINTERLACE2 (GST_PAD_PARENT (pad));
gst_deinterlace2_push_history (self, buf);
buf = NULL;
fields_required = gst_deinterlace_method_get_fields_required (self->method);
/* Not enough fields in the history */
if (self->history_count < fields_required + 1) {
/* TODO: do bob or just forward frame */
GST_DEBUG ("HistoryCount=%d", self->history_count);
return GST_FLOW_OK;
}
while (self->history_count >= fields_required) {
if (self->fields == GST_DEINTERLACE2_ALL)
GST_DEBUG ("All fields");
if (self->fields == GST_DEINTERLACE2_TF)
GST_DEBUG ("Top fields");
if (self->fields == GST_DEINTERLACE2_BF)
GST_DEBUG ("Bottom fields");
cur_field_idx = self->history_count - fields_required;
if ((self->field_history[cur_field_idx].flags == PICTURE_INTERLACED_TOP
&& self->fields == GST_DEINTERLACE2_TF) ||
self->fields == GST_DEINTERLACE2_ALL) {
GST_DEBUG ("deinterlacing top field");
/* create new buffer */
ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
GST_BUFFER_OFFSET_NONE, self->frame_size,
GST_PAD_CAPS (self->srcpad), &self->out_buf);
if (ret != GST_FLOW_OK)
return ret;
/* do magic calculus */
gst_deinterlace_method_deinterlace_frame (self->method, self);
g_assert (self->history_count - 1 -
gst_deinterlace_method_get_latency (self->method) >= 0);
buf =
self->field_history[self->history_count - 1 -
gst_deinterlace_method_get_latency (self->method)].buf;
timestamp = GST_BUFFER_TIMESTAMP (buf);
gst_buffer_unref (gst_deinterlace2_pop_history (self));
GST_BUFFER_TIMESTAMP (self->out_buf) = timestamp;
if (self->fields == GST_DEINTERLACE2_ALL)
GST_BUFFER_DURATION (self->out_buf) = self->field_duration;
else
GST_BUFFER_DURATION (self->out_buf) = 2 * self->field_duration;
ret = gst_pad_push (self->srcpad, self->out_buf);
self->out_buf = NULL;
if (ret != GST_FLOW_OK)
return ret;
}
/* no calculation done: remove excess field */
else if (self->field_history[cur_field_idx].flags ==
PICTURE_INTERLACED_TOP && self->fields == GST_DEINTERLACE2_BF) {
GST_DEBUG ("Removing unused top field");
gst_buffer_unref (gst_deinterlace2_pop_history (self));
}
cur_field_idx = self->history_count - fields_required;
if (self->history_count < fields_required)
break;
/* deinterlace bottom_field */
if ((self->field_history[cur_field_idx].flags == PICTURE_INTERLACED_BOTTOM
&& self->fields == GST_DEINTERLACE2_BF) ||
self->fields == GST_DEINTERLACE2_ALL) {
GST_DEBUG ("deinterlacing bottom field");
/* create new buffer */
ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
GST_BUFFER_OFFSET_NONE, self->frame_size,
GST_PAD_CAPS (self->srcpad), &self->out_buf);
if (ret != GST_FLOW_OK)
return ret;
/* do magic calculus */
gst_deinterlace_method_deinterlace_frame (self->method, self);
g_assert (self->history_count - 1 -
gst_deinterlace_method_get_latency (self->method) >= 0);
buf =
self->field_history[self->history_count - 1 -
gst_deinterlace_method_get_latency (self->method)].buf;
//.........这里部分代码省略.........
示例4: gst_musepackdec_loop
static void
gst_musepackdec_loop (GstPad * sinkpad)
{
GstMusepackDec *musepackdec;
GstFlowReturn flow;
GstBuffer *out;
#ifdef MPC_IS_OLD_API
guint32 update_acc, update_bits;
#else
mpc_frame_info frame;
mpc_status err;
#endif
gint num_samples, samplerate, bitspersample;
musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad));
samplerate = g_atomic_int_get (&musepackdec->rate);
if (samplerate == 0) {
if (!gst_musepack_stream_init (musepackdec))
goto pause_task;
gst_musepackdec_send_newsegment (musepackdec);
samplerate = g_atomic_int_get (&musepackdec->rate);
}
bitspersample = g_atomic_int_get (&musepackdec->bps);
flow = gst_pad_alloc_buffer_and_set_caps (musepackdec->srcpad, -1,
MPC_DECODER_BUFFER_LENGTH * 4, GST_PAD_CAPS (musepackdec->srcpad), &out);
if (flow != GST_FLOW_OK) {
GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
goto pause_task;
}
#ifdef MPC_IS_OLD_API
num_samples = mpc_decoder_decode (musepackdec->d,
(MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out), &update_acc, &update_bits);
if (num_samples < 0) {
GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
goto pause_task;
} else if (num_samples == 0) {
goto eos_and_pause;
}
#else
frame.buffer = (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out);
err = mpc_demux_decode (musepackdec->d, &frame);
if (err != MPC_STATUS_OK) {
GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
goto pause_task;
} else if (frame.bits == -1) {
goto eos_and_pause;
}
num_samples = frame.samples;
#endif
GST_BUFFER_SIZE (out) = num_samples * bitspersample;
GST_BUFFER_OFFSET (out) = musepackdec->segment.last_stop;
GST_BUFFER_TIMESTAMP (out) =
gst_util_uint64_scale_int (musepackdec->segment.last_stop,
GST_SECOND, samplerate);
GST_BUFFER_DURATION (out) =
gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate);
musepackdec->segment.last_stop += num_samples;
GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out)));
flow = gst_pad_push (musepackdec->srcpad, out);
if (flow != GST_FLOW_OK) {
GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
goto pause_task;
}
/* check if we're at the end of a configured segment */
if (musepackdec->segment.stop != -1 &&
musepackdec->segment.last_stop >= musepackdec->segment.stop) {
gint64 stop_time;
GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment");
if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0)
goto eos_and_pause;
GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message");
stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop,
GST_SECOND, samplerate);
gst_element_post_message (GST_ELEMENT (musepackdec),
gst_message_new_segment_done (GST_OBJECT (musepackdec),
GST_FORMAT_TIME, stop_time));
//.........这里部分代码省略.........
示例5: gst_type_find_element_loop
static void
gst_type_find_element_loop (GstPad * pad)
{
GstTypeFindElement *typefind;
GstFlowReturn ret = GST_FLOW_OK;
typefind = GST_TYPE_FIND_ELEMENT (GST_PAD_PARENT (pad));
if (typefind->mode == MODE_TYPEFIND) {
GstPad *peer;
GstCaps *found_caps = NULL;
GstTypeFindProbability probability = GST_TYPE_FIND_NONE;
GST_DEBUG_OBJECT (typefind, "find type in pull mode");
peer = gst_pad_get_peer (pad);
if (peer) {
gint64 size;
gchar *ext;
if (!gst_pad_query_duration (peer, GST_FORMAT_BYTES, &size)) {
GST_WARNING_OBJECT (typefind, "Could not query upstream length!");
gst_object_unref (peer);
ret = GST_FLOW_ERROR;
goto pause;
}
/* the size if 0, we cannot continue */
if (size == 0) {
/* keep message in sync with message in sink event handler */
GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND,
(_("Stream contains no data.")), ("Can't typefind empty stream"));
gst_object_unref (peer);
ret = GST_FLOW_ERROR;
goto pause;
}
ext = gst_type_find_get_extension (typefind, pad);
found_caps =
gst_type_find_helper_get_range (GST_OBJECT_CAST (peer),
GST_OBJECT_PARENT (peer),
(GstTypeFindHelperGetRangeFunction) (GST_PAD_GETRANGEFUNC (peer)),
(guint64) size, ext, &probability);
g_free (ext);
GST_DEBUG ("Found caps %" GST_PTR_FORMAT, found_caps);
gst_object_unref (peer);
}
if (!found_caps || probability < typefind->min_probability) {
GST_DEBUG ("Trying to guess using extension");
gst_caps_replace (&found_caps, NULL);
found_caps =
gst_type_find_guess_by_extension (typefind, pad, &probability);
}
if (!found_caps || probability < typefind->min_probability) {
GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (NULL), (NULL));
gst_caps_replace (&found_caps, NULL);
ret = GST_FLOW_ERROR;
goto pause;
}
GST_DEBUG ("Emiting found caps %" GST_PTR_FORMAT, found_caps);
g_signal_emit (typefind, gst_type_find_element_signals[HAVE_TYPE],
0, probability, found_caps);
typefind->mode = MODE_NORMAL;
gst_caps_unref (found_caps);
} else if (typefind->mode == MODE_NORMAL) {
GstBuffer *outbuf = NULL;
if (typefind->need_stream_start) {
gchar *stream_id;
stream_id =
gst_pad_create_stream_id (typefind->src, GST_ELEMENT_CAST (typefind),
NULL);
GST_DEBUG_OBJECT (typefind, "Pushing STREAM_START");
gst_pad_push_event (typefind->src,
gst_event_new_stream_start (stream_id));
typefind->need_stream_start = FALSE;
g_free (stream_id);
}
if (typefind->need_segment) {
typefind->need_segment = FALSE;
gst_pad_push_event (typefind->src,
gst_event_new_segment (&typefind->segment));
}
/* Pull 4k blocks and send downstream */
ret = gst_pad_pull_range (typefind->sink, typefind->offset, 4096, &outbuf);
if (ret != GST_FLOW_OK)
goto pause;
typefind->offset += 4096;
//.........这里部分代码省略.........
示例6: gst_shape_wipe_video_sink_chain
static GstFlowReturn
gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer)
{
GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad));
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *mask = NULL, *outbuf = NULL;
GstClockTime timestamp;
gboolean new_outbuf = FALSE;
if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN))
return GST_FLOW_NOT_NEGOTIATED;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
timestamp =
gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);
if (GST_CLOCK_TIME_IS_VALID (timestamp))
gst_object_sync_values (G_OBJECT (self), timestamp);
GST_DEBUG_OBJECT (self,
"Blending buffer with timestamp %" GST_TIME_FORMAT " at position %lf",
GST_TIME_ARGS (timestamp), self->mask_position);
g_mutex_lock (self->mask_mutex);
if (!self->mask)
g_cond_wait (self->mask_cond, self->mask_mutex);
if (self->mask == NULL) {
g_mutex_unlock (self->mask_mutex);
gst_buffer_unref (buffer);
return GST_FLOW_UNEXPECTED;
} else {
mask = gst_buffer_ref (self->mask);
}
g_mutex_unlock (self->mask_mutex);
if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) {
gst_buffer_unref (buffer);
gst_buffer_unref (mask);
return GST_FLOW_OK;
}
/* Try to blend inplace, if it's not possible
* get a new buffer from downstream.
*/
if (!gst_buffer_is_writable (buffer)) {
ret =
gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE,
GST_BUFFER_SIZE (buffer), GST_PAD_CAPS (self->srcpad), &outbuf);
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
gst_buffer_unref (buffer);
gst_buffer_unref (mask);
return ret;
}
gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_ALL);
new_outbuf = TRUE;
} else {
outbuf = buffer;
}
if (self->fmt == GST_VIDEO_FORMAT_AYUV && self->mask_bpp == 16)
ret = gst_shape_wipe_blend_ayuv_16 (self, buffer, mask, outbuf);
else if (self->fmt == GST_VIDEO_FORMAT_AYUV)
ret = gst_shape_wipe_blend_ayuv_8 (self, buffer, mask, outbuf);
else if (self->fmt == GST_VIDEO_FORMAT_ARGB && self->mask_bpp == 16)
ret = gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf);
else if (self->fmt == GST_VIDEO_FORMAT_ARGB)
ret = gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf);
else if (self->fmt == GST_VIDEO_FORMAT_BGRA && self->mask_bpp == 16)
ret = gst_shape_wipe_blend_bgra_16 (self, buffer, mask, outbuf);
else if (self->fmt == GST_VIDEO_FORMAT_BGRA)
ret = gst_shape_wipe_blend_bgra_8 (self, buffer, mask, outbuf);
else
g_assert_not_reached ();
gst_buffer_unref (mask);
if (new_outbuf)
gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK) {
gst_buffer_unref (outbuf);
return ret;
}
ret = gst_pad_push (self->srcpad, outbuf);
return ret;
}
示例7: gst_real_audio_dec_setcaps
static gboolean
gst_real_audio_dec_setcaps (GstPad * pad, GstCaps * caps)
{
GstRealAudioDec *dec = GST_REAL_AUDIO_DEC (GST_PAD_PARENT (pad));
GstStructure *s = gst_caps_get_structure (caps, 0);
gint version, flavor, channels, rate, leaf_size, packet_size, width, height;
guint16 res = 0;
RAInit data;
gboolean bres;
const GValue *v;
GstBuffer *buf = NULL;
const gchar *name = gst_structure_get_name (s);
if (!strcmp (name, "audio/x-sipro")) {
version = GST_REAL_AUDIO_DEC_VERSION_SIPR;
} else {
if (!gst_structure_get_int (s, "raversion", &version))
goto missing_keys;
}
if (!gst_structure_get_int (s, "flavor", &flavor) ||
!gst_structure_get_int (s, "channels", &channels) ||
!gst_structure_get_int (s, "width", &width) ||
!gst_structure_get_int (s, "rate", &rate) ||
!gst_structure_get_int (s, "height", &height) ||
!gst_structure_get_int (s, "leaf_size", &leaf_size) ||
!gst_structure_get_int (s, "packet_size", &packet_size))
goto missing_keys;
if ((v = gst_structure_get_value (s, "codec_data")))
buf = g_value_peek_pointer (v);
GST_LOG_OBJECT (dec, "opening code for version %d", version);
/* first close existing decoder */
close_library (dec, &dec->lib);
if (!open_library (dec, version, &dec->lib))
goto could_not_open;
/* we have the module, no initialize with the caps data */
data.samplerate = rate;
data.width = width;
data.channels = channels;
data.quality = 100;
data.leaf_size = leaf_size;
data.packet_size = packet_size;
data.datalen = buf ? GST_BUFFER_SIZE (buf) : 0;
data.data = buf ? GST_BUFFER_DATA (buf) : NULL;
if ((res = dec->lib.RAInitDecoder (dec->lib.context, &data))) {
GST_WARNING_OBJECT (dec, "RAInitDecoder() failed");
goto could_not_initialize;
}
if (dec->lib.RASetPwd) {
dec->lib.RASetPwd (dec->lib.context, dec->pwd ? dec->pwd : DEFAULT_PWD);
}
if ((res = dec->lib.RASetFlavor (dec->lib.context, flavor))) {
GST_WARNING_OBJECT (dec, "RASetFlavor(%d) failed", flavor);
goto could_not_initialize;
}
caps = gst_caps_new_simple ("audio/x-raw-int",
"endianness", G_TYPE_INT, G_BYTE_ORDER,
"width", G_TYPE_INT, width,
"depth", G_TYPE_INT, width,
"rate", G_TYPE_INT, rate,
"channels", G_TYPE_INT, channels, "signed", G_TYPE_BOOLEAN, TRUE, NULL);
bres = gst_pad_set_caps (GST_PAD (dec->src), caps);
gst_caps_unref (caps);
if (!bres)
goto could_not_set_caps;
dec->width = width;
dec->height = height;
dec->leaf_size = leaf_size;
GST_LOG_OBJECT (dec, "opened module");
return TRUE;
missing_keys:
{
GST_DEBUG_OBJECT (dec, "Could not find all necessary keys in structure.");
return FALSE;
}
could_not_open:
{
GST_DEBUG_OBJECT (dec, "Could not find decoder");
return FALSE;
}
could_not_initialize:
{
close_library (dec, &dec->lib);
GST_WARNING_OBJECT (dec, "Initialization of REAL driver failed (%i).", res);
return FALSE;
}
could_not_set_caps:
//.........这里部分代码省略.........
示例8: gst_image_freeze_src_loop
static void
gst_image_freeze_src_loop (GstPad * pad)
{
GstImageFreeze *self = GST_IMAGE_FREEZE (GST_PAD_PARENT (pad));
GstBuffer *buffer;
guint64 offset;
GstClockTime timestamp, timestamp_end;
guint64 cstart, cstop;
gboolean in_seg, eos;
GstFlowReturn flow_ret = GST_FLOW_OK;
g_mutex_lock (&self->lock);
if (!gst_pad_has_current_caps (self->srcpad)) {
GST_ERROR_OBJECT (pad, "Not negotiated yet");
flow_ret = GST_FLOW_NOT_NEGOTIATED;
g_mutex_unlock (&self->lock);
goto pause_task;
}
if (!self->buffer) {
GST_ERROR_OBJECT (pad, "Have no buffer yet");
flow_ret = GST_FLOW_ERROR;
g_mutex_unlock (&self->lock);
goto pause_task;
}
buffer = gst_buffer_ref (self->buffer);
buffer = gst_buffer_make_writable (buffer);
g_mutex_unlock (&self->lock);
if (self->need_segment) {
GstEvent *e;
GST_DEBUG_OBJECT (pad, "Pushing SEGMENT event: %" GST_SEGMENT_FORMAT,
&self->segment);
e = gst_event_new_segment (&self->segment);
g_mutex_lock (&self->lock);
if (self->segment.rate >= 0) {
self->offset =
gst_util_uint64_scale (self->segment.start, self->fps_n,
self->fps_d * GST_SECOND);
} else {
self->offset =
gst_util_uint64_scale (self->segment.stop, self->fps_n,
self->fps_d * GST_SECOND);
}
g_mutex_unlock (&self->lock);
self->need_segment = FALSE;
gst_pad_push_event (self->srcpad, e);
}
g_mutex_lock (&self->lock);
offset = self->offset;
if (self->fps_n != 0) {
timestamp =
gst_util_uint64_scale (offset, self->fps_d * GST_SECOND, self->fps_n);
timestamp_end =
gst_util_uint64_scale (offset + 1, self->fps_d * GST_SECOND,
self->fps_n);
} else {
timestamp = self->segment.start;
timestamp_end = GST_CLOCK_TIME_NONE;
}
eos = (self->fps_n == 0 && offset > 0) ||
(self->segment.rate >= 0 && self->segment.stop != -1
&& timestamp > self->segment.stop) || (self->segment.rate < 0
&& offset == 0) || (self->segment.rate < 0
&& self->segment.start != -1 && timestamp_end < self->segment.start);
if (self->fps_n == 0 && offset > 0)
in_seg = FALSE;
else
in_seg =
gst_segment_clip (&self->segment, GST_FORMAT_TIME, timestamp,
timestamp_end, &cstart, &cstop);
if (in_seg) {
self->segment.position = cstart;
if (self->segment.rate >= 0)
self->segment.position = cstop;
}
if (self->segment.rate >= 0)
self->offset++;
else
self->offset--;
g_mutex_unlock (&self->lock);
GST_DEBUG_OBJECT (pad, "Handling buffer with timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (in_seg) {
GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_PTS (buffer) = cstart;
GST_BUFFER_DURATION (buffer) = cstop - cstart;
GST_BUFFER_OFFSET (buffer) = offset;
//.........这里部分代码省略.........
示例9: gst_speed_convert
static gboolean
gst_speed_convert (GstPad * pad, GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value)
{
gboolean ret = TRUE;
guint scale = 1;
GstSpeed *filter;
if (src_format == *dest_format) {
*dest_value = src_value;
return TRUE;
}
filter = GST_SPEED (GST_PAD_PARENT (pad));
switch (src_format) {
case GST_FORMAT_BYTES:
switch (*dest_format) {
case GST_FORMAT_DEFAULT:
if (filter->sample_size == 0) {
ret = FALSE;
break;
}
*dest_value = src_value / filter->sample_size;
break;
case GST_FORMAT_TIME:
{
gint byterate = filter->sample_size * filter->rate;
if (byterate == 0) {
ret = FALSE;
break;
}
*dest_value = src_value * GST_SECOND / byterate;
break;
}
default:
ret = FALSE;
}
break;
case GST_FORMAT_DEFAULT:
switch (*dest_format) {
case GST_FORMAT_BYTES:
*dest_value = src_value * filter->sample_size;
break;
case GST_FORMAT_TIME:
if (filter->rate == 0) {
ret = FALSE;
break;
}
*dest_value = src_value * GST_SECOND / filter->rate;
break;
default:
ret = FALSE;
}
break;
case GST_FORMAT_TIME:
switch (*dest_format) {
case GST_FORMAT_BYTES:
scale = filter->sample_size;
/* fallthrough */
case GST_FORMAT_DEFAULT:
*dest_value = src_value * scale * filter->rate / GST_SECOND;
break;
default:
ret = FALSE;
}
break;
default:
ret = FALSE;
}
return ret;
}
示例10: gst_mulawdec_chain
static GstFlowReturn
gst_mulawdec_chain (GstPad * pad, GstBuffer * buffer)
{
GstMuLawDec *mulawdec;
gint16 *linear_data;
guint8 *mulaw_data;
guint mulaw_size;
GstBuffer *outbuf;
GstFlowReturn ret;
mulawdec = GST_MULAWDEC (GST_PAD_PARENT (pad));
if (G_UNLIKELY (mulawdec->rate == 0))
goto not_negotiated;
mulaw_data = (guint8 *) GST_BUFFER_DATA (buffer);
mulaw_size = GST_BUFFER_SIZE (buffer);
ret =
gst_pad_alloc_buffer_and_set_caps (mulawdec->srcpad,
GST_BUFFER_OFFSET_NONE, mulaw_size * 2, GST_PAD_CAPS (mulawdec->srcpad),
&outbuf);
if (ret != GST_FLOW_OK)
goto alloc_failed;
linear_data = (gint16 *) GST_BUFFER_DATA (outbuf);
/* copy discont flag */
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
if (GST_BUFFER_DURATION (outbuf) == GST_CLOCK_TIME_NONE)
GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (GST_SECOND,
mulaw_size * 2, 2 * mulawdec->rate * mulawdec->channels);
else
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer);
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (mulawdec->srcpad));
mulaw_decode (mulaw_data, linear_data, mulaw_size);
gst_buffer_unref (buffer);
ret = gst_pad_push (mulawdec->srcpad, outbuf);
return ret;
/* ERRORS */
not_negotiated:
{
GST_WARNING_OBJECT (mulawdec, "no input format set: not-negotiated");
gst_buffer_unref (buffer);
return GST_FLOW_NOT_NEGOTIATED;
}
alloc_failed:
{
GST_DEBUG_OBJECT (mulawdec, "pad alloc failed, flow: %s",
gst_flow_get_name (ret));
gst_buffer_unref (buffer);
return ret;
}
}
示例11: gst_midi_parse_loop
static void
gst_midi_parse_loop (GstPad * sinkpad)
{
GstMidiParse *midiparse = GST_MIDI_PARSE (GST_PAD_PARENT (sinkpad));
GstFlowReturn ret;
switch (midiparse->state) {
case GST_MIDI_PARSE_STATE_LOAD:
{
GstBuffer *buffer = NULL;
GST_DEBUG_OBJECT (midiparse, "loading song");
ret =
gst_pad_pull_range (midiparse->sinkpad, midiparse->offset, -1,
&buffer);
if (ret == GST_FLOW_EOS) {
GST_DEBUG_OBJECT (midiparse, "Song loaded");
midiparse->state = GST_MIDI_PARSE_STATE_PARSE;
} else if (ret != GST_FLOW_OK) {
GST_ELEMENT_ERROR (midiparse, STREAM, DECODE, (NULL),
("Unable to read song"));
goto pause;
} else {
GST_DEBUG_OBJECT (midiparse, "pushing buffer");
gst_adapter_push (midiparse->adapter, buffer);
midiparse->offset += gst_buffer_get_size (buffer);
}
break;
}
case GST_MIDI_PARSE_STATE_PARSE:
ret = gst_midi_parse_parse_song (midiparse);
if (ret != GST_FLOW_OK)
goto pause;
midiparse->state = GST_MIDI_PARSE_STATE_PLAY;
break;
case GST_MIDI_PARSE_STATE_PLAY:
ret = gst_midi_parse_do_play (midiparse);
if (ret != GST_FLOW_OK)
goto pause;
break;
default:
break;
}
return;
pause:
{
const gchar *reason = gst_flow_get_name (ret);
GstEvent *event;
GST_DEBUG_OBJECT (midiparse, "pausing task, reason %s", reason);
gst_pad_pause_task (sinkpad);
if (ret == GST_FLOW_EOS) {
/* perform EOS logic */
event = gst_event_new_eos ();
gst_pad_push_event (midiparse->srcpad, event);
} else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
event = gst_event_new_eos ();
/* for fatal errors we post an error message, post the error
* first so the app knows about the error first. */
GST_ELEMENT_ERROR (midiparse, STREAM, FAILED,
("Internal data flow error."),
("streaming task paused, reason %s (%d)", reason, ret));
gst_pad_push_event (midiparse->srcpad, event);
}
}
}
示例12: gst_adder_sink_getcaps
/* we can only accept caps that we and downstream can handle.
* if we have filtercaps set, use those to constrain the target caps.
*/
static GstCaps *
gst_adder_sink_getcaps (GstPad * pad, GstCaps * filter)
{
GstAdder *adder;
GstCaps *result, *peercaps, *current_caps, *filter_caps;
GstStructure *s;
gint i, n;
adder = GST_ADDER (GST_PAD_PARENT (pad));
GST_OBJECT_LOCK (adder);
/* take filter */
if ((filter_caps = adder->filter_caps)) {
if (filter)
filter_caps =
gst_caps_intersect_full (filter, filter_caps,
GST_CAPS_INTERSECT_FIRST);
else
gst_caps_ref (filter_caps);
} else {
filter_caps = filter ? gst_caps_ref (filter) : NULL;
}
GST_OBJECT_UNLOCK (adder);
if (filter_caps && gst_caps_is_empty (filter_caps)) {
GST_WARNING_OBJECT (pad, "Empty filter caps");
return filter_caps;
}
/* get the downstream possible caps */
peercaps = gst_pad_peer_query_caps (adder->srcpad, filter_caps);
/* get the allowed caps on this sinkpad */
GST_OBJECT_LOCK (adder);
current_caps =
adder->current_caps ? gst_caps_ref (adder->current_caps) : NULL;
if (current_caps == NULL) {
current_caps = gst_pad_get_pad_template_caps (pad);
if (!current_caps)
current_caps = gst_caps_new_any ();
}
GST_OBJECT_UNLOCK (adder);
if (peercaps) {
/* if the peer has caps, intersect */
GST_DEBUG_OBJECT (adder, "intersecting peer and our caps");
result =
gst_caps_intersect_full (peercaps, current_caps,
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (peercaps);
gst_caps_unref (current_caps);
} else {
/* the peer has no caps (or there is no peer), just use the allowed caps
* of this sinkpad. */
/* restrict with filter-caps if any */
if (filter_caps) {
GST_DEBUG_OBJECT (adder, "no peer caps, using filtered caps");
result =
gst_caps_intersect_full (filter_caps, current_caps,
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (current_caps);
} else {
GST_DEBUG_OBJECT (adder, "no peer caps, using our caps");
result = current_caps;
}
}
result = gst_caps_make_writable (result);
n = gst_caps_get_size (result);
for (i = 0; i < n; i++) {
GstStructure *sref;
s = gst_caps_get_structure (result, i);
sref = gst_structure_copy (s);
gst_structure_set (sref, "channels", GST_TYPE_INT_RANGE, 0, 2, NULL);
if (gst_structure_is_subset (s, sref)) {
/* This field is irrelevant when in mono or stereo */
gst_structure_remove_field (s, "channel-mask");
}
gst_structure_free (sref);
}
if (filter_caps)
gst_caps_unref (filter_caps);
GST_LOG_OBJECT (adder, "getting caps on pad %p,%s to %" GST_PTR_FORMAT, pad,
GST_PAD_NAME (pad), result);
return result;
}
示例13: gst_xvidenc_setcaps
static gboolean
gst_xvidenc_setcaps (GstPad * pad, GstCaps * vscaps)
{
GstXvidEnc *xvidenc;
GstStructure *structure;
gint w, h;
const GValue *fps, *par;
gint xvid_cs = -1;
xvidenc = GST_XVIDENC (GST_PAD_PARENT (pad));
/* if there's something old around, remove it */
if (xvidenc->handle) {
gst_xvidenc_flush_buffers (xvidenc, TRUE);
xvid_encore (xvidenc->handle, XVID_ENC_DESTROY, NULL, NULL);
xvidenc->handle = NULL;
}
structure = gst_caps_get_structure (vscaps, 0);
if (!gst_structure_get_int (structure, "width", &w) ||
!gst_structure_get_int (structure, "height", &h)) {
return FALSE;
}
fps = gst_structure_get_value (structure, "framerate");
if (fps == NULL || !GST_VALUE_HOLDS_FRACTION (fps)) {
GST_WARNING_OBJECT (pad, "no framerate specified, or not a GstFraction");
return FALSE;
}
/* optional par info */
par = gst_structure_get_value (structure, "pixel-aspect-ratio");
xvid_cs = gst_xvid_structure_to_csp (structure);
if (xvid_cs == -1) {
gchar *sstr;
sstr = gst_structure_to_string (structure);
GST_DEBUG_OBJECT (xvidenc, "Did not find xvid colourspace for caps %s",
sstr);
g_free (sstr);
return FALSE;
}
xvidenc->csp = xvid_cs;
xvidenc->width = w;
xvidenc->height = h;
xvidenc->fbase = gst_value_get_fraction_numerator (fps);
xvidenc->fincr = gst_value_get_fraction_denominator (fps);
if ((par != NULL) && GST_VALUE_HOLDS_FRACTION (par)) {
xvidenc->par_width = gst_value_get_fraction_numerator (par);
xvidenc->par_height = gst_value_get_fraction_denominator (par);
} else {
xvidenc->par_width = 1;
xvidenc->par_height = 1;
}
/* wipe xframe cache given possible change caps properties */
g_free (xvidenc->xframe_cache);
xvidenc->xframe_cache = NULL;
if (gst_xvidenc_setup (xvidenc)) {
gboolean ret = FALSE;
GstCaps *new_caps = NULL, *allowed_caps;
/* please downstream with preferred caps */
allowed_caps = gst_pad_get_allowed_caps (xvidenc->srcpad);
GST_DEBUG_OBJECT (xvidenc, "allowed caps: %" GST_PTR_FORMAT, allowed_caps);
if (allowed_caps && !gst_caps_is_empty (allowed_caps)) {
new_caps = gst_caps_copy_nth (allowed_caps, 0);
} else {
new_caps = gst_caps_new_simple ("video/x-xvid", NULL);
}
if (allowed_caps)
gst_caps_unref (allowed_caps);
gst_caps_set_simple (new_caps,
"width", G_TYPE_INT, w, "height", G_TYPE_INT, h,
"framerate", GST_TYPE_FRACTION, xvidenc->fbase, xvidenc->fincr,
"pixel-aspect-ratio", GST_TYPE_FRACTION,
xvidenc->par_width, xvidenc->par_height, NULL);
/* just to be sure */
gst_pad_fixate_caps (xvidenc->srcpad, new_caps);
if (xvidenc->used_profile != 0) {
switch (xvidenc->used_profile) {
case XVID_PROFILE_S_L0:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "0", NULL);
break;
case XVID_PROFILE_S_L1:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "1", NULL);
break;
case XVID_PROFILE_S_L2:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "2", NULL);
break;
//.........这里部分代码省略.........
示例14: gst_xvidenc_chain
static GstFlowReturn
gst_xvidenc_chain (GstPad * pad, GstBuffer * buf)
{
GstXvidEnc *xvidenc = GST_XVIDENC (GST_PAD_PARENT (pad));
GstBuffer *outbuf;
xvid_enc_frame_t xframe;
const gint motion_presets[] = {
0, 0, 0, 0,
XVID_ME_HALFPELREFINE16,
XVID_ME_HALFPELREFINE16 | XVID_ME_ADVANCEDDIAMOND16,
XVID_ME_HALFPELREFINE16 | XVID_ME_EXTSEARCH16
| XVID_ME_HALFPELREFINE8 | XVID_ME_USESQUARES16
};
if (!xvidenc->handle) {
GST_ELEMENT_ERROR (xvidenc, CORE, NEGOTIATION, (NULL),
("format wasn't negotiated before chain function"));
gst_buffer_unref (buf);
return GST_FLOW_NOT_NEGOTIATED;
}
GST_DEBUG_OBJECT (xvidenc,
"Received buffer of time %" GST_TIME_FORMAT ", size %d",
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_BUFFER_SIZE (buf));
if (xvidenc->xframe_cache)
memcpy (&xframe, xvidenc->xframe_cache, sizeof (xframe));
else { /* need to build some inital xframe to be cached */
/* encode and so ... */
gst_xvid_init_struct (xframe);
if (xvidenc->par_width == xvidenc->par_height)
xframe.par = XVID_PAR_11_VGA;
else {
xframe.par = XVID_PAR_EXT;
xframe.par_width = xvidenc->par_width;
xframe.par_height = xvidenc->par_height;
}
/* handle options */
xframe.vol_flags |= xvidenc->quant_type;
xframe.vop_flags = XVID_VOP_HALFPEL;
xframe.motion = motion_presets[xvidenc->motion];
if (xvidenc->me_chroma) {
xframe.motion |= XVID_ME_CHROMA_PVOP;
xframe.motion |= XVID_ME_CHROMA_BVOP;
}
if (xvidenc->me_vhq >= 1) {
xframe.vop_flags |= XVID_VOP_MODEDECISION_RD;
}
if (xvidenc->me_vhq >= 2) {
xframe.motion |= XVID_ME_HALFPELREFINE16_RD;
xframe.motion |= XVID_ME_QUARTERPELREFINE16_RD;
}
if (xvidenc->me_vhq >= 3) {
xframe.motion |= XVID_ME_HALFPELREFINE8_RD;
xframe.motion |= XVID_ME_QUARTERPELREFINE8_RD;
xframe.motion |= XVID_ME_CHECKPREDICTION_RD;
}
if (xvidenc->me_vhq >= 4) {
xframe.motion |= XVID_ME_EXTSEARCH_RD;
}
/* no motion estimation, then only intra */
if (xvidenc->motion == 0) {
xframe.type = XVID_TYPE_IVOP;
} else {
xframe.type = XVID_TYPE_AUTO;
}
if (xvidenc->motion > 4) {
xframe.vop_flags |= XVID_VOP_INTER4V;
}
if (xvidenc->me_quarterpel) {
xframe.vol_flags |= XVID_VOL_QUARTERPEL;
xframe.motion |= XVID_ME_QUARTERPELREFINE16;
xframe.motion |= XVID_ME_QUARTERPELREFINE8;
}
if (xvidenc->gmc) {
xframe.vol_flags |= XVID_VOL_GMC;
xframe.motion |= XVID_ME_GME_REFINE;
}
if (xvidenc->interlaced) {
xframe.vol_flags |= XVID_VOL_INTERLACING;
}
if (xvidenc->trellis) {
xframe.vop_flags |= XVID_VOP_TRELLISQUANT;
}
if (xvidenc->hqacpred) {
xframe.vop_flags |= XVID_VOP_HQACPRED;
}
//.........这里部分代码省略.........
示例15: mfw_gst_vpuenc_init_encoder
static int mfw_gst_vpuenc_init_encoder(GstPad *pad, enum v4l2_memory memory)
{
GstVPU_Enc *vpu_enc = MFW_GST_VPU_ENC(GST_PAD_PARENT(pad));
gchar *mime = "undef";
gint ret;
GstCaps *caps = NULL;
struct v4l2_format fmt;
int retval, i;
if (!vpu_enc->codecTypeProvided) {
GST_ERROR("Incomplete command line.\n");
GError *error = NULL;
GQuark domain = g_quark_from_string("mfw_vpuencoder");
error = g_error_new(domain, 10, "fatal error");
gst_element_post_message(GST_ELEMENT(vpu_enc),
gst_message_new_error
(GST_OBJECT(vpu_enc), error,
"Incomplete command line - codec type was not provided."));
return GST_FLOW_ERROR;
}
fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
fmt.fmt.pix.width = vpu_enc->width;
fmt.fmt.pix.height = vpu_enc->height;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420;
retval = ioctl(vpu_enc->vpu_fd, VIDIOC_S_FMT, &fmt);
if (retval) {
printf("VIDIOC_S_FMT failed: %s\n", strerror(errno));
return GST_FLOW_ERROR;
}
reqs.memory = memory;
retval = ioctl(vpu_enc->vpu_fd, VIDIOC_REQBUFS, &reqs);
if (retval) {
perror("VIDIOC_REQBUFS");
return GST_FLOW_ERROR;
}
retval = ioctl(vpu_enc->vpu_fd, VPU_IOC_CODEC, vpu_enc->codec);
if (retval) {
perror("VPU_IOC_CODEC");
return GST_FLOW_ERROR;
}
retval = ioctl(vpu_enc->vpu_fd, VPU_IOC_MJPEG_QUALITY, vpu_enc->mjpeg_quality);
if (retval) {
perror("VPU_IOC_MJPEG_QUALITY");
return GST_FLOW_ERROR;
}
for (i = 0; i < NUM_BUFFERS; i++) {
struct v4l2_buffer *buf = &vpu_enc->buf_v4l2[i];
buf->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
buf->memory = memory;
buf->index = i;
if (memory == V4L2_MEMORY_MMAP) {
retval = ioctl(vpu_enc->vpu_fd, VIDIOC_QUERYBUF, buf);
if (retval) {
GST_ERROR("VIDIOC_QUERYBUF failed: %s\n", strerror(errno));
return GST_FLOW_ERROR;
}
vpu_enc->buf_size[i] = buf->length;
vpu_enc->buf_data[i] = mmap(NULL, buf->length,
PROT_READ | PROT_WRITE, MAP_SHARED,
vpu_enc->vpu_fd, vpu_enc->buf_v4l2[i].m.offset);
}
}
switch (vpu_enc->codec) {
case STD_MPEG4:
mime = "video/mpeg";
break;
case STD_AVC:
mime = "video/x-h264";
break;
case STD_H263:
mime = "video/x-h263";
break;
case STD_MJPG:
mime = "image/jpeg";
break;
default:
return GST_FLOW_ERROR;
}
caps = gst_caps_new_simple(mime,
"mpegversion", G_TYPE_INT, 4,
"systemstream", G_TYPE_BOOLEAN, FALSE,
"height", G_TYPE_INT, vpu_enc->height,
"width", G_TYPE_INT, vpu_enc->width,
"framerate", GST_TYPE_FRACTION, (gint32) (vpu_enc->framerate * 1000),
1000, NULL);
gst_pad_set_caps(vpu_enc->srcpad, caps);
vpu_enc->init = TRUE;
return GST_FLOW_OK;
//.........这里部分代码省略.........