本文整理汇总了C++中GST_ELEMENT_ERROR函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ELEMENT_ERROR函数的具体用法?C++ GST_ELEMENT_ERROR怎么用?C++ GST_ELEMENT_ERROR使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_ELEMENT_ERROR函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: gst_amc_audio_dec_loop
//.........这里部分代码省略.........
goto flushing;
}
goto failed_release;
}
if (is_eos || flow_ret == GST_FLOW_EOS) {
GST_AUDIO_DECODER_STREAM_UNLOCK (self);
g_mutex_lock (&self->drain_lock);
if (self->draining) {
GST_DEBUG_OBJECT (self, "Drained");
self->draining = FALSE;
g_cond_broadcast (&self->drain_cond);
} else if (flow_ret == GST_FLOW_OK) {
GST_DEBUG_OBJECT (self, "Component signalled EOS");
flow_ret = GST_FLOW_EOS;
}
g_mutex_unlock (&self->drain_lock);
GST_AUDIO_DECODER_STREAM_LOCK (self);
} else {
GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));
}
self->downstream_flow_ret = flow_ret;
if (flow_ret != GST_FLOW_OK)
goto flow_error;
GST_AUDIO_DECODER_STREAM_UNLOCK (self);
return;
dequeue_error:
{
GST_ELEMENT_ERROR_FROM_ERROR (self, err);
gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_ERROR;
GST_AUDIO_DECODER_STREAM_UNLOCK (self);
g_mutex_lock (&self->drain_lock);
self->draining = FALSE;
g_cond_broadcast (&self->drain_cond);
g_mutex_unlock (&self->drain_lock);
return;
}
format_error:
{
if (err)
GST_ELEMENT_ERROR_FROM_ERROR (self, err);
else
GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
("Failed to handle format"));
gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_ERROR;
GST_AUDIO_DECODER_STREAM_UNLOCK (self);
g_mutex_lock (&self->drain_lock);
self->draining = FALSE;
g_cond_broadcast (&self->drain_cond);
g_mutex_unlock (&self->drain_lock);
return;
}
failed_release:
{
GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err);
gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
示例2: gst_pipeline_change_state
//.........这里部分代码省略.........
if (clock)
gst_object_unref (clock);
if (start_time != GST_CLOCK_TIME_NONE && now != GST_CLOCK_TIME_NONE) {
GstClockTime new_base_time = now - start_time + delay;
GST_DEBUG_OBJECT (element,
"start_time=%" GST_TIME_FORMAT ", now=%" GST_TIME_FORMAT
", base_time %" GST_TIME_FORMAT,
GST_TIME_ARGS (start_time), GST_TIME_ARGS (now),
GST_TIME_ARGS (new_base_time));
gst_element_set_base_time (element, new_base_time);
} else {
GST_DEBUG_OBJECT (pipeline,
"NOT adjusting base_time because start_time is NONE");
}
} else {
GST_DEBUG_OBJECT (pipeline,
"NOT adjusting base_time because we selected one before");
}
if (cur_clock)
gst_object_unref (cur_clock);
break;
}
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
{
/* we take a start_time snapshot before calling the children state changes
* so that they know about when the pipeline PAUSED. */
pipeline_update_start_time (element);
break;
}
case GST_STATE_CHANGE_PAUSED_TO_READY:
case GST_STATE_CHANGE_READY_TO_NULL:
break;
}
result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
/* READY to PAUSED starts running_time from 0 */
reset_start_time (pipeline, 0);
break;
}
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
{
/* Take a new snapshot of the start_time after calling the state change on
* all children. This will be the running_time of the pipeline when we go
* back to PLAYING */
pipeline_update_start_time (element);
break;
}
case GST_STATE_CHANGE_PAUSED_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_NULL:
{
GstBus *bus;
gboolean auto_flush;
/* grab some stuff before we release the lock to flush out the bus */
GST_OBJECT_LOCK (element);
if ((bus = element->bus))
gst_object_ref (bus);
auto_flush = pipeline->priv->auto_flush_bus;
GST_OBJECT_UNLOCK (element);
if (bus) {
if (auto_flush) {
gst_bus_set_flushing (bus, TRUE);
} else {
GST_INFO_OBJECT (element, "not flushing bus, auto-flushing disabled");
}
gst_object_unref (bus);
}
break;
}
}
return result;
/* ERRORS */
invalid_clock:
{
/* we generate this error when the selected clock was not
* accepted by some element */
GST_ELEMENT_ERROR (pipeline, CORE, CLOCK,
(_("Selected clock cannot be used in pipeline.")),
("Pipeline cannot operate with selected clock"));
GST_DEBUG_OBJECT (pipeline,
"Pipeline cannot operate with selected clock %p", clock);
if (clock)
gst_object_unref (clock);
return GST_STATE_CHANGE_FAILURE;
}
}
示例3: gst_cmml_enc_parse_tag_head
/* encode the CMML head tag and push the CMML headers
*/
static void
gst_cmml_enc_parse_tag_head (GstCmmlEnc * enc, GstCmmlTagHead * head)
{
GList *headers = NULL;
GList *walk;
guchar *head_string;
GstCaps *caps;
GstBuffer *ident_buf, *preamble_buf, *head_buf;
GstBuffer *buffer;
if (enc->preamble == NULL)
goto flow_unexpected;
GST_INFO_OBJECT (enc, "parsing head tag");
enc->flow_return = gst_cmml_enc_new_ident_header (enc, &ident_buf);
if (enc->flow_return != GST_FLOW_OK)
goto alloc_error;
headers = g_list_append (headers, ident_buf);
enc->flow_return = gst_cmml_enc_new_buffer (enc,
enc->preamble, strlen ((gchar *) enc->preamble), &preamble_buf);
if (enc->flow_return != GST_FLOW_OK)
goto alloc_error;
headers = g_list_append (headers, preamble_buf);
head_string = gst_cmml_parser_tag_head_to_string (enc->parser, head);
enc->flow_return = gst_cmml_enc_new_buffer (enc,
head_string, strlen ((gchar *) head_string), &head_buf);
g_free (head_string);
if (enc->flow_return != GST_FLOW_OK)
goto alloc_error;
headers = g_list_append (headers, head_buf);
caps = gst_pad_get_caps (enc->srcpad);
caps = gst_cmml_enc_set_header_on_caps (enc, caps,
ident_buf, preamble_buf, head_buf);
while (headers) {
buffer = GST_BUFFER (headers->data);
/* set granulepos 0 on headers */
GST_BUFFER_OFFSET_END (buffer) = 0;
gst_buffer_set_caps (buffer, caps);
enc->flow_return = gst_cmml_enc_push (enc, buffer);
headers = g_list_delete_link (headers, headers);
if (enc->flow_return != GST_FLOW_OK)
goto push_error;
}
gst_caps_unref (caps);
enc->sent_headers = TRUE;
return;
flow_unexpected:
GST_ELEMENT_ERROR (enc, STREAM, ENCODE,
(NULL), ("got head tag before preamble"));
enc->flow_return = GST_FLOW_ERROR;
return;
push_error:
gst_caps_unref (caps);
/* fallthrough */
alloc_error:
for (walk = headers; walk; walk = walk->next)
gst_buffer_unref (GST_BUFFER (walk->data));
g_list_free (headers);
return;
}
示例4: gst_visual_gl_change_state
static GstStateChangeReturn
gst_visual_gl_change_state (GstElement * element, GstStateChange transition)
{
GstVisualGL *visual = GST_VISUAL_GL (element);
GstStateChangeReturn ret;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
GstElement *parent = GST_ELEMENT (gst_element_get_parent (visual));
GstStructure *structure = NULL;
GstQuery *query = NULL;
gboolean isPerformed = FALSE;
gchar *name;
if (!parent) {
GST_ELEMENT_ERROR (visual, CORE, STATE_CHANGE, (NULL),
("A parent bin is required"));
return FALSE;
}
name = gst_element_get_name (visual);
structure = gst_structure_new (name, NULL);
query = gst_query_new_application (GST_QUERY_CUSTOM, structure);
g_free (name);
isPerformed = gst_element_query (parent, query);
if (isPerformed) {
const GValue *id_value =
gst_structure_get_value (structure, "gstgldisplay");
if (G_VALUE_HOLDS_POINTER (id_value))
/* at least one gl element is after in our gl chain */
visual->display =
gst_object_ref (GST_GL_DISPLAY (g_value_get_pointer (id_value)));
else {
/* this gl filter is a sink in terms of the gl chain */
visual->display = gst_gl_display_new ();
gst_gl_display_create_context (visual->display, 0);
//TODO visual->external_gl_context);
}
gst_visual_gl_reset (visual);
visual->actor =
visual_actor_new (GST_VISUAL_GL_GET_CLASS (visual)->plugin->info->
plugname);
visual->video = visual_video_new ();
visual->audio = visual_audio_new ();
if (!visual->actor || !visual->video)
goto actor_setup_failed;
gst_gl_display_thread_add (visual->display,
(GstGLDisplayThreadFunc) actor_setup, visual);
if (visual->actor_setup_result != 0)
goto actor_setup_failed;
else
visual_actor_set_video (visual->actor, visual->video);
}
gst_query_unref (query);
gst_object_unref (GST_OBJECT (parent));
if (!isPerformed)
return GST_STATE_CHANGE_FAILURE;
}
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
{
if (visual->fbo) {
gst_gl_display_del_fbo (visual->display, visual->fbo,
visual->depthbuffer);
visual->fbo = 0;
visual->depthbuffer = 0;
}
if (visual->midtexture) {
gst_gl_display_del_texture (visual->display, visual->midtexture,
visual->width, visual->height);
visual->midtexture = 0;
}
if (visual->display) {
gst_object_unref (visual->display);
visual->display = NULL;
}
//.........这里部分代码省略.........
示例5: gst_rtp_ssrc_demux_rtcp_chain
static GstFlowReturn
gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad, GstObject * parent,
GstBuffer * buf)
{
GstFlowReturn ret;
GstRtpSsrcDemux *demux;
guint32 ssrc;
GstRTCPPacket packet;
GstRTCPBuffer rtcp = { NULL, };
GstPad *srcpad;
GstRtpSsrcDemuxPad *dpad;
demux = GST_RTP_SSRC_DEMUX (parent);
if (!gst_rtcp_buffer_validate (buf))
goto invalid_rtcp;
gst_rtcp_buffer_map (buf, GST_MAP_READ, &rtcp);
if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {
gst_rtcp_buffer_unmap (&rtcp);
goto invalid_rtcp;
}
/* first packet must be SR or RR or else the validate would have failed */
switch (gst_rtcp_packet_get_type (&packet)) {
case GST_RTCP_TYPE_SR:
/* get the ssrc so that we can route it to the right source pad */
gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, NULL, NULL,
NULL);
break;
default:
goto unexpected_rtcp;
}
gst_rtcp_buffer_unmap (&rtcp);
GST_DEBUG_OBJECT (demux, "received RTCP of SSRC %08x", ssrc);
srcpad = find_or_create_demux_pad_for_ssrc (demux, ssrc, RTCP_PAD);
if (srcpad == NULL)
goto create_failed;
/* push to srcpad */
ret = gst_pad_push (srcpad, buf);
if (ret != GST_FLOW_OK) {
/* check if the ssrc still there, may have been removed */
GST_PAD_LOCK (demux);
dpad = find_demux_pad_for_ssrc (demux, ssrc);
if (dpad == NULL || dpad->rtcp_pad != srcpad) {
/* SSRC was removed during the push ... ignore the error */
ret = GST_FLOW_OK;
}
GST_PAD_UNLOCK (demux);
}
gst_object_unref (srcpad);
return ret;
/* ERRORS */
invalid_rtcp:
{
/* this is fatal and should be filtered earlier */
GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL),
("Dropping invalid RTCP packet"));
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
unexpected_rtcp:
{
GST_DEBUG_OBJECT (demux, "dropping unexpected RTCP packet");
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
create_failed:
{
GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL),
("Could not create new pad"));
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
}
示例6: gst_ffmpegvidenc_handle_frame
static GstFlowReturn
gst_ffmpegvidenc_handle_frame (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder;
GstBuffer *outbuf;
gint ret_size = 0, c;
GstVideoInfo *info = &ffmpegenc->input_state->info;
GstVideoFrame vframe;
if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame))
ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I;
if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ)) {
GST_ERROR_OBJECT (encoder, "Failed to map input buffer");
return GST_FLOW_ERROR;
}
/* Fill avpicture */
for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {
if (c < GST_VIDEO_INFO_N_COMPONENTS (info)) {
ffmpegenc->picture->data[c] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, c);
ffmpegenc->picture->linesize[c] =
GST_VIDEO_FRAME_COMP_STRIDE (&vframe, c);
} else {
ffmpegenc->picture->data[c] = NULL;
ffmpegenc->picture->linesize[c] = 0;
}
}
ffmpegenc->picture->pts =
gst_ffmpeg_time_gst_to_ff (frame->pts /
ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base);
ffmpegenc_setup_working_buf (ffmpegenc);
ret_size = avcodec_encode_video (ffmpegenc->context,
ffmpegenc->working_buf, ffmpegenc->working_buf_size, ffmpegenc->picture);
gst_video_frame_unmap (&vframe);
if (ret_size < 0)
goto encode_fail;
/* Encoder needs more data */
if (!ret_size)
return GST_FLOW_OK;
/* save stats info if there is some as well as a stats file */
if (ffmpegenc->file && ffmpegenc->context->stats_out)
if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
(("Could not write to file \"%s\"."), ffmpegenc->filename),
GST_ERROR_SYSTEM);
gst_video_codec_frame_unref (frame);
/* Get oldest frame */
frame = gst_video_encoder_get_oldest_frame (encoder);
/* Allocate output buffer */
if (gst_video_encoder_allocate_output_frame (encoder, frame,
ret_size) != GST_FLOW_OK) {
gst_video_codec_frame_unref (frame);
goto alloc_fail;
}
outbuf = frame->output_buffer;
gst_buffer_fill (outbuf, 0, ffmpegenc->working_buf, ret_size);
/* buggy codec may not set coded_frame */
if (ffmpegenc->context->coded_frame) {
if (ffmpegenc->context->coded_frame->key_frame)
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
} else
GST_WARNING_OBJECT (ffmpegenc, "codec did not provide keyframe info");
/* Reset frame type */
if (ffmpegenc->picture->pict_type)
ffmpegenc->picture->pict_type = 0;
return gst_video_encoder_finish_frame (encoder, frame);
/* ERRORS */
encode_fail:
{
#ifndef GST_DISABLE_GST_DEBUG
GstFFMpegVidEncClass *oclass =
(GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
GST_ERROR_OBJECT (ffmpegenc,
"avenc_%s: failed to encode buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
return GST_FLOW_OK;
}
alloc_fail:
{
#ifndef GST_DISABLE_GST_DEBUG
GstFFMpegVidEncClass *oclass =
(GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
GST_ERROR_OBJECT (ffmpegenc,
//.........这里部分代码省略.........
示例7: gst_shout2send_start
static gboolean
gst_shout2send_start (GstBaseSink * basesink)
{
GstShout2send *sink = GST_SHOUT2SEND (basesink);
const gchar *cur_prop;
gshort proto = 3;
gchar *version_string;
GST_DEBUG_OBJECT (sink, "starting");
sink->conn = shout_new ();
switch (sink->protocol) {
case SHOUT2SEND_PROTOCOL_XAUDIOCAST:
proto = SHOUT_PROTOCOL_XAUDIOCAST;
break;
case SHOUT2SEND_PROTOCOL_ICY:
proto = SHOUT_PROTOCOL_ICY;
break;
case SHOUT2SEND_PROTOCOL_HTTP:
proto = SHOUT_PROTOCOL_HTTP;
break;
}
cur_prop = "protocol";
GST_DEBUG_OBJECT (sink, "setting protocol: %d", sink->protocol);
if (shout_set_protocol (sink->conn, proto) != SHOUTERR_SUCCESS)
goto set_failed;
/* --- FIXME: shout requires an ip, and fails if it is given a host. */
/* may want to put convert_to_ip(shout2send->ip) here */
cur_prop = "ip";
GST_DEBUG_OBJECT (sink, "setting ip: %s", sink->ip);
if (shout_set_host (sink->conn, sink->ip) != SHOUTERR_SUCCESS)
goto set_failed;
cur_prop = "port";
GST_DEBUG_OBJECT (sink, "setting port: %u", sink->port);
if (shout_set_port (sink->conn, sink->port) != SHOUTERR_SUCCESS)
goto set_failed;
cur_prop = "password";
GST_DEBUG_OBJECT (sink, "setting password: %s", sink->password);
if (shout_set_password (sink->conn, sink->password) != SHOUTERR_SUCCESS)
goto set_failed;
cur_prop = "streamname";
GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->streamname);
if (shout_set_name (sink->conn, sink->streamname) != SHOUTERR_SUCCESS)
goto set_failed;
cur_prop = "description";
GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->description);
if (shout_set_description (sink->conn, sink->description) != SHOUTERR_SUCCESS)
goto set_failed;
cur_prop = "genre";
GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->genre);
if (shout_set_genre (sink->conn, sink->genre) != SHOUTERR_SUCCESS)
goto set_failed;
cur_prop = "mount";
GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->mount);
if (shout_set_mount (sink->conn, sink->mount) != SHOUTERR_SUCCESS)
goto set_failed;
cur_prop = "username";
GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, "source");
if (shout_set_user (sink->conn, sink->username) != SHOUTERR_SUCCESS)
goto set_failed;
version_string = gst_version_string ();
cur_prop = "agent";
GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, version_string);
if (shout_set_agent (sink->conn, version_string) != SHOUTERR_SUCCESS) {
g_free (version_string);
goto set_failed;
}
g_free (version_string);
return TRUE;
/* ERROR */
set_failed:
{
GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL),
("Error setting %s: %s", cur_prop, shout_get_error (sink->conn)));
return FALSE;
}
}
示例8: gst_ffmpegvidenc_handle_frame
static GstFlowReturn
gst_ffmpegvidenc_handle_frame (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder;
GstBuffer *outbuf;
gint ret = 0, c;
GstVideoInfo *info = &ffmpegenc->input_state->info;
AVPacket *pkt;
int have_data = 0;
BufferInfo *buffer_info;
if (ffmpegenc->interlaced) {
ffmpegenc->picture->interlaced_frame = TRUE;
/* if this is not the case, a filter element should be used to swap fields */
ffmpegenc->picture->top_field_first =
GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
}
if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame))
ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I;
buffer_info = g_slice_new0 (BufferInfo);
buffer_info->buffer = gst_buffer_ref (frame->input_buffer);
if (!gst_video_frame_map (&buffer_info->vframe, info, frame->input_buffer,
GST_MAP_READ)) {
GST_ERROR_OBJECT (encoder, "Failed to map input buffer");
gst_buffer_unref (buffer_info->buffer);
g_slice_free (BufferInfo, buffer_info);
gst_video_codec_frame_unref (frame);
return GST_FLOW_ERROR;
}
/* Fill avpicture */
ffmpegenc->picture->buf[0] =
av_buffer_create (NULL, 0, buffer_info_free, buffer_info, 0);
for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {
if (c < GST_VIDEO_INFO_N_COMPONENTS (info)) {
ffmpegenc->picture->data[c] =
GST_VIDEO_FRAME_PLANE_DATA (&buffer_info->vframe, c);
ffmpegenc->picture->linesize[c] =
GST_VIDEO_FRAME_COMP_STRIDE (&buffer_info->vframe, c);
} else {
ffmpegenc->picture->data[c] = NULL;
ffmpegenc->picture->linesize[c] = 0;
}
}
ffmpegenc->picture->format = ffmpegenc->context->pix_fmt;
ffmpegenc->picture->width = GST_VIDEO_FRAME_WIDTH (&buffer_info->vframe);
ffmpegenc->picture->height = GST_VIDEO_FRAME_HEIGHT (&buffer_info->vframe);
ffmpegenc->picture->pts =
gst_ffmpeg_time_gst_to_ff (frame->pts /
ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base);
have_data = 0;
pkt = g_slice_new0 (AVPacket);
ret =
avcodec_encode_video2 (ffmpegenc->context, pkt, ffmpegenc->picture,
&have_data);
av_frame_unref (ffmpegenc->picture);
if (ret < 0 || !have_data)
g_slice_free (AVPacket, pkt);
if (ret < 0)
goto encode_fail;
/* Encoder needs more data */
if (!have_data) {
gst_video_codec_frame_unref (frame);
return GST_FLOW_OK;
}
/* save stats info if there is some as well as a stats file */
if (ffmpegenc->file && ffmpegenc->context->stats_out)
if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
(("Could not write to file \"%s\"."), ffmpegenc->filename),
GST_ERROR_SYSTEM);
gst_video_codec_frame_unref (frame);
/* Get oldest frame */
frame = gst_video_encoder_get_oldest_frame (encoder);
outbuf =
gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data,
pkt->size, 0, pkt->size, pkt, gst_ffmpegvidenc_free_avpacket);
frame->output_buffer = outbuf;
if (pkt->flags & AV_PKT_FLAG_KEY)
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
else
GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
//.........这里部分代码省略.........
示例9: gst_dvbsrc_open_frontend
static gboolean
gst_dvbsrc_open_frontend (GstDvbSrc * object, gboolean writable)
{
struct dvb_frontend_info fe_info;
const char *adapter_desc = NULL;
gchar *frontend_dev;
GstStructure *adapter_structure;
char *adapter_name = NULL;
frontend_dev = g_strdup_printf ("/dev/dvb/adapter%d/frontend%d",
object->adapter_number, object->frontend_number);
GST_INFO_OBJECT (object, "Using frontend device: %s", frontend_dev);
/* open frontend */
if ((object->fd_frontend =
open (frontend_dev, writable ? O_RDWR : O_RDONLY)) < 0) {
switch (errno) {
case ENOENT:
GST_ELEMENT_ERROR (object, RESOURCE, NOT_FOUND,
(_("Device \"%s\" does not exist."), frontend_dev), (NULL));
break;
default:
GST_ELEMENT_ERROR (object, RESOURCE, OPEN_READ_WRITE,
(_("Could not open frontend device \"%s\"."), frontend_dev),
GST_ERROR_SYSTEM);
break;
}
close (object->fd_frontend);
g_free (frontend_dev);
return FALSE;
}
GST_DEBUG_OBJECT (object, "Device opened, querying information");
if (ioctl (object->fd_frontend, FE_GET_INFO, &fe_info) < 0) {
GST_ELEMENT_ERROR (object, RESOURCE, SETTINGS,
(_("Could not get settings from frontend device \"%s\"."),
frontend_dev), GST_ERROR_SYSTEM);
close (object->fd_frontend);
g_free (frontend_dev);
return FALSE;
}
GST_DEBUG_OBJECT (object, "Got information about adapter : %s", fe_info.name);
adapter_name = g_strdup (fe_info.name);
object->adapter_type = fe_info.type;
switch (object->adapter_type) {
case FE_QPSK:
adapter_desc = "DVB-S";
adapter_structure = gst_structure_new ("dvb-adapter",
"type", G_TYPE_STRING, adapter_desc,
"name", G_TYPE_STRING, adapter_name,
"auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL);
break;
case FE_QAM:
adapter_desc = "DVB-C";
adapter_structure = gst_structure_new ("dvb-adapter",
"type", G_TYPE_STRING, adapter_desc,
"name", G_TYPE_STRING, adapter_name,
"auto-inversion", G_TYPE_BOOLEAN,
fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN,
fe_info.caps & FE_CAN_QAM_AUTO, "auto-fec", G_TYPE_BOOLEAN,
fe_info.caps & FE_CAN_FEC_AUTO, NULL);
break;
case FE_OFDM:
adapter_desc = "DVB-T";
adapter_structure = gst_structure_new ("dvb-adapter",
"type", G_TYPE_STRING, adapter_desc,
"name", G_TYPE_STRING, adapter_name,
"auto-inversion", G_TYPE_BOOLEAN,
fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN,
fe_info.caps & FE_CAN_QAM_AUTO, "auto-transmission-mode",
G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_TRANSMISSION_MODE_AUTO,
"auto-guard-interval", G_TYPE_BOOLEAN,
fe_info.caps & FE_CAN_GUARD_INTERVAL_AUTO, "auto-hierarchy",
G_TYPE_BOOLEAN, fe_info.caps % FE_CAN_HIERARCHY_AUTO, "auto-fec",
G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL);
break;
case FE_ATSC:
adapter_desc = "ATSC";
adapter_structure = gst_structure_new ("dvb-adapter",
"type", G_TYPE_STRING, adapter_desc,
"name", G_TYPE_STRING, adapter_name, NULL);
break;
default:
g_error ("Unknown frontend type: %d", object->adapter_type);
adapter_structure = gst_structure_new ("dvb-adapter",
"type", G_TYPE_STRING, "unknown", NULL);
}
GST_INFO_OBJECT (object, "DVB card: %s ", adapter_name);
gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element
(GST_OBJECT (object), adapter_structure));
g_free (frontend_dev);
g_free (adapter_name);
return TRUE;
//.........这里部分代码省略.........
示例10: gst_dvbsrc_read_device
static GstFlowReturn
gst_dvbsrc_read_device (GstDvbSrc * object, int size, GstBuffer ** buffer)
{
gint count = 0;
gint ret_val = 0;
GstBuffer *buf = gst_buffer_new_and_alloc (size);
GstClockTime timeout = object->timeout * GST_USECOND;
GstMapInfo map;
g_return_val_if_fail (GST_IS_BUFFER (buf), GST_FLOW_ERROR);
if (object->fd_dvr < 0)
return GST_FLOW_ERROR;
gst_buffer_map (buf, &map, GST_MAP_WRITE);
while (count < size) {
ret_val = gst_poll_wait (object->poll, timeout);
GST_LOG_OBJECT (object, "select returned %d", ret_val);
if (G_UNLIKELY (ret_val < 0)) {
if (errno == EBUSY)
goto stopped;
else if (errno == EINTR)
continue;
else
goto select_error;
} else if (G_UNLIKELY (ret_val == 0)) {
/* timeout, post element message */
gst_element_post_message (GST_ELEMENT_CAST (object),
gst_message_new_element (GST_OBJECT (object),
gst_structure_new_empty ("dvb-read-failure")));
} else {
int nread = read (object->fd_dvr, map.data + count, size - count);
if (G_UNLIKELY (nread < 0)) {
GST_WARNING_OBJECT
(object,
"Unable to read from device: /dev/dvb/adapter%d/dvr%d (%d)",
object->adapter_number, object->frontend_number, errno);
gst_element_post_message (GST_ELEMENT_CAST (object),
gst_message_new_element (GST_OBJECT (object),
gst_structure_new_empty ("dvb-read-failure")));
} else
count = count + nread;
}
}
gst_buffer_unmap (buf, &map);
gst_buffer_resize (buf, 0, count);
*buffer = buf;
return GST_FLOW_OK;
stopped:
{
GST_DEBUG_OBJECT (object, "stop called");
gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return GST_FLOW_FLUSHING;
}
select_error:
{
GST_ELEMENT_ERROR (object, RESOURCE, READ, (NULL),
("select error %d: %s (%d)", ret_val, g_strerror (errno), errno));
gst_buffer_unmap (buf, &map);
gst_buffer_unref (buf);
return GST_FLOW_ERROR;
}
}
示例11: gst_gdiscreencapsrc_create
static GstFlowReturn
gst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf)
{
GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);
GstBuffer *new_buf;
GstFlowReturn res;
gint new_buf_size;
GstClock *clock;
GstClockTime time = GST_CLOCK_TIME_NONE;
GstClockTime base_time;
if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||
!src->info.bmiHeader.biHeight)) {
GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),
("format wasn't negotiated before create function"));
return GST_FLOW_NOT_NEGOTIATED;
} else if (G_UNLIKELY (src->rate_numerator == 0 && src->frames == 1)) {
GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->frames);
return GST_FLOW_UNEXPECTED;
}
new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *
(-src->info.bmiHeader.biHeight);
GST_LOG_OBJECT (src,
"creating buffer of %d bytes with %dx%d image for frame %d",
new_buf_size, (gint) src->info.bmiHeader.biWidth,
(gint) (-src->info.bmiHeader.biHeight), (gint) src->frames);
res =
gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (src),
GST_BUFFER_OFFSET_NONE, new_buf_size,
GST_PAD_CAPS (GST_BASE_SRC_PAD (push_src)), &new_buf);
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (src, "could not allocate buffer, reason %s",
gst_flow_get_name (res));
return res;
}
clock = gst_element_get_clock (GST_ELEMENT (src));
if (clock) {
/* Calculate sync time. */
GstClockTime frame_time =
gst_util_uint64_scale_int (src->frames * GST_SECOND,
src->rate_denominator, src->rate_numerator);
time = gst_clock_get_time (clock);
base_time = gst_element_get_base_time (GST_ELEMENT (src));
GST_BUFFER_TIMESTAMP (new_buf) = MAX (time - base_time, frame_time);
} else {
GST_BUFFER_TIMESTAMP (new_buf) = GST_CLOCK_TIME_NONE;
}
/* Do screen capture and put it into buffer... */
gst_gdiscreencapsrc_screen_capture (src, new_buf);
if (src->rate_numerator) {
GST_BUFFER_DURATION (new_buf) =
gst_util_uint64_scale_int (GST_SECOND,
src->rate_denominator, src->rate_numerator);
if (clock) {
GST_BUFFER_DURATION (new_buf) =
MAX (GST_BUFFER_DURATION (new_buf),
gst_clock_get_time (clock) - time);
}
} else {
/* NONE means forever */
GST_BUFFER_DURATION (new_buf) = GST_CLOCK_TIME_NONE;
}
GST_BUFFER_OFFSET (new_buf) = src->frames;
src->frames++;
GST_BUFFER_OFFSET_END (new_buf) = src->frames;
gst_object_unref (clock);
*buf = new_buf;
return GST_FLOW_OK;
}
示例12: gst_multi_file_src_create
static GstFlowReturn
gst_multi_file_src_create (GstPushSrc * src, GstBuffer ** buffer)
{
GstMultiFileSrc *multifilesrc;
gsize size;
gchar *data;
gchar *filename;
GstBuffer *buf;
gboolean ret;
GError *error = NULL;
multifilesrc = GST_MULTI_FILE_SRC (src);
if (multifilesrc->index < multifilesrc->start_index) {
multifilesrc->index = multifilesrc->start_index;
}
filename = gst_multi_file_src_get_filename (multifilesrc);
GST_DEBUG_OBJECT (multifilesrc, "reading from file \"%s\".", filename);
ret = g_file_get_contents (filename, &data, &size, &error);
if (!ret) {
if (multifilesrc->successful_read) {
/* If we've read at least one buffer successfully, not finding the
* next file is EOS. */
g_free (filename);
if (error != NULL)
g_error_free (error);
if (multifilesrc->loop) {
error = NULL;
multifilesrc->index = multifilesrc->start_index;
filename = gst_multi_file_src_get_filename (multifilesrc);
ret = g_file_get_contents (filename, &data, &size, &error);
if (!ret) {
g_free (filename);
if (error != NULL)
g_error_free (error);
return GST_FLOW_UNEXPECTED;
}
} else {
return GST_FLOW_UNEXPECTED;
}
} else {
goto handle_error;
}
}
multifilesrc->successful_read = TRUE;
multifilesrc->index++;
if (multifilesrc->stop_index != -1 &&
multifilesrc->index >= multifilesrc->stop_index) {
multifilesrc->index = multifilesrc->start_index;
}
buf = gst_buffer_new ();
GST_BUFFER_DATA (buf) = (unsigned char *) data;
GST_BUFFER_MALLOCDATA (buf) = GST_BUFFER_DATA (buf);
GST_BUFFER_SIZE (buf) = size;
GST_BUFFER_OFFSET (buf) = multifilesrc->offset;
GST_BUFFER_OFFSET_END (buf) = multifilesrc->offset + size;
multifilesrc->offset += size;
gst_buffer_set_caps (buf, multifilesrc->caps);
GST_DEBUG_OBJECT (multifilesrc, "read file \"%s\".", filename);
g_free (filename);
*buffer = buf;
return GST_FLOW_OK;
handle_error:
{
if (error != NULL) {
GST_ELEMENT_ERROR (multifilesrc, RESOURCE, READ,
("Error while reading from file \"%s\".", filename),
("%s", error->message));
g_error_free (error);
} else {
GST_ELEMENT_ERROR (multifilesrc, RESOURCE, READ,
("Error while reading from file \"%s\".", filename),
("%s", g_strerror (errno)));
}
g_free (filename);
return GST_FLOW_ERROR;
}
}
示例13: gst_pngdec_caps_create_and_set
static GstFlowReturn
gst_pngdec_caps_create_and_set (GstPngDec * pngdec)
{
GstFlowReturn ret = GST_FLOW_OK;
GstCaps *caps = NULL, *res = NULL;
GstPadTemplate *templ = NULL;
gint bpc = 0, color_type;
png_uint_32 width, height;
g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR);
/* Get bits per channel */
bpc = png_get_bit_depth (pngdec->png, pngdec->info);
/* We don't handle 16 bits per color, strip down to 8 */
if (bpc == 16) {
GST_LOG_OBJECT (pngdec,
"this is a 16 bits per channel PNG image, strip down to 8 bits");
png_set_strip_16 (pngdec->png);
}
/* Get Color type */
color_type = png_get_color_type (pngdec->png, pngdec->info);
#if 0
/* We used to have this HACK to reverse the outgoing bytes, but the problem
* that originally required the hack seems to have been in ffmpegcolorspace's
* RGBA descriptions. It doesn't seem needed now that's fixed, but might
* still be needed on big-endian systems, I'm not sure. J.S. 6/7/2007 */
if (color_type == PNG_COLOR_TYPE_RGB_ALPHA)
png_set_bgr (pngdec->png);
#endif
/* Gray scale converted to RGB and upscaled to 8 bits */
if ((color_type == PNG_COLOR_TYPE_GRAY_ALPHA) ||
(color_type == PNG_COLOR_TYPE_GRAY)) {
GST_LOG_OBJECT (pngdec, "converting grayscale png to RGB");
png_set_gray_to_rgb (pngdec->png);
if (bpc < 8) { /* Convert to 8 bits */
GST_LOG_OBJECT (pngdec, "converting grayscale image to 8 bits");
#if PNG_LIBPNG_VER < 10400
png_set_gray_1_2_4_to_8 (pngdec->png);
#else
png_set_expand_gray_1_2_4_to_8 (pngdec->png);
#endif
}
}
/* Palette converted to RGB */
if (color_type == PNG_COLOR_TYPE_PALETTE) {
GST_LOG_OBJECT (pngdec, "converting palette png to RGB");
png_set_palette_to_rgb (pngdec->png);
}
/* Update the info structure */
png_read_update_info (pngdec->png, pngdec->info);
/* Get IHDR header again after transformation settings */
png_get_IHDR (pngdec->png, pngdec->info, &width, &height,
&bpc, &pngdec->color_type, NULL, NULL, NULL);
pngdec->width = width;
pngdec->height = height;
GST_LOG_OBJECT (pngdec, "this is a %dx%d PNG image", pngdec->width,
pngdec->height);
switch (pngdec->color_type) {
case PNG_COLOR_TYPE_RGB:
GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits");
pngdec->bpp = 24;
break;
case PNG_COLOR_TYPE_RGB_ALPHA:
GST_LOG_OBJECT (pngdec, "we have an alpha channel, depth is 32 bits");
pngdec->bpp = 32;
break;
default:
GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL),
("pngdec does not support this color type"));
ret = GST_FLOW_NOT_SUPPORTED;
goto beach;
}
caps = gst_caps_new_simple ("video/x-raw-rgb",
"width", G_TYPE_INT, pngdec->width,
"height", G_TYPE_INT, pngdec->height,
"bpp", G_TYPE_INT, pngdec->bpp,
"framerate", GST_TYPE_FRACTION, pngdec->fps_n, pngdec->fps_d, NULL);
templ = gst_static_pad_template_get (&gst_pngdec_src_pad_template);
res = gst_caps_intersect (caps, gst_pad_template_get_caps (templ));
gst_caps_unref (caps);
gst_object_unref (templ);
if (!gst_pad_set_caps (pngdec->srcpad, res))
ret = GST_FLOW_NOT_NEGOTIATED;
//.........这里部分代码省略.........
示例14: gst_ffmpegvidenc_flush_buffers
static GstFlowReturn
gst_ffmpegvidenc_flush_buffers (GstFFMpegVidEnc * ffmpegenc, gboolean send)
{
GstVideoCodecFrame *frame;
GstFlowReturn flow_ret = GST_FLOW_OK;
GstBuffer *outbuf;
gint ret;
AVPacket *pkt;
int have_data = 0;
GST_DEBUG_OBJECT (ffmpegenc, "flushing buffers with sending %d", send);
/* no need to empty codec if there is none */
if (!ffmpegenc->opened)
goto done;
while ((frame =
gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (ffmpegenc)))) {
pkt = g_slice_new0 (AVPacket);
have_data = 0;
ret = avcodec_encode_video2 (ffmpegenc->context, pkt, NULL, &have_data);
if (ret < 0) { /* there should be something, notify and give up */
#ifndef GST_DISABLE_GST_DEBUG
GstFFMpegVidEncClass *oclass =
(GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
GST_WARNING_OBJECT (ffmpegenc,
"avenc_%s: failed to flush buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
g_slice_free (AVPacket, pkt);
gst_video_codec_frame_unref (frame);
break;
}
/* save stats info if there is some as well as a stats file */
if (ffmpegenc->file && ffmpegenc->context->stats_out)
if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
(("Could not write to file \"%s\"."), ffmpegenc->filename),
GST_ERROR_SYSTEM);
if (send && have_data) {
outbuf =
gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data,
pkt->size, 0, pkt->size, pkt, gst_ffmpegvidenc_free_avpacket);
frame->output_buffer = outbuf;
if (pkt->flags & AV_PKT_FLAG_KEY)
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
else
GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
flow_ret =
gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame);
} else {
/* no frame attached, so will be skipped and removed from frame list */
gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame);
}
}
done:
return flow_ret;
}
示例15: gst_pngdec_task
static void
gst_pngdec_task (GstPad * pad)
{
GstPngDec *pngdec;
GstBuffer *buffer = NULL;
size_t buffer_size = 0;
gint i = 0;
png_bytep *rows, inp;
png_uint_32 rowbytes;
GstFlowReturn ret = GST_FLOW_OK;
pngdec = GST_PNGDEC (GST_OBJECT_PARENT (pad));
GST_LOG_OBJECT (pngdec, "read frame");
/* Let libpng come back here on error */
if (setjmp (png_jmpbuf (pngdec->png))) {
ret = GST_FLOW_ERROR;
goto pause;
}
/* Set reading callback */
png_set_read_fn (pngdec->png, pngdec, user_read_data);
/* Read info */
png_read_info (pngdec->png, pngdec->info);
/* Generate the caps and configure */
ret = gst_pngdec_caps_create_and_set (pngdec);
if (ret != GST_FLOW_OK) {
goto pause;
}
/* Allocate output buffer */
rowbytes = png_get_rowbytes (pngdec->png, pngdec->info);
if (rowbytes > (G_MAXUINT32 - 3) || pngdec->height > G_MAXUINT32 / rowbytes) {
ret = GST_FLOW_ERROR;
goto pause;
}
rowbytes = GST_ROUND_UP_4 (rowbytes);
buffer_size = pngdec->height * rowbytes;
ret =
gst_pad_alloc_buffer_and_set_caps (pngdec->srcpad, GST_BUFFER_OFFSET_NONE,
buffer_size, GST_PAD_CAPS (pngdec->srcpad), &buffer);
if (ret != GST_FLOW_OK)
goto pause;
rows = (png_bytep *) g_malloc (sizeof (png_bytep) * pngdec->height);
inp = GST_BUFFER_DATA (buffer);
for (i = 0; i < pngdec->height; i++) {
rows[i] = inp;
inp += rowbytes;
}
/* Read the actual picture */
png_read_image (pngdec->png, rows);
g_free (rows);
/* Push the raw RGB frame */
ret = gst_pad_push (pngdec->srcpad, buffer);
if (ret != GST_FLOW_OK)
goto pause;
/* And we are done */
gst_pad_pause_task (pngdec->sinkpad);
gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());
return;
pause:
{
GST_INFO_OBJECT (pngdec, "pausing task, reason %s",
gst_flow_get_name (ret));
gst_pad_pause_task (pngdec->sinkpad);
if (ret == GST_FLOW_UNEXPECTED) {
gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());
} else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) {
GST_ELEMENT_ERROR (pngdec, STREAM, FAILED,
(_("Internal data stream error.")),
("stream stopped, reason %s", gst_flow_get_name (ret)));
gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());
}
}
}