本文整理汇总了C++中GST_ELEMENT_WARNING函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ELEMENT_WARNING函数的具体用法?C++ GST_ELEMENT_WARNING怎么用?C++ GST_ELEMENT_WARNING使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_ELEMENT_WARNING函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: theora_enc_write_multipass_cache
static gboolean
theora_enc_write_multipass_cache (GstTheoraEnc * enc, gboolean begin,
gboolean eos)
{
GError *err = NULL;
GIOStatus stat = G_IO_STATUS_NORMAL;
gint bytes_read = 0;
gsize bytes_written = 0;
gchar *buf;
if (begin)
stat = g_io_channel_seek_position (enc->multipass_cache_fd, 0, G_SEEK_SET,
&err);
if (stat != G_IO_STATUS_ERROR) {
do {
bytes_read =
th_encode_ctl (enc->encoder, TH_ENCCTL_2PASS_OUT, &buf, sizeof (buf));
if (bytes_read > 0)
g_io_channel_write_chars (enc->multipass_cache_fd, buf, bytes_read,
&bytes_written, NULL);
} while (bytes_read > 0 && bytes_written > 0);
}
if (stat == G_IO_STATUS_ERROR || bytes_read < 0) {
if (begin) {
if (eos)
GST_ELEMENT_WARNING (enc, RESOURCE, WRITE, (NULL),
("Failed to seek to beginning of multipass cache file: %s",
err->message));
else
GST_ELEMENT_ERROR (enc, RESOURCE, WRITE, (NULL),
("Failed to seek to beginning of multipass cache file: %s",
err->message));
} else {
GST_ELEMENT_ERROR (enc, RESOURCE, WRITE, (NULL),
("Failed to write multipass cache file"));
}
if (err)
g_error_free (err);
return FALSE;
}
return TRUE;
}
示例2: gst_rtp_g723_depay_process
static GstBuffer *
gst_rtp_g723_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpG723Depay *rtpg723depay;
GstBuffer *outbuf = NULL;
gint payload_len;
gboolean marker;
rtpg723depay = GST_RTP_G723_DEPAY (depayload);
payload_len = gst_rtp_buffer_get_payload_len (buf);
/* At least 4 bytes */
if (payload_len < 4)
goto too_small;
GST_LOG_OBJECT (rtpg723depay, "payload len %d", payload_len);
outbuf = gst_rtp_buffer_get_payload_buffer (buf);
marker = gst_rtp_buffer_get_marker (buf);
if (marker) {
/* marker bit starts talkspurt */
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
GST_LOG_OBJECT (depayload, "pushing buffer of size %d",
GST_BUFFER_SIZE (outbuf));
return outbuf;
/* ERRORS */
too_small:
{
GST_ELEMENT_WARNING (rtpg723depay, STREAM, DECODE,
(NULL), ("G723 RTP payload too small (%d)", payload_len));
goto bad_packet;
}
bad_packet:
{
/* no fatal error */
return NULL;
}
}
示例3: gst_play_sink_convert_bin_add_conversion_element_factory
GstElement *
gst_play_sink_convert_bin_add_conversion_element_factory (GstPlaySinkConvertBin
* self, const char *factory, const char *name)
{
GstElement *el;
el = gst_element_factory_make (factory, name);
if (el == NULL) {
gst_play_sink_convert_bin_post_missing_element_message (self, factory);
GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN,
(_("Missing element '%s' - check your GStreamer installation."),
factory),
(self->audio ? "audio rendering might fail" :
"video rendering might fail"));
} else {
gst_play_sink_convert_bin_add_conversion_element (self, el);
}
return el;
}
示例4: gst_phoenixsrc_set_property
void
gst_phoenixsrc_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstPhoenixSrc *phoenixsrc;
g_return_if_fail (GST_IS_PHOENIX_SRC (object));
phoenixsrc = GST_PHOENIX_SRC (object);
switch (property_id) {
case PROP_CAMERA_CONFIG_FILEPATH:
g_free (phoenixsrc->config_filepath);
phoenixsrc->config_filepath = g_strdup (g_value_get_string (value));
break;
case PROP_NUM_CAPTURE_BUFFERS:
if (phoenixsrc->acq_started) {
GST_ELEMENT_WARNING (phoenixsrc, RESOURCE, SETTINGS,
("Number of capture buffers cannot be changed after acquisition has started."),
(NULL));
} else {
phoenixsrc->num_capture_buffers = g_value_get_uint (value);
g_free (phoenixsrc->frame_start_times);
phoenixsrc->frame_start_times =
g_new (guint64, phoenixsrc->num_capture_buffers);
g_free (phoenixsrc->frame_end_times);
phoenixsrc->frame_end_times =
g_new (guint64, phoenixsrc->num_capture_buffers);
}
break;
case PROP_BOARD:
phoenixsrc->board = g_value_get_uint (value);
break;
case PROP_CHANNEL:
phoenixsrc->channel = g_value_get_uint (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
示例5: gst_face_blur_transform_ip
static GstFlowReturn
gst_face_blur_transform_ip (GstOpencvVideoFilter * transform,
GstBuffer * buffer, IplImage * img)
{
GstFaceBlur *filter = GST_FACE_BLUR (transform);
CvSeq *faces;
int i;
if (!filter->cvCascade) {
if (filter->profile != NULL
&& filter->sent_profile_load_failed_msg == FALSE) {
GST_ELEMENT_WARNING (filter, RESOURCE, NOT_FOUND,
("Profile %s is missing.", filter->profile),
("missing faceblur profile file %s", filter->profile));
filter->sent_profile_load_failed_msg = TRUE;
}
return GST_FLOW_OK;
}
cvCvtColor (img, filter->cvGray, CV_RGB2GRAY);
cvClearMemStorage (filter->cvStorage);
faces =
cvHaarDetectObjects (filter->cvGray, filter->cvCascade,
filter->cvStorage, filter->scale_factor, filter->min_neighbors,
filter->flags, cvSize (filter->min_size_width, filter->min_size_height)
#if (CV_MAJOR_VERSION >= 2) && (CV_MINOR_VERSION >= 2)
, cvSize (filter->min_size_width + 2, filter->min_size_height + 2)
#endif
);
for (i = 0; i < (faces ? faces->total : 0); i++) {
CvRect *r = (CvRect *) cvGetSeqElem (faces, i);
cvSetImageROI (img, *r);
cvSmooth (img, img, CV_BLUR, 11, 11, 0, 0);
cvSmooth (img, img, CV_GAUSSIAN, 11, 11, 0, 0);
cvResetImageROI (img);
}
return GST_FLOW_OK;
}
示例6: gst_aggregator_query_latency
static gboolean
gst_aggregator_query_latency (GstAggregator * self, GstQuery * query)
{
LatencyData data;
data.min = 0;
data.max = GST_CLOCK_TIME_NONE;
data.live = FALSE;
/* query upstream's latency */
gst_aggregator_iterate_sinkpads (self,
(GstAggregatorPadForeachFunc) _latency_query, &data);
if (data.live && GST_CLOCK_TIME_IS_VALID (self->timeout) &&
self->timeout > data.max) {
GST_ELEMENT_WARNING (self, CORE, NEGOTIATION,
("%s", "Timeout too big"),
("The requested timeout value is too big for the latency in the "
"current pipeline. Limiting to %" G_GINT64_FORMAT, data.max));
self->timeout = data.max;
}
self->priv->latency_live = data.live;
self->priv->latency_min = data.min;
self->priv->latency_max = data.max;
/* add our own */
if (GST_CLOCK_TIME_IS_VALID (self->timeout)) {
if (GST_CLOCK_TIME_IS_VALID (data.min))
data.min += self->timeout;
if (GST_CLOCK_TIME_IS_VALID (data.max))
data.max += self->timeout;
}
GST_DEBUG_OBJECT (self, "configured latency live:%s min:%" G_GINT64_FORMAT
" max:%" G_GINT64_FORMAT, data.live ? "true" : "false", data.min,
data.max);
gst_query_set_latency (query, data.live, data.min, data.max);
return TRUE;
}
示例7: gst_aggregator_set_timeout
/**
* gst_aggregator_set_timeout:
* @agg: a #GstAggregator
* @timeout: the new timeout value.
*
* Sets the new timeout value to @timeout. This value is used to limit the
* amount of time a pad waits for data to appear before considering the pad
* as unresponsive.
*/
static void
gst_aggregator_set_timeout (GstAggregator * self, gint64 timeout)
{
g_return_if_fail (GST_IS_AGGREGATOR (self));
GST_OBJECT_LOCK (self);
if (self->priv->latency_live && self->priv->latency_max != 0 &&
GST_CLOCK_TIME_IS_VALID (timeout) && timeout > self->priv->latency_max) {
GST_ELEMENT_WARNING (self, CORE, NEGOTIATION,
("%s", "Timeout too big"),
("The requested timeout value is too big for the latency in the "
"current pipeline. Limiting to %" G_GINT64_FORMAT,
self->priv->latency_max));
timeout = self->priv->latency_max;
}
self->timeout = timeout;
GST_OBJECT_UNLOCK (self);
}
示例8: gst_motiondetect_load_mask
/* We take ownership of mask here */
static void
gst_motiondetect_load_mask (StbtMotionDetect * filter, char* mask)
{
char *oldMaskFilename = NULL;
IplImage *oldMaskImage = NULL, *newMaskImage = NULL;
if (mask) {
newMaskImage = cvLoadImage (mask, CV_LOAD_IMAGE_GRAYSCALE);
if (!newMaskImage) {
/* Unfortunately OpenCV doesn't seem to provide any way of finding out
why the image load failed, so we can't be more specific than FAILED: */
GST_ELEMENT_WARNING (filter, RESOURCE, FAILED,
("OpenCV failed to load mask image"),
("While attempting to load mask '%s'", mask));
GST_WARNING ("Couldn't load mask image: %s. error: %s",
mask, g_strerror (errno));
g_free (mask);
mask = NULL;
}
gst_motiondetect_check_mask_compability(filter);
}
GST_OBJECT_LOCK(filter);
oldMaskFilename = filter->mask;
filter->mask = mask;
oldMaskImage = filter->cvMaskImage;
filter->cvMaskImage = newMaskImage;
if (filter->cvInvertedMaskImage) {
cvReleaseImage (&filter->cvInvertedMaskImage);
filter->cvInvertedMaskImage = NULL;
}
if (filter->cvMaskImage) {
filter->cvInvertedMaskImage = cvCloneImage (filter->cvMaskImage);
cvNot(filter->cvMaskImage, filter->cvInvertedMaskImage);
}
GST_OBJECT_UNLOCK(filter);
cvReleaseImage (&oldMaskImage);
g_free(oldMaskFilename);
}
示例9: gst_play_sink_convert_bin_add_identity
void
gst_play_sink_convert_bin_add_identity (GstPlaySinkConvertBin * self)
{
if (self->identity)
return;
self->identity = gst_element_factory_make ("identity", "identity");
if (self->identity == NULL) {
gst_play_sink_convert_bin_post_missing_element_message (self, "identity");
GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN,
(_("Missing element '%s' - check your GStreamer installation."),
"identity"), (self->audio ?
"audio rendering might fail" : "video rendering might fail")
);
} else {
g_object_set (self->identity, "silent", TRUE, "signal-handoffs", FALSE,
NULL);
gst_bin_add (GST_BIN_CAST (self), self->identity);
}
}
示例10: gst_wavenc_change_state
static GstStateChangeReturn
gst_wavenc_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstWavEnc *wavenc = GST_WAVENC (element);
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
wavenc->format = 0;
wavenc->channels = 0;
wavenc->width = 0;
wavenc->rate = 0;
wavenc->length = 0;
wavenc->sent_header = FALSE;
/* its true because we haven't writen anything */
wavenc->finished_properly = TRUE;
break;
default:
break;
}
ret = parent_class->change_state (element, transition);
if (ret != GST_STATE_CHANGE_SUCCESS)
return ret;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
if (!wavenc->finished_properly) {
GST_ELEMENT_WARNING (wavenc, STREAM, MUX,
("Wav stream not finished properly"),
("Wav stream not finished properly, no EOS received "
"before shutdown"));
}
break;
default:
break;
}
return ret;
}
示例11: gst_identity_query
static gboolean
gst_identity_query (GstBaseTransform * base, GstPadDirection direction,
GstQuery * query)
{
GstIdentity *identity;
gboolean ret;
identity = GST_IDENTITY (base);
ret = GST_BASE_TRANSFORM_CLASS (parent_class)->query (base, direction, query);
if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
gboolean live = FALSE;
GstClockTime min = 0, max = 0;
if (ret) {
gst_query_parse_latency (query, &live, &min, &max);
if (identity->sync && max < min) {
GST_ELEMENT_WARNING (base, CORE, CLOCK, (NULL),
("Impossible to configure latency before identity sync=true:"
" max %" GST_TIME_FORMAT " < min %"
GST_TIME_FORMAT ". Add queues or other buffering elements.",
GST_TIME_ARGS (max), GST_TIME_ARGS (min)));
}
}
/* Ignore the upstream latency if it is not live */
GST_OBJECT_LOCK (identity);
if (live)
identity->upstream_latency = min;
else
identity->upstream_latency = 0;
GST_OBJECT_UNLOCK (identity);
gst_query_set_latency (query, live || identity->sync, min, max);
ret = TRUE;
}
return ret;
}
示例12: gst_rtp_g722_depay_process
static GstBuffer *
gst_rtp_g722_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstRtpG722Depay *rtpg722depay;
GstBuffer *outbuf;
gint payload_len;
gboolean marker;
GstRTPBuffer rtp = { NULL };
rtpg722depay = GST_RTP_G722_DEPAY (depayload);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 0)
goto empty_packet;
GST_DEBUG_OBJECT (rtpg722depay, "got payload of %d bytes", payload_len);
outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
marker = gst_rtp_buffer_get_marker (&rtp);
gst_rtp_buffer_unmap (&rtp);
if (marker && outbuf) {
/* mark talk spurt with DISCONT */
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
return outbuf;
/* ERRORS */
empty_packet:
{
GST_ELEMENT_WARNING (rtpg722depay, STREAM, DECODE,
("Empty Payload."), (NULL));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
示例13: gst_v4l2_set_input
gboolean
gst_v4l2_set_input (GstV4l2Object * v4l2object, gint input)
{
GST_DEBUG_OBJECT (v4l2object->element, "trying to set input to %d", input);
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_INPUT, &input) < 0)
goto input_failed;
return TRUE;
/* ERRORS */
input_failed:
{
GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
(_("Failed to set input %d on device %s."),
input, v4l2object->videodev), GST_ERROR_SYSTEM);
return FALSE;
}
}
示例14: gst_v4l2_set_norm
/******************************************************
* gst_v4l2_set_norm()
* Set the norm of the current device
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l2_set_norm (GstV4l2Object * v4l2object, v4l2_std_id norm)
{
GST_DEBUG_OBJECT (v4l2object->element, "trying to set norm to %llx", norm);
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_STD, &norm) < 0)
goto std_failed;
return TRUE;
/* ERRORS */
std_failed:
{
GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
(_("Failed to set norm for device '%s'."),
v4l2object->videodev), GST_ERROR_SYSTEM);
return FALSE;
}
}
示例15: gst_decklink_video_sink_set_property
void
gst_decklink_video_sink_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
switch (property_id) {
case PROP_MODE:
self->mode = (GstDecklinkModeEnum) g_value_get_enum (value);
break;
case PROP_DEVICE_NUMBER:
self->device_number = g_value_get_int (value);
break;
case PROP_VIDEO_FORMAT:
self->video_format = (GstDecklinkVideoFormat) g_value_get_enum (value);
switch (self->video_format) {
case GST_DECKLINK_VIDEO_FORMAT_AUTO:
case GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV:
case GST_DECKLINK_VIDEO_FORMAT_10BIT_YUV:
case GST_DECKLINK_VIDEO_FORMAT_8BIT_ARGB:
case GST_DECKLINK_VIDEO_FORMAT_8BIT_BGRA:
break;
default:
GST_ELEMENT_WARNING (GST_ELEMENT (self), CORE, NOT_IMPLEMENTED,
("Format %d not supported", self->video_format), (NULL));
break;
}
break;
case PROP_TIMECODE_FORMAT:
self->timecode_format =
gst_decklink_timecode_format_from_enum ((GstDecklinkTimecodeFormat)
g_value_get_enum (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}