本文整理汇总了C++中GST_WARNING函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_WARNING函数的具体用法?C++ GST_WARNING怎么用?C++ GST_WARNING使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_WARNING函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: calculate_skew
/* For the clock skew we use a windowed low point averaging algorithm as can be
* found in Fober, Orlarey and Letz, 2005, "Real Time Clock Skew Estimation
* over Network Delays":
* http://www.grame.fr/Ressources/pub/TR-050601.pdf
* http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1546
*
* The idea is that the jitter is composed of:
*
* J = N + n
*
* N : a constant network delay.
* n : random added noise. The noise is concentrated around 0
*
* In the receiver we can track the elapsed time at the sender with:
*
* send_diff(i) = (Tsi - Ts0);
*
* Tsi : The time at the sender at packet i
* Ts0 : The time at the sender at the first packet
*
* This is the difference between the RTP timestamp in the first received packet
* and the current packet.
*
* At the receiver we have to deal with the jitter introduced by the network.
*
* recv_diff(i) = (Tri - Tr0)
*
* Tri : The time at the receiver at packet i
* Tr0 : The time at the receiver at the first packet
*
* Both of these values contain a jitter Ji, a jitter for packet i, so we can
* write:
*
* recv_diff(i) = (Cri + D + ni) - (Cr0 + D + n0))
*
* Cri : The time of the clock at the receiver for packet i
* D + ni : The jitter when receiving packet i
*
* We see that the network delay is irrelevant here as we can elliminate D:
*
* recv_diff(i) = (Cri + ni) - (Cr0 + n0))
*
* The drift is now expressed as:
*
* Drift(i) = recv_diff(i) - send_diff(i);
*
* We now keep the W latest values of Drift and find the minimum (this is the
* one with the lowest network jitter and thus the one which is least affected
* by it). We average this lowest value to smooth out the resulting network skew.
*
* Both the window and the weighting used for averaging influence the accuracy
* of the drift estimation. Finding the correct parameters turns out to be a
* compromise between accuracy and inertia.
*
* We use a 2 second window or up to 512 data points, which is statistically big
* enough to catch spikes (FIXME, detect spikes).
* We also use a rather large weighting factor (125) to smoothly adapt. During
* startup, when filling the window, we use a parabolic weighting factor, the
* more the window is filled, the faster we move to the detected possible skew.
*
* Returns: @time adjusted with the clock skew.
*/
static GstClockTime
calculate_skew (RTPJitterBuffer * jbuf, guint32 rtptime, GstClockTime time,
guint32 clock_rate)
{
guint64 ext_rtptime;
guint64 send_diff, recv_diff;
gint64 delta;
gint64 old;
gint pos, i;
GstClockTime gstrtptime, out_time;
guint64 slope;
ext_rtptime = gst_rtp_buffer_ext_timestamp (&jbuf->ext_rtptime, rtptime);
gstrtptime = gst_util_uint64_scale_int (ext_rtptime, GST_SECOND, clock_rate);
/* keep track of the last extended rtptime */
jbuf->last_rtptime = ext_rtptime;
if (jbuf->clock_rate != clock_rate) {
if (jbuf->clock_rate == -1) {
GST_DEBUG ("Clock rate changed from %" G_GUINT32_FORMAT " to %"
G_GUINT32_FORMAT, jbuf->clock_rate, clock_rate);
} else {
GST_WARNING ("Clock rate changed from %" G_GUINT32_FORMAT " to %"
G_GUINT32_FORMAT, jbuf->clock_rate, clock_rate);
}
jbuf->base_time = -1;
jbuf->base_rtptime = -1;
jbuf->clock_rate = clock_rate;
jbuf->prev_out_time = -1;
jbuf->prev_send_diff = -1;
}
/* first time, lock on to time and gstrtptime */
if (G_UNLIKELY (jbuf->base_time == -1)) {
jbuf->base_time = time;
jbuf->prev_out_time = -1;
//.........这里部分代码省略.........
示例2: gst_base_video_decoder_finish_frame
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame)
{
GstBaseVideoDecoderClass *base_video_decoder_class;
GstBuffer *src_buffer;
GST_DEBUG ("finish frame");
base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT, frame->is_sync_point,
GST_TIME_ARGS (frame->presentation_timestamp));
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp),
GST_TIME_ARGS (frame->presentation_timestamp -
base_video_decoder->segment.start));
base_video_decoder->timestamp_offset = frame->presentation_timestamp;
base_video_decoder->field_index = 0;
} else {
/* This case is for one initial timestamp and no others, e.g.,
* filesrc ! decoder ! xvimagesink */
GST_WARNING ("sync timestamp didn't change, ignoring");
frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
}
} else {
if (frame->is_sync_point) {
GST_WARNING ("sync point doesn't have timestamp");
if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
GST_WARNING
("No base timestamp. Assuming frames start at segment start");
base_video_decoder->timestamp_offset =
base_video_decoder->segment.start;
base_video_decoder->field_index = 0;
}
}
}
frame->field_index = base_video_decoder->field_index;
base_video_decoder->field_index += frame->n_fields;
if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
frame->presentation_timestamp =
gst_base_video_decoder_get_field_timestamp (base_video_decoder,
frame->field_index);
frame->presentation_duration = GST_CLOCK_TIME_NONE;
frame->decode_timestamp =
gst_base_video_decoder_get_timestamp (base_video_decoder,
frame->decode_frame_number);
}
if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
frame->presentation_duration =
gst_base_video_decoder_get_field_duration (base_video_decoder,
frame->n_fields);
}
if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) {
if (frame->presentation_timestamp < base_video_decoder->last_timestamp) {
GST_WARNING ("decreasing timestamp (%" GST_TIME_FORMAT " < %"
GST_TIME_FORMAT ")", GST_TIME_ARGS (frame->presentation_timestamp),
GST_TIME_ARGS (base_video_decoder->last_timestamp));
}
}
base_video_decoder->last_timestamp = frame->presentation_timestamp;
GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
if (base_video_decoder->state.interlaced) {
#ifndef GST_VIDEO_BUFFER_TFF
#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
#endif
#ifndef GST_VIDEO_BUFFER_RFF
#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
#endif
#ifndef GST_VIDEO_BUFFER_ONEFIELD
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif
int tff = base_video_decoder->state.top_field_first;
if (frame->field_index & 1) {
tff ^= 1;
}
if (tff) {
GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
} else {
GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
}
GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
if (frame->n_fields == 3) {
GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
} else if (frame->n_fields == 1) {
GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
}
}
if (base_video_decoder->discont) {
GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DISCONT);
base_video_decoder->discont = FALSE;
//.........这里部分代码省略.........
示例3: setup_recoder_pipeline
static gboolean
setup_recoder_pipeline (GstSmartEncoder * smart_encoder)
{
GstPad *tmppad;
GstCaps *caps;
/* Fast path */
if (G_UNLIKELY (smart_encoder->encoder))
return TRUE;
GST_DEBUG ("Creating internal decoder and encoder");
/* Create decoder/encoder */
caps = gst_pad_get_current_caps (smart_encoder->sinkpad);
smart_encoder->decoder = get_decoder (caps);
if (G_UNLIKELY (smart_encoder->decoder == NULL))
goto no_decoder;
gst_caps_unref (caps);
gst_element_set_bus (smart_encoder->decoder, GST_ELEMENT_BUS (smart_encoder));
caps = gst_pad_get_current_caps (smart_encoder->sinkpad);
smart_encoder->encoder = get_encoder (caps);
if (G_UNLIKELY (smart_encoder->encoder == NULL))
goto no_encoder;
gst_caps_unref (caps);
gst_element_set_bus (smart_encoder->encoder, GST_ELEMENT_BUS (smart_encoder));
GST_DEBUG ("Creating internal pads");
/* Create internal pads */
/* Source pad which we'll use to feed data to decoders */
smart_encoder->internal_srcpad = gst_pad_new ("internal_src", GST_PAD_SRC);
g_object_set_qdata ((GObject *) smart_encoder->internal_srcpad,
INTERNAL_ELEMENT, smart_encoder);
gst_pad_set_active (smart_encoder->internal_srcpad, TRUE);
/* Sink pad which will get the buffers from the encoder.
* Note: We don't need an event function since we'll be discarding all
* of them. */
smart_encoder->internal_sinkpad = gst_pad_new ("internal_sink", GST_PAD_SINK);
g_object_set_qdata ((GObject *) smart_encoder->internal_sinkpad,
INTERNAL_ELEMENT, smart_encoder);
gst_pad_set_chain_function (smart_encoder->internal_sinkpad, internal_chain);
gst_pad_set_active (smart_encoder->internal_sinkpad, TRUE);
GST_DEBUG ("Linking pads to elements");
/* Link everything */
tmppad = gst_element_get_static_pad (smart_encoder->encoder, "src");
if (GST_PAD_LINK_FAILED (gst_pad_link (tmppad,
smart_encoder->internal_sinkpad)))
goto sinkpad_link_fail;
gst_object_unref (tmppad);
if (!gst_element_link (smart_encoder->decoder, smart_encoder->encoder))
goto encoder_decoder_link_fail;
tmppad = gst_element_get_static_pad (smart_encoder->decoder, "sink");
if (GST_PAD_LINK_FAILED (gst_pad_link (smart_encoder->internal_srcpad,
tmppad)))
goto srcpad_link_fail;
gst_object_unref (tmppad);
GST_DEBUG ("Done creating internal elements/pads");
return TRUE;
no_decoder:
{
GST_WARNING ("Couldn't find a decoder for %" GST_PTR_FORMAT, caps);
gst_caps_unref (caps);
return FALSE;
}
no_encoder:
{
GST_WARNING ("Couldn't find an encoder for %" GST_PTR_FORMAT, caps);
gst_caps_unref (caps);
return FALSE;
}
srcpad_link_fail:
{
gst_object_unref (tmppad);
GST_WARNING ("Couldn't link internal srcpad to decoder");
return FALSE;
}
sinkpad_link_fail:
{
gst_object_unref (tmppad);
GST_WARNING ("Couldn't link encoder to internal sinkpad");
return FALSE;
}
encoder_decoder_link_fail:
{
GST_WARNING ("Couldn't link decoder to encoder");
return FALSE;
//.........这里部分代码省略.........
示例4: GST_MEMDUMP
//
// Expected synchronisation from caller. This method is not thread-safe!
//
bool DiscretixSession::dxdrmProcessKey(Uint8Array* key, RefPtr<Uint8Array>& nextMessage, unsigned short& errorCode, unsigned long& systemCode)
{
GST_MEMDUMP("response received :", key->data(), key->byteLength());
bool isAckRequired;
HDxResponseResult responseResult = nullptr;
EDxDrmStatus status = DX_ERROR_CONTENT_NOT_RECOGNIZED;
errorCode = 0;
if (m_state == PHASE_INITIAL) {
// Server replied to our license request
status = DxDrmStream_ProcessLicenseResponse(m_DxDrmStream, key->data(), key->byteLength(), &responseResult, &isAckRequired);
if (status == DX_SUCCESS) {
// Create a deep copy of the key.
m_key = key->buffer();
m_state = (isAckRequired ? PHASE_ACKNOWLEDGE : PHASE_PROVISIONED);
GST_DEBUG("Acknowledgement required: %s", isAckRequired ? "yes" : "no");
}
} else if (m_state == PHASE_ACKNOWLEDGE) {
// Server replied to our license response acknowledge
status = DxDrmClient_ProcessServerResponse(key->data(), key->byteLength(), DX_RESPONSE_LICENSE_ACK, &responseResult, &isAckRequired);
if (status == DX_SUCCESS) {
// Create a deep copy of the key.
m_key = key->buffer();
m_state = (isAckRequired ? PHASE_ACKNOWLEDGE : PHASE_PROVISIONED);
if (m_state == PHASE_ACKNOWLEDGE)
GST_WARNING("Acknowledging an Ack. Strange situation.");
}
} else
GST_WARNING("Unexpected call. We are already provisioned");
if (status != DX_SUCCESS) {
GST_ERROR("failed processing license response (status: %d)", status);
errorCode = MediaKeyError::MEDIA_KEYERR_CLIENT;
} else if (m_state == PHASE_PROVISIONED) {
status = DxDrmStream_SetIntent(m_DxDrmStream, DX_INTENT_AUTO_PLAY, DX_AUTO_NO_UI);
if (status != DX_SUCCESS)
GST_ERROR("opening stream failed because there are no rights (license) to play the content (status: %d)", status);
else {
GST_INFO("playback rights found");
/* starting consumption of the file - notifying the drm that the file is being used */
status = DxDrmFile_HandleConsumptionEvent(m_DxDrmStream, DX_EVENT_START);
if (status != DX_SUCCESS)
GST_ERROR("Content consumption failed");
else {
GST_INFO("Stream was opened and is ready for playback");
m_ready = true;
}
}
} else if (m_state == PHASE_ACKNOWLEDGE) {
uint32_t challengeLength = MAX_CHALLENGE_LEN;
unsigned char* challenge = static_cast<unsigned char*>(g_malloc0(challengeLength));
status = DxDrmClient_GetLicenseAcq_GenerateAck(&responseResult, challenge, &challengeLength);
if (status != DX_SUCCESS)
GST_ERROR("failed generating license ack challenge (status: %d, response result %p)", status, responseResult);
GST_MEMDUMP("generated license ack request :", challenge, challengeLength);
nextMessage = Uint8Array::create(challenge, challengeLength);
g_free(challenge);
}
systemCode = status;
return (status == DX_SUCCESS);
}
示例5: gst_base_video_decoder_sink_event
static gboolean
gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
{
GstBaseVideoDecoder *base_video_decoder;
GstBaseVideoDecoderClass *base_video_decoder_class;
gboolean ret = FALSE;
base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
{
if (!base_video_decoder->packetized) {
GstFlowReturn flow_ret;
do {
flow_ret =
base_video_decoder_class->parse_data (base_video_decoder, TRUE);
} while (flow_ret == GST_FLOW_OK);
}
if (base_video_decoder_class->finish) {
base_video_decoder_class->finish (base_video_decoder);
}
ret =
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
event);
}
break;
case GST_EVENT_NEWSEGMENT:
{
gboolean update;
double rate;
double applied_rate;
GstFormat format;
gint64 start;
gint64 stop;
gint64 position;
GstSegment *segment = &base_video_decoder->segment;
gst_event_parse_new_segment_full (event, &update, &rate,
&applied_rate, &format, &start, &stop, &position);
if (format != GST_FORMAT_TIME)
goto newseg_wrong_format;
if (!update) {
gst_base_video_decoder_reset (base_video_decoder);
}
base_video_decoder->timestamp_offset = start;
gst_segment_set_newsegment_full (segment,
update, rate, applied_rate, format, start, stop, position);
base_video_decoder->have_segment = TRUE;
GST_WARNING ("new segment: format %d rate %g start %" GST_TIME_FORMAT
" stop %" GST_TIME_FORMAT
" position %" GST_TIME_FORMAT
" update %d",
format, rate,
GST_TIME_ARGS (segment->start),
GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time), update);
ret =
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
event);
}
break;
case GST_EVENT_FLUSH_STOP:{
GST_OBJECT_LOCK (base_video_decoder);
base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE;
base_video_decoder->proportion = 0.5;
GST_OBJECT_UNLOCK (base_video_decoder);
}
default:
/* FIXME this changes the order of events */
ret =
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
event);
break;
}
done:
gst_object_unref (base_video_decoder);
return ret;
newseg_wrong_format:
{
GST_DEBUG_OBJECT (base_video_decoder, "received non TIME newsegment");
gst_event_unref (event);
goto done;
}
}
示例6: format_info_get_desc
/* returns static descriptions and dynamic ones (such as video/x-raw-yuv),
* or NULL if caps aren't known at all */
static gchar *
format_info_get_desc (const FormatInfo * info, const GstCaps * caps)
{
const GstStructure *s;
g_assert (info != NULL);
if (info->desc != NULL)
return g_strdup (_(info->desc));
s = gst_caps_get_structure (caps, 0);
if (strcmp (info->type, "video/x-raw-yuv") == 0) {
const gchar *ret = NULL;
guint32 fourcc = 0;
gst_structure_get_fourcc (s, "format", &fourcc);
switch (fourcc) {
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
ret = _("Uncompressed planar YUV 4:2:0");
break;
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
ret = _("Uncompressed planar YVU 4:2:0");
break;
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
ret = _("Uncompressed packed YUV 4:2:2");
break;
case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'):
ret = _("Uncompressed packed YUV 4:1:0");
break;
case GST_MAKE_FOURCC ('Y', 'V', 'U', '9'):
ret = _("Uncompressed packed YVU 4:1:0");
break;
case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
ret = _("Uncompressed packed YUV 4:2:2");
break;
case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
ret = _("Uncompressed packed YUV 4:1:1");
break;
case GST_MAKE_FOURCC ('I', 'Y', 'U', '2'):
ret = _("Uncompressed packed YUV 4:4:4");
break;
case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
ret = _("Uncompressed planar YUV 4:2:2");
break;
case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
ret = _("Uncompressed planar YUV 4:1:1");
break;
case GST_MAKE_FOURCC ('Y', '8', '0', '0'):
ret = _("Uncompressed black and white Y-plane");
break;
default:
ret = _("Uncompressed YUV");
break;
}
return g_strdup (ret);
} else if (strcmp (info->type, "video/x-raw-rgb") == 0) {
const gchar *rgb_str;
gint depth = 0;
gst_structure_get_int (s, "depth", &depth);
rgb_str = gst_structure_has_field (s, "alpha_mask") ? "RGBA" : "RGB";
if (gst_structure_has_field (s, "paletted_data")) {
return g_strdup_printf (_("Uncompressed palettized %d-bit %s"), depth,
rgb_str);
} else {
return g_strdup_printf ("Uncompressed %d-bit %s", depth, rgb_str);
}
} else if (strcmp (info->type, "video/x-h263") == 0) {
const gchar *variant, *ret;
variant = gst_structure_get_string (s, "variant");
if (variant == NULL)
ret = "H.263";
else if (strcmp (variant, "itu") == 0)
ret = "ITU H.26n"; /* why not ITU H.263? (tpm) */
else if (strcmp (variant, "lead") == 0)
ret = "Lead H.263";
else if (strcmp (variant, "microsoft") == 0)
ret = "Microsoft H.263";
else if (strcmp (variant, "vdolive") == 0)
ret = "VDOLive";
else if (strcmp (variant, "vivo") == 0)
ret = "Vivo H.263";
else if (strcmp (variant, "xirlink") == 0)
ret = "Xirlink H.263";
else {
GST_WARNING ("Unknown H263 variant '%s'", variant);
ret = "H.263";
}
return g_strdup (ret);
} else if (strcmp (info->type, "video/x-h264") == 0) {
const gchar *variant, *ret;
variant = gst_structure_get_string (s, "variant");
if (variant == NULL)
ret = "H.264";
//.........这里部分代码省略.........
示例7: init_devices
static gpointer
init_devices (gpointer data)
{
IDeckLinkIterator *iterator;
IDeckLink *decklink = NULL;
HRESULT ret;
int i;
#ifdef _MSC_VER
// Start COM thread for Windows
g_mutex_lock (&com_init_lock);
/* create the COM initialization thread */
g_thread_create ((GThreadFunc) gst_decklink_com_thread, NULL, FALSE, NULL);
/* wait until the COM thread signals that COM has been initialized */
g_cond_wait (&com_init_cond, &com_init_lock);
g_mutex_unlock (&com_init_lock);
#endif /* _MSC_VER */
iterator = CreateDeckLinkIteratorInstance ();
if (iterator == NULL) {
GST_ERROR ("no driver");
return NULL;
}
i = 0;
ret = iterator->Next (&decklink);
while (ret == S_OK) {
ret = decklink->QueryInterface (IID_IDeckLinkInput,
(void **) &devices[i].input.input);
if (ret != S_OK) {
GST_WARNING ("selected device does not have input interface");
} else {
devices[i].input.device = decklink;
devices[i].input.clock = gst_decklink_clock_new ("GstDecklinkInputClock");
GST_DECKLINK_CLOCK_CAST (devices[i].input.clock)->input =
&devices[i].input;
devices[i].input.
input->SetCallback (new GStreamerDecklinkInputCallback (&devices[i].
input));
}
ret = decklink->QueryInterface (IID_IDeckLinkOutput,
(void **) &devices[i].output.output);
if (ret != S_OK) {
GST_WARNING ("selected device does not have output interface");
} else {
devices[i].output.device = decklink;
devices[i].output.clock =
gst_decklink_clock_new ("GstDecklinkOutputClock");
GST_DECKLINK_CLOCK_CAST (devices[i].output.clock)->output =
&devices[i].output;
}
ret = decklink->QueryInterface (IID_IDeckLinkConfiguration,
(void **) &devices[i].input.config);
if (ret != S_OK) {
GST_WARNING ("selected device does not have config interface");
}
ret = decklink->QueryInterface (IID_IDeckLinkAttributes,
(void **) &devices[i].input.attributes);
if (ret != S_OK) {
GST_WARNING ("selected device does not have attributes interface");
}
ret = iterator->Next (&decklink);
i++;
if (i == 10) {
GST_WARNING ("this hardware has more then 10 devices");
break;
}
}
n_devices = i;
iterator->Release ();
return NULL;
}
示例8: gst_ximagesink_check_xshm_calls
/* This function checks that it is actually really possible to create an image
using XShm */
gboolean
gst_ximagesink_check_xshm_calls (GstXImageSink * ximagesink,
GstXContext * xcontext)
{
XImage *ximage;
XShmSegmentInfo SHMInfo;
size_t size;
int (*handler) (Display *, XErrorEvent *);
gboolean result = FALSE;
gboolean did_attach = FALSE;
g_return_val_if_fail (xcontext != NULL, FALSE);
/* Sync to ensure any older errors are already processed */
XSync (xcontext->disp, FALSE);
/* Set defaults so we don't free these later unnecessarily */
SHMInfo.shmaddr = ((void *) -1);
SHMInfo.shmid = -1;
/* Setting an error handler to catch failure */
error_caught = FALSE;
handler = XSetErrorHandler (gst_ximagesink_handle_xerror);
/* Trying to create a 1x1 ximage */
GST_DEBUG ("XShmCreateImage of 1x1");
ximage = XShmCreateImage (xcontext->disp, xcontext->visual,
xcontext->depth, ZPixmap, NULL, &SHMInfo, 1, 1);
/* Might cause an error, sync to ensure it is noticed */
XSync (xcontext->disp, FALSE);
if (!ximage || error_caught) {
GST_WARNING ("could not XShmCreateImage a 1x1 image");
goto beach;
}
size = ximage->height * ximage->bytes_per_line;
SHMInfo.shmid = shmget (IPC_PRIVATE, size, IPC_CREAT | 0777);
if (SHMInfo.shmid == -1) {
GST_WARNING ("could not get shared memory of %" G_GSIZE_FORMAT " bytes",
size);
goto beach;
}
SHMInfo.shmaddr = shmat (SHMInfo.shmid, NULL, 0);
if (SHMInfo.shmaddr == ((void *) -1)) {
GST_WARNING ("Failed to shmat: %s", g_strerror (errno));
/* Clean up the shared memory segment */
shmctl (SHMInfo.shmid, IPC_RMID, NULL);
goto beach;
}
ximage->data = SHMInfo.shmaddr;
SHMInfo.readOnly = FALSE;
if (XShmAttach (xcontext->disp, &SHMInfo) == 0) {
GST_WARNING ("Failed to XShmAttach");
/* Clean up the shared memory segment */
shmctl (SHMInfo.shmid, IPC_RMID, NULL);
goto beach;
}
/* Sync to ensure we see any errors we caused */
XSync (xcontext->disp, FALSE);
/* Delete the shared memory segment as soon as everyone is attached.
* This way, it will be deleted as soon as we detach later, and not
* leaked if we crash. */
shmctl (SHMInfo.shmid, IPC_RMID, NULL);
if (!error_caught) {
GST_DEBUG ("XServer ShmAttached to 0x%x, id 0x%lx", SHMInfo.shmid,
SHMInfo.shmseg);
did_attach = TRUE;
/* store whether we succeeded in result */
result = TRUE;
} else {
GST_WARNING ("MIT-SHM extension check failed at XShmAttach. "
"Not using shared memory.");
}
beach:
/* Sync to ensure we swallow any errors we caused and reset error_caught */
XSync (xcontext->disp, FALSE);
error_caught = FALSE;
XSetErrorHandler (handler);
if (did_attach) {
GST_DEBUG ("XServer ShmDetaching from 0x%x id 0x%lx",
SHMInfo.shmid, SHMInfo.shmseg);
XShmDetach (xcontext->disp, &SHMInfo);
XSync (xcontext->disp, FALSE);
}
if (SHMInfo.shmaddr != ((void *) -1))
shmdt (SHMInfo.shmaddr);
//.........这里部分代码省略.........
示例9: gst_vdp_mpeg4_dec_handle_configuration
static gboolean
gst_vdp_mpeg4_dec_handle_configuration (GstVdpMpeg4Dec * mpeg4_dec,
GstMpeg4Frame * mpeg4_frame)
{
Mpeg4VisualObjectSequence vos;
Mpeg4VisualObject vo;
Mpeg4VideoObjectLayer vol;
GstVideoState state;
guint8 profile_indication;
VdpDecoderProfile profile;
GstFlowReturn ret;
if (mpeg4_dec->is_configured)
return GST_FLOW_OK;
if (!mpeg4_frame->vos_buf || !mpeg4_frame->vo_buf || !mpeg4_frame->vol_buf)
goto skip_frame;
if (!mpeg4_util_parse_VOS (mpeg4_frame->vos_buf, &vos))
goto skip_frame;
if (!mpeg4_util_parse_VO (mpeg4_frame->vo_buf, &vo))
goto skip_frame;
if (!mpeg4_util_parse_VOL (mpeg4_frame->vol_buf, &vo, &vol))
goto skip_frame;
state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (mpeg4_dec));
state.width = vol.width;
state.height = vol.height;
if (vol.fixed_vop_rate) {
state.fps_n = vol.vop_time_increment_resolution;
state.fps_d = vol.fixed_vop_time_increment;
}
state.par_n = vol.par_n;
state.par_d = vol.par_d;
gst_base_video_decoder_set_state (GST_BASE_VIDEO_DECODER (mpeg4_dec), state);
profile_indication = vos.profile_and_level_indication >> 4;
switch (profile_indication) {
case 0x0:
profile = VDP_DECODER_PROFILE_MPEG4_PART2_SP;
break;
case 0xf:
profile = VDP_DECODER_PROFILE_MPEG4_PART2_ASP;
break;
default:
goto unsupported_profile;
}
ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (mpeg4_dec), profile, 2);
if (ret != GST_FLOW_OK)
return ret;
mpeg4_dec->vol = vol;
mpeg4_dec->is_configured = TRUE;
return GST_FLOW_OK;
skip_frame:
GST_WARNING ("Skipping frame since we're not configured yet");
gst_base_video_decoder_skip_frame (GST_BASE_VIDEO_DECODER (mpeg4_dec),
GST_VIDEO_FRAME (mpeg4_frame));
return GST_FLOW_CUSTOM_ERROR;
unsupported_profile:
GST_ELEMENT_ERROR (mpeg4_dec, STREAM, WRONG_TYPE,
("vdpaumpeg4dec doesn't support this streams profile"),
("profile_and_level_indication: %d", vos.profile_and_level_indication));
return GST_FLOW_ERROR;
}
示例10: decode_buffer
//.........这里部分代码省略.........
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
}
ofs += seg.size;
/* Decode scan, if complete */
if (seg.marker == GST_JPEG_MARKER_EOI && scan_seg.header_size > 0) {
scan_seg.data_size = seg.offset - scan_seg.data_offset;
scan_seg.is_valid = TRUE;
}
if (scan_seg.is_valid) {
status = decode_scan(
decoder,
buf + scan_seg.header_offset,
scan_seg.header_size,
buf + scan_seg.data_offset,
scan_seg.data_size
);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
break;
memset(&scan_seg, 0, sizeof(scan_seg));
}
append_ecs = TRUE;
switch (seg.marker) {
case GST_JPEG_MARKER_SOI:
priv->has_quant_table = FALSE;
priv->has_huf_table = FALSE;
priv->mcu_restart = 0;
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
case GST_JPEG_MARKER_EOI:
if (decode_current_picture(decoder)) {
/* Get out of the loop, trailing data is not needed */
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
goto end;
}
status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
break;
case GST_JPEG_MARKER_DHT:
status = decode_huffman_table(decoder, buf + seg.offset, seg.size);
break;
case GST_JPEG_MARKER_DQT:
status = decode_quant_table(decoder, buf + seg.offset, seg.size);
break;
case GST_JPEG_MARKER_DRI:
status = decode_restart_interval(decoder, buf + seg.offset, seg.size);
break;
case GST_JPEG_MARKER_DAC:
GST_ERROR("unsupported arithmetic coding mode");
status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
break;
case GST_JPEG_MARKER_SOS:
scan_seg.header_offset = seg.offset;
scan_seg.header_size = seg.size;
scan_seg.data_offset = seg.offset + seg.size;
scan_seg.data_size = 0;
append_ecs = FALSE;
break;
default:
/* Restart marker */
if (seg.marker >= GST_JPEG_MARKER_RST_MIN &&
seg.marker <= GST_JPEG_MARKER_RST_MAX) {
append_ecs = FALSE;
break;
}
/* Frame header */
if (seg.marker >= GST_JPEG_MARKER_SOF_MIN &&
seg.marker <= GST_JPEG_MARKER_SOF_MAX) {
status = decode_picture(
decoder,
seg.marker,
buf + seg.offset, seg.size,
pts
);
break;
}
/* Application segments */
if (seg.marker >= GST_JPEG_MARKER_APP_MIN &&
seg.marker <= GST_JPEG_MARKER_APP_MAX) {
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
}
GST_WARNING("unsupported marker (0x%02x)", seg.marker);
status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
break;
}
/* Append entropy coded segments */
if (append_ecs)
scan_seg.data_size = seg.offset - scan_seg.data_offset;
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
break;
}
end:
return status;
}
示例11: gst_aiur_stream_cache_add_buffer
void
gst_aiur_stream_cache_add_buffer (GstAiurStreamCache * cache,
GstBuffer * buffer)
{
guint64 size;
gint trycnt = 0;
if ((cache == NULL) || (buffer == NULL))
goto bail;
g_mutex_lock (cache->mutex);
size = GST_BUFFER_SIZE (buffer);
if ((cache->seeking) || (size == 0)) {
g_mutex_unlock (cache->mutex);
goto bail;
}
if (cache->ignore_size) {
/* drop part or total buffer */
if (cache->ignore_size >= size) {
cache->ignore_size -= size;
g_mutex_unlock (cache->mutex);
goto bail;
} else {
GST_BUFFER_DATA (buffer) += (cache->ignore_size);
GST_BUFFER_SIZE (buffer) -= (cache->ignore_size);
size = GST_BUFFER_SIZE (buffer);
cache->ignore_size = 0;
}
//g_print("cache offset %lld\n", cache->offset);
}
gst_adapter_push (cache->adapter, buffer);
g_cond_signal (cache->produce_cond);
buffer = NULL;
if (cache->threshold_max) {
#if 0
if (cache->threshold_max < size + cache->threshold_pre) {
cache->threshold_max = size + cache->threshold_pre;
}
#endif
while ((gst_adapter_available (cache->adapter) > cache->threshold_max)
&& (cache->closed == FALSE)) {
if (((++trycnt) & 0x1f) == 0x0) {
GST_WARNING ("wait push try %d SIZE %d %lld", trycnt,
gst_adapter_available (cache->adapter), cache->threshold_max);
}
WAIT_COND_TIMEOUT (cache->consume_cond, cache->mutex, 1000000);
}
if (cache->seeking) {
g_mutex_unlock (cache->mutex);
goto bail;
}
}
g_mutex_unlock (cache->mutex);
return;
bail:
if (buffer) {
gst_buffer_unref (buffer);
}
}
示例12: get_video_recv_info
static gboolean
get_video_recv_info (KmsRembLocal * rl,
guint64 * bitrate, guint * fraction_lost, guint64 * packets_rcv_interval)
{
GValueArray *arr = NULL;
GValue *val;
guint i;
gboolean ret = FALSE;
if (!KMS_REMB_BASE (rl)->rtpsess) {
GST_WARNING ("Session object does not exist");
return ret;
}
g_object_get (KMS_REMB_BASE (rl)->rtpsess, "sources", &arr, NULL);
if (arr == NULL) {
GST_WARNING ("Sources array not found");
return ret;
}
for (i = 0; i < arr->n_values; i++) {
GObject *source;
guint ssrc;
GstStructure *s;
val = g_value_array_get_nth (arr, i);
source = g_value_get_object (val);
g_object_get (source, "ssrc", &ssrc, "stats", &s, NULL);
GST_TRACE_OBJECT (source, "source ssrc: %u", ssrc);
GST_TRACE_OBJECT (KMS_REMB_BASE (rl)->rtpsess, "stats: %" GST_PTR_FORMAT,
s);
if (ssrc == rl->remote_ssrc) {
GstClockTime current_time;
guint64 octets_received, packets_received;
if (!gst_structure_get_uint64 (s, "bitrate", bitrate)) {
break;
}
if (!gst_structure_get_uint64 (s, "octets-received", &octets_received)) {
break;
}
if (!gst_structure_get_uint (s, "sent-rb-fractionlost", fraction_lost)) {
break;
}
if (!gst_structure_get_uint64 (s, "packets-received", &packets_received)) {
break;
}
current_time = kms_utils_get_time_nsecs ();
if (rl->last_time != 0) {
GstClockTime elapsed = current_time - rl->last_time;
guint64 bytes_handled = octets_received - rl->last_octets_received;
*bitrate =
gst_util_uint64_scale (bytes_handled, 8 * GST_SECOND, elapsed);
GST_TRACE_OBJECT (KMS_REMB_BASE (rl)->rtpsess,
"Elapsed %" G_GUINT64_FORMAT " bytes %" G_GUINT64_FORMAT ", rate %"
G_GUINT64_FORMAT, elapsed, bytes_handled, *bitrate);
}
rl->last_time = current_time;
rl->last_octets_received = octets_received;
*packets_rcv_interval = packets_received - rl->last_packets_received;
rl->last_packets_received = packets_received;
ret = TRUE;
}
gst_structure_free (s);
if (ret) {
break;
}
}
g_value_array_free (arr);
return ret;
}
示例13: on_sending_rtcp
static void
on_sending_rtcp (GObject * sess, GstBuffer * buffer, gboolean is_early,
gboolean * do_not_supress)
{
KmsRembLocal *rl;
KmsRTCPPSFBAFBREMBPacket remb_packet;
GstRTCPBuffer rtcp = { NULL, };
GstRTCPPacket packet;
guint packet_ssrc;
rl = g_object_get_data (sess, KMS_REMB_LOCAL);
if (!rl) {
GST_WARNING ("Invalid RembLocal");
return;
}
if (is_early) {
return;
}
if (!gst_rtcp_buffer_map (buffer, GST_MAP_READWRITE, &rtcp)) {
GST_WARNING_OBJECT (sess, "Cannot map buffer to RTCP");
return;
}
if (!gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_PSFB, &packet)) {
GST_WARNING_OBJECT (sess, "Cannot add RTCP packet");
goto end;
}
if (!kms_remb_local_update (rl)) {
goto end;
}
remb_packet.bitrate = rl->remb;
if (rl->event_manager != NULL) {
guint remb_local_max;
remb_local_max = kms_utils_remb_event_manager_get_min (rl->event_manager);
if (remb_local_max > 0) {
GST_TRACE_OBJECT (sess, "REMB local max: %" G_GUINT32_FORMAT,
remb_local_max);
remb_packet.bitrate = MIN (remb_local_max, rl->remb);
}
}
if (rl->min_bw > 0) {
remb_packet.bitrate = MAX (remb_packet.bitrate, rl->min_bw * 1000);
} else {
remb_packet.bitrate = MAX (remb_packet.bitrate, REMB_MIN);
}
remb_packet.n_ssrcs = 1;
remb_packet.ssrcs[0] = rl->remote_ssrc;
g_object_get (sess, "internal-ssrc", &packet_ssrc, NULL);
if (!kms_rtcp_psfb_afb_remb_marshall_packet (&packet, &remb_packet,
packet_ssrc)) {
gst_rtcp_packet_remove (&packet);
}
GST_TRACE_OBJECT (sess, "Sending REMB (bitrate: %" G_GUINT32_FORMAT
", ssrc: %" G_GUINT32_FORMAT ")", remb_packet.bitrate, rl->remote_ssrc);
kms_remb_base_update_stats (KMS_REMB_BASE (rl), rl->remote_ssrc,
remb_packet.bitrate);
end:
gst_rtcp_buffer_unmap (&rtcp);
}
示例14: rtp_jitter_buffer_insert
/**
* rtp_jitter_buffer_insert:
* @jbuf: an #RTPJitterBuffer
* @buf: a buffer
* @time: a running_time when this buffer was received in nanoseconds
* @clock_rate: the clock-rate of the payload of @buf
* @max_delay: the maximum lateness of @buf
* @tail: TRUE when the tail element changed.
*
* Inserts @buf into the packet queue of @jbuf. The sequence number of the
* packet will be used to sort the packets. This function takes ownerhip of
* @buf when the function returns %TRUE.
* @buf should have writable metadata when calling this function.
*
* Returns: %FALSE if a packet with the same number already existed.
*/
gboolean
rtp_jitter_buffer_insert (RTPJitterBuffer * jbuf, GstBuffer * buf,
GstClockTime time, guint32 clock_rate, gboolean * tail, gint * percent)
{
GList *list;
guint32 rtptime;
guint16 seqnum;
GstRTPBuffer rtp = {NULL};
g_return_val_if_fail (jbuf != NULL, FALSE);
g_return_val_if_fail (buf != NULL, FALSE);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
seqnum = gst_rtp_buffer_get_seq (&rtp);
/* loop the list to skip strictly smaller seqnum buffers */
for (list = jbuf->packets->head; list; list = g_list_next (list)) {
guint16 qseq;
gint gap;
GstRTPBuffer rtpb = {NULL};
gst_rtp_buffer_map (GST_BUFFER_CAST (list->data), GST_MAP_READ, &rtpb);
qseq = gst_rtp_buffer_get_seq (&rtpb);
gst_rtp_buffer_unmap (&rtpb);
/* compare the new seqnum to the one in the buffer */
gap = gst_rtp_buffer_compare_seqnum (seqnum, qseq);
/* we hit a packet with the same seqnum, notify a duplicate */
if (G_UNLIKELY (gap == 0))
goto duplicate;
/* seqnum > qseq, we can stop looking */
if (G_LIKELY (gap < 0))
break;
}
rtptime = gst_rtp_buffer_get_timestamp (&rtp);
/* rtp time jumps are checked for during skew calculation, but bypassed
* in other mode, so mind those here and reset jb if needed.
* Only reset if valid input time, which is likely for UDP input
* where we expect this might happen due to async thread effects
* (in seek and state change cycles), but not so much for TCP input */
if (GST_CLOCK_TIME_IS_VALID (time) &&
jbuf->mode != RTP_JITTER_BUFFER_MODE_SLAVE &&
jbuf->base_time != -1 && jbuf->last_rtptime != -1) {
GstClockTime ext_rtptime = jbuf->ext_rtptime;
ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime);
if (ext_rtptime > jbuf->last_rtptime + 3 * clock_rate ||
ext_rtptime + 3 * clock_rate < jbuf->last_rtptime) {
/* reset even if we don't have valid incoming time;
* still better than producing possibly very bogus output timestamp */
GST_WARNING ("rtp delta too big, reset skew");
rtp_jitter_buffer_reset_skew (jbuf);
}
}
switch (jbuf->mode) {
case RTP_JITTER_BUFFER_MODE_NONE:
case RTP_JITTER_BUFFER_MODE_BUFFER:
/* send 0 as the first timestamp and -1 for the other ones. This will
* interpollate them from the RTP timestamps with a 0 origin. In buffering
* mode we will adjust the outgoing timestamps according to the amount of
* time we spent buffering. */
if (jbuf->base_time == -1)
time = 0;
else
time = -1;
break;
case RTP_JITTER_BUFFER_MODE_SLAVE:
default:
break;
}
/* do skew calculation by measuring the difference between rtptime and the
* receive time, this function will retimestamp @buf with the skew corrected
* running time. */
time = calculate_skew (jbuf, rtptime, time, clock_rate);
GST_BUFFER_TIMESTAMP (buf) = time;
/* It's more likely that the packet was inserted in the front of the buffer */
if (G_LIKELY (list))
g_queue_insert_before (jbuf->packets, list, buf);
//.........这里部分代码省略.........
示例15: gst_amlvdec_sink_event
static gboolean
gst_amlvdec_sink_event (GstVideoDecoder * dec, GstEvent * event)
{
gboolean ret = TRUE;
GstAmlVdec *amlvdec = GST_AMLVDEC(dec);
GST_ERROR_OBJECT (amlvdec, "Got %s event on sink pad", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
/* case GST_EVENT_NEWSEGMENT:
{
gboolean update;
GstFormat format;
gdouble rate, arate;
gint64 start, stop, time;
stop_eos_task (amlvdec);
gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format, &start, &stop, &time);
if (format != GST_FORMAT_TIME)
goto newseg_wrong_format;
amlvdec_forward_process(amlvdec, update, rate, format, start, stop, time);
gst_segment_set_newsegment_full (&amlvdec->segment, update, rate, arate, format, start, stop, time);
GST_DEBUG_OBJECT (amlvdec,"Pushing newseg rate %g, applied rate %g, format %d, start %"
G_GINT64_FORMAT ", stop %" G_GINT64_FORMAT ", pos %" G_GINT64_FORMAT,
rate, arate, format, start, stop, time);
ret = gst_pad_push_event (amlvdec->srcpad, event);
break;
}*/
case GST_EVENT_FLUSH_START:
if(amlvdec->codec_init_ok){
set_black_policy(0);
}
ret = TRUE;
break;
case GST_EVENT_FLUSH_STOP:
{
stop_eos_task (amlvdec);
if(amlvdec->codec_init_ok){
gint res = -1;
res = codec_reset(amlvdec->pcodec);
if (res < 0) {
GST_ERROR("reset vcodec failed, res= %x\n", res);
return FALSE;
}
amlvdec->is_headerfeed = FALSE;
}
GST_WARNING("vformat:%d\n", amlvdec->pcodec->video_type);
break;
}
case GST_EVENT_EOS:
GST_WARNING("get GST_EVENT_EOS,check for video end\n");
if(amlvdec->codec_init_ok) {
start_eos_task(amlvdec);
amlvdec->is_eos = TRUE;
}
ret = TRUE;
break;
default:
break;
}
done:
ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (amlvdec, event);
return ret;
}