本文整理汇总了C++中GST_ERROR_OBJECT函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ERROR_OBJECT函数的具体用法?C++ GST_ERROR_OBJECT怎么用?C++ GST_ERROR_OBJECT使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_ERROR_OBJECT函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: output_loop
//.........这里部分代码省略.........
memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen);
if (self->use_timestamps)
{
GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp,
GST_SECOND,
OMX_TICKS_PER_SECOND);
}
if (self->share_output_buffer)
{
GST_WARNING_OBJECT (self, "couldn't zero-copy");
/* If pAppPrivate is NULL, it means it was a dummy
* allocation, free it. */
if (!omx_buffer->pAppPrivate)
{
g_free (omx_buffer->pBuffer);
omx_buffer->pBuffer = NULL;
}
}
ret = push_buffer (self, buf);
}
else
{
GST_WARNING_OBJECT (self, "couldn't allocate buffer of size %" G_GUINT32_FORMAT,
omx_buffer->nFilledLen);
}
}
}
else
{
GST_WARNING_OBJECT (self, "empty buffer");
}
if (G_UNLIKELY (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS))
{
GST_DEBUG_OBJECT (self, "got eos");
gst_pad_push_event (self->srcpad, gst_event_new_eos ());
ret = GST_FLOW_UNEXPECTED;
goto leave;
}
if (self->share_output_buffer &&
!omx_buffer->pBuffer &&
omx_buffer->nOffset == 0)
{
GstBuffer *buf;
GstFlowReturn result;
GST_LOG_OBJECT (self, "allocate buffer");
result = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
GST_BUFFER_OFFSET_NONE,
omx_buffer->nAllocLen,
GST_PAD_CAPS (self->srcpad),
&buf);
if (G_LIKELY (result == GST_FLOW_OK))
{
gst_buffer_ref (buf);
omx_buffer->pAppPrivate = buf;
omx_buffer->pBuffer = GST_BUFFER_DATA (buf);
omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf);
}
else
{
GST_WARNING_OBJECT (self, "could not pad allocate buffer, using malloc");
omx_buffer->pBuffer = g_malloc (omx_buffer->nAllocLen);
}
}
if (self->share_output_buffer &&
!omx_buffer->pBuffer)
{
GST_ERROR_OBJECT (self, "no input buffer to share");
}
omx_buffer->nFilledLen = 0;
GST_LOG_OBJECT (self, "release_buffer");
g_omx_port_release_buffer (out_port, omx_buffer);
}
leave:
self->last_pad_push_return = ret;
if (gomx->omx_error != OMX_ErrorNone)
ret = GST_FLOW_ERROR;
if (ret != GST_FLOW_OK)
{
GST_INFO_OBJECT (self, "pause task, reason: %s",
gst_flow_get_name (ret));
gst_pad_pause_task (self->srcpad);
}
GST_LOG_OBJECT (self, "end");
gst_object_unref (self);
}
示例2: gst_omx_audio_dec_loop
static void
gst_omx_audio_dec_loop (GstOMXAudioDec * self)
{
GstOMXPort *port = self->dec_out_port;
GstOMXBuffer *buf = NULL;
GstFlowReturn flow_ret = GST_FLOW_OK;
GstOMXAcquireBufferReturn acq_return;
OMX_ERRORTYPE err;
acq_return = gst_omx_port_acquire_buffer (port, &buf);
if (acq_return == GST_OMX_ACQUIRE_BUFFER_ERROR) {
goto component_error;
} else if (acq_return == GST_OMX_ACQUIRE_BUFFER_FLUSHING) {
goto flushing;
} else if (acq_return == GST_OMX_ACQUIRE_BUFFER_EOS) {
goto eos;
}
if (!gst_pad_has_current_caps (GST_AUDIO_DECODER_SRC_PAD (self)) ||
acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) {
OMX_PARAM_PORTDEFINITIONTYPE port_def;
OMX_AUDIO_PARAM_PCMMODETYPE pcm_param;
GstAudioChannelPosition omx_position[OMX_AUDIO_MAXCHANNELS];
GstOMXAudioDecClass *klass = GST_OMX_AUDIO_DEC_GET_CLASS (self);
gint i;
GST_DEBUG_OBJECT (self, "Port settings have changed, updating caps");
/* Reallocate all buffers */
if (acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE
&& gst_omx_port_is_enabled (port)) {
err = gst_omx_port_set_enabled (port, FALSE);
if (err != OMX_ErrorNone)
goto reconfigure_error;
err = gst_omx_port_wait_buffers_released (port, 5 * GST_SECOND);
if (err != OMX_ErrorNone)
goto reconfigure_error;
err = gst_omx_port_deallocate_buffers (port);
if (err != OMX_ErrorNone)
goto reconfigure_error;
err = gst_omx_port_wait_enabled (port, 1 * GST_SECOND);
if (err != OMX_ErrorNone)
goto reconfigure_error;
}
/* Just update caps */
GST_AUDIO_DECODER_STREAM_LOCK (self);
gst_omx_port_get_port_definition (port, &port_def);
g_assert (port_def.format.audio.eEncoding == OMX_AUDIO_CodingPCM);
GST_OMX_INIT_STRUCT (&pcm_param);
pcm_param.nPortIndex = self->dec_out_port->index;
err =
gst_omx_component_get_parameter (self->dec, OMX_IndexParamAudioPcm,
&pcm_param);
if (err != OMX_ErrorNone) {
GST_ERROR_OBJECT (self, "Failed to get PCM parameters: %s (0x%08x)",
gst_omx_error_to_string (err), err);
goto caps_failed;
}
g_assert (pcm_param.ePCMMode == OMX_AUDIO_PCMModeLinear);
g_assert (pcm_param.bInterleaved == OMX_TRUE);
gst_audio_info_init (&self->info);
for (i = 0; i < pcm_param.nChannels; i++) {
switch (pcm_param.eChannelMapping[i]) {
case OMX_AUDIO_ChannelLF:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
break;
case OMX_AUDIO_ChannelRF:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
break;
case OMX_AUDIO_ChannelCF:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
break;
case OMX_AUDIO_ChannelLS:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT;
break;
case OMX_AUDIO_ChannelRS:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT;
break;
case OMX_AUDIO_ChannelLFE:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_LFE1;
break;
case OMX_AUDIO_ChannelCS:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_CENTER;
break;
case OMX_AUDIO_ChannelLR:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;
break;
case OMX_AUDIO_ChannelRR:
omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;
break;
case OMX_AUDIO_ChannelNone:
//.........这里部分代码省略.........
示例3: gst_decklink_video_src_set_caps
static gboolean
gst_decklink_video_src_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
GstCaps *current_caps;
const GstDecklinkMode *mode;
BMDVideoInputFlags flags;
HRESULT ret;
GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps);
if ((current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc)))) {
GST_DEBUG_OBJECT (self, "Pad already has caps %" GST_PTR_FORMAT, caps);
if (!gst_caps_is_equal (caps, current_caps)) {
GST_DEBUG_OBJECT (self, "New caps, reconfiguring");
gst_caps_unref (current_caps);
if (self->mode == GST_DECKLINK_MODE_AUTO) {
return TRUE;
} else {
return FALSE;
}
} else {
gst_caps_unref (current_caps);
return TRUE;
}
}
if (!gst_video_info_from_caps (&self->info, caps))
return FALSE;
if (self->input->config && self->connection != GST_DECKLINK_CONNECTION_AUTO) {
ret = self->input->config->SetInt (bmdDeckLinkConfigVideoInputConnection,
gst_decklink_get_connection (self->connection));
if (ret != S_OK) {
GST_ERROR_OBJECT (self, "Failed to set configuration (input source)");
return FALSE;
}
if (self->connection == GST_DECKLINK_CONNECTION_COMPOSITE) {
ret = self->input->config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags,
bmdAnalogVideoFlagCompositeSetup75);
if (ret != S_OK) {
GST_ERROR_OBJECT (self,
"Failed to set configuration (composite setup)");
return FALSE;
}
}
}
flags = bmdVideoInputFlagDefault;
if (self->mode == GST_DECKLINK_MODE_AUTO) {
bool autoDetection = false;
if (self->input->attributes) {
ret =
self->input->
attributes->GetFlag (BMDDeckLinkSupportsInputFormatDetection,
&autoDetection);
if (ret != S_OK) {
GST_ERROR_OBJECT (self, "Failed to get attribute (autodetection)");
return FALSE;
}
if (autoDetection)
flags |= bmdVideoInputEnableFormatDetection;
}
if (!autoDetection) {
GST_ERROR_OBJECT (self, "Failed to activate auto-detection");
return FALSE;
}
}
mode = gst_decklink_get_mode (self->mode);
g_assert (mode != NULL);
ret = self->input->input->EnableVideoInput (mode->mode,
bmdFormat8BitYUV, flags);
if (ret != S_OK) {
GST_WARNING_OBJECT (self, "Failed to enable video input");
return FALSE;
}
g_mutex_lock (&self->input->lock);
self->input->mode = mode;
self->input->video_enabled = TRUE;
if (self->input->start_streams)
self->input->start_streams (self->input->videosrc);
g_mutex_unlock (&self->input->lock);
return TRUE;
}
示例4: gst_net_client_clock_start
static gboolean
gst_net_client_clock_start (GstNetClientClock * self)
{
GSocketAddress *servaddr;
GSocketAddress *myaddr;
GInetAddress *inetaddr;
GSocket *socket;
GError *error = NULL;
g_return_val_if_fail (self->priv->address != NULL, FALSE);
g_return_val_if_fail (self->priv->servaddr == NULL, FALSE);
socket = g_socket_new (G_SOCKET_FAMILY_IPV4, G_SOCKET_TYPE_DATAGRAM,
G_SOCKET_PROTOCOL_UDP, &error);
if (socket == NULL)
goto no_socket;
/* check address we're bound to, mostly for debugging purposes */
myaddr = g_socket_get_local_address (socket, &error);
if (myaddr == NULL)
goto getsockname_error;
GST_DEBUG_OBJECT (self, "socket opened on UDP port %hd",
g_inet_socket_address_get_port (G_INET_SOCKET_ADDRESS (myaddr)));
g_object_unref (myaddr);
/* create target address */
inetaddr = g_inet_address_new_from_string (self->priv->address);
if (inetaddr == NULL)
goto bad_address;
servaddr = g_inet_socket_address_new (inetaddr, self->priv->port);
g_object_unref (inetaddr);
g_assert (servaddr != NULL);
GST_DEBUG_OBJECT (self, "will communicate with %s:%d", self->priv->address,
self->priv->port);
self->priv->cancel = g_cancellable_new ();
self->priv->socket = socket;
self->priv->servaddr = G_SOCKET_ADDRESS (servaddr);
self->priv->thread = g_thread_try_new ("GstNetClientClock",
gst_net_client_clock_thread, self, &error);
if (error != NULL)
goto no_thread;
return TRUE;
/* ERRORS */
no_socket:
{
GST_ERROR_OBJECT (self, "socket_new() failed: %s", error->message);
g_error_free (error);
return FALSE;
}
getsockname_error:
{
GST_ERROR_OBJECT (self, "get_local_address() failed: %s", error->message);
g_error_free (error);
g_object_unref (socket);
return FALSE;
}
bad_address:
{
GST_ERROR_OBJECT (self, "inet_address_new_from_string('%s') failed",
self->priv->address);
g_object_unref (socket);
return FALSE;
}
no_thread:
{
GST_ERROR_OBJECT (self, "could not create thread: %s", error->message);
g_object_unref (self->priv->servaddr);
self->priv->servaddr = NULL;
g_object_unref (self->priv->socket);
self->priv->socket = NULL;
g_error_free (error);
return FALSE;
}
}
示例5: gst_vp8_enc_finish
static gboolean
gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder)
{
GstVP8Enc *encoder;
GstVideoFrame *frame;
int flags = 0;
vpx_codec_err_t status;
vpx_codec_iter_t iter = NULL;
const vpx_codec_cx_pkt_t *pkt;
GST_DEBUG_OBJECT (base_video_encoder, "finish");
encoder = GST_VP8_ENC (base_video_encoder);
status =
vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags,
0);
if (status != 0) {
GST_ERROR_OBJECT (encoder, "encode returned %d %s", status,
gst_vpx_error_name (status));
return FALSE;
}
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
while (pkt != NULL) {
GstBuffer *buffer;
GstVP8EncCoderHook *hook;
gboolean invisible, keyframe;
GST_DEBUG_OBJECT (encoder, "packet %u type %d", (guint) pkt->data.frame.sz,
pkt->kind);
if (pkt->kind == VPX_CODEC_STATS_PKT
&& encoder->multipass_mode == VPX_RC_FIRST_PASS) {
GST_LOG_OBJECT (encoder, "handling STATS packet");
g_byte_array_append (encoder->first_pass_cache_content,
pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz);
frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder);
if (frame != NULL) {
buffer = gst_buffer_new ();
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL);
frame->src_buffer = buffer;
gst_base_video_encoder_finish_frame (base_video_encoder, frame);
}
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
continue;
} else if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) {
GST_LOG_OBJECT (encoder, "non frame pkt: %d", pkt->kind);
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
continue;
}
invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0;
keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder);
g_assert (frame != NULL);
hook = frame->coder_hook;
buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz);
memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz);
frame->is_sync_point = frame->is_sync_point || keyframe;
if (hook->image)
g_slice_free (vpx_image_t, hook->image);
hook->image = NULL;
if (invisible) {
hook->invisible = g_list_append (hook->invisible, buffer);
} else {
frame->src_buffer = buffer;
gst_base_video_encoder_finish_frame (base_video_encoder, frame);
frame = NULL;
}
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
}
if (encoder->multipass_mode == VPX_RC_FIRST_PASS
&& encoder->multipass_cache_file) {
GError *err = NULL;
if (!g_file_set_contents (encoder->multipass_cache_file,
(const gchar *) encoder->first_pass_cache_content->data,
encoder->first_pass_cache_content->len, &err)) {
GST_ELEMENT_ERROR (encoder, RESOURCE, WRITE, (NULL),
("Failed to write multipass cache file: %s", err->message));
g_error_free (err);
}
}
return TRUE;
}
示例6: gst_rsvg_decode_image
static GstFlowReturn
gst_rsvg_decode_image (GstRsvgDec * rsvg, GstBuffer * buffer,
GstVideoCodecFrame * frame)
{
GstVideoDecoder *decoder = GST_VIDEO_DECODER (rsvg);
GstFlowReturn ret = GST_FLOW_OK;
cairo_t *cr;
cairo_surface_t *surface;
RsvgHandle *handle;
GError *error = NULL;
RsvgDimensionData dimension;
gdouble scalex, scaley;
GstMapInfo minfo;
GstVideoFrame vframe;
GstVideoCodecState *output_state;
GST_LOG_OBJECT (rsvg, "parsing svg");
if (!gst_buffer_map (buffer, &minfo, GST_MAP_READ)) {
GST_ERROR_OBJECT (rsvg, "Failed to get SVG image");
return GST_FLOW_ERROR;
}
handle = rsvg_handle_new_from_data (minfo.data, minfo.size, &error);
if (!handle) {
GST_ERROR_OBJECT (rsvg, "Failed to parse SVG image: %s", error->message);
g_error_free (error);
return GST_FLOW_ERROR;
}
rsvg_handle_get_dimensions (handle, &dimension);
output_state = gst_video_decoder_get_output_state (decoder);
if ((output_state == NULL)
|| GST_VIDEO_INFO_WIDTH (&output_state->info) != dimension.width
|| GST_VIDEO_INFO_HEIGHT (&output_state->info) != dimension.height) {
/* Create the output state */
gst_video_decoder_set_output_state (decoder, GST_RSVG_VIDEO_FORMAT,
dimension.width, dimension.height, rsvg->input_state);
if (output_state)
gst_video_codec_state_unref (output_state);
output_state = gst_video_decoder_get_output_state (decoder);
}
ret = gst_video_decoder_allocate_output_frame (decoder, frame);
if (ret != GST_FLOW_OK) {
g_object_unref (handle);
GST_ERROR_OBJECT (rsvg, "Buffer allocation failed %s",
gst_flow_get_name (ret));
return ret;
}
GST_LOG_OBJECT (rsvg, "render image at %d x %d",
GST_VIDEO_INFO_HEIGHT (&output_state->info),
GST_VIDEO_INFO_WIDTH (&output_state->info));
if (!gst_video_frame_map (&vframe,
&gst_video_decoder_get_output_state (decoder)->info,
frame->output_buffer, GST_MAP_READWRITE)) {
GST_ERROR_OBJECT (rsvg, "Failed to get SVG image");
return GST_FLOW_ERROR;
}
surface =
cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA (&vframe,
0), CAIRO_FORMAT_ARGB32, GST_VIDEO_FRAME_WIDTH (&vframe),
GST_VIDEO_FRAME_HEIGHT (&vframe), GST_VIDEO_FRAME_PLANE_STRIDE (&vframe,
0));
cr = cairo_create (surface);
cairo_set_operator (cr, CAIRO_OPERATOR_CLEAR);
cairo_set_source_rgba (cr, 1.0, 1.0, 1.0, 0.0);
cairo_paint (cr);
cairo_set_operator (cr, CAIRO_OPERATOR_OVER);
cairo_set_source_rgba (cr, 0.0, 0.0, 0.0, 1.0);
scalex = scaley = 1.0;
if (GST_VIDEO_INFO_WIDTH (&output_state->info) != dimension.width) {
scalex =
((gdouble) GST_VIDEO_INFO_WIDTH (&output_state->info)) /
((gdouble) dimension.width);
}
if (GST_VIDEO_INFO_HEIGHT (&output_state->info) != dimension.height) {
scaley =
((gdouble) GST_VIDEO_INFO_HEIGHT (&output_state->info)) /
((gdouble) dimension.height);
}
cairo_scale (cr, scalex, scaley);
rsvg_handle_render_cairo (handle, cr);
g_object_unref (handle);
cairo_destroy (cr);
cairo_surface_destroy (surface);
/* Now unpremultiply Cairo's ARGB to match GStreamer's */
gst_rsvg_decode_unpremultiply (GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0),
GST_VIDEO_FRAME_WIDTH (&vframe), GST_VIDEO_FRAME_HEIGHT (&vframe));
gst_video_codec_state_unref (output_state);
//.........这里部分代码省略.........
示例7: gst_egl_adaptation_init_egl_display
gboolean
gst_egl_adaptation_init_egl_display (GstEglAdaptationContext * ctx)
{
GstMessage *msg;
EGLDisplay display;
GST_DEBUG_OBJECT (ctx->element, "Enter EGL initial configuration");
if (!platform_wrapper_init ()) {
GST_ERROR_OBJECT (ctx->element, "Couldn't init EGL platform wrapper");
goto HANDLE_ERROR;
}
msg =
gst_message_new_need_context (GST_OBJECT_CAST (ctx->element),
GST_EGL_DISPLAY_CONTEXT_TYPE);
gst_element_post_message (GST_ELEMENT_CAST (ctx->element), msg);
GST_OBJECT_LOCK (ctx->element);
if (!ctx->set_display) {
GstContext *context;
GST_OBJECT_UNLOCK (ctx->element);
display = eglGetDisplay (EGL_DEFAULT_DISPLAY);
if (display == EGL_NO_DISPLAY) {
GST_ERROR_OBJECT (ctx->element, "Could not get EGL display connection");
goto HANDLE_ERROR; /* No EGL error is set by eglGetDisplay() */
}
ctx->display = gst_egl_display_new (display, (GDestroyNotify) eglTerminate);
context = gst_context_new_egl_display (ctx->display, FALSE);
msg = gst_message_new_have_context (GST_OBJECT (ctx->element), context);
gst_element_post_message (GST_ELEMENT_CAST (ctx->element), msg);
}
if (!eglInitialize (gst_egl_display_get (ctx->display),
&ctx->eglglesctx->egl_major, &ctx->eglglesctx->egl_minor)) {
got_egl_error ("eglInitialize");
GST_ERROR_OBJECT (ctx->element, "Could not init EGL display connection");
goto HANDLE_EGL_ERROR;
}
/* Check against required EGL version
* XXX: Need to review the version requirement in terms of the needed API
*/
if (ctx->eglglesctx->egl_major < GST_EGLGLESSINK_EGL_MIN_VERSION) {
GST_ERROR_OBJECT (ctx->element, "EGL v%d needed, but you only have v%d.%d",
GST_EGLGLESSINK_EGL_MIN_VERSION, ctx->eglglesctx->egl_major,
ctx->eglglesctx->egl_minor);
goto HANDLE_ERROR;
}
GST_INFO_OBJECT (ctx->element, "System reports supported EGL version v%d.%d",
ctx->eglglesctx->egl_major, ctx->eglglesctx->egl_minor);
eglBindAPI (EGL_OPENGL_ES_API);
return TRUE;
/* Errors */
HANDLE_EGL_ERROR:
GST_ERROR_OBJECT (ctx->element, "EGL call returned error %x", eglGetError ());
HANDLE_ERROR:
GST_ERROR_OBJECT (ctx->element, "Couldn't setup window/surface from handle");
return FALSE;
}
示例8: gst_structure_empty_new
//.........这里部分代码省略.........
if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_8) {
g_value_set_int(value, 8);
gst_value_list_prepend_value(list, value);
}
if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_4) {
g_value_set_int(value, 4);
gst_value_list_prepend_value(list, value);
}
g_value_unset(value);
if (list) {
gst_structure_set_value(structure, "blocks", list);
g_free(list);
list = NULL;
}
/* allocation */
g_value_init(value, G_TYPE_STRING);
list = g_value_init(g_new0(GValue,1), GST_TYPE_LIST);
if (sbc->allocation_method & BT_A2DP_ALLOCATION_LOUDNESS) {
g_value_set_static_string(value, "loudness");
gst_value_list_prepend_value(list, value);
}
if (sbc->allocation_method & BT_A2DP_ALLOCATION_SNR) {
g_value_set_static_string(value, "snr");
gst_value_list_prepend_value(list, value);
}
g_value_unset(value);
if (list) {
gst_structure_set_value(structure, "allocation", list);
g_free(list);
list = NULL;
}
/* rate */
g_value_init(value, G_TYPE_INT);
list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
if (sbc->frequency & BT_SBC_SAMPLING_FREQ_48000) {
g_value_set_int(value, 48000);
gst_value_list_prepend_value(list, value);
}
if (sbc->frequency & BT_SBC_SAMPLING_FREQ_44100) {
g_value_set_int(value, 44100);
gst_value_list_prepend_value(list, value);
}
if (sbc->frequency & BT_SBC_SAMPLING_FREQ_32000) {
g_value_set_int(value, 32000);
gst_value_list_prepend_value(list, value);
}
if (sbc->frequency & BT_SBC_SAMPLING_FREQ_16000) {
g_value_set_int(value, 16000);
gst_value_list_prepend_value(list, value);
}
g_value_unset(value);
if (list) {
gst_structure_set_value(structure, "rate", list);
g_free(list);
list = NULL;
}
/* bitpool */
value = g_value_init(value, GST_TYPE_INT_RANGE);
gst_value_set_int_range(value,
MIN(sbc->min_bitpool, TEMPLATE_MAX_BITPOOL),
MIN(sbc->max_bitpool, TEMPLATE_MAX_BITPOOL));
gst_structure_set_value(structure, "bitpool", value);
g_value_unset(value);
/* channels */
mono = FALSE;
stereo = FALSE;
if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_MONO)
mono = TRUE;
if ((sbc->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) ||
(sbc->channel_mode &
BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) ||
(sbc->channel_mode &
BT_A2DP_CHANNEL_MODE_JOINT_STEREO))
stereo = TRUE;
if (mono && stereo) {
g_value_init(value, GST_TYPE_INT_RANGE);
gst_value_set_int_range(value, 1, 2);
} else {
g_value_init(value, G_TYPE_INT);
if (mono)
g_value_set_int(value, 1);
else if (stereo)
g_value_set_int(value, 2);
else {
GST_ERROR_OBJECT(self,
"Unexpected number of channels");
g_value_set_int(value, 0);
}
}
gst_structure_set_value(structure, "channels", value);
g_free(value);
return structure;
}
示例9: GST_LOG_OBJECT
//.........这里部分代码省略.........
gst_structure_set_value(structure, "mpegversion", list);
g_free(list);
/* layer */
GST_LOG_OBJECT(self, "setting mpeg layer");
list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
if (mpeg->layer & BT_MPEG_LAYER_1) {
g_value_set_int(value, 1);
gst_value_list_prepend_value(list, value);
valid_layer = TRUE;
}
if (mpeg->layer & BT_MPEG_LAYER_2) {
g_value_set_int(value, 2);
gst_value_list_prepend_value(list, value);
valid_layer = TRUE;
}
if (mpeg->layer & BT_MPEG_LAYER_3) {
g_value_set_int(value, 3);
gst_value_list_prepend_value(list, value);
valid_layer = TRUE;
}
if (list) {
gst_structure_set_value(structure, "layer", list);
g_free(list);
list = NULL;
}
if (!valid_layer) {
gst_structure_free(structure);
g_free(value);
return NULL;
}
/* rate */
GST_LOG_OBJECT(self, "setting mpeg rate");
list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_48000) {
g_value_set_int(value, 48000);
gst_value_list_prepend_value(list, value);
}
if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_44100) {
g_value_set_int(value, 44100);
gst_value_list_prepend_value(list, value);
}
if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_32000) {
g_value_set_int(value, 32000);
gst_value_list_prepend_value(list, value);
}
if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_24000) {
g_value_set_int(value, 24000);
gst_value_list_prepend_value(list, value);
}
if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_22050) {
g_value_set_int(value, 22050);
gst_value_list_prepend_value(list, value);
}
if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_16000) {
g_value_set_int(value, 16000);
gst_value_list_prepend_value(list, value);
}
g_value_unset(value);
if (list) {
gst_structure_set_value(structure, "rate", list);
g_free(list);
list = NULL;
}
/* channels */
GST_LOG_OBJECT(self, "setting mpeg channels");
mono = FALSE;
stereo = FALSE;
if (mpeg->channel_mode & BT_A2DP_CHANNEL_MODE_MONO)
mono = TRUE;
if ((mpeg->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) ||
(mpeg->channel_mode &
BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) ||
(mpeg->channel_mode &
BT_A2DP_CHANNEL_MODE_JOINT_STEREO))
stereo = TRUE;
if (mono && stereo) {
g_value_init(value, GST_TYPE_INT_RANGE);
gst_value_set_int_range(value, 1, 2);
} else {
g_value_init(value, G_TYPE_INT);
if (mono)
g_value_set_int(value, 1);
else if (stereo)
g_value_set_int(value, 2);
else {
GST_ERROR_OBJECT(self,
"Unexpected number of channels");
g_value_set_int(value, 0);
}
}
gst_structure_set_value(structure, "channels", value);
g_free(value);
return structure;
}
示例10: gst_avdtp_sink_init_sbc_pkt_conf
static gboolean gst_avdtp_sink_init_sbc_pkt_conf(GstAvdtpSink *sink,
GstCaps *caps,
sbc_capabilities_t *pkt)
{
sbc_capabilities_t *cfg;
const GValue *value = NULL;
const char *pref, *name;
gint rate, subbands, blocks;
GstStructure *structure = gst_caps_get_structure(caps, 0);
cfg = (void *) gst_avdtp_find_caps(sink, BT_A2DP_SBC_SINK);
name = gst_structure_get_name(structure);
if (!(IS_SBC(name))) {
GST_ERROR_OBJECT(sink, "Unexpected format %s, "
"was expecting sbc", name);
return FALSE;
}
value = gst_structure_get_value(structure, "rate");
rate = g_value_get_int(value);
if (rate == 44100)
cfg->frequency = BT_SBC_SAMPLING_FREQ_44100;
else if (rate == 48000)
cfg->frequency = BT_SBC_SAMPLING_FREQ_48000;
else if (rate == 32000)
cfg->frequency = BT_SBC_SAMPLING_FREQ_32000;
else if (rate == 16000)
cfg->frequency = BT_SBC_SAMPLING_FREQ_16000;
else {
GST_ERROR_OBJECT(sink, "Invalid rate while setting caps");
return FALSE;
}
value = gst_structure_get_value(structure, "mode");
pref = g_value_get_string(value);
if (strcmp(pref, "mono") == 0)
cfg->channel_mode = BT_A2DP_CHANNEL_MODE_MONO;
else if (strcmp(pref, "dual") == 0)
cfg->channel_mode = BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL;
else if (strcmp(pref, "stereo") == 0)
cfg->channel_mode = BT_A2DP_CHANNEL_MODE_STEREO;
else if (strcmp(pref, "joint") == 0)
cfg->channel_mode = BT_A2DP_CHANNEL_MODE_JOINT_STEREO;
else {
GST_ERROR_OBJECT(sink, "Invalid mode %s", pref);
return FALSE;
}
value = gst_structure_get_value(structure, "allocation");
pref = g_value_get_string(value);
if (strcmp(pref, "loudness") == 0)
cfg->allocation_method = BT_A2DP_ALLOCATION_LOUDNESS;
else if (strcmp(pref, "snr") == 0)
cfg->allocation_method = BT_A2DP_ALLOCATION_SNR;
else {
GST_ERROR_OBJECT(sink, "Invalid allocation: %s", pref);
return FALSE;
}
value = gst_structure_get_value(structure, "subbands");
subbands = g_value_get_int(value);
if (subbands == 8)
cfg->subbands = BT_A2DP_SUBBANDS_8;
else if (subbands == 4)
cfg->subbands = BT_A2DP_SUBBANDS_4;
else {
GST_ERROR_OBJECT(sink, "Invalid subbands %d", subbands);
return FALSE;
}
value = gst_structure_get_value(structure, "blocks");
blocks = g_value_get_int(value);
if (blocks == 16)
cfg->block_length = BT_A2DP_BLOCK_LENGTH_16;
else if (blocks == 12)
cfg->block_length = BT_A2DP_BLOCK_LENGTH_12;
else if (blocks == 8)
cfg->block_length = BT_A2DP_BLOCK_LENGTH_8;
else if (blocks == 4)
cfg->block_length = BT_A2DP_BLOCK_LENGTH_4;
else {
GST_ERROR_OBJECT(sink, "Invalid blocks %d", blocks);
return FALSE;
}
value = gst_structure_get_value(structure, "bitpool");
cfg->max_bitpool = cfg->min_bitpool = g_value_get_int(value);
memcpy(pkt, cfg, sizeof(*pkt));
return TRUE;
}
示例11: gst_avdtp_sink_conf_recv_stream_fd
static gboolean gst_avdtp_sink_conf_recv_stream_fd(
GstAvdtpSink *self)
{
struct bluetooth_data *data = self->data;
gint ret;
GIOError err;
GError *gerr = NULL;
GIOStatus status;
GIOFlags flags;
gsize read;
ret = gst_avdtp_sink_bluetooth_recvmsg_fd(self);
if (ret < 0)
return FALSE;
if (!self->stream) {
GST_ERROR_OBJECT(self, "Error while configuring device: "
"could not acquire audio socket");
return FALSE;
}
/* set stream socket to nonblock */
GST_LOG_OBJECT(self, "setting stream socket to nonblock");
flags = g_io_channel_get_flags(self->stream);
flags |= G_IO_FLAG_NONBLOCK;
status = g_io_channel_set_flags(self->stream, flags, &gerr);
if (status != G_IO_STATUS_NORMAL) {
if (gerr)
GST_WARNING_OBJECT(self, "Error while "
"setting server socket to nonblock: "
"%s", gerr->message);
else
GST_WARNING_OBJECT(self, "Error while "
"setting server "
"socket to nonblock");
}
/* It is possible there is some outstanding
data in the pipe - we have to empty it */
GST_LOG_OBJECT(self, "emptying stream pipe");
while (1) {
err = g_io_channel_read(self->stream, data->buffer,
(gsize) data->link_mtu,
&read);
if (err != G_IO_ERROR_NONE || read <= 0)
break;
}
/* set stream socket to block */
GST_LOG_OBJECT(self, "setting stream socket to block");
flags = g_io_channel_get_flags(self->stream);
flags &= ~G_IO_FLAG_NONBLOCK;
status = g_io_channel_set_flags(self->stream, flags, &gerr);
if (status != G_IO_STATUS_NORMAL) {
if (gerr)
GST_WARNING_OBJECT(self, "Error while "
"setting server socket to block:"
"%s", gerr->message);
else
GST_WARNING_OBJECT(self, "Error while "
"setting server "
"socket to block");
}
memset(data->buffer, 0, sizeof(data->buffer));
return TRUE;
}
示例12: gst_avdtp_sink_configure
static gboolean gst_avdtp_sink_configure(GstAvdtpSink *self,
GstCaps *caps)
{
gchar buf[BT_SUGGESTED_BUFFER_SIZE];
struct bt_open_req *open_req = (void *) buf;
struct bt_open_rsp *open_rsp = (void *) buf;
struct bt_set_configuration_req *req = (void *) buf;
struct bt_set_configuration_rsp *rsp = (void *) buf;
gboolean ret;
GIOError io_error;
gchar *temp;
GstStructure *structure;
codec_capabilities_t *codec = NULL;
temp = gst_caps_to_string(caps);
GST_DEBUG_OBJECT(self, "configuring device with caps: %s", temp);
g_free(temp);
structure = gst_caps_get_structure(caps, 0);
if (gst_structure_has_name(structure, "audio/x-sbc"))
codec = (void *) gst_avdtp_find_caps(self, BT_A2DP_SBC_SINK);
else if (gst_structure_has_name(structure, "audio/mpeg"))
codec = (void *) gst_avdtp_find_caps(self, BT_A2DP_MPEG12_SINK);
if (codec == NULL) {
GST_ERROR_OBJECT(self, "Couldn't parse caps "
"to packet configuration");
return FALSE;
}
memset(req, 0, BT_SUGGESTED_BUFFER_SIZE);
open_req->h.type = BT_REQUEST;
open_req->h.name = BT_OPEN;
open_req->h.length = sizeof(*open_req);
strncpy(open_req->destination, self->device, 18);
open_req->seid = codec->seid;
open_req->lock = BT_WRITE_LOCK;
io_error = gst_avdtp_sink_audioservice_send(self, &open_req->h);
if (io_error != G_IO_ERROR_NONE) {
GST_ERROR_OBJECT(self, "Error ocurred while sending "
"open packet");
return FALSE;
}
open_rsp->h.length = sizeof(*open_rsp);
io_error = gst_avdtp_sink_audioservice_expect(self,
&open_rsp->h, BT_OPEN);
if (io_error != G_IO_ERROR_NONE) {
GST_ERROR_OBJECT(self, "Error while receiving device "
"confirmation");
return FALSE;
}
memset(req, 0, sizeof(buf));
req->h.type = BT_REQUEST;
req->h.name = BT_SET_CONFIGURATION;
req->h.length = sizeof(*req);
if (codec->type == BT_A2DP_SBC_SINK)
ret = gst_avdtp_sink_init_sbc_pkt_conf(self, caps,
(void *) &req->codec);
else
ret = gst_avdtp_sink_init_mp3_pkt_conf(self, caps,
(void *) &req->codec);
if (!ret) {
GST_ERROR_OBJECT(self, "Couldn't parse caps "
"to packet configuration");
return FALSE;
}
req->h.length += req->codec.length - sizeof(req->codec);
io_error = gst_avdtp_sink_audioservice_send(self, &req->h);
if (io_error != G_IO_ERROR_NONE) {
GST_ERROR_OBJECT(self, "Error ocurred while sending "
"configurarion packet");
return FALSE;
}
rsp->h.length = sizeof(*rsp);
io_error = gst_avdtp_sink_audioservice_expect(self,
&rsp->h, BT_SET_CONFIGURATION);
if (io_error != G_IO_ERROR_NONE) {
GST_ERROR_OBJECT(self, "Error while receiving device "
"confirmation");
return FALSE;
}
self->data->link_mtu = rsp->link_mtu;
return TRUE;
}
示例13: gst_v4l2_video_dec_handle_frame
//.........这里部分代码省略.........
gst_video_codec_state_unref (output_state);
if (!gst_video_decoder_negotiate (decoder)) {
if (GST_PAD_IS_FLUSHING (decoder->srcpad))
goto flushing;
else
goto not_negotiated;
}
/* Ensure our internal pool is activated */
if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
TRUE))
goto activate_failed;
}
if (g_atomic_int_get (&self->processing) == FALSE) {
/* It's possible that the processing thread stopped due to an error */
if (self->output_flow != GST_FLOW_OK &&
self->output_flow != GST_FLOW_FLUSHING) {
GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");
ret = self->output_flow;
goto drop;
}
GST_DEBUG_OBJECT (self, "Starting decoding thread");
/* Start the processing task, when it quits, the task will disable input
* processing to unlock input if draining, or prevent potential block */
g_atomic_int_set (&self->processing, TRUE);
if (!gst_pad_start_task (decoder->srcpad,
(GstTaskFunction) gst_v4l2_video_dec_loop, self,
(GDestroyNotify) gst_v4l2_video_dec_loop_stopped))
goto start_task_failed;
}
if (frame->input_buffer) {
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
ret =
gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output->
pool), &frame->input_buffer);
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
if (ret == GST_FLOW_FLUSHING) {
if (g_atomic_int_get (&self->processing) == FALSE)
ret = self->output_flow;
goto drop;
} else if (ret != GST_FLOW_OK) {
goto process_failed;
}
/* No need to keep input arround */
gst_buffer_replace (&frame->input_buffer, NULL);
}
gst_video_codec_frame_unref (frame);
return ret;
/* ERRORS */
not_negotiated:
{
GST_ERROR_OBJECT (self, "not negotiated");
ret = GST_FLOW_NOT_NEGOTIATED;
goto drop;
}
activate_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
(_("Failed to allocate required memory.")),
("Buffer pool activation failed"));
ret = GST_FLOW_ERROR;
goto drop;
}
flushing:
{
ret = GST_FLOW_FLUSHING;
goto drop;
}
start_task_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
(_("Failed to start decoding thread.")), (NULL));
g_atomic_int_set (&self->processing, FALSE);
ret = GST_FLOW_ERROR;
goto drop;
}
process_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
(_("Failed to process frame.")),
("Maybe be due to not enough memory or failing driver"));
ret = GST_FLOW_ERROR;
goto drop;
}
drop:
{
gst_video_decoder_drop_frame (decoder, frame);
return ret;
}
}
示例14: pad_chain
static GstFlowReturn
pad_chain (GstPad *pad,
GstBuffer *buf)
{
GOmxCore *gomx;
GOmxPort *in_port;
GstOmxBaseFilter *self;
GstFlowReturn ret = GST_FLOW_OK;
self = GST_OMX_BASE_FILTER (GST_OBJECT_PARENT (pad));
gomx = self->gomx;
GST_LOG_OBJECT (self, "begin");
GST_LOG_OBJECT (self, "gst_buffer: size=%u", GST_BUFFER_SIZE (buf));
GST_LOG_OBJECT (self, "state: %d", gomx->omx_state);
if (G_UNLIKELY (gomx->omx_state == OMX_StateLoaded))
{
g_mutex_lock (self->ready_lock);
GST_INFO_OBJECT (self, "omx: prepare");
/** @todo this should probably go after doing preparations. */
if (self->omx_setup)
{
self->omx_setup (self);
}
setup_ports (self);
g_omx_core_prepare (self->gomx);
if (gomx->omx_state == OMX_StateIdle)
{
self->ready = TRUE;
gst_pad_start_task (self->srcpad, output_loop, self->srcpad);
}
g_mutex_unlock (self->ready_lock);
if (gomx->omx_state != OMX_StateIdle)
goto out_flushing;
}
in_port = self->in_port;
if (G_LIKELY (in_port->enabled))
{
guint buffer_offset = 0;
if (G_UNLIKELY (gomx->omx_state == OMX_StateIdle))
{
GST_INFO_OBJECT (self, "omx: play");
g_omx_core_start (gomx);
if (gomx->omx_state != OMX_StateExecuting)
goto out_flushing;
/* send buffer with codec data flag */
/** @todo move to util */
if (self->codec_data)
{
OMX_BUFFERHEADERTYPE *omx_buffer;
GST_LOG_OBJECT (self, "request buffer");
omx_buffer = g_omx_port_request_buffer (in_port);
if (G_LIKELY (omx_buffer))
{
omx_buffer->nFlags |= 0x00000080; /* codec data flag */
omx_buffer->nFilledLen = GST_BUFFER_SIZE (self->codec_data);
memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (self->codec_data), omx_buffer->nFilledLen);
GST_LOG_OBJECT (self, "release_buffer");
g_omx_port_release_buffer (in_port, omx_buffer);
}
}
}
if (G_UNLIKELY (gomx->omx_state != OMX_StateExecuting))
{
GST_ERROR_OBJECT (self, "Whoa! very wrong");
}
while (G_LIKELY (buffer_offset < GST_BUFFER_SIZE (buf)))
{
OMX_BUFFERHEADERTYPE *omx_buffer;
if (self->last_pad_push_return != GST_FLOW_OK ||
!(gomx->omx_state == OMX_StateExecuting ||
gomx->omx_state == OMX_StatePause))
{
goto out_flushing;
}
GST_LOG_OBJECT (self, "request buffer");
omx_buffer = g_omx_port_request_buffer (in_port);
//.........这里部分代码省略.........
示例15: gst_inter_audio_src_create
static GstFlowReturn
gst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size,
GstBuffer ** buf)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GstCaps *caps;
GstBuffer *buffer;
guint n, bpf;
guint64 period_time;
guint64 period_samples;
GST_DEBUG_OBJECT (interaudiosrc, "create");
buffer = NULL;
caps = NULL;
g_mutex_lock (&interaudiosrc->surface->mutex);
if (interaudiosrc->surface->audio_info.finfo) {
if (!gst_audio_info_is_equal (&interaudiosrc->surface->audio_info,
&interaudiosrc->info)) {
caps = gst_audio_info_to_caps (&interaudiosrc->surface->audio_info);
interaudiosrc->timestamp_offset +=
gst_util_uint64_scale (interaudiosrc->n_samples, GST_SECOND,
interaudiosrc->info.rate);
interaudiosrc->n_samples = 0;
}
}
bpf = interaudiosrc->surface->audio_info.bpf;
period_time = interaudiosrc->surface->audio_period_time;
period_samples =
gst_util_uint64_scale (period_time, interaudiosrc->info.rate, GST_SECOND);
if (bpf > 0)
n = gst_adapter_available (interaudiosrc->surface->audio_adapter) / bpf;
else
n = 0;
if (n > period_samples)
n = period_samples;
if (n > 0) {
buffer = gst_adapter_take_buffer (interaudiosrc->surface->audio_adapter,
n * bpf);
} else {
buffer = gst_buffer_new ();
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_GAP);
}
g_mutex_unlock (&interaudiosrc->surface->mutex);
if (caps) {
gboolean ret = gst_base_src_set_caps (src, caps);
gst_caps_unref (caps);
if (!ret) {
GST_ERROR_OBJECT (src, "Failed to set caps %" GST_PTR_FORMAT, caps);
if (buffer)
gst_buffer_unref (buffer);
return GST_FLOW_NOT_NEGOTIATED;
}
}
buffer = gst_buffer_make_writable (buffer);
bpf = interaudiosrc->info.bpf;
if (n < period_samples) {
GstMapInfo map;
GstMemory *mem;
GST_DEBUG_OBJECT (interaudiosrc,
"creating %" G_GUINT64_FORMAT " samples of silence",
period_samples - n);
mem = gst_allocator_alloc (NULL, (period_samples - n) * bpf, NULL);
if (gst_memory_map (mem, &map, GST_MAP_WRITE)) {
gst_audio_format_fill_silence (interaudiosrc->info.finfo, map.data,
map.size);
gst_memory_unmap (mem, &map);
}
gst_buffer_prepend_memory (buffer, mem);
}
n = period_samples;
GST_BUFFER_OFFSET (buffer) = interaudiosrc->n_samples;
GST_BUFFER_OFFSET_END (buffer) = interaudiosrc->n_samples + n;
GST_BUFFER_TIMESTAMP (buffer) = interaudiosrc->timestamp_offset +
gst_util_uint64_scale (interaudiosrc->n_samples, GST_SECOND,
interaudiosrc->info.rate);
GST_DEBUG_OBJECT (interaudiosrc, "create ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
GST_BUFFER_DURATION (buffer) = interaudiosrc->timestamp_offset +
gst_util_uint64_scale (interaudiosrc->n_samples + n, GST_SECOND,
interaudiosrc->info.rate) - GST_BUFFER_TIMESTAMP (buffer);
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
if (interaudiosrc->n_samples == 0) {
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
}
interaudiosrc->n_samples += n;
*buf = buffer;
return GST_FLOW_OK;
}