本文整理汇总了C++中gst_sample_get_buffer函数的典型用法代码示例。如果您正苦于以下问题:C++ gst_sample_get_buffer函数的具体用法?C++ gst_sample_get_buffer怎么用?C++ gst_sample_get_buffer使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了gst_sample_get_buffer函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: TRACE_MEDIA_MESSAGE
void PlaybackPipeline::enqueueSample(PassRefPtr<MediaSample> prsample)
{
RefPtr<MediaSample> rsample = prsample;
AtomicString trackId = rsample->trackID();
TRACE_MEDIA_MESSAGE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT, trackId.string().utf8().data(), rsample->presentationTime().toFloat(), rsample->presentationSize().width(), rsample->presentationSize().height(), GST_TIME_ARGS(floatToGstClockTime(rsample->presentationTime().toDouble())));
ASSERT(WTF::isMainThread());
GST_OBJECT_LOCK(m_webKitMediaSrc.get());
Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);
if (!stream) {
WARN_MEDIA_MESSAGE("No stream!");
GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
return;
}
GstElement* appsrc = stream->appsrc;
GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(rsample.get());
if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
GstSample* gstsample = gst_sample_ref(sample->sample());
GST_BUFFER_FLAG_UNSET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
push_sample(GST_APP_SRC(appsrc), gstsample);
// gst_app_src_push_sample() uses transfer-none for gstsample
gst_sample_unref(gstsample);
}
}
示例2: appsink_new_sample_cb
/* The appsink has received a sample */
static GstFlowReturn appsink_new_sample_cb(GstAppSink *sink,
gpointer user_data)
{
struct videnc_state *st = user_data;
GstSample *sample;
GstBuffer *buffer;
GstMapInfo info;
guint8 *data;
gsize size;
/* Retrieve the sample */
sample = gst_app_sink_pull_sample(sink);
if (sample) {
buffer = gst_sample_get_buffer(sample);
gst_buffer_map( buffer, &info, (GstMapFlags)(GST_MAP_READ) );
data = info.data;
size = info.size;
gst_video_h264_packetize(data, size, st->encoder.pktsize,
st->pkth, st->arg);
gst_buffer_unmap(buffer, &info);
gst_sample_unref(sample);
}
return GST_FLOW_OK;
}
示例3: gst_sample_unref
/*!
* \brief OpenIMAJCapGStreamer::grabFrame
* \return
* Grabs a sample from the pipeline, awaiting consumation by getImage.
* The pipeline is started if it was not running yet
*/
bool OpenIMAJCapGStreamer::nextFrame()
{
if(!pipeline)
return false;
// start the pipeline if it was not in playing state yet
if(!this->isPipelinePlaying())
this->startPipeline();
// bail out if EOS
if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
return false;
if(sample)
gst_sample_unref(sample);
sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
if(!sample)
return false;
buffer = gst_sample_get_buffer(sample);
if(!buffer)
return false;
return true;
}
示例4: gst_app_sink_pull_buffer
void ofGstVideoUtils::update(){
if (isLoaded()){
if(!isFrameByFrame()){
mutex.lock();
bHavePixelsChanged = bBackPixelsChanged;
if (bHavePixelsChanged){
bBackPixelsChanged=false;
pixels.swap(backPixels);
#if GST_VERSION_MAJOR==0
if(prevBuffer) gst_buffer_unref (prevBuffer);
#else
if(prevBuffer) gst_sample_unref (prevBuffer);
#endif
prevBuffer = buffer;
}
mutex.unlock();
}else{
#if GST_VERSION_MAJOR==0
GstBuffer *buffer;
//get the buffer from appsink
if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));
if(buffer){
if(pixels.isAllocated()){
if(prevBuffer) gst_buffer_unref (prevBuffer);
pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
prevBuffer = buffer;
bHavePixelsChanged=true;
}
}
}
#else
GstBuffer *buffer;
GstSample * sample;
//get the buffer from appsink
if(isPaused()){
sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
}else{
sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
}
buffer = gst_sample_get_buffer(sample);
if(buffer){
if(pixels.isAllocated()){
if(prevBuffer) gst_sample_unref (prevBuffer);
gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
//TODO: stride = mapinfo.size / height;
pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
prevBuffer = sample;
bHavePixelsChanged=true;
gst_buffer_unmap(buffer,&mapinfo);
}
}
}
#endif
}else{
示例5: lock
void ofGstVideoUtils::update(){
if (isLoaded()){
if(!isFrameByFrame()){
ofScopedLock lock(mutex);
bHavePixelsChanged = bBackPixelsChanged;
if (bHavePixelsChanged){
bBackPixelsChanged=false;
swap(pixels,backPixels);
#ifdef OF_USE_GST_GL
if(backTexture.isAllocated()){
frontTexture.getTextureData() = backTexture.getTextureData();
frontTexture.setTextureMinMagFilter(GL_LINEAR,GL_LINEAR);
frontTexture.setTextureWrap(GL_CLAMP_TO_EDGE,GL_CLAMP_TO_EDGE);
}
#endif
if(!copyPixels){
frontBuffer = backBuffer;
}
}
}else{
#if GST_VERSION_MAJOR==0
GstBuffer *buffer;
//get the buffer from appsink
if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));
if(buffer){
if(pixels.isAllocated()){
pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
prevBuffer = shared_ptr<GstBuffer>(buffer,gst_buffer_unref);;
bHavePixelsChanged=true;
}
}
}
#else
GstBuffer * buffer;
GstSample * sample;
//get the buffer from appsink
if(isPaused()){
sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
}else{
sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
}
buffer = gst_sample_get_buffer(sample);
if(buffer){
if(pixels.isAllocated()){
gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
//TODO: stride = mapinfo.size / height;
pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
backBuffer = shared_ptr<GstSample>(sample,gst_sample_unref);
bHavePixelsChanged=true;
gst_buffer_unmap(buffer,&mapinfo);
}
}
}
#endif
}else{
示例6: gst_buffer_unref
/*!
* \brief CvCapture_GStreamer::grabFrame
* \return
* Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
* The pipeline is started if it was not running yet
*/
bool CvCapture_GStreamer::grabFrame()
{
if(!pipeline)
return false;
// start the pipeline if it was not in playing state yet
if(!this->isPipelinePlaying())
this->startPipeline();
// bail out if EOS
if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
return false;
#if GST_VERSION_MAJOR == 0
if(buffer)
gst_buffer_unref(buffer);
buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
#else
if(sample)
gst_sample_unref(sample);
sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
if(!sample)
return false;
buffer = gst_sample_get_buffer(sample);
#endif
if(!buffer)
return false;
return true;
}
示例7: post_recv_sample
static GstFlowReturn
post_recv_sample (GstElement * appsink, gpointer user_data)
{
GstSample *sample = NULL;
GstFlowReturn ret;
GstBuffer *buffer;
g_signal_emit_by_name (appsink, "pull-sample", &sample);
if (sample == NULL)
return GST_FLOW_ERROR;
buffer = gst_sample_get_buffer (sample);
if (buffer == NULL) {
ret = GST_FLOW_OK;
goto end;
}
g_signal_emit_by_name (httpep, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
/* something wrong */
GST_ERROR ("Could not send buffer to httpep %s. Ret code %d",
GST_ELEMENT_NAME (httpep), ret);
}
g_object_get (G_OBJECT (httpep), "http-method", &method, NULL);
ck_assert_int_eq (method, KMS_HTTP_ENDPOINT_METHOD_POST);
end:
if (sample != NULL)
gst_sample_unref (sample);
return ret;
}
示例8: guard
bool GstAppSinkPipeline::GetLatestFrameBuffer(void** frameBuffer)
{
bool retrieving = false;
boost::lock_guard<boost::mutex> guard(bufferMutex);
if (retrievedBuffer == 0)
{
if (currentBuffer != 0)
{
retrievedBuffer = currentBuffer;
currentBuffer = 0;
retrieving = true;
}
}
if (retrieving)
{
GstBuffer* buffer;
GstMapInfo map;
buffer = gst_sample_get_buffer (retrievedBuffer);
if (buffer)
{
gst_buffer_map (buffer, &map, GST_MAP_READ);
(*frameBuffer) = map.data;
gst_buffer_unmap (buffer, &map);
}
else return false;
}
return true;
}
示例9: totem_gst_tag_list_get_cover
GdkPixbuf *
totem_gst_tag_list_get_cover (GstTagList *tag_list)
{
GstSample *cover_sample;
g_return_val_if_fail (tag_list != NULL, FALSE);
cover_sample = totem_gst_tag_list_get_cover_real (tag_list);
/* Fallback to preview */
if (!cover_sample) {
gst_tag_list_get_sample_index (tag_list, GST_TAG_PREVIEW_IMAGE, 0,
&cover_sample);
}
if (cover_sample) {
GstBuffer *buffer;
GdkPixbuf *pixbuf;
buffer = gst_sample_get_buffer (cover_sample);
pixbuf = totem_gst_buffer_to_pixbuf (buffer);
gst_sample_unref (cover_sample);
return pixbuf;
}
return NULL;
}
示例10: on_new_preroll
GstFlowReturn on_new_preroll(GstAppSink *appsink, gpointer user_data) {
GstSample* sample = NULL;
GstBuffer* buffer;
GstMemory* memory;
GstMapInfo info;
GstClockTime clocktime;
g_debug("on_new_preroll ");
sample = gst_app_sink_pull_sample (appsink);
if (sample) {
g_debug("pulled sample\n");
buffer = gst_sample_get_buffer(sample);
clocktime = GST_BUFFER_PTS(buffer);
memory = gst_buffer_get_memory(buffer, 0);
gst_memory_map(memory, &info, GST_MAP_READ);
/*
You can access raw memory at info.data
*/
if(app.output_callback)
app.output_callback(info.data, info.size);
//fwrite(info.data, 1, info.size, app.outfile);
gst_memory_unmap(memory, &info);
gst_memory_unref(memory);
gst_sample_unref(sample);
}
return GST_FLOW_OK;
}
示例11: convert_frame_need_data_callback
static void
convert_frame_need_data_callback (GstElement * src, guint size,
GstVideoConvertSampleContext * context)
{
GstFlowReturn ret = GST_FLOW_ERROR;
GError *error;
GstBuffer *buffer;
g_mutex_lock (&context->mutex);
if (context->finished)
goto done;
buffer = gst_sample_get_buffer (context->sample);
g_signal_emit_by_name (src, "push-buffer", buffer, &ret);
gst_sample_unref (context->sample);
context->sample = NULL;
if (ret != GST_FLOW_OK) {
GST_ERROR ("Could not push video frame: %s", gst_flow_get_name (ret));
error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
"Could not push video frame: %s", gst_flow_get_name (ret));
convert_frame_finish (context, NULL, error);
}
g_signal_handlers_disconnect_by_func (src, convert_frame_need_data_callback,
context);
done:
g_mutex_unlock (&context->mutex);
}
示例12: frame_handler
GstFlowReturn frame_handler(GstSample * sample, GStreamerFramesReceiver * pClass)
{
GstBuffer * buffer = gst_sample_get_buffer(sample);
GstMapInfo info;
gst_buffer_map(buffer, &info, GST_MAP_READ);
if (pClass)
{
if (pClass -> InputFrameWidth() == 0)
{
int width, height;
PixelFormat pixelFormat;
GstCaps *caps = gst_sample_get_caps(sample);
ExtractImageParams(caps, width, height, pixelFormat);
pClass -> InputFrameWidth() = width;
pClass -> InputFrameHeight() = height;
pClass -> InputPixelFormat() = pixelFormat;
}
pClass -> CopyFrameData(info.data, info.size);
}
gst_buffer_unmap (buffer, &info);
return GST_FLOW_OK;
}
示例13: gst_app_sink_pull_sample
GstFlowReturn Capture::newSample(GstAppSink* sink, gpointer gSelf)
{
//g_print("New sample...");
GstSample* sample = NULL;
GstBuffer* sampleBuffer = NULL;
GstMapInfo bufferInfo;
Capture* self = static_cast<Capture* >(gSelf);
sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
if(sample != NULL)
{
sampleBuffer = gst_sample_get_buffer(sample);
if(sampleBuffer != NULL)
{
gst_buffer_map(sampleBuffer, &bufferInfo, GST_MAP_READ);
//gsize sz = gst_buffer_get_size(sampleBuffer);
//g_print("%lu\n",sz);
self->m_mutex.lock();
self->m_image = QImage(bufferInfo.data, 600, 400, QImage::Format_Mono);
self->m_mutex.unlock();
gst_buffer_unmap(sampleBuffer, &bufferInfo);
}
gst_sample_unref(sample);
}
return GST_FLOW_OK;
}
示例14: new_sample_callback
static GstFlowReturn new_sample_callback (GstAppSink * sink, gpointer user_data)
{
GstBuffer *buffer;
GstSample *sample;
Encoder *encoder = (Encoder *)user_data;
*(encoder->output->heartbeat) = gst_clock_get_time (encoder->system_clock);
sample = gst_app_sink_pull_sample (GST_APP_SINK (sink));
buffer = gst_sample_get_buffer (sample);
sem_wait (encoder->output->semaphore);
(*(encoder->output->total_count)) += gst_buffer_get_size (buffer);
/* update head_addr, free enough memory for current buffer. */
while (cache_free (encoder) < gst_buffer_get_size (buffer) + 12) { /* timestamp + gop size = 12 */
move_head (encoder);
}
if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
/*
* random access point found.
* write previous gop size to 4 bytes reservation,
* write current gop timestamp,
* reserve 4 bytes for size of current gop,
*/
if (encoder->mqdes == -1) {
/* no m3u8 output */
move_last_rap (encoder, buffer);
} else if (GST_BUFFER_PTS (buffer) == encoder->last_running_time) {
gchar *msg;
move_last_rap (encoder, buffer);
msg = g_strdup_printf ("%lu", encoder->last_segment_duration);
if (mq_send (encoder->mqdes, msg, strlen (msg), 1) == -1) {
GST_ERROR ("mq_send error: %s", g_strerror (errno));
}
g_free (msg);
encoder->last_running_time = GST_CLOCK_TIME_NONE;
}
}
/* udpstreaming? */
if (encoder->udpstreaming) {
udp_streaming (encoder, buffer);
}
/*
* copy buffer to cache.
* update tail_addr
*/
copy_buffer (encoder, buffer);
sem_post (encoder->output->semaphore);
gst_sample_unref (sample);
return GST_FLOW_OK;
}
示例15: qDebug
GstFlowReturn VideoSender::newBufferCB(GstAppSink *sink, gpointer user_data)
{
qDebug() << "In" << __FUNCTION__;
VideoSender *vs = static_cast<VideoSender *>(user_data);
// Get new video sample
GstSample *sample = gst_app_sink_pull_sample(sink);
if (sample == NULL) {
qWarning("%s: Failed to get new sample", __FUNCTION__);
return GST_FLOW_OK;
}
// FIXME: zero copy?
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo map;
QByteArray *data = NULL;
if (gst_buffer_map(buffer, &map, GST_MAP_READ)) {
// Copy the data to QByteArray
data = new QByteArray((char *)map.data, map.size);
vs->emitVideo(data);
gst_buffer_unmap(buffer, &map);
} else {
qWarning("Error with gst_buffer_map");
}
gst_sample_unref(sample);
return GST_FLOW_OK;
}