本文整理汇总了C++中GST_PIPELINE函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_PIPELINE函数的具体用法?C++ GST_PIPELINE怎么用?C++ GST_PIPELINE使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_PIPELINE函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: ASSERT
void MediaPlayerPrivate::createGSTPlayBin(String url)
{
ASSERT(!m_playBin);
m_playBin = gst_element_factory_make("playbin2", "play");
GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_playBin));
gst_bus_add_signal_watch(bus);
g_signal_connect(bus, "message", G_CALLBACK(mediaPlayerPrivateMessageCallback), this);
gst_object_unref(bus);
g_object_set(G_OBJECT(m_playBin), "uri", url.utf8().data(),
"volume", static_cast<double>(m_player->volume()), NULL);
m_videoSink = webkit_video_sink_new();
g_object_ref_sink(m_videoSink);
g_object_set(m_playBin, "video-sink", m_videoSink, NULL);
g_signal_connect(m_videoSink, "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this);
}
示例2: MediaObjectImpl
MediaPipelineImpl::MediaPipelineImpl (const boost::property_tree::ptree &config)
: MediaObjectImpl (config)
{
GstClock *clock;
pipeline = gst_pipeline_new (NULL);
if (pipeline == NULL) {
throw KurentoException (MEDIA_OBJECT_NOT_AVAILABLE,
"Cannot create gstreamer pipeline");
}
clock = gst_system_clock_obtain ();
gst_pipeline_use_clock (GST_PIPELINE (pipeline), clock);
g_object_unref (clock);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
busMessageHandler = 0;
}
示例3: tsmf_platform_register_handler
int tsmf_platform_register_handler(TSMFGstreamerDecoder* decoder)
{
GstBus* bus;
if (!decoder)
return -1;
if (!decoder->pipe)
return -1;
bus = gst_pipeline_get_bus(GST_PIPELINE(decoder->pipe));
if (!bus)
{
WLog_ERR(TAG, "gst_pipeline_get_bus failed!");
return 1;
}
return 0;
}
示例4: GST_START_TEST
GST_END_TEST
GST_START_TEST (request_audio_src_pad_pending)
{
GstElement *dummysrc;
gchar *padname = NULL;
GstBus *bus;
loop = g_main_loop_new (NULL, TRUE);
pipeline = gst_pipeline_new (__FUNCTION__);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus);
g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);
dummysrc = gst_element_factory_make ("dummysrc", NULL);
g_signal_connect (dummysrc, "pad-added", G_CALLBACK (pad_added_delayed),
&padname);
gst_bin_add (GST_BIN (pipeline), dummysrc);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* request src pad using action */
g_signal_emit_by_name (dummysrc, "request-new-pad",
KMS_ELEMENT_PAD_TYPE_AUDIO, NULL, GST_PAD_SRC, &padname);
fail_if (padname == NULL);
GST_DEBUG ("Pad name %s", padname);
g_object_set (G_OBJECT (dummysrc), "audio", TRUE, NULL);
g_free (padname);
g_timeout_add_seconds (4, print_timedout_pipeline, NULL);
g_main_loop_run (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_bus_remove_signal_watch (bus);
g_object_unref (bus);
g_object_unref (pipeline);
g_main_loop_unref (loop);
}
示例5: QAbstractListModel
QDeclarativeVideoEditor::QDeclarativeVideoEditor(QObject *parent) :
QAbstractListModel(parent), m_position(0), m_positionTimer(this), m_rendering(false), m_size(0),
m_width(0), m_height(0), m_fpsn(0), m_fpsd(0)
{
QHash<int, QByteArray> roles;
roles.insert( 33 , "uri" );
roles.insert( 34 , "fileName" );
roles.insert( 35 , "inPoint" );
roles.insert( 36 , "duration" );
setRoleNames(roles);
connect(&m_positionTimer, SIGNAL(timeout()), SLOT(updatePosition()));
m_timeline = ges_timeline_new_audio_video();
m_timelineLayer = (GESTimelineLayer*) ges_simple_timeline_layer_new();
ges_timeline_add_layer(m_timeline, m_timelineLayer);
m_pipeline = ges_timeline_pipeline_new();
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline));
gst_bus_add_watch (bus, bus_call, this);
gst_object_unref (bus);
/*
* gst-dsp encoders seems to not proxy downstream caps correctly, this can make
* GES fail to render some projects. We override the default getcaps on our own
*/
g_signal_connect(m_pipeline, "element-added", (GCallback) gstcapstricks_pipeline_element_added, NULL);
ges_timeline_pipeline_add_timeline (m_pipeline, m_timeline);
m_vsink = gst_element_factory_make ("omapxvsink", "previewvsink");
ges_timeline_pipeline_preview_set_video_sink (m_pipeline, m_vsink);
gst_x_overlay_set_render_rectangle (GST_X_OVERLAY (m_vsink),
171, 0,
512, 288);
ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW);
gst_element_set_state ((GstElement*) m_pipeline, GST_STATE_PAUSED);
m_duration = GST_CLOCK_TIME_NONE;
m_progress = 0.0;
}
示例6: gst_message_parse_error
void RgAnalyser::HandleErrorMsg (GstMessage *msg)
{
GError *gerror = nullptr;
gchar *debug = nullptr;
gst_message_parse_error (msg, &gerror, &debug);
const auto& msgStr = QString::fromUtf8 (gerror->message);
const auto& debugStr = QString::fromUtf8 (debug);
const auto code = gerror->code;
const auto domain = gerror->domain;
g_error_free (gerror);
g_free (debug);
qWarning () << Q_FUNC_INFO
<< domain
<< code
<< msgStr
<< debugStr;
if (IsDraining_)
return;
IsDraining_ = true;
const auto bus = gst_pipeline_get_bus (GST_PIPELINE (Pipeline_));
while (const auto msg = gst_bus_timed_pop (bus, 0.01 * GST_SECOND))
handleMessage (std::shared_ptr<GstMessage> (msg, gst_message_unref));
IsDraining_ = false;
gst_element_set_state (Pipeline_, GST_STATE_NULL);
PopThread_->Resume ();
const auto trackInfoPos = std::find_if (Result_.Tracks_.begin (), Result_.Tracks_.end (),
[this] (const TrackRgResult& info) { return info.TrackPath_ == CurrentPath_; });
if (trackInfoPos == Result_.Tracks_.end ())
Result_.Tracks_.append ({ CurrentPath_, 0, 0 });
CheckFinish ();
}
示例7: main
int main(int argc, char** argv)
{
GMainLoop *loop;
GstElement *play;
gst_init(&argc, &argv);
play = gst_element_factory_make("playbin2", "play");
//play->set_properties("volume", 10);
loop = g_main_loop_new(NULL, FALSE);
g_object_set(G_OBJECT(play), "uri", argv[1], NULL);
gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(play)), bus_cb, loop);
g_print("playing......\n");
gst_element_set_state(play, GST_STATE_PLAYING);
g_print("start g_main_loop_run\n");
g_main_loop_run(loop);
g_print("g_main_loop_run return\n");
gst_element_set_state(play, GST_STATE_NULL);
return 0;
}
示例8: gst_pipeline_get_bus
void CrowdDetectorFilterImpl::postConstructor ()
{
GstBus *bus;
std::shared_ptr<MediaPipelineImpl> pipe;
FilterImpl::postConstructor ();
pipe = std::dynamic_pointer_cast<MediaPipelineImpl> (getMediaPipeline() );
bus = gst_pipeline_get_bus (GST_PIPELINE (pipe->getPipeline() ) );
bus_handler_id = register_signal_handler (G_OBJECT (bus),
"message",
std::function <void (GstElement *, GstMessage *) >
(std::bind (&CrowdDetectorFilterImpl::busMessage, this,
std::placeholders::_2) ),
std::dynamic_pointer_cast<CrowdDetectorFilterImpl>
(shared_from_this() ) );
g_object_unref (bus);
}
示例9: gst_pipeline_get_property
static void
gst_pipeline_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstPipeline *pipeline = GST_PIPELINE (object);
switch (prop_id) {
case PROP_DELAY:
g_value_set_uint64 (value, gst_pipeline_get_delay (pipeline));
break;
case PROP_AUTO_FLUSH_BUS:
g_value_set_boolean (value, gst_pipeline_get_auto_flush_bus (pipeline));
break;
case PROP_LATENCY:
g_value_set_uint64 (value, gst_pipeline_get_latency (pipeline));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
示例10: gst_play_file
static void
gst_play_file(const char *filename){
GMainLoop *loop;
GstElement *pipeline;
GstBus *bus;
GstElement *source , *parser , *sink;
loop = g_main_loop_new(NULL , TRUE);
pipeline = gst_pipeline_new("audio-player");
source = gst_element_factory_make("filesrc" , "source");
parser = gst_element_factory_make("wavparse" , "parser");
sink = gst_element_factory_make("alsasink" , "output");
g_object_set(G_OBJECT(source) , "location"
, filename , NULL);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus , bus_watch , loop);
g_object_unref(bus);
gst_bin_add_many(GST_BIN(pipeline)
, source , parser , sink , NULL);
g_signal_connect(parser
, "pad-added" , G_CALLBACK(add_pad) , sink);
if(! gst_element_link(source , parser)){
g_warning("linke source to parser failed");
}
gst_element_set_state(pipeline , GST_STATE_PLAYING);
printf("Start playing...\n");
g_main_loop_run(loop);
printf("Playing stopped!!!\n");
gst_element_set_state(pipeline , GST_STATE_NULL);
g_object_unref(pipeline);
}
示例11: gstreamer_determine_video_dimensions
void gstreamer_determine_video_dimensions(const char *uri, int *video_width,
int *video_height) {
GMainLoop *loop = g_main_loop_new(NULL, FALSE);
char *playbin_launch_str = malloc(strlen(uri) + 64);
sprintf(playbin_launch_str, PLAYBIN_STR
" uri=%s audio-sink=fakesink video-sink=fakesink", uri);
GError *error2 = NULL;
GstElement *playbin = gst_parse_launch(playbin_launch_str, &error2);
if (error2) {
printf("Error: Could not create gstreamer pipeline for identification.\n");
printf("Parse error: %s\n", error2->message);
exit(1);
}
playbin_pipeline = playbin;
bus_quit_on_playing = TRUE;
GstBus *playbin_bus = gst_pipeline_get_bus(GST_PIPELINE(playbin));
guint type_find_bus_watch_id = gst_bus_add_watch(playbin_bus, bus_callback, loop);
gst_object_unref(playbin_bus);
gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_READY);
gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_PLAYING);
g_main_loop_run(loop);
gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_PAUSED);
GstPad *pad = gst_pad_new("", GST_PAD_UNKNOWN);
g_signal_emit_by_name(playbin, "get-video-pad", 0, &pad, NULL);
GstCaps *caps = gst_pad_get_current_caps(pad);
*video_width = g_value_get_int(gst_structure_get_value(
gst_caps_get_structure(caps, 0), "width"));
*video_height = g_value_get_int(gst_structure_get_value(
gst_caps_get_structure(caps, 0), "height"));
g_object_unref(pad);
gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(playbin));
g_source_remove(type_find_bus_watch_id);
g_main_loop_unref(loop);
}
示例12: g_signal_handler_disconnect
MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
{
if (m_repaintHandler) {
g_signal_handler_disconnect(m_videoSink.get(), m_repaintHandler);
m_repaintHandler = 0;
}
g_mutex_clear(&m_sampleMutex);
m_player = 0;
if (m_volumeSignalHandler) {
g_signal_handler_disconnect(m_volumeElement.get(), m_volumeSignalHandler);
m_volumeSignalHandler = 0;
}
if (m_muteSignalHandler) {
g_signal_handler_disconnect(m_volumeElement.get(), m_muteSignalHandler);
m_muteSignalHandler = 0;
}
#if USE(GSTREAMER_GL)
g_cond_clear(&m_drawCondition);
g_mutex_clear(&m_drawMutex);
#endif
if (m_pipeline) {
GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
ASSERT(bus);
g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateNeedContextMessageCallback), this);
gst_bus_disable_sync_message_emission(bus.get());
m_pipeline.clear();
}
#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
if (client())
client()->platformLayerWillBeDestroyed();
#endif
}
示例13: gst_pipeline_get_bus
eServiceMP3Record::~eServiceMP3Record()
{
if (m_recording_pipeline)
{
// disconnect sync handler callback
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_recording_pipeline));
#if GST_VERSION_MAJOR < 1
gst_bus_set_sync_handler(bus, NULL, NULL);
#else
gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
#endif
gst_object_unref(bus);
}
if (m_state > stateIdle)
stop();
if (m_recording_pipeline)
{
gst_object_unref(GST_OBJECT(m_recording_pipeline));
}
}
示例14: gst_init
//BUILDER COMMENT. DO NOT REMOVE. auxcode begin
void Music::init(const string newName, AL::ALPtr<AL::ALBroker> parentBroker) {
Component::init(newName, parentBroker);
#ifdef WEBOTS
return;
#endif
// init GStreamer
gst_init (NULL, NULL);
loop = g_main_loop_new (NULL, FALSE);
// set up
play = gst_element_factory_make ("playbin2", "play");
bus = gst_pipeline_get_bus (GST_PIPELINE (play));
// set state
gst_element_set_state (play, GST_STATE_READY);
isSetFileMp3 = false;
isPlayPress = false;
isStopPress = false;
}
示例15: introbin_set_pad_offset
gboolean introbin_set_pad_offset(CustomData *data)
{
gint64 pos2;
pos2=gst_element_get_base_time(data->pipeline);
GstClock *clock;
clock=gst_pipeline_get_clock(GST_PIPELINE(data->pipeline));
GstClockTime clock_time;
clock_time=gst_clock_get_time(clock);
gst_object_unref(clock);
g_print("Pipeline times: base_time=%lld\n clock_time=%lld",
pos2,clock_time);
GstElement *dec=gst_bin_get_by_name(GST_BIN(data->introbin),"introdec");
GstPad *src_pad1,*src_pad2;
src_pad1=gst_element_get_static_pad(GST_ELEMENT(dec),"src_0");
gst_pad_set_offset(src_pad1,clock_time-pos2);
gst_object_unref(src_pad1);
src_pad2=gst_element_get_static_pad(GST_ELEMENT(dec),"src_1");
gst_pad_set_offset(src_pad2,clock_time-pos2);
gst_object_unref(src_pad2);
return TRUE;
}