当前位置: 首页>>代码示例>>Java>>正文


Java Gst.main方法代码示例

本文整理汇总了Java中org.gstreamer.Gst.main方法的典型用法代码示例。如果您正苦于以下问题:Java Gst.main方法的具体用法?Java Gst.main怎么用?Java Gst.main使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.gstreamer.Gst的用法示例。


在下文中一共展示了Gst.main方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import org.gstreamer.Gst; //导入方法依赖的package包/类
public static void main(String[] args) {
    args = Gst.init("Dynamic Pad Test", args);
    /* create elements */
    Pipeline pipeline = new Pipeline("my_pipeline");
    Element source = ElementFactory.make("filesrc", "source");
    source.set("location", args[0]);
    Element demux = ElementFactory.make("oggdemux", "demuxer");
    
    /* you would normally check that the elements were created properly */
    
    /* put together a pipeline */
    pipeline.addMany(source, demux);
    Element.linkPads(source, "src", demux, "sink");
    
    /* listen for newly created pads */
    demux.connect(new Element.PAD_ADDED() {
        public void padAdded(Element element, Pad pad) {
           System.out.println("New Pad " + pad.getName() + " was created");
        }
    });
    
    /* start the pipeline */
    pipeline.play();
    Gst.main();
}
 
开发者ID:gstreamer-java,项目名称:gstreamer1.x-java,代码行数:26,代码来源:DynamicPadTest.java

示例2: main

import org.gstreamer.Gst; //导入方法依赖的package包/类
public static void main(String[] args) {
    //
    // Initialize the gstreamer framework, and let it interpret any command
    // line flags it is interested in.
    //
    args = Gst.init("SimplePipeline", args);
    
    Pipeline pipe = new Pipeline("SimplePipeline");
    Element src = ElementFactory.make("fakesrc", "Source");
    Element sink = ElementFactory.make("fakesink", "Destination");
    
    
    // Add the elements to the Bin
    pipe.addMany(src, sink);
    
    // Link fakesrc to fakesink so data can flow
    src.link(sink);
    
    // Start the pipeline playing
    pipe.play();
    Gst.main();
    pipe.stop();
}
 
开发者ID:gstreamer-java,项目名称:gstreamer1.x-java,代码行数:24,代码来源:SimplePipeline.java

示例3: main

import org.gstreamer.Gst; //导入方法依赖的package包/类
public static void main(String[] args) {
    //
    // Initialize the gstreamer framework, and let it interpret any command
    // line flags it is interested in.
    //
    args = Gst.init("DoubleQuit", args);
    
    for (int i = 0; i < 2; ++i) {
        Pipeline pipe = makePipe();
        Gst.getScheduledExecutorService().schedule(new Runnable() {

            public void run() {
                Gst.quit();
            }
        }, 1, TimeUnit.SECONDS);
        // Start the pipeline playing
        pipe.play();
        System.out.println("Running main loop " + i);
        Gst.main();
        // Clean up (gstreamer requires elements to be in State.NULL before disposal)
        pipe.stop();
    }
}
 
开发者ID:gstreamer-java,项目名称:gstreamer1.x-java,代码行数:24,代码来源:DoubleQuit.java

示例4: main

import org.gstreamer.Gst; //导入方法依赖的package包/类
public static void main(String[] args) {
    args = Gst.init("TypeFind Test", args);
    /* create elements */
    Pipeline pipeline = new Pipeline("my_pipeline");
    Element source = ElementFactory.make("filesrc", "source");
    source.set("location", args[0]);
    TypeFind typefind = new TypeFind("typefinder");
    
    /* you would normally check that the elements were created properly */
    
    /* put together a pipeline */
    pipeline.addMany(source, typefind);
    Element.linkMany(source, typefind);
    
    /* listen for types found */
    typefind.connect(new TypeFind.HAVE_TYPE() {

        public void typeFound(Element elem, int probability, Caps caps) {
            System.out.printf("New type found: probability=%d caps=%s\n",
                    probability, caps.toString());
        }
    });
    
    /* start the pipeline */
    pipeline.play();
    
    Gst.main();
}
 
开发者ID:gstreamer-java,项目名称:gstreamer1.x-java,代码行数:29,代码来源:TypeFindTest.java

示例5: main

import org.gstreamer.Gst; //导入方法依赖的package包/类
public static void main(String[] args) {
    //
    // Initialize the gstreamer framework, and let it interpret any command
    // line flags it is interested in.
    //
    args = Gst.init("AudioPlayer", args);
    
    if (args.length < 1) {
        System.out.println("Usage: AudioPlayer <file to play>");
        System.exit(1);
    }
    //
    // Create a PlayBin to play the media file.  A PlayBin is a Pipeline that
    // creates all the needed elements and automatically links them together.
    //
    PlayBin playbin = new PlayBin("AudioPlayer");
    
    // Make sure a video window does not appear.
    playbin.setVideoSink(ElementFactory.make("fakesink", "videosink"));
    
    // Set the file to play
    playbin.setInputFile(new File(args[0]));
    
    // Start the pipeline playing
    playbin.play();
    Gst.main();
    
    // Clean up (gstreamer requires elements to be in State.NULL before disposal)
    playbin.stop();
}
 
开发者ID:gstreamer-java,项目名称:gstreamer1.x-java,代码行数:31,代码来源:AudioPlayer.java

示例6: main

import org.gstreamer.Gst; //导入方法依赖的package包/类
/**
 * Launches a pipeline from command-line pipeline description.
 * You can find examples for command-line pipeline syntax in the manual page
 * for the Gstreamer <code>gst-launch</code> utility.
 *
 * For example: <pre>
 * java org.gstreamer.example.PipelineLauncher videotestsrc ! autovideosink
 * </pre>
 *
 * @param args pipline definition
 */
public static void main(String[] args) {
    //
    // Initialize the gstreamer framework, and let it interpret any command
    // line flags it is interested in.
    //
    args = Gst.init("PipelineLauncher", args);

    if (args.length == 0) {
        args = new String[]{"videotestsrc", "!", "autovideosink"};
    }

    StringBuilder sb = new StringBuilder();

    for (String s: args) {
        sb.append(" ");
        sb.append(s);
    }
    
    Pipeline pipe = Pipeline.launch(sb.substring(1));

    pipe.play();

    Gst.main();

    pipe.stop();
}
 
开发者ID:gstreamer-java,项目名称:gstreamer1.x-java,代码行数:38,代码来源:PipelineLauncher.java

示例7: StreamingEncodingH263WebCam

import org.gstreamer.Gst; //导入方法依赖的package包/类
/**
 * Emitirá el vide proveniente de la webcam codificandolo en H264
 */
private void StreamingEncodingH263WebCam() {

	Gst.init("GStreamer", new String[0]);

	final Pipeline pipe = Pipeline.launch("v4l2src name=v4l2src  ! textoverlay name=textoverlay ! ffenc_h263 name=ffenc_h263 ! rtph263ppay name=rtph263ppay ! udpsink name=udpsinkVideo");
	ref_pipeline = pipe;


	pipe.getElementByName("v4l2src").set("device","/dev/video0");
	pipe.getElementByName("textoverlay").set("text","EMISIÓN DESDE KMC SERVER");
	pipe.getElementByName("ffenc_h263").set("gop-size","0");
	pipe.getElementByName("udpsinkVideo").set("host", getIpMC());
	pipe.getElementByName("udpsinkVideo").set("port", getPortVideo());
	pipe.getElementByName("udpsinkVideo").set("sync", "false");
	pipe.getElementByName("udpsinkVideo").set("async", "false");


	final Thread t = new Thread(
			new Runnable()
			{
				public void run()
				{ 
					pipe.setState(State.PLAYING);
					Gst.main();     
					pipe.setState(State.NULL);
				}
			});
	t.start();
	ref_thread=t;
}
 
开发者ID:laggc,项目名称:rtsp_multicast_pfc,代码行数:34,代码来源:Media.java

示例8: StreamingEncodingH263Screenshot

import org.gstreamer.Gst; //导入方法依赖的package包/类
private void StreamingEncodingH263Screenshot() {

		Gst.init("GStreamer", new String[0]);

		final Pipeline pipe = Pipeline.launch("ximagesrc name=ximagesrc ! autovideoconvert ! ffenc_h263 name=ffenc_h263 ! video/x-h263, width=704, height=576, framerate=25/1 ! rtph263ppay name=rtph263ppay ! udpsink name=udpsinkVideo");
		ref_pipeline = pipe;

		pipe.getElementByName("ximagesrc").set("show-pointer",false);
		pipe.getElementByName("ffenc_h263").set("gop-size",0);
		pipe.getElementByName("udpsinkVideo").set("host", getIpMC());
		pipe.getElementByName("udpsinkVideo").set("port", getPortVideo());
		pipe.getElementByName("udpsinkVideo").set("sync", "false");
		pipe.getElementByName("udpsinkVideo").set("async", "false");


		final Thread t = new Thread(
				new Runnable()
				{
					public void run()
					{ 
						pipe.setState(State.PLAYING);
						Gst.main();     
						pipe.setState(State.NULL);
					}
				});
		t.start();
		ref_thread=t;


	}
 
开发者ID:laggc,项目名称:rtsp_multicast_pfc,代码行数:31,代码来源:Media.java

示例9: PlaySdp

import org.gstreamer.Gst; //导入方法依赖的package包/类
/**
 * Toma como argumento una URI que indica el path del SessionDescription que se quiere reproducir.
 * Ejemplo: URI uriSDP = new URI("file:///home/laggc/Escritorio/borrar.sdp");
 * Se creará un JFrame en el cual se reproducirá el medio.
 * @return 
 * @partam uriSDP
 * URI del SDP que se quiere reproducir.
 */
public static void PlaySdp(URI uriSDP)
{
	Gst.init("PLAY SDP", new String[0]);

	final PlayBin2 playbin = new PlayBin2("PlaySDP");

	playbin.setURI(uriSDP);

	SwingUtilities.invokeLater(new Runnable() {

		public void run() {
			VideoComponent videoComponent = new VideoComponent();
			playbin.setVideoSink(videoComponent.getElement());

			JFrame frame = new JFrame("Player");
			frame.getContentPane().add(videoComponent, BorderLayout.CENTER);
			frame.setPreferredSize(new Dimension(640, 480));
			frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
			frame.pack();
			frame.setVisible(true);
			frame.setLocationRelativeTo(null);
			playbin.setState(State.PLAYING);       
		}
	});

	Gst.main();
	playbin.setState(State.NULL);	
}
 
开发者ID:laggc,项目名称:rtsp_multicast_pfc,代码行数:37,代码来源:SimpleClient.java

示例10: main

import org.gstreamer.Gst; //导入方法依赖的package包/类
public static void main(String[] args) {
    //
    // Initialize the gstreamer framework, and let it interpret any command
    // line flags it is interested in.
    //
    args = Gst.init("AudioPlayerMetadata", args);
    
    if (args.length < 1) {
        System.out.println("Usage: AudioPlayer <file to play>");
        System.exit(1);
    }
    //
    // Create a PlayBin to play the media file.  A PlayBin is a Pipeline that
    // creates all the needed elements and automatically links them together.
    //
    PlayBin playbin = new PlayBin("AudioPlayer");
    
    // Make sure a video window does not appear.
    playbin.setVideoSink(ElementFactory.make("fakesink", "videosink"));
    
    // Set the file to play
    playbin.setInputFile(new File(args[0]));
    
    // Listen for metadata (tags)
    playbin.getBus().connect(new Bus.TAG() {

        public void tagsFound(GstObject source, TagList tagList) {
            for (String tagName : tagList.getTagNames()) {
                // Each tag can have multiple values, so print them all.
                for (Object tagData : tagList.getValues(tagName)) {
                    System.out.printf("[%s]=%s\n", tagName, tagData);
                }
            }
        }
    });
    
    // Start the pipeline playing
    playbin.play();
    Gst.main();
    
    // Clean up (gstreamer requires elements to be in State.NULL before disposal)
    playbin.stop();
}
 
开发者ID:gstreamer-java,项目名称:gstreamer1.x-java,代码行数:44,代码来源:AudioPlayerMetadata.java

示例11: CalculateDuration

import org.gstreamer.Gst; //导入方法依赖的package包/类
/**
 * Calcula la duración del media.
 * @return
 */
private long CalculateDuration() {

	long duration;

	Gst.init("GStreamer", new String[0]);

	final Pipeline pipe = Pipeline.launch("filesrc name=filesrc ! decodebin2 ! fakesink");
	ref_pipeline = pipe;

	pipe.getElementByName("filesrc").set("location",getPath());

	final Thread t = new Thread(
			new Runnable()
			{
				public void run()
				{ 
					pipe.setState(State.PLAYING);
					Gst.main();     
					pipe.setState(State.NULL);
				}
			});
	t.start();


	while(pipe.getState()!=State.PAUSED){
		try {
			Thread.sleep(10);
		} catch (InterruptedException e) {}
		pipe.setState(State.PAUSED);
	}

	ClockTime time= pipe.queryDuration();
	duration = time.toMillis();

	t.interrupt();

	logger.info("Calculated duration: " + time.getMinutes() + ":"+time.getSeconds());

	return duration;
}
 
开发者ID:laggc,项目名称:rtsp_multicast_pfc,代码行数:45,代码来源:Media.java

示例12: StreamingEncodingH264VideoAudio

import org.gstreamer.Gst; //导入方法依赖的package包/类
/**
 * Emitirá video+audio codificando a H264
 */
private void StreamingEncodingH264VideoAudio() {

	Gst.init("GStreamer", new String[0]);

	//Creamos el pipe de GStreamer
	final Pipeline pipe = Pipeline.launch("filesrc name=filesrc ! decodebin name=dec dec. ! queue2 ! x264enc name=x264enc  ! rtph264pay name=rtph264pay ! udpsink name=udpsinkVideo dec. ! queue2 ! audioresample ! audioconvert ! mulawenc ! rtppcmupay  ! udpsink name=udpsinkAudio");
	ref_pipeline = pipe;

	//Introducimos los parámetros para los elementos del pipe
	pipe.getElementByName("filesrc").set("location",getPath());
	pipe.getElementByName("x264enc").set("ref",refEncoding);
	pipe.getElementByName("x264enc").set("bitrate",bitRateEncoding);
	pipe.getElementByName("rtph264pay").set("config-interval",config_interval);
	pipe.getElementByName("rtph264pay").set("mtu",mtu);
	pipe.getElementByName("udpsinkVideo").set("host", getIpMC());
	pipe.getElementByName("udpsinkVideo").set("port", getPortVideo());
	pipe.getElementByName("udpsinkVideo").set("sync", "true");
	pipe.getElementByName("udpsinkVideo").set("async", "true");
	pipe.getElementByName("udpsinkAudio").set("host", getIpMC());
	pipe.getElementByName("udpsinkAudio").set("port", getPortAudio());
	pipe.getElementByName("udpsinkAudio").set("sync", "true");
	pipe.getElementByName("udpsinkAudio").set("async", "true");


	/*Creamos un thread para que reproduzca el video y el hilo principal,
	 * se quedará esperando para poder liberar el pipe de GStreamer.
	 * La ejecución no se corta porque anteriormente (Streaming()) habia creado 
	 * ya un hilo para el Streaming
	 */
	final Thread t = new Thread(
			new Runnable()
			{
				public void run()
				{ 
					pipe.setState(State.PLAYING);
					Gst.main();     
					pipe.setState(State.NULL);
				}
			});
	t.start();
	ref_thread=t;


	/*El hilo principal se duerme durante el tiempo que dura el video, y luego
	 * libera sus recursos */
	try {
		logger.info("START PLAYING");
		Thread.sleep(getDuration());
	} catch (InterruptedException e) {
		logger.info("Interrupted exception:");
		e.printStackTrace();
	}

	logger.info("Reproducción terminada. Borrando streaming:" + getTitle());
	sendTeardown();
	ServerRTSP.INSTANCE.deleteMedia(getTitle());
}
 
开发者ID:laggc,项目名称:rtsp_multicast_pfc,代码行数:61,代码来源:Media.java

示例13: StreamingEncodingH264Video

import org.gstreamer.Gst; //导入方法依赖的package包/类
/**
 * Emitirá video codificando a H264
 */
private void StreamingEncodingH264Video() {

	Gst.init("GStreamer", new String[0]);

	final Pipeline pipe = Pipeline.launch("filesrc name=filesrc ! decodebin name=dec dec. ! queue2 ! x264enc name=x264enc  ! rtph264pay name=rtph264pay ! udpsink name=udpsinkVideo");
	ref_pipeline = pipe;

	pipe.getElementByName("filesrc").set("location",getPath());
	pipe.getElementByName("x264enc").set("ref",refEncoding);
	pipe.getElementByName("x264enc").set("bitrate",bitRateEncoding);
	pipe.getElementByName("rtph264pay").set("config-interval",config_interval);
	pipe.getElementByName("rtph264pay").set("mtu",mtu);
	pipe.getElementByName("udpsinkVideo").set("host", getIpMC());
	pipe.getElementByName("udpsinkVideo").set("port", getPortVideo());
	pipe.getElementByName("udpsinkVideo").set("sync", "true");
	pipe.getElementByName("udpsinkVideo").set("async", "true");


	final Thread t = new Thread(
			new Runnable()
			{
				public void run()
				{ 
					pipe.setState(State.PLAYING);
					Gst.main();     
					pipe.setState(State.NULL);
				}
			});

	t.start();
	ref_thread=t;

	/*El hilo principal se duerme durante el tiempo que dura el video, y luego
	 * libera sus recursos */
	try {
		logger.info("START PLAYING");
		Thread.sleep(getDuration());
	} catch (InterruptedException e) {
		logger.info("Interrupted exception:");
		e.printStackTrace();
	}

	logger.info("Reproducción terminada. Borrando streaming:" + getTitle());
	sendTeardown();
	ServerRTSP.INSTANCE.deleteMedia(getTitle());
}
 
开发者ID:laggc,项目名称:rtsp_multicast_pfc,代码行数:50,代码来源:Media.java

示例14: StreamingH264Video

import org.gstreamer.Gst; //导入方法依赖的package包/类
/**
 * Emitirá video+audio directamente de un archivo previamente
 * codificado en H264
 */
private void StreamingH264Video() {
	Gst.init("GStreamer", new String[0]);

	final Pipeline pipe = Pipeline.launch("filesrc name=filesrc ! qtdemux name=demux demux.video_00 ! queue2 ! h264parse  ! rtph264pay name=rtph264pay ! udpsink name=udpsinkVideo");
	ref_pipeline = pipe;

	pipe.getElementByName("filesrc").set("location",getPath());
	pipe.getElementByName("rtph264pay").set("config-interval",config_interval);
	pipe.getElementByName("rtph264pay").set("mtu",mtu);
	pipe.getElementByName("udpsinkVideo").set("host", getIpMC());
	pipe.getElementByName("udpsinkVideo").set("port", getPortVideo());
	pipe.getElementByName("udpsinkVideo").set("sync", "true");
	pipe.getElementByName("udpsinkVideo").set("async", "true");


	final Thread t = new Thread(
			new Runnable()
			{
				public void run()
				{ 
					pipe.setState(State.PLAYING);
					Gst.main();     
					pipe.setState(State.NULL);
				}
			});
	t.start();
	ref_thread=t;


	/*El hilo principal se duerme durante el tiempo que dura el video, y luego
	 * libera sus recursos */
	try {
		logger.info("START PLAYING");
		Thread.sleep(getDuration());
	} catch (InterruptedException e) {
		logger.info("Interrupted exception:");
		e.printStackTrace();
	}

	logger.info("Reproducción terminada. Borrando streaming:" + getTitle());
	sendTeardown();
	ServerRTSP.INSTANCE.deleteMedia(getTitle());

}
 
开发者ID:laggc,项目名称:rtsp_multicast_pfc,代码行数:49,代码来源:Media.java

示例15: StreamingH264VideoAudio

import org.gstreamer.Gst; //导入方法依赖的package包/类
/**
 * Emitirá video directamente de un archivo previamente
 * codificado en H264
 */
private void StreamingH264VideoAudio() {
	Gst.init("GStreamer", new String[0]);

	final Pipeline pipe = Pipeline.launch("filesrc name=filesrc ! qtdemux name=demux demux.video_00 ! queue2 ! h264parse  ! rtph264pay name=rtph264pay ! udpsink name=udpsinkVideo demux.audio_00 ! decodebin2 ! queue2 ! audioresample ! audioconvert ! mulawenc ! rtppcmupay  ! udpsink name=udpsinkAudio");
	ref_pipeline = pipe;

	pipe.getElementByName("filesrc").set("location",getPath());
	pipe.getElementByName("rtph264pay").set("config-interval",config_interval);
	pipe.getElementByName("rtph264pay").set("mtu",mtu);
	pipe.getElementByName("udpsinkVideo").set("host", getIpMC());
	pipe.getElementByName("udpsinkVideo").set("port", getPortVideo());
	pipe.getElementByName("udpsinkAudio").set("host", getIpMC());
	pipe.getElementByName("udpsinkAudio").set("port", getPortAudio());

	final Thread t = new Thread(
			new Runnable()
			{
				public void run()
				{ 
					pipe.setState(State.PLAYING);
					Gst.main();     
					pipe.setState(State.NULL);
				}
			});
	t.start();
	ref_thread=t;

	/*El hilo principal se duerme durante el tiempo que dura el video, y luego
	 * libera sus recursos */
	try {
		logger.info("START PLAYING");
		Thread.sleep(getDuration());

	} catch (InterruptedException e) {
		logger.info("Interrupted exception:");
		e.printStackTrace();
	}

	logger.info("Reproducción terminada. Borrando streaming:" + getTitle());
	sendTeardown();
	ServerRTSP.INSTANCE.deleteMedia(getTitle());

}
 
开发者ID:laggc,项目名称:rtsp_multicast_pfc,代码行数:48,代码来源:Media.java


注:本文中的org.gstreamer.Gst.main方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。