本文整理汇总了Java中org.kurento.client.PlayerEndpoint.addEndOfStreamListener方法的典型用法代码示例。如果您正苦于以下问题:Java PlayerEndpoint.addEndOfStreamListener方法的具体用法?Java PlayerEndpoint.addEndOfStreamListener怎么用?Java PlayerEndpoint.addEndOfStreamListener使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.kurento.client.PlayerEndpoint
的用法示例。
在下文中一共展示了PlayerEndpoint.addEndOfStreamListener方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: main
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException,
URISyntaxException, InterruptedException {
// Connecting to Kurento Server
KurentoClient kurento = KurentoClient
.create("ws://localhost:8888/kurento");
// Creating media pipeline
MediaPipeline pipeline = kurento.createMediaPipeline();
// Creating media elements
PlayerEndpoint player = new PlayerEndpoint.Builder(pipeline,
"http://files.kurento.org/video/fiwarecut.mp4").build();
FaceOverlayFilter filter = new FaceOverlayFilter.Builder(pipeline)
.build();
filter.setOverlayedImage(
"http://files.kurento.org/imgs/mario-wings.png", -0.2F, -1.1F,
1.6F, 1.6F);
HttpGetEndpoint http = new HttpGetEndpoint.Builder(pipeline).build();
// Connecting media elements
player.connect(filter);
filter.connect(http);
// Reacting to events
player.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
System.out.println("The playing has finished");
System.exit(0);
}
});
// Playing media and opening the default desktop browser
player.play();
String videoUrl = http.getUrl();
Desktop.getDesktop().browse(new URI(videoUrl));
// Setting a delay to wait the EndOfStream event, previously subscribed
Thread.sleep(60000);
}
示例2: testEventWithoutTag
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testEventWithoutTag() throws Exception {
MediaPipeline mp = kurentoClient.createMediaPipeline();
final CountDownLatch eventReceived = new CountDownLatch(1);
PlayerEndpoint player =
new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm")
.build();
player.addTag("test_1", "value_1");
player.addTag("test_2", "value_2");
player.addTag("test_3", "value_3");
player.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
List<Tag> tags = event.getTags();
if (tags.size() == 0) {
eventReceived.countDown();
}
}
});
player.play();
// Guard time to reproduce the whole video
if (!eventReceived.await(TIMEOUT, TimeUnit.SECONDS)) {
Assert.fail("Event not received");
}
}
示例3: launchBrowser
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
private void launchBrowser(WebRtcEndpoint webRtcEp, PlayerEndpoint playerEp,
RecorderEndpoint recorderEp) throws InterruptedException {
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
playerEp.play();
final CountDownLatch eosLatch = new CountDownLatch(1);
playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
eosLatch.countDown();
}
});
if (recorderEp != null) {
recorderEp.record();
}
// Assertions
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
Assert.assertTrue("The color of the video should be black",
getPage().similarColor(Color.BLACK));
Assert.assertTrue("Not received EOS event in player",
eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
double currentTime = getPage().getCurrentTime();
Assert.assertTrue(
"Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)",
getPage().compare(PLAYTIME, currentTime));
}
示例4: testDispatcherPlayer
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testDispatcherPlayer() throws Exception {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp =
new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm")
.build();
PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/10sec/blue.webm").build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
Dispatcher dispatcher = new Dispatcher.Builder(mp).build();
HubPort hubPort1 = new HubPort.Builder(dispatcher).build();
HubPort hubPort2 = new HubPort.Builder(dispatcher).build();
HubPort hubPort3 = new HubPort.Builder(dispatcher).build();
playerEp.connect(hubPort1);
playerEp2.connect(hubPort3);
hubPort2.connect(webRtcEp);
dispatcher.connect(hubPort1, hubPort2);
final CountDownLatch eosLatch = new CountDownLatch(1);
playerEp2.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
eosLatch.countDown();
}
});
// Test execution
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
playerEp.play();
// Assertions
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
Assert.assertTrue("The color of the video should be red", getPage().similarColor(Color.RED));
Thread.sleep(5000);
playerEp2.play();
dispatcher.connect(hubPort3, hubPort2);
Assert.assertTrue("The color of the video should be blue", getPage().similarColor(Color.BLUE));
Assert.assertTrue("Not received EOS event in player",
eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
double currentTime = getPage().getCurrentTime();
Assert.assertTrue(
"Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)",
getPage().compare(PLAYTIME, currentTime));
// Release Media Pipeline
mp.release();
}
示例5: testEventTag
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testEventTag() throws Exception {
MediaPipeline mp = kurentoClient.createMediaPipeline();
final CountDownLatch eventReceived = new CountDownLatch(TAG_SIZE);
PlayerEndpoint player =
new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm")
.build();
player.addTag("test_1", "value_1");
player.addTag("test_2", "value_2");
player.addTag("test_3", "value_3");
player.setSendTagsInEvents(true);
player.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
List<Tag> tags = event.getTags();
for (Tag tag : tags) {
if (tag.getKey().compareTo("test_1") == 0) {
if (tag.getValue().compareTo("value_1") == 0) {
eventReceived.countDown();
}
} else if (tag.getKey().compareTo("test_2") == 0) {
if (tag.getValue().compareTo("value_2") == 0) {
eventReceived.countDown();
}
} else if (tag.getKey().compareTo("test_3") == 0) {
if (tag.getValue().compareTo("value_3") == 0) {
eventReceived.countDown();
}
}
}
}
});
player.play();
// Guard time to reproduce the whole video
if (!eventReceived.await(TIMEOUT, TimeUnit.SECONDS)) {
Assert.fail("Event not received");
}
}
示例6: testRepositoryRecorder
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testRepositoryRecorder() throws Exception {
final CountDownLatch recorderLatch = new CountDownLatch(1);
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/10sec/ball.webm").build();
WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build();
RepositoryItem repositoryItem = repository.createRepositoryItem();
RepositoryHttpRecorder recorder = repositoryItem.createRepositoryHttpRecorder();
RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recorder.getURL()).build();
playerEp.connect(webRtcEp1);
playerEp.connect(recorderEp);
final CountDownLatch eosLatch = new CountDownLatch(1);
playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
eosLatch.countDown();
}
});
// Test execution #1. Play the video while it is recorded
launchBrowser(webRtcEp1, playerEp, recorderEp);
// Wait for EOS
Assert.assertTrue("Not received EOS event in player",
eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
// Release Media Pipeline #1
recorderEp.stopAndWait(new Continuation<Void>() {
@Override
public void onSuccess(Void result) throws Exception {
recorderLatch.countDown();
}
@Override
public void onError(Throwable cause) throws Exception {
recorderLatch.countDown();
}
});
Assert.assertTrue("Not stop properly",
recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
mp.release();
Thread.sleep(500);
}
示例7: doTest
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
public void doTest(PlayerOperation playerOperation) throws Exception {
// Test data
final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/format/small.webm";
final int guardTimeSeconds = 10;
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
playerEp.connect(webRtcEp);
// Subscription to EOS event
final boolean[] eos = new boolean[1];
eos[0] = false;
playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
log.error("EOS event received: {} {}", event.getType(), event.getTimestamp());
eos[0] = true;
}
});
// WebRTC in receive-only mode
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
playerEp.play();
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
// Stop/release stream and wait x seconds
switch (playerOperation) {
case STOP:
playerEp.stop();
break;
case RELEASE:
playerEp.release();
break;
}
Thread.sleep(TimeUnit.SECONDS.toMillis(guardTimeSeconds));
// Verify that EOS event has not being received
Assert.assertFalse("EOS event has been received. "
+ "This should not be happenning because the stream has been stopped", eos[0]);
// Release Media Pipeline
mp.release();
}
示例8: testPlayerFaceOverlay
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testPlayerFaceOverlay() throws Exception {
// Test data
final int playTimeSeconds = 30;
final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/filter/fiwarecut.mp4";
final Color expectedColor = Color.RED;
final int xExpectedColor = 420;
final int yExpectedColor = 45;
final String imgOverlayUrl = "http://" + getTestFilesHttpPath() + "/img/red-square.png";
final float offsetXPercent = -0.2F;
final float offsetYPercent = -1.2F;
final float widthPercent = 1.6F;
final float heightPercent = 1.6F;
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
FaceOverlayFilter filter = new FaceOverlayFilter.Builder(mp).build();
filter.setOverlayedImage(imgOverlayUrl, offsetXPercent, offsetYPercent, widthPercent,
heightPercent);
playerEp.connect(filter);
filter.connect(webRtcEp);
final CountDownLatch eosLatch = new CountDownLatch(1);
playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
eosLatch.countDown();
}
});
// Test execution
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
playerEp.play();
// Assertions
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
Assert.assertTrue(
"Color at coordinates " + xExpectedColor + "," + yExpectedColor + " must be "
+ expectedColor,
getPage().similarColorAt(expectedColor, xExpectedColor, yExpectedColor));
Assert.assertTrue("Not received EOS event in player",
eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
double currentTime = getPage().getCurrentTime();
Assert.assertTrue(
"Error in play time (expected: " + playTimeSeconds + " sec, real: " + currentTime + " sec)",
getPage().compare(playTimeSeconds, currentTime));
// Release Media Pipeline
mp.release();
}
示例9: doTest
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
String expectedAudioCodec, String extension) throws Exception {
// Media Pipeline #1
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp =
new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build();
WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build();
String recordingFile = getRecordUrl(extension);
RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
.withMediaProfile(mediaProfileSpecType).build();
playerEp.connect(webRtcEp1);
playerEp.connect(recorderEp);
final CountDownLatch eosLatch = new CountDownLatch(1);
playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
eosLatch.countDown();
}
});
// Test execution #1. Play the video while it is recorded
launchBrowser(mp, webRtcEp1, playerEp, recorderEp, expectedVideoCodec, expectedAudioCodec,
recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME);
// Wait for EOS
Assert.assertTrue("No EOS event", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
// Release Media Pipeline #1
mp.release();
// Reloading browser
getPage().reload();
// Media Pipeline #2
MediaPipeline mp2 = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build();
WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build();
playerEp2.connect(webRtcEp2);
// Playing the recording
launchBrowser(null, webRtcEp2, playerEp2, null, expectedVideoCodec, expectedAudioCodec,
recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME);
// Release Media Pipeline #2
mp2.release();
success = true;
}
示例10: checkRecordingFile
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
protected void checkRecordingFile(String recordingFile, String browserName,
Color[] expectedColors, long playTime, String expectedVideoCodec, String expectedAudioCodec)
throws InterruptedException {
// Checking continuity of the audio
Timer gettingStats = new Timer();
final CountDownLatch errorContinuityAudiolatch = new CountDownLatch(1);
waitForFileExists(recordingFile);
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, recordingFile).build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
playerEp.connect(webRtcEp);
// Playing the recording
WebRtcTestPage checkPage = getPage(browserName);
checkPage.setThresholdTime(checkPage.getThresholdTime() * 2);
checkPage.subscribeEvents("playing");
checkPage.initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
final CountDownLatch eosLatch = new CountDownLatch(1);
playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
eosLatch.countDown();
}
});
playerEp.play();
// Assertions in recording
final String messageAppend = "[played file with media pipeline]";
Assert.assertTrue(
"Not received media in the recording (timeout waiting playing event) " + messageAppend,
checkPage.waitForEvent("playing"));
checkPage.activatePeerConnectionInboundStats("webRtcPeer.peerConnection");
gettingStats.schedule(new CheckAudioTimerTask(errorContinuityAudiolatch, checkPage), 100, 200);
for (Color color : expectedColors) {
Assert.assertTrue("The color of the recorded video should be " + color + " " + messageAppend,
checkPage.similarColorAt(color, 50, 50));
}
Assert.assertTrue("Not received EOS event in player",
eosLatch.await(checkPage.getTimeout(), TimeUnit.SECONDS));
gettingStats.cancel();
double currentTime = checkPage.getCurrentTime();
Assert.assertTrue("Error in play time in the recorded video (expected: " + playTime
+ " sec, real: " + currentTime + " sec) " + messageAppend,
checkPage.compare(playTime, currentTime));
Assert.assertTrue("Check audio. There were more than 2 seconds without receiving packets",
errorContinuityAudiolatch.getCount() == 1);
AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec);
AssertMedia.assertDuration(recordingFile, TimeUnit.SECONDS.toMillis(playTime),
TimeUnit.SECONDS.toMillis(checkPage.getThresholdTime()));
mp.release();
}
示例11: testFaceOverlayFilter
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
/**
* Test if a {@link FaceOverlayFilter} can be created in the KMS. The filter is pipelined with a
* {@link PlayerEndpoint}, which feeds video to the filter. This test depends on the correct
* behaviour of the player and its events.
*
* @throws InterruptedException
*/
@Test
public void testFaceOverlayFilter() throws InterruptedException {
PlayerEndpoint player = new PlayerEndpoint.Builder(pipeline, URL_POINTER_DETECTOR).build();
player.connect(overlayFilter);
AsyncEventManager<EndOfStreamEvent> async = new AsyncEventManager<>("EndOfStream event");
player.addEndOfStreamListener(async.getMediaEventListener());
player.play();
async.waitForResult();
player.stop();
player.release();
}