本文整理汇总了Java中org.kurento.client.PlayerEndpoint.play方法的典型用法代码示例。如果您正苦于以下问题:Java PlayerEndpoint.play方法的具体用法?Java PlayerEndpoint.play怎么用?Java PlayerEndpoint.play使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.kurento.client.PlayerEndpoint
的用法示例。
在下文中一共展示了PlayerEndpoint.play方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testRtpEndpointSimulatingAndroidSdp
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testRtpEndpointSimulatingAndroidSdp() throws InterruptedException {
PlayerEndpoint player = new PlayerEndpoint.Builder(pipeline, URL_BARCODES).build();
RtpEndpoint rtpEndpoint = new RtpEndpoint.Builder(pipeline).build();
String requestSdp = "v=0\r\n" + "o=- 12345 12345 IN IP4 95.125.31.136\r\n" + "s=-\r\n"
+ "c=IN IP4 95.125.31.136\r\n" + "t=0 0\r\n" + "m=video 52126 RTP/AVP 96 97 98\r\n"
+ "a=rtpmap:96 H264/90000\r\n" + "a=rtpmap:97 MP4V-ES/90000\r\n"
+ "a=rtpmap:98 H263-1998/90000\r\n" + "a=recvonly\r\n" + "b=AS:384\r\n";
rtpEndpoint.processOffer(requestSdp);
player.connect(rtpEndpoint, MediaType.VIDEO);
player.play();
// just a little bit of time before destroying
Thread.sleep(2000);
}
示例2: main
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException,
URISyntaxException, InterruptedException {
// Connecting to Kurento Server
KurentoClient kurento = KurentoClient
.create("ws://localhost:8888/kurento");
// Creating media pipeline
MediaPipeline pipeline = kurento.createMediaPipeline();
// Creating media elements
PlayerEndpoint player = new PlayerEndpoint.Builder(pipeline,
"http://files.kurento.org/video/fiwarecut.mp4").build();
FaceOverlayFilter filter = new FaceOverlayFilter.Builder(pipeline)
.build();
filter.setOverlayedImage(
"http://files.kurento.org/imgs/mario-wings.png", -0.2F, -1.1F,
1.6F, 1.6F);
HttpGetEndpoint http = new HttpGetEndpoint.Builder(pipeline).build();
// Connecting media elements
player.connect(filter);
filter.connect(http);
// Reacting to events
player.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
System.out.println("The playing has finished");
System.exit(0);
}
});
// Playing media and opening the default desktop browser
player.play();
String videoUrl = http.getUrl();
Desktop.getDesktop().browse(new URI(videoUrl));
// Setting a delay to wait the EndOfStream event, previously subscribed
Thread.sleep(60000);
}
示例3: testEventWithoutTag
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testEventWithoutTag() throws Exception {
MediaPipeline mp = kurentoClient.createMediaPipeline();
final CountDownLatch eventReceived = new CountDownLatch(1);
PlayerEndpoint player =
new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm")
.build();
player.addTag("test_1", "value_1");
player.addTag("test_2", "value_2");
player.addTag("test_3", "value_3");
player.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
List<Tag> tags = event.getTags();
if (tags.size() == 0) {
eventReceived.countDown();
}
}
});
player.play();
// Guard time to reproduce the whole video
if (!eventReceived.await(TIMEOUT, TimeUnit.SECONDS)) {
Assert.fail("Event not received");
}
}
示例4: testPlayerMultiplePause
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testPlayerMultiplePause() throws Exception {
// Test data
final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/60sec/red.webm";
final Color expectedColor = Color.RED;
final int playTimeSeconds = 2;
final int pauseTimeSeconds = 2;
final int numPauses = 30;
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
playerEp.connect(webRtcEp);
// WebRTC in receive-only mode
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
playerEp.play();
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
for (int i = 0; i < numPauses; i++) {
// Assert color
Assert.assertTrue("The color of the video should be " + expectedColor,
getPage().similarColor(expectedColor));
// Pause and wait
playerEp.pause();
Thread.sleep(TimeUnit.SECONDS.toMillis(pauseTimeSeconds));
// Resume video
playerEp.play();
Thread.sleep(TimeUnit.SECONDS.toMillis(playTimeSeconds));
}
// Release Media Pipeline
mp.release();
}
示例5: test
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void test() throws InterruptedException {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
String videoPath = "file://" + getTestFilesDiskPath() + "/video/filter/barcodes.webm";
PlayerEndpoint p = new PlayerEndpoint.Builder(mp, videoPath).build();
final CountDownLatch latch = new CountDownLatch(1);
p.addErrorListener(new EventListener<ErrorEvent>() {
@Override
public void onEvent(ErrorEvent event) {
log.warn("Error un player: " + event.getDescription());
latch.countDown();
}
});
p.play();
if (latch.await(5, TimeUnit.SECONDS)) {
fail("Player error");
}
// Release Media Pipeline
mp.release();
}
示例6: transactionTest
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void transactionTest() throws InterruptedException, ExecutionException {
// Pipeline creation (no transaction)
MediaPipeline pipeline = kurentoClient.createMediaPipeline();
PlayerEndpoint player =
new PlayerEndpoint.Builder(pipeline, "http://" + getTestFilesHttpPath()
+ "/video/format/small.webm").useEncodedMedia().build();
HttpPostEndpoint httpEndpoint = new HttpPostEndpoint.Builder(pipeline).build();
player.connect(httpEndpoint);
String url = httpEndpoint.getUrl();
// End pipeline creation
// Explicit transaction
Transaction tx = pipeline.beginTransaction();
player.play(tx);
TFuture<String> fUrl = httpEndpoint.getUrl(tx);
pipeline.release(tx);
tx.commit();
// End explicit transaction
assertThat(url, is(fUrl.get()));
}
示例7: creationInTransaction
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void creationInTransaction() throws InterruptedException, ExecutionException {
// Pipeline creation (transaction)
Transaction tx1 = kurentoClient.beginTransaction();
MediaPipeline pipeline = kurentoClient.createMediaPipeline(tx1);
PlayerEndpoint player =
new PlayerEndpoint.Builder(pipeline, "http://" + getTestFilesHttpPath()
+ "/video/format/small.webm").useEncodedMedia().build(tx1);
HttpPostEndpoint httpEndpoint = new HttpPostEndpoint.Builder(pipeline).build(tx1);
player.connect(tx1, httpEndpoint);
TFuture<String> url1 = httpEndpoint.getUrl(tx1);
tx1.commit();
// End pipeline creation
// Explicit transaction
Transaction tx2 = pipeline.beginTransaction();
player.play(tx2);
TFuture<String> url2 = httpEndpoint.getUrl(tx2);
pipeline.release(tx2);
tx2.commit();
// End explicit transaction
assertThat(url1.get(), is(url2.get()));
}
示例8: testDispatcherPlayer
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testDispatcherPlayer() throws Exception {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp =
new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm")
.build();
PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/10sec/blue.webm").build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
Dispatcher dispatcher = new Dispatcher.Builder(mp).build();
HubPort hubPort1 = new HubPort.Builder(dispatcher).build();
HubPort hubPort2 = new HubPort.Builder(dispatcher).build();
HubPort hubPort3 = new HubPort.Builder(dispatcher).build();
playerEp.connect(hubPort1);
playerEp2.connect(hubPort3);
hubPort2.connect(webRtcEp);
dispatcher.connect(hubPort1, hubPort2);
final CountDownLatch eosLatch = new CountDownLatch(1);
playerEp2.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
eosLatch.countDown();
}
});
// Test execution
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
playerEp.play();
// Assertions
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
Assert.assertTrue("The color of the video should be red", getPage().similarColor(Color.RED));
Thread.sleep(5000);
playerEp2.play();
dispatcher.connect(hubPort3, hubPort2);
Assert.assertTrue("The color of the video should be blue", getPage().similarColor(Color.BLUE));
Assert.assertTrue("Not received EOS event in player",
eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
double currentTime = getPage().getCurrentTime();
Assert.assertTrue(
"Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)",
getPage().compare(PLAYTIME, currentTime));
// Release Media Pipeline
mp.release();
}
示例9: testAlphaBlendingPlayer
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testAlphaBlendingPlayer() throws Exception {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerRed =
new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/30sec/red.webm")
.build();
PlayerEndpoint playerGreen = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/30sec/green.webm").build();
PlayerEndpoint playerBlue = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/30sec/blue.webm").build();
AlphaBlending alphaBlending = new AlphaBlending.Builder(mp).build();
HubPort hubPort1 = new HubPort.Builder(alphaBlending).build();
HubPort hubPort2 = new HubPort.Builder(alphaBlending).build();
HubPort hubPort3 = new HubPort.Builder(alphaBlending).build();
playerRed.connect(hubPort1);
playerGreen.connect(hubPort2);
playerBlue.connect(hubPort3);
HubPort hubPort4 = new HubPort.Builder(alphaBlending).build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
hubPort4.connect(webRtcEp);
alphaBlending.setMaster(hubPort1, 1);
alphaBlending.setPortProperties(0F, 0F, 8, 0.2F, 0.2F, hubPort2);
alphaBlending.setPortProperties(0.4F, 0.4F, 7, 0.2F, 0.2F, hubPort3);
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.VIDEO_ONLY, WebRtcMode.RCV_ONLY);
playerRed.play();
playerGreen.play();
playerBlue.play();
Thread.sleep(2000);
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
Thread.sleep(2000);
// Assertions
Assert.assertTrue("Upper left part of the video must be blue",
getPage().similarColorAt(Color.GREEN, 0, 0));
Assert.assertTrue("Lower right part of the video must be red",
getPage().similarColorAt(Color.RED, 315, 235));
Assert.assertTrue("Center of the video must be blue",
getPage().similarColorAt(Color.BLUE, 160, 120));
// alphaBlending.setMaster(hubPort3, 1);
alphaBlending.setPortProperties(0.8F, 0.8F, 7, 0.2F, 0.2F, hubPort3);
Assert.assertTrue("Lower right part of the video must be blue",
getPage().similarColorAt(Color.BLUE, 315, 235));
Assert.assertTrue("Center of the video must be red",
getPage().similarColorAt(Color.RED, 160, 120));
Thread.sleep(PLAYTIME * 1000);
}
示例10: testEventTag
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testEventTag() throws Exception {
MediaPipeline mp = kurentoClient.createMediaPipeline();
final CountDownLatch eventReceived = new CountDownLatch(TAG_SIZE);
PlayerEndpoint player =
new PlayerEndpoint.Builder(mp, "http://" + getTestFilesHttpPath() + "/video/10sec/red.webm")
.build();
player.addTag("test_1", "value_1");
player.addTag("test_2", "value_2");
player.addTag("test_3", "value_3");
player.setSendTagsInEvents(true);
player.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
List<Tag> tags = event.getTags();
for (Tag tag : tags) {
if (tag.getKey().compareTo("test_1") == 0) {
if (tag.getValue().compareTo("value_1") == 0) {
eventReceived.countDown();
}
} else if (tag.getKey().compareTo("test_2") == 0) {
if (tag.getValue().compareTo("value_2") == 0) {
eventReceived.countDown();
}
} else if (tag.getKey().compareTo("test_3") == 0) {
if (tag.getValue().compareTo("value_3") == 0) {
eventReceived.countDown();
}
}
}
}
});
player.play();
// Guard time to reproduce the whole video
if (!eventReceived.await(TIMEOUT, TimeUnit.SECONDS)) {
Assert.fail("Event not received");
}
}
示例11: doTest
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
public void doTest(PlayerOperation playerOperation) throws Exception {
// Test data
final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/format/small.webm";
final int guardTimeSeconds = 10;
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
playerEp.connect(webRtcEp);
// Subscription to EOS event
final boolean[] eos = new boolean[1];
eos[0] = false;
playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
log.error("EOS event received: {} {}", event.getType(), event.getTimestamp());
eos[0] = true;
}
});
// WebRTC in receive-only mode
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
playerEp.play();
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
// Stop/release stream and wait x seconds
switch (playerOperation) {
case STOP:
playerEp.stop();
break;
case RELEASE:
playerEp.release();
break;
}
Thread.sleep(TimeUnit.SECONDS.toMillis(guardTimeSeconds));
// Verify that EOS event has not being received
Assert.assertFalse("EOS event has been received. "
+ "This should not be happenning because the stream has been stopped", eos[0]);
// Release Media Pipeline
mp.release();
}
示例12: testPlayerFaceOverlay
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testPlayerFaceOverlay() throws Exception {
// Test data
final int playTimeSeconds = 30;
final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/filter/fiwarecut.mp4";
final Color expectedColor = Color.RED;
final int xExpectedColor = 420;
final int yExpectedColor = 45;
final String imgOverlayUrl = "http://" + getTestFilesHttpPath() + "/img/red-square.png";
final float offsetXPercent = -0.2F;
final float offsetYPercent = -1.2F;
final float widthPercent = 1.6F;
final float heightPercent = 1.6F;
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
FaceOverlayFilter filter = new FaceOverlayFilter.Builder(mp).build();
filter.setOverlayedImage(imgOverlayUrl, offsetXPercent, offsetYPercent, widthPercent,
heightPercent);
playerEp.connect(filter);
filter.connect(webRtcEp);
final CountDownLatch eosLatch = new CountDownLatch(1);
playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() {
@Override
public void onEvent(EndOfStreamEvent event) {
eosLatch.countDown();
}
});
// Test execution
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
playerEp.play();
// Assertions
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
Assert.assertTrue(
"Color at coordinates " + xExpectedColor + "," + yExpectedColor + " must be "
+ expectedColor,
getPage().similarColorAt(expectedColor, xExpectedColor, yExpectedColor));
Assert.assertTrue("Not received EOS event in player",
eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
double currentTime = getPage().getCurrentTime();
Assert.assertTrue(
"Error in play time (expected: " + playTimeSeconds + " sec, real: " + currentTime + " sec)",
getPage().compare(playTimeSeconds, currentTime));
// Release Media Pipeline
mp.release();
}
示例13: testPlayerSwitch
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
@Test
public void testPlayerSwitch() throws Exception {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
PlayerEndpoint playerRed = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/format/chrome.mp4").build();
WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
// Test execution
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY);
// red
playerRed.connect(webRtcEndpoint);
playerRed.play();
getPage().subscribeEvents("playing");
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
PlayerEndpoint playerGreen = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/format/fiware.mkv").build();
// green
playerGreen.connect(webRtcEndpoint);
playerGreen.play();
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
// blue
PlayerEndpoint playerBlue = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/format/sintel.webm").build();
playerBlue.connect(webRtcEndpoint);
playerBlue.play();
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
// ball
PlayerEndpoint playerBall = new PlayerEndpoint.Builder(mp,
"http://" + getTestFilesHttpPath() + "/video/format/rabbit.mov").build();
playerBall.connect(webRtcEndpoint);
playerBall.play();
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
// rtsp
PlayerEndpoint playerRtsp =
new PlayerEndpoint.Builder(mp, "rtsp://195.55.223.100/axis-media/media.amp").build();
playerRtsp.connect(webRtcEndpoint);
playerRtsp.play();
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
// Assertions
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
double currentTime = getPage().getCurrentTime();
Assert.assertTrue(
"Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)",
getPage().compare(PLAYTIME, currentTime));
// Release Media Pipeline
mp.release();
}
示例14: doTestWithPlayer
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
public void doTestWithPlayer(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
String expectedAudioCodec, String extension, String mediaUrlPlayer) throws Exception {
// Media Pipeline #1
getPage(BROWSER2).close();
MediaPipeline mp = kurentoClient.createMediaPipeline();
final CountDownLatch errorPipelinelatch = new CountDownLatch(1);
mp.addErrorListener(new EventListener<ErrorEvent>() {
@Override
public void onEvent(ErrorEvent event) {
msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
errorPipelinelatch.countDown();
}
});
WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrlPlayer).build();
String recordingFile = getRecordUrl(extension);
RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
.withMediaProfile(mediaProfileSpecType).build();
PassThrough passThrough = new PassThrough.Builder(mp).build();
passThrough.connect(recorderEp);
// Test execution
getPage(BROWSER1).subscribeLocalEvents("playing");
long startWebrtc = System.currentTimeMillis();
getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
webRtcEpRed.connect(passThrough);
recorderEp.record();
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage(BROWSER1).waitForEvent("playing"));
long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
startWebrtc = System.currentTimeMillis();
playerEp.play();
playerEp.connect(passThrough);
long playerEpConnectionTime = System.currentTimeMillis() - startWebrtc;
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
webRtcEpRed.connect(passThrough);
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
// Release Media Pipeline #1
saveGstreamerDot(mp);
final CountDownLatch recorderLatch = new CountDownLatch(1);
recorderEp.stopAndWait(new Continuation<Void>() {
@Override
public void onSuccess(Void result) throws Exception {
recorderLatch.countDown();
}
@Override
public void onError(Throwable cause) throws Exception {
recorderLatch.countDown();
}
});
Assert.assertTrue("Not stop properly",
recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
mp.release();
Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);
final long playtime = PLAYTIME
+ TimeUnit.MILLISECONDS.toSeconds((2 * webrtcRedConnectionTime) + playerEpConnectionTime);
checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
expectedAudioCodec);
success = true;
}
开发者ID:Kurento,项目名称:kurento-java,代码行数:80,代码来源:RecorderSwitchWebRtcWebRtcPlayerWithPassThroughTest.java
示例15: doTestWithPlayer
import org.kurento.client.PlayerEndpoint; //导入方法依赖的package包/类
public void doTestWithPlayer(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
String expectedAudioCodec, String extension, String mediaUrlPlayer) throws Exception {
// Media Pipeline #1
getPage(BROWSER2).close();
MediaPipeline mp = kurentoClient.createMediaPipeline();
final CountDownLatch errorPipelinelatch = new CountDownLatch(1);
mp.addErrorListener(new EventListener<ErrorEvent>() {
@Override
public void onEvent(ErrorEvent event) {
msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
errorPipelinelatch.countDown();
}
});
WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrlPlayer).build();
String recordingFile = getRecordUrl(extension);
RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
.withMediaProfile(mediaProfileSpecType).build();
// Test execution
getPage(BROWSER1).subscribeLocalEvents("playing");
long startWebrtc = System.currentTimeMillis();
getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
webRtcEpRed.connect(recorderEp);
recorderEp.record();
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage(BROWSER1).waitForEvent("playing"));
long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
startWebrtc = System.currentTimeMillis();
playerEp.play();
playerEp.connect(recorderEp);
long playerEpConnectionTime = System.currentTimeMillis() - startWebrtc;
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
webRtcEpRed.connect(recorderEp);
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
// Release Media Pipeline #1
saveGstreamerDot(mp);
final CountDownLatch recorderLatch = new CountDownLatch(1);
recorderEp.stopAndWait(new Continuation<Void>() {
@Override
public void onSuccess(Void result) throws Exception {
recorderLatch.countDown();
}
@Override
public void onError(Throwable cause) throws Exception {
recorderLatch.countDown();
}
});
Assert.assertTrue("Not stop properly",
recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
mp.release();
Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);
final long playtime = PLAYTIME
+ TimeUnit.MILLISECONDS.toSeconds((2 * webrtcRedConnectionTime) + playerEpConnectionTime);
checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
expectedAudioCodec);
success = true;
}