本文整理汇总了Java中org.kurento.client.WebRtcEndpoint.connect方法的典型用法代码示例。如果您正苦于以下问题:Java WebRtcEndpoint.connect方法的具体用法?Java WebRtcEndpoint.connect怎么用?Java WebRtcEndpoint.connect使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.kurento.client.WebRtcEndpoint
的用法示例。
在下文中一共展示了WebRtcEndpoint.connect方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: connectAccordingToProfile
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
private void connectAccordingToProfile(WebRtcEndpoint webRtcEndpoint, RecorderEndpoint recorder,
MediaProfileSpecType profile) {
switch (profile) {
case WEBM:
webRtcEndpoint.connect(recorder, MediaType.AUDIO);
webRtcEndpoint.connect(recorder, MediaType.VIDEO);
break;
case WEBM_AUDIO_ONLY:
webRtcEndpoint.connect(recorder, MediaType.AUDIO);
break;
case WEBM_VIDEO_ONLY:
webRtcEndpoint.connect(recorder, MediaType.VIDEO);
break;
default:
throw new UnsupportedOperationException("Unsupported profile for this tutorial: " + profile);
}
}
示例2: start
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
private void start(WebSocketSession session, JsonObject jsonMessage) {
try {
// Media Logic (Media Pipeline and Elements)
MediaPipeline pipeline = kurento.createMediaPipeline();
pipelines.put(session.getId(), pipeline);
WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(pipeline)
.build();
FaceOverlayFilter faceOverlayFilter = new FaceOverlayFilter.Builder(
pipeline).build();
faceOverlayFilter.setOverlayedImage(
"http://files.kurento.org/imgs/mario-wings.png", -0.35F,
-1.2F, 1.6F, 1.6F);
webRtcEndpoint.connect(faceOverlayFilter);
faceOverlayFilter.connect(webRtcEndpoint);
// SDP negotiation (offer and answer)
String sdpOffer = jsonMessage.get("sdpOffer").getAsString();
String sdpAnswer = webRtcEndpoint.processOffer(sdpOffer);
// Sending response back to client
JsonObject response = new JsonObject();
response.addProperty("id", "startResponse");
response.addProperty("sdpAnswer", sdpAnswer);
session.sendMessage(new TextMessage(response.toString()));
} catch (Throwable t) {
sendError(session, t.getMessage());
}
}
示例3: processRequest
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@RequestMapping(value = "/helloworld", method = RequestMethod.POST)
private String processRequest(@RequestBody String sdpOffer)
throws IOException {
// Media Logic
MediaPipeline pipeline = kurento.createMediaPipeline();
WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(pipeline)
.build();
webRtcEndpoint.connect(webRtcEndpoint);
// SDP negotiation (offer and answer)
String responseSdp = webRtcEndpoint.processOffer(sdpOffer);
return responseSdp;
}
示例4: testWebRtcStabilityLoopback
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@Test
public void testWebRtcStabilityLoopback() throws Exception {
final int playTime = Integer.parseInt(
System.getProperty("test.webrtcstability.playtime", String.valueOf(DEFAULT_PLAYTIME)));
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
webRtcEndpoint.connect(webRtcEndpoint);
// WebRTC
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.VIDEO_ONLY, WebRtcMode.SEND_RCV);
// Latency assessment
LatencyController cs = new LatencyController("WebRTC in loopback");
getPage().activateLatencyControl(VideoTagType.LOCAL.getId(), VideoTagType.REMOTE.getId());
cs.checkLatency(playTime, TimeUnit.MINUTES, getPage());
// Release Media Pipeline
mp.release();
// Draw latency results (PNG chart and CSV file)
cs.drawChart(getDefaultOutputFile(".png"), 500, 270);
cs.writeCsv(getDefaultOutputFile(".csv"));
cs.logLatencyErrorrs();
}
示例5: testWebRtcStabilityBack2Back
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@Test
public void testWebRtcStabilityBack2Back() throws Exception {
final int playTime = Integer.parseInt(System
.getProperty("test.webrtc.stability.back2back.playtime", String.valueOf(DEFAULT_PLAYTIME)));
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEndpoint1 = new WebRtcEndpoint.Builder(mp).build();
WebRtcEndpoint webRtcEndpoint2 = new WebRtcEndpoint.Builder(mp).build();
webRtcEndpoint1.connect(webRtcEndpoint2);
webRtcEndpoint2.connect(webRtcEndpoint1);
// Latency control
LatencyController cs = new LatencyController("WebRTC latency control");
// WebRTC
getPresenter().subscribeLocalEvents("playing");
getPresenter().initWebRtc(webRtcEndpoint1, WebRtcChannel.VIDEO_ONLY, WebRtcMode.SEND_ONLY);
getViewer().subscribeEvents("playing");
getViewer().initWebRtc(webRtcEndpoint2, WebRtcChannel.VIDEO_ONLY, WebRtcMode.RCV_ONLY);
// Latency assessment
cs.checkLatency(playTime, TimeUnit.MINUTES, getPresenter(), getViewer());
// Release Media Pipeline
mp.release();
// Draw latency results (PNG chart and CSV file)
cs.drawChart(getDefaultOutputFile(".png"), 500, 270);
cs.writeCsv(getDefaultOutputFile(".csv"));
cs.logLatencyErrorrs();
}
示例6: testDispatcherPlayer
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@Test
public void testDispatcherPlayer() throws Exception {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).useDataChannels().build();
WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp).useDataChannels().build();
webRtcEp.connect(webRtcEp2);
webRtcEp2.connect(webRtcEp);
// Test execution
getPage(0).initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY, true);
getPage(1).initWebRtc(webRtcEp2, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY, true);
Thread.sleep(8000);
for (int i = 0; i < TIMES; i++) {
String messageSentBrower0 = "Data sent from the browser0. Message" + i;
String messageSentBrower1 = "Data sent from the browser1. Message" + i;
getPage(0).sendDataByDataChannel(messageSentBrower0);
getPage(1).sendDataByDataChannel(messageSentBrower1);
Assert.assertTrue("The message should be: " + messageSentBrower1,
getPage(0).compareDataChannelMessage(messageSentBrower1));
Assert.assertTrue("The message should be: " + messageSentBrower0,
getPage(1).compareDataChannelMessage(messageSentBrower0));
}
// Release Media Pipeline
mp.release();
}
示例7: testWebRtcFaceOverlay
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@Test
public void testWebRtcFaceOverlay() throws InterruptedException {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
FaceOverlayFilter faceOverlayFilter = new FaceOverlayFilter.Builder(mp).build();
webRtcEndpoint.connect(faceOverlayFilter);
faceOverlayFilter.connect(webRtcEndpoint);
// Start WebRTC and wait for playing event
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
// Guard time to play the video
int playTime = Integer.parseInt(
System.getProperty("test.webrtcfaceoverlay.playtime", String.valueOf(DEFAULT_PLAYTIME)));
waitSeconds(playTime);
// Assertions
double currentTime = getPage().getCurrentTime();
Assert.assertTrue(
"Error in play time (expected: " + playTime + " sec, real: " + currentTime + " sec)",
getPage().compare(playTime, currentTime));
Assert.assertTrue("The color of the video should be green",
getPage().similarColor(CHROME_VIDEOTEST_COLOR));
// Release Media Pipeline
mp.release();
}
示例8: doTest
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
public void doTest(BrowserType browserType, String videoPath, String audioUrl, Color color)
throws InterruptedException {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
webRtcEndpoint.connect(webRtcEndpoint);
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);
// Wait until event playing in the remote stream
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
// Guard time to play the video
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME));
// Assert play time
double currentTime = getPage().getCurrentTime();
Assert.assertTrue("Error in play time of player (expected: " + PLAYTIME + " sec, real: "
+ currentTime + " sec)", getPage().compare(PLAYTIME, currentTime));
// Assert color
if (color != null) {
Assert.assertTrue("The color of the video should be " + color, getPage().similarColor(color));
}
// Assert audio quality
if (audioUrl != null) {
float realPesqMos = Ffmpeg.getPesqMos(audioUrl, AUDIO_SAMPLE_RATE);
Assert.assertTrue("Bad perceived audio quality: PESQ MOS too low (expected=" + MIN_PESQ_MOS
+ ", real=" + realPesqMos + ")", realPesqMos >= MIN_PESQ_MOS);
}
// Release Media Pipeline
mp.release();
}
示例9: createSdpResponseForUser
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
private String createSdpResponseForUser(RoomParticipant sender, String sdpOffer) {
WebRtcEndpoint receivingEndpoint = sender.getReceivingEndpoint();
if (receivingEndpoint == null) {
log.warn("PARTICIPANT {}: Trying to connect to a user without receiving endpoint "
+ "(it seems is not yet fully connected)", this.name);
return null;
}
if (sender.getName().equals(name)) {
// FIXME: Use another message type for receiving sdp offer
log.debug("PARTICIPANT {}: configuring loopback", this.name);
return receivingEndpoint.processOffer(sdpOffer);
}
if (sendingEndpoints.get(sender.getName()) != null) {
log.warn("PARTICIPANT {}: There is a sending endpoint to user {} "
+ "when trying to create another one", this.name, sender.getName());
return null;
}
log.debug("PARTICIPANT {}: Creating a sending endpoint to user {}", this.name,
sender.getName());
WebRtcEndpoint sendingEndpoint = new WebRtcEndpoint.Builder(pipeline).build();
WebRtcEndpoint oldSendingEndpoint =
sendingEndpoints.putIfAbsent(sender.getName(), sendingEndpoint);
if (oldSendingEndpoint != null) {
log.warn(
"PARTICIPANT {}: 2 threads have simultaneously created a sending endpoint for user {}",
this.name, sender.getName());
return null;
}
log.debug("PARTICIPANT {}: Created sending endpoint for user {}", this.name, sender.getName());
try {
receivingEndpoint = sender.getReceivingEndpoint();
if (receivingEndpoint != null) {
receivingEndpoint.connect(sendingEndpoint);
return sendingEndpoint.processOffer(sdpOffer);
}
} catch (KurentoServerException e) {
// TODO Check object status when KurentoClient set this info in the
// object
if (e.getCode() == 40101) {
log.warn("Receiving endpoint is released when trying to connect a sending endpoint to it",
e);
} else {
log.error("Exception connecting receiving endpoint to sending endpoint", e);
sendingEndpoint.release(new Continuation<Void>() {
@Override
public void onSuccess(Void result) throws Exception {
}
@Override
public void onError(Throwable cause) throws Exception {
log.error("Exception releasing WebRtcEndpoint", cause);
}
});
}
sendingEndpoints.remove(sender.getName());
releaseEndpoint(sender.getName(), sendingEndpoint);
}
return null;
}
示例10: testAlphaBlendingWebRtc
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@Test
public void testAlphaBlendingWebRtc() throws Exception {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();
AlphaBlending alphaBlending = new AlphaBlending.Builder(mp).build();
HubPort hubPort1 = new HubPort.Builder(alphaBlending).build();
HubPort hubPort2 = new HubPort.Builder(alphaBlending).build();
HubPort hubPort3 = new HubPort.Builder(alphaBlending).build();
webRtcEpRed.connect(hubPort1);
webRtcEpGreen.connect(hubPort2);
webRtcEpBlue.connect(hubPort3);
WebRtcEndpoint webRtcEpAlphabaBlending = new WebRtcEndpoint.Builder(mp).build();
HubPort hubPort4 = new HubPort.Builder(alphaBlending).build();
hubPort4.connect(webRtcEpAlphabaBlending);
alphaBlending.setMaster(hubPort1, 1);
alphaBlending.setPortProperties(0F, 0F, 8, 0.2F, 0.2F, hubPort2);
alphaBlending.setPortProperties(0.4F, 0.4F, 7, 0.2F, 0.2F, hubPort3);
getPage(BROWSER1).subscribeLocalEvents("playing");
getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
getPage(BROWSER2).subscribeLocalEvents("playing");
getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
WebRtcMode.SEND_ONLY);
getPage(BROWSER3).subscribeLocalEvents("playing");
getPage(BROWSER3).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
getPage(BROWSER4).subscribeEvents("playing");
getPage(BROWSER4).initWebRtc(webRtcEpAlphabaBlending, WebRtcChannel.AUDIO_AND_VIDEO,
WebRtcMode.RCV_ONLY);
// Assertions
Assert.assertTrue("Upper left part of the video must be blue",
getPage(BROWSER4).similarColorAt(Color.GREEN, 0, 0));
Assert.assertTrue("Lower right part of the video must be red",
getPage(BROWSER4).similarColorAt(Color.RED, 315, 235));
Assert.assertTrue("Center of the video must be blue",
getPage(BROWSER4).similarColorAt(Color.BLUE, 160, 120));
// alphaBlending.setMaster(hubPort3, 1);
alphaBlending.setPortProperties(0.8F, 0.8F, 7, 0.2F, 0.2F, hubPort3);
Assert.assertTrue("Lower right part of the video must be blue",
getPage(BROWSER4).similarColorAt(Color.BLUE, 315, 235));
Assert.assertTrue("Center of the video must be red",
getPage(BROWSER4).similarColorAt(Color.RED, 160, 120));
Thread.sleep(PLAYTIME * 1000);
}
示例11: testWebRtcStabilityRtpH264
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@Test
public void testWebRtcStabilityRtpH264() throws Exception {
final int playTime =
Integer.parseInt(System.getProperty("test.webrtc.stability.switch.webrtc2rtp.playtime",
String.valueOf(DEFAULT_PLAYTIME)));
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build();
RtpEndpoint rtpEndpoint1 = new RtpEndpoint.Builder(mp).build();
RtpEndpoint rtpEndpoint2 = new RtpEndpoint.Builder(mp).build();
webRtcEndpoint.connect(rtpEndpoint1);
rtpEndpoint2.connect(webRtcEndpoint);
// RTP session (rtpEndpoint1 --> rtpEndpoint2)
String sdpOffer = rtpEndpoint1.generateOffer();
log.debug("SDP offer in rtpEndpoint1\n{}", sdpOffer);
// SDP mangling
sdpOffer = SdpUtils.mangleSdp(sdpOffer, REMOVE_CODECS);
log.debug("SDP offer in rtpEndpoint1 after mangling\n{}", sdpOffer);
String sdpAnswer1 = rtpEndpoint2.processOffer(sdpOffer);
log.debug("SDP answer in rtpEndpoint2\n{}", sdpAnswer1);
String sdpAnswer2 = rtpEndpoint1.processAnswer(sdpAnswer1);
log.debug("SDP answer in rtpEndpoint1\n{}", sdpAnswer2);
// Latency controller
LatencyController cs = new LatencyController();
// WebRTC
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.VIDEO_ONLY, WebRtcMode.SEND_RCV);
// Assertion: wait to playing event in browser
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
// Latency assessment
getPage().activateLatencyControl(VideoTagType.LOCAL.getId(), VideoTagType.REMOTE.getId());
cs.checkLatency(playTime, TimeUnit.MINUTES, getPage());
// Release Media Pipeline
mp.release();
// Draw latency results (PNG chart and CSV file)
cs.drawChart(getDefaultOutputFile(".png"), 500, 270);
cs.writeCsv(getDefaultOutputFile(".csv"));
cs.logLatencyErrorrs();
}
示例12: doTestWithPlayer
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
public void doTestWithPlayer(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
String expectedAudioCodec, String extension, String mediaUrlPlayer) throws Exception {
// Media Pipeline #1
getPage(BROWSER2).close();
MediaPipeline mp = kurentoClient.createMediaPipeline();
final CountDownLatch errorPipelinelatch = new CountDownLatch(1);
mp.addErrorListener(new EventListener<ErrorEvent>() {
@Override
public void onEvent(ErrorEvent event) {
msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType();
errorPipelinelatch.countDown();
}
});
WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrlPlayer).build();
String recordingFile = getRecordUrl(extension);
RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile)
.withMediaProfile(mediaProfileSpecType).build();
// Test execution
getPage(BROWSER1).subscribeLocalEvents("playing");
long startWebrtc = System.currentTimeMillis();
getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
webRtcEpRed.connect(recorderEp);
recorderEp.record();
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage(BROWSER1).waitForEvent("playing"));
long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc;
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
startWebrtc = System.currentTimeMillis();
playerEp.play();
playerEp.connect(recorderEp);
long playerEpConnectionTime = System.currentTimeMillis() - startWebrtc;
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
webRtcEpRed.connect(recorderEp);
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER);
// Release Media Pipeline #1
saveGstreamerDot(mp);
final CountDownLatch recorderLatch = new CountDownLatch(1);
recorderEp.stopAndWait(new Continuation<Void>() {
@Override
public void onSuccess(Void result) throws Exception {
recorderLatch.countDown();
}
@Override
public void onError(Throwable cause) throws Exception {
recorderLatch.countDown();
}
});
Assert.assertTrue("Not stop properly",
recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
mp.release();
Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1);
final long playtime = PLAYTIME
+ TimeUnit.MILLISECONDS.toSeconds((2 * webrtcRedConnectionTime) + playerEpConnectionTime);
checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec,
expectedAudioCodec);
success = true;
}
示例13: doTest
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec,
String expectedAudioCodec, String extension) throws Exception {
String multiSlashses = File.separator + File.separator + File.separator;
final CountDownLatch recorderLatch = new CountDownLatch(1);
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build();
String recordingFile = getRecordUrl(extension).replace(getSimpleTestName(),
new Date().getTime() + File.separator + getSimpleTestName());
String recordingFileWithMultiSlashes = recordingFile.replace(File.separator, multiSlashses);
log.debug("The path with multi slash is {} ", recordingFileWithMultiSlashes);
RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFileWithMultiSlashes)
.withMediaProfile(mediaProfileSpecType).build();
webRtcEp.connect(webRtcEp);
webRtcEp.connect(recorderEp);
getPage().subscribeEvents("playing");
getPage().initWebRtc(webRtcEp, AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV);
recorderEp.record();
// Wait until event playing in the remote stream
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage().waitForEvent("playing"));
Thread.sleep(SECONDS.toMillis(PLAYTIME));
recorderEp.stopAndWait(new Continuation<Void>() {
@Override
public void onSuccess(Void result) throws Exception {
recorderLatch.countDown();
}
@Override
public void onError(Throwable cause) throws Exception {
recorderLatch.countDown();
}
});
Assert.assertTrue("Not stop properly",
recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS));
// Wait until file exists
waitForFileExists(recordingFile);
AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec);
mp.release();
}
示例14: testCompositeRecorder
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@Test
public void testCompositeRecorder() throws Exception {
// MediaPipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
Composite composite = new Composite.Builder(mp).build();
HubPort hubPort1 = new HubPort.Builder(composite).build();
WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
webRtcEpRed.connect(hubPort1);
HubPort hubPort2 = new HubPort.Builder(composite).build();
WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
webRtcEpGreen.connect(hubPort2, MediaType.AUDIO);
HubPort hubPort3 = new HubPort.Builder(composite).build();
WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();
webRtcEpBlue.connect(hubPort3, MediaType.AUDIO);
HubPort hubPort4 = new HubPort.Builder(composite).build();
WebRtcEndpoint webRtcEpWhite = new WebRtcEndpoint.Builder(mp).build();
webRtcEpWhite.connect(hubPort4, MediaType.AUDIO);
String recordingFile = getDefaultOutputFile(EXTENSION_WEBM);
RecorderEndpoint recorderEp =
new RecorderEndpoint.Builder(mp, Protocol.FILE + recordingFile).build();
HubPort hubPort5 = new HubPort.Builder(composite).build();
hubPort5.connect(recorderEp);
// WebRTC browsers
getPage(BROWSER2).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
getPage(BROWSER3).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
WebRtcMode.SEND_ONLY);
getPage(BROWSER4).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
getPage(BROWSER5).initWebRtc(webRtcEpWhite, WebRtcChannel.AUDIO_AND_VIDEO,
WebRtcMode.SEND_ONLY);
recorderEp.record();
Thread.sleep(PLAYTIME * 1000);
final CountDownLatch recorderLatch = new CountDownLatch(1);
recorderEp.stopAndWait(new Continuation<Void>() {
@Override
public void onSuccess(Void result) throws Exception {
recorderLatch.countDown();
}
@Override
public void onError(Throwable cause) throws Exception {
recorderLatch.countDown();
}
});
Assert.assertTrue("Not stop properly",
recorderLatch.await(getPage(BROWSER1).getTimeout(), TimeUnit.SECONDS));
mp.release();
// Media Pipeline #2
MediaPipeline mp2 = kurentoClient.createMediaPipeline();
PlayerEndpoint playerEp2 =
new PlayerEndpoint.Builder(mp2, Protocol.FILE + recordingFile).build();
WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build();
playerEp2.connect(webRtcEp2);
// Playing the recorded file
launchBrowser(mp2, webRtcEp2, playerEp2, null, EXPECTED_VIDEO_CODEC_WEBM,
EXPECTED_AUDIO_CODEC_WEBM, recordingFile, Color.RED, 0, 0, PLAYTIME);
// Release Media Pipeline #2
mp2.release();
success = true;
}
示例15: testCompositeWebRtc
import org.kurento.client.WebRtcEndpoint; //导入方法依赖的package包/类
@Test
public void testCompositeWebRtc() throws Exception {
// Media Pipeline
MediaPipeline mp = kurentoClient.createMediaPipeline();
WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build();
WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build();
WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build();
Composite composite = new Composite.Builder(mp).build();
HubPort hubPort1 = new HubPort.Builder(composite).build();
HubPort hubPort2 = new HubPort.Builder(composite).build();
HubPort hubPort3 = new HubPort.Builder(composite).build();
webRtcEpRed.connect(hubPort1);
webRtcEpGreen.connect(hubPort2);
webRtcEpBlue.connect(hubPort3);
WebRtcEndpoint webRtcEpWhite = new WebRtcEndpoint.Builder(mp).build();
HubPort hubPort4 = new HubPort.Builder(composite).build();
webRtcEpWhite.connect(hubPort4);
WebRtcEndpoint webRtcEpComposite = new WebRtcEndpoint.Builder(mp).build();
HubPort hubPort5 = new HubPort.Builder(composite).build();
hubPort5.connect(webRtcEpComposite);
// WebRTC browsers
getPage(BROWSER2).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
getPage(BROWSER3).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO,
WebRtcMode.SEND_ONLY);
getPage(BROWSER4).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY);
getPage(BROWSER5).initWebRtc(webRtcEpWhite, WebRtcChannel.AUDIO_AND_VIDEO,
WebRtcMode.SEND_ONLY);
getPage(BROWSER1).subscribeEvents("playing");
getPage(BROWSER1).initWebRtc(webRtcEpComposite, WebRtcChannel.AUDIO_AND_VIDEO,
WebRtcMode.RCV_ONLY);
// Assertions
Assert.assertTrue("Not received media (timeout waiting playing event)",
getPage(BROWSER1).waitForEvent("playing"));
Assert.assertTrue("Upper left part of the video must be red",
getPage(BROWSER1).similarColorAt(Color.RED, 0, 0));
Assert.assertTrue("Upper right part of the video must be green",
getPage(BROWSER1).similarColorAt(Color.GREEN, 450, 0));
Assert.assertTrue("Lower left part of the video must be blue",
getPage(BROWSER1).similarColorAt(Color.BLUE, 0, 450));
Assert.assertTrue("Lower right part of the video must be white",
getPage(BROWSER1).similarColorAt(Color.WHITE, 450, 450));
// Finally, a black & white filter is connected to one WebRTC
GStreamerFilter bwFilter =
new GStreamerFilter.Builder(mp, "videobalance saturation=0.0").build();
webRtcEpRed.connect(bwFilter);
bwFilter.connect(hubPort1);
Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME));
Assert.assertTrue("When connecting the filter, the upper left part of the video must be gray",
getPage(BROWSER1).similarColorAt(new Color(75, 75, 75), 0, 0));
// Release Media Pipeline
mp.release();
}