本文整理汇总了Java中org.webrtc.VideoTrack.addRenderer方法的典型用法代码示例。如果您正苦于以下问题:Java VideoTrack.addRenderer方法的具体用法?Java VideoTrack.addRenderer怎么用?Java VideoTrack.addRenderer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.webrtc.VideoTrack
的用法示例。
在下文中一共展示了VideoTrack.addRenderer方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: addLocalStreams
import org.webrtc.VideoTrack; //导入方法依赖的package包/类
private void addLocalStreams(Context context) {
AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
// TODO(fischman): figure out how to do this Right(tm) and remove the suppression.
@SuppressWarnings("deprecation")
boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
audioManager.setMode(isWiredHeadsetOn ? AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);
localStream = peerConnectionFactory.createLocalMediaStream("ARDAMS");
if (!audioOnly) {
VideoCapturer capturer = getVideoCapturer();
MediaConstraints videoConstraints = new MediaConstraints();
videoSource = peerConnectionFactory.createVideoSource(capturer, videoConstraints);
VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("ARDAMSv0", videoSource);
videoTrack.addRenderer(new VideoRenderer(localRender));
localStream.addTrack(videoTrack);
}
localStream.addTrack(peerConnectionFactory.createAudioTrack("ARDAMSa0", peerConnectionFactory.createAudioSource(new MediaConstraints())));
peerConnection.addStream(localStream);
}
示例2: onAddStream
import org.webrtc.VideoTrack; //导入方法依赖的package包/类
@Override
public void onAddStream(MediaStream stream) {
Log.w(TAG, "onAddStream:" + stream);
for (AudioTrack audioTrack : stream.audioTracks) {
audioTrack.setEnabled(true);
}
if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
VideoTrack videoTrack = stream.videoTracks.getFirst();
videoTrack.setEnabled(true);
videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
}
}
示例3: onAddStream
import org.webrtc.VideoTrack; //导入方法依赖的package包/类
@Override
public void onAddStream(MediaStream stream) {
Log.w(TAG, "onAddStream:" + stream);
for (AudioTrack audioTrack : stream.audioTracks) {
audioTrack.setEnabled(true);
}
if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
VideoTrack videoTrack = stream.videoTracks.getFirst();
videoTrack.setEnabled(true);
videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
}
}
示例4: run
import org.webrtc.VideoTrack; //导入方法依赖的package包/类
public void run() {
Log.d(TAG, "Attaching VideoRenderer to remote stream (" + remoteStream + ")");
// Check if the remote stream has a video track
if (remoteStream.videoTracks.size() == 1) {
// Get the video track
VideoTrack remoteVideoTrack = remoteStream.videoTracks.get(0);
// Set video track enabled if we have enabled video rendering
remoteVideoTrack.setEnabled(renderVideo);
VideoRenderer videoRenderer = remoteVideoRenderers.get(remoteRender);
if (videoRenderer != null) {
MediaStream mediaStream = remoteVideoMediaStreams.get(videoRenderer);
if (mediaStream != null) {
VideoTrack videoTrack = remoteVideoTracks.get(mediaStream);
if (videoTrack != null) {
videoTrack.removeRenderer(videoRenderer);
}
}
}
VideoRenderer newVideoRenderer = new VideoRenderer(remoteRender);
remoteVideoTrack.addRenderer(newVideoRenderer);
remoteVideoRenderers.put(remoteRender, newVideoRenderer);
remoteVideoMediaStreams.put(newVideoRenderer, remoteStream);
remoteVideoTracks.put(remoteStream, remoteVideoTrack);
Log.d(TAG, "Attached.");
}
}
示例5: onCreate
import org.webrtc.VideoTrack; //导入方法依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Initialize PeerConnectionFactory globals.
//Params are context, initAudio,initVideo and videoCodecHwAcceleration
PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);
//Create a new PeerConnectionFactory instance.
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options);
//Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
VideoCapturer videoCapturerAndroid = createVideoCapturer();
//Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later!
MediaConstraints constraints = new MediaConstraints();
//Create a VideoSource instance
VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
//create an AudioSource instance
AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints);
AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);
//we will start capturing the video from the camera
//width,height and fps
videoCapturerAndroid.startCapture(1000, 1000, 30);
//create surface renderer, init it and add the renderer to the track
SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer);
videoView.setMirror(true);
EglBase rootEglBase = EglBase.create();
videoView.init(rootEglBase.getEglBaseContext(), null);
localVideoTrack.addRenderer(new VideoRenderer(videoView));
}
示例6: onCreate
import org.webrtc.VideoTrack; //导入方法依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(true);
PeerConnectionFactory.initializeAndroidGlobals(
this, // Context
true, // Audio Enabled
true, // Video Enabled
true, // Hardware Acceleration Enabled
null); // Render EGL Context
peerConnectionFactory = new PeerConnectionFactory();
VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);
localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
localVideoTrack.setEnabled(true);
AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
localAudioTrack.setEnabled(true);
localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
localMediaStream.addTrack(localVideoTrack);
localMediaStream.addTrack(localAudioTrack);
GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);
VideoRendererGui.setView(videoView, null);
try {
otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
localVideoTrack.addRenderer(renderer);
} catch (Exception e) {
e.printStackTrace();
}
}
示例7: onIceServers
import org.webrtc.VideoTrack; //导入方法依赖的package包/类
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
factory = new PeerConnectionFactory();
MediaConstraints pcConstraints = appRtcClient.pcConstraints();
pcConstraints.optional.add(
new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);
createDataChannelToRegressionTestBug2302(pc); // See method comment.
// Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
// NOTE: this _must_ happen while |factory| is alive!
// Logging.enableTracing(
// "logcat:",
// EnumSet.of(Logging.TraceLevel.TRACE_ALL),
// Logging.Severity.LS_SENSITIVE);
{
final PeerConnection finalPC = pc;
final Runnable repeatedStatsLogger = new Runnable() {
public void run() {
synchronized (quit[0]) {
if (quit[0]) {
return;
}
final Runnable runnableThis = this;
if (hudView.getVisibility() == View.INVISIBLE) {
vsv.postDelayed(runnableThis, 1000);
return;
}
boolean success = finalPC.getStats(new StatsObserver() {
public void onComplete(final StatsReport[] reports) {
runOnUiThread(new Runnable() {
public void run() {
updateHUD(reports);
}
});
for (StatsReport report : reports) {
Log.d(TAG, "Stats: " + report.toString());
}
vsv.postDelayed(runnableThis, 1000);
}
}, null);
if (!success) {
throw new RuntimeException("getStats() return false!");
}
}
}
};
vsv.postDelayed(repeatedStatsLogger, 1000);
}
{
logAndToast("Creating local video source...");
MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
if (appRtcClient.videoConstraints() != null) {
VideoCapturer capturer = getVideoCapturer();
videoSource = factory.createVideoSource(
capturer, appRtcClient.videoConstraints());
VideoTrack videoTrack =
factory.createVideoTrack("ARDAMSv0", videoSource);
videoTrack.addRenderer(new VideoRenderer(localRender));
lMS.addTrack(videoTrack);
}
if (appRtcClient.audioConstraints() != null) {
lMS.addTrack(factory.createAudioTrack(
"ARDAMSa0",
factory.createAudioSource(appRtcClient.audioConstraints())));
}
pc.addStream(lMS, new MediaConstraints());
}
logAndToast("Waiting for ICE candidates...");
}
示例8: onIceServers
import org.webrtc.VideoTrack; //导入方法依赖的package包/类
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
factory = new PeerConnectionFactory();
pc = factory.createPeerConnection(
iceServers, appRtcClient.pcConstraints(), pcObserver);
// Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
// NOTE: this _must_ happen while |factory| is alive!
// Logging.enableTracing(
// "logcat:",
// EnumSet.of(Logging.TraceLevel.TRACE_ALL),
// Logging.Severity.LS_SENSITIVE);
{
final PeerConnection finalPC = pc;
final Runnable repeatedStatsLogger = new Runnable() {
public void run() {
synchronized (quit[0]) {
if (quit[0]) {
return;
}
final Runnable runnableThis = this;
boolean success = finalPC.getStats(new StatsObserver() {
public void onComplete(StatsReport[] reports) {
for (StatsReport report : reports) {
Log.d(TAG, "Stats: " + report.toString());
}
vsv.postDelayed(runnableThis, 10000);
}
}, null);
if (!success) {
throw new RuntimeException("getStats() return false!");
}
}
}
};
vsv.postDelayed(repeatedStatsLogger, 10000);
}
{
logAndToast("Creating local video source...");
MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
if (appRtcClient.videoConstraints() != null) {
VideoCapturer capturer = getVideoCapturer();
videoSource = factory.createVideoSource(
capturer, appRtcClient.videoConstraints());
VideoTrack videoTrack =
factory.createVideoTrack("ARDAMSv0", videoSource);
videoTrack.addRenderer(new VideoRenderer(new VideoCallbacks(
vsv, VideoStreamsView.Endpoint.LOCAL)));
lMS.addTrack(videoTrack);
}
lMS.addTrack(factory.createAudioTrack("ARDAMSa0"));
pc.addStream(lMS, new MediaConstraints());
}
logAndToast("Waiting for ICE candidates...");
}