本文整理汇总了Java中org.webrtc.PeerConnectionFactory.initializeAndroidGlobals方法的典型用法代码示例。如果您正苦于以下问题:Java PeerConnectionFactory.initializeAndroidGlobals方法的具体用法?Java PeerConnectionFactory.initializeAndroidGlobals怎么用?Java PeerConnectionFactory.initializeAndroidGlobals使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.webrtc.PeerConnectionFactory
的用法示例。
在下文中一共展示了PeerConnectionFactory.initializeAndroidGlobals方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: onCreate
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
@Override
public void onCreate() {
super.onCreate();
initializeRandomNumberFix();
initializeLogging();
initializeDependencyInjection();
initializeJobManager();
initializeExpiringMessageManager();
initializeGcmCheck();
initializeSignedPreKeyCheck();
initializePeriodicTasks();
initializeCircumvention();
if (Build.VERSION.SDK_INT >= 11) {
PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);
}
}
示例2: initializeWebRtc
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
private void initializeWebRtc() {
Set<String> HARDWARE_AEC_BLACKLIST = new HashSet<String>() {{
add("Pixel");
add("Pixel XL");
}};
Set<String> OPEN_SL_ES_WHITELIST = new HashSet<String>() {{
add("Pixel");
add("Pixel XL");
}};
if (Build.VERSION.SDK_INT >= 11) {
if (HARDWARE_AEC_BLACKLIST.contains(Build.MODEL)) {
WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
}
if (!OPEN_SL_ES_WHITELIST.contains(Build.MODEL)) {
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true);
}
PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);
}
}
示例3: WebRTC
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
WebRTC(WebRTCTask task, MainActivity activity) {
this.task = task;
this.activity = activity;
// Initialize Android globals
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=3416
PeerConnectionFactory.initializeAndroidGlobals(activity, false);
// Set ICE servers
List<PeerConnection.IceServer> iceServers = new ArrayList<>();
iceServers.add(new org.webrtc.PeerConnection.IceServer("stun:" + Config.STUN_SERVER));
if (Config.TURN_SERVER != null) {
iceServers.add(new org.webrtc.PeerConnection.IceServer("turn:" + Config.TURN_SERVER,
Config.TURN_USER, Config.TURN_PASS));
}
// Create peer connection
final PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
this.factory = new PeerConnectionFactory(options);
this.constraints = new MediaConstraints();
this.pc = this.factory.createPeerConnection(iceServers, constraints, new PeerConnectionObserver());
// Add task message event handler
this.task.setMessageHandler(new TaskMessageHandler());
}
示例4: WebRtcClient
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
public WebRtcClient(RtcListener listener, String host, PeerConnectionClient.PeerConnectionParameters params) {
mListener = listener;
pcParams = params;
PeerConnectionFactory.initializeAndroidGlobals(listener, true, true,
params.videoCodecHwAcceleration);
factory = new PeerConnectionFactory();
MessageHandler messageHandler = new MessageHandler();
try {
client = IO.socket(host);
} catch (URISyntaxException e) {
e.printStackTrace();
}
client.on("id", messageHandler.onId);
client.on("message", messageHandler.onMessage);
client.connect();
iceServers.add(new PeerConnection.IceServer("stun:23.21.150.121"));
iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
}
示例5: createPeerConnectionFactoryInternal
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
private void createPeerConnectionFactoryInternal(Context context) {
Log.d(TAG, "Create peer connection peerConnectionFactory. Use video: " + peerConnectionParameters.videoCallEnabled);
// isError = false;
// Initialize field trials.
String field_trials = FIELD_TRIAL_AUTOMATIC_RESIZE;
// Check if VP9 is used by default.
if (peerConnectionParameters.videoCallEnabled && peerConnectionParameters.videoCodec != null &&
peerConnectionParameters.videoCodec.equals(NBMMediaConfiguration.NBMVideoCodec.VP9.toString())) {
field_trials += FIELD_TRIAL_VP9;
}
PeerConnectionFactory.initializeFieldTrials(field_trials);
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
observer.onPeerConnectionError("Failed to initializeAndroidGlobals");
}
peerConnectionFactory = new PeerConnectionFactory();
// ToDo: What about these options?
// if (options != null) {
// Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
// peerConnectionFactory.setOptions(options);
// }
Log.d(TAG, "Peer connection peerConnectionFactory created.");
}
示例6: createPeerConnection
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
private boolean createPeerConnection(Context context) {
boolean success = false;
if (PeerConnectionFactory.initializeAndroidGlobals(context)) {
PeerConnectionFactory factory = new PeerConnectionFactory();
List<IceServer> iceServers = new ArrayList<IceServer>();
iceServers.add(new IceServer("stun:stun.l.google.com:19302"));
// For TURN servers the format would be:
// new IceServer("turn:url", user, password)
MediaConstraints mediaConstraints = new MediaConstraints();
mediaConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "false"));
mediaConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
peerConnection = factory.createPeerConnection(iceServers, mediaConstraints, this);
localStream = factory.createLocalMediaStream("WEBRTC_WORKSHOP_NS");
localStream.addTrack(factory.createAudioTrack("WEBRTC_WORKSHOP_NSa1",
factory.createAudioSource(new MediaConstraints())));
peerConnection.addStream(localStream, new MediaConstraints());
success = true;
}
return success;
}
示例7: initializeAndroidGlobals
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
/**
* Initialize globals
*/
private static void initializeAndroidGlobals(Context context) {
if (!mIsInitialized) {
try {
mIsInitialized = PeerConnectionFactory.initializeAndroidGlobals(
context,
true, // enable audio initializing
true, // enable video initializing
true // enable hardware acceleration
);
PeerConnectionFactory.initializeFieldTrials(null);
mIsSupported = true;
Log.d(LOG_TAG, "## initializeAndroidGlobals(): mIsInitialized=" + mIsInitialized);
} catch (Throwable e) {
Log.e(LOG_TAG, "## initializeAndroidGlobals(): Exception Msg=" + e.getMessage());
mIsInitialized = true;
mIsSupported = false;
}
}
}
示例8: initPeerConnectionFactory
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
/**
* PeerConnection factory initialization
*/
private void initPeerConnectionFactory() {
PeerConnectionFactory.initializeAndroidGlobals(getApplicationContext(), true);
mOptions = new PeerConnectionFactory.Options();
mOptions.networkIgnoreMask = 0;
mPeerConnectionFactory = new PeerConnectionFactory(mOptions);
Log.d(TAG, "Created PeerConnectionFactory.");
mPeerConnectionFactory.setVideoHwAccelerationOptions(
rootEglBase.getEglBaseContext(),
rootEglBase.getEglBaseContext()
);
}
示例9: testJniLoadsWithoutError
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
@Test
@SmallTest
public void testJniLoadsWithoutError() throws InterruptedException {
PeerConnectionFactory.initializeAndroidGlobals(InstrumentationRegistry.getTargetContext(),
true /* initializeAudio */, true /* initializeVideo */,
false /* videoCodecHwAcceleration */);
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
new PeerConnectionFactory(options);
}
示例10: start
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
public void start() {
start.setEnabled(false);
call.setEnabled(true);
//Initialize PeerConnectionFactory globals.
//Params are context, initAudio,initVideo and videoCodecHwAcceleration
PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);
//Create a new PeerConnectionFactory instance.
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
peerConnectionFactory = new PeerConnectionFactory(options);
//Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
VideoCapturer videoCapturerAndroid = getVideoCapturer(new CustomCameraEventsHandler());
//Create MediaConstraints - Will be useful for specifying video and audio constraints.
audioConstraints = new MediaConstraints();
videoConstraints = new MediaConstraints();
//Create a VideoSource instance
videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
//create an AudioSource instance
audioSource = peerConnectionFactory.createAudioSource(audioConstraints);
localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);
localVideoView.setVisibility(View.VISIBLE);
//create a videoRenderer based on SurfaceViewRenderer instance
localRenderer = new VideoRenderer(localVideoView);
// And finally, with our VideoRenderer ready, we
// can add our renderer to the VideoTrack.
localVideoTrack.addRenderer(localRenderer);
}
示例11: WebRtcClient
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
public WebRtcClient(RtcListener listener, String host, PeerConnectionParameters params) {
mListener = listener;
pcParams = params;
PeerConnectionFactory.initializeAndroidGlobals(listener, true, true,
params.videoCodecHwAcceleration);
factory = new PeerConnectionFactory();
MessageHandler messageHandler = new MessageHandler();
Log.d(TAG, "WebRtcClient..host:" + host);
try {
Manager man = new Manager(new URI(host));
// client = IO.socket(host);
client = man.socket("/hello");
} catch (URISyntaxException e) {
e.printStackTrace();
Log.d(TAG, "WebRtcClient..exception");
}
client.on("id", messageHandler.onId);
client.on("message", messageHandler.onMessage);
client.connect();
// iceServers.add(new PeerConnection.IceServer("stun:23.21.150.121"));
iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
}
示例12: clientConnected
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
/**
* Notify the shared SDK instance that the specified client has connected. This is for internal use only, and should never be called by your client application.
*
* @param client The client that just connected
*/
public void clientConnected(RespokeClient client) {
if (null != pushToken) {
registerPushServices();
}
if (!factoryStaticInitialized) {
// Perform a one-time WebRTC global initialization
PeerConnectionFactory.initializeAndroidGlobals(context, true, true, true, VideoRendererGui.getEGLContext());
factoryStaticInitialized = true;
}
}
示例13: onCreate
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Initialize PeerConnectionFactory globals.
//Params are context, initAudio,initVideo and videoCodecHwAcceleration
PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);
//Create a new PeerConnectionFactory instance.
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options);
//Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
VideoCapturer videoCapturerAndroid = createVideoCapturer();
//Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later!
MediaConstraints constraints = new MediaConstraints();
//Create a VideoSource instance
VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
//create an AudioSource instance
AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints);
AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);
//we will start capturing the video from the camera
//width,height and fps
videoCapturerAndroid.startCapture(1000, 1000, 30);
//create surface renderer, init it and add the renderer to the track
SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer);
videoView.setMirror(true);
EglBase rootEglBase = EglBase.create();
videoView.init(rootEglBase.getEglBaseContext(), null);
localVideoTrack.addRenderer(new VideoRenderer(videoView));
}
示例14: createPeerConnectionFactoryInternal
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
private void createPeerConnectionFactoryInternal(Context context, String host) {
PeerConnectionFactory.initializeInternalTracer();
if (peerConnectionParameters.tracing) {
PeerConnectionFactory.startInternalTracingCapture(
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ "webrtc-trace.txt");
}
Log.d(TAG, "Create peer connection factory. Use video: " +
peerConnectionParameters.videoCallEnabled);
isError = false;
// Initialize field trials.
PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_AUTOMATIC_RESIZE);
// Check preferred video codec.
preferredVideoCodec = VIDEO_CODEC_VP8;
if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
preferredVideoCodec = VIDEO_CODEC_VP9;
} else if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
preferredVideoCodec = VIDEO_CODEC_H264;
}
}
Log.d(TAG, "Pereferred video codec: " + preferredVideoCodec);
// Check if ISAC is used by default.
preferIsac = false;
if (peerConnectionParameters.audioCodec != null
&& peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) {
preferIsac = true;
}
// Enable/disable OpenSL ES playback.
if (!peerConnectionParameters.useOpenSLES) {
Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
} else {
Log.d(TAG, "Allow OpenSL ES audio if device supports it");
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
}
// Create peer connection factory.
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true,
peerConnectionParameters.videoCodecHwAcceleration)) {
events.onPeerConnectionError("Failed to initializeAndroidGlobals");
}
if (options != null) {
Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
}
factory = new PeerConnectionFactory();
Log.d(TAG, "Peer connection factory created.");
}
示例15: onCreate
import org.webrtc.PeerConnectionFactory; //导入方法依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(true);
PeerConnectionFactory.initializeAndroidGlobals(
this, // Context
true, // Audio Enabled
true, // Video Enabled
true, // Hardware Acceleration Enabled
null); // Render EGL Context
peerConnectionFactory = new PeerConnectionFactory();
VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);
localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
localVideoTrack.setEnabled(true);
AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
localAudioTrack.setEnabled(true);
localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
localMediaStream.addTrack(localVideoTrack);
localMediaStream.addTrack(localAudioTrack);
GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);
VideoRendererGui.setView(videoView, null);
try {
otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
localVideoTrack.addRenderer(renderer);
} catch (Exception e) {
e.printStackTrace();
}
}