本文整理汇总了Java中org.webrtc.voiceengine.WebRtcAudioManager类的典型用法代码示例。如果您正苦于以下问题:Java WebRtcAudioManager类的具体用法?Java WebRtcAudioManager怎么用?Java WebRtcAudioManager使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
WebRtcAudioManager类属于org.webrtc.voiceengine包,在下文中一共展示了WebRtcAudioManager类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initializeWebRtc
import org.webrtc.voiceengine.WebRtcAudioManager; //导入依赖的package包/类
private void initializeWebRtc() {
Set<String> HARDWARE_AEC_BLACKLIST = new HashSet<String>() {{
add("Pixel");
add("Pixel XL");
}};
Set<String> OPEN_SL_ES_WHITELIST = new HashSet<String>() {{
add("Pixel");
add("Pixel XL");
}};
if (Build.VERSION.SDK_INT >= 11) {
if (HARDWARE_AEC_BLACKLIST.contains(Build.MODEL)) {
WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
}
if (!OPEN_SL_ES_WHITELIST.contains(Build.MODEL)) {
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true);
}
PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);
}
}
示例2: createPeerConnectionFactoryInternal
import org.webrtc.voiceengine.WebRtcAudioManager; //导入依赖的package包/类
private void createPeerConnectionFactoryInternal(Context context, String host) {
PeerConnectionFactory.initializeInternalTracer();
if (peerConnectionParameters.tracing) {
PeerConnectionFactory.startInternalTracingCapture(
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ "webrtc-trace.txt");
}
Log.d(TAG, "Create peer connection factory. Use video: " +
peerConnectionParameters.videoCallEnabled);
isError = false;
// Initialize field trials.
PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_AUTOMATIC_RESIZE);
// Check preferred video codec.
preferredVideoCodec = VIDEO_CODEC_VP8;
if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
preferredVideoCodec = VIDEO_CODEC_VP9;
} else if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
preferredVideoCodec = VIDEO_CODEC_H264;
}
}
Log.d(TAG, "Pereferred video codec: " + preferredVideoCodec);
// Check if ISAC is used by default.
preferIsac = false;
if (peerConnectionParameters.audioCodec != null
&& peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) {
preferIsac = true;
}
// Enable/disable OpenSL ES playback.
if (!peerConnectionParameters.useOpenSLES) {
Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
} else {
Log.d(TAG, "Allow OpenSL ES audio if device supports it");
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
}
// Create peer connection factory.
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true,
peerConnectionParameters.videoCodecHwAcceleration)) {
events.onPeerConnectionError("Failed to initializeAndroidGlobals");
}
if (options != null) {
Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
}
factory = new PeerConnectionFactory();
Log.d(TAG, "Peer connection factory created.");
}
示例3: createPeerConnectionFactoryInternal
import org.webrtc.voiceengine.WebRtcAudioManager; //导入依赖的package包/类
private void createPeerConnectionFactoryInternal(Context context) {
PeerConnectionFactory.initializeInternalTracer();
if (peerConnectionParameters.tracing) {
PeerConnectionFactory.startInternalTracingCapture(
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ "webrtc-trace.txt");
}
Log.d(TAG,
"Create peer connection factory. Use video: " + peerConnectionParameters.videoCallEnabled);
isError = false;
// Initialize field trials.
PeerConnectionFactory.initializeFieldTrials("");
// Check preferred video codec.
preferredVideoCodec = VIDEO_CODEC_VP8;
if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
preferredVideoCodec = VIDEO_CODEC_VP9;
} else if (peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) {
preferredVideoCodec = VIDEO_CODEC_H264;
}
}
Log.d(TAG, "Pereferred video codec: " + preferredVideoCodec);
// Check if ISAC is used by default.
preferIsac = peerConnectionParameters.audioCodec != null
&& peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC);
// Enable/disable OpenSL ES playback.
if (!peerConnectionParameters.useOpenSLES) {
Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
} else {
Log.d(TAG, "Allow OpenSL ES audio if device supports it");
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
}
if (peerConnectionParameters.disableBuiltInAEC) {
Log.d(TAG, "Disable built-in AEC even if device supports it");
WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
} else {
Log.d(TAG, "Enable built-in AEC if device supports it");
WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
}
if (peerConnectionParameters.disableBuiltInAGC) {
Log.d(TAG, "Disable built-in AGC even if device supports it");
WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(true);
} else {
Log.d(TAG, "Enable built-in AGC if device supports it");
WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(false);
}
if (peerConnectionParameters.disableBuiltInNS) {
Log.d(TAG, "Disable built-in NS even if device supports it");
WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true);
} else {
Log.d(TAG, "Enable built-in NS if device supports it");
WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
}
// Create peer connection factory.
if (!PeerConnectionFactory.initializeAndroidGlobals(
context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
events.onPeerConnectionError("Failed to initializeAndroidGlobals");
}
if (options != null) {
Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
}
factory = new PeerConnectionFactory(options);
Log.d(TAG, "Peer connection factory created.");
}