本文整理汇总了Java中org.webrtc.AudioTrack类的典型用法代码示例。如果您正苦于以下问题:Java AudioTrack类的具体用法?Java AudioTrack怎么用?Java AudioTrack使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
AudioTrack类属于org.webrtc包,在下文中一共展示了AudioTrack类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: gotRemoteStream
import org.webrtc.AudioTrack; //导入依赖的package包/类
private void gotRemoteStream(MediaStream stream) {
//we have remote video stream. add to the renderer.
final VideoTrack videoTrack = stream.videoTracks.getFirst();
AudioTrack audioTrack = stream.audioTracks.getFirst();
runOnUiThread(new Runnable() {
@Override
public void run() {
try {
remoteRenderer = new VideoRenderer(remoteVideoView);
remoteVideoView.setVisibility(View.VISIBLE);
videoTrack.addRenderer(remoteRenderer);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
示例2: onAddStream
import org.webrtc.AudioTrack; //导入依赖的package包/类
@Override
public void onAddStream(MediaStream stream) {
Log.w(TAG, "onAddStream:" + stream);
for (AudioTrack audioTrack : stream.audioTracks) {
audioTrack.setEnabled(true);
}
if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
VideoTrack videoTrack = stream.videoTracks.getFirst();
videoTrack.setEnabled(true);
videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
}
}
示例3: onAddStream
import org.webrtc.AudioTrack; //导入依赖的package包/类
@Override
public void onAddStream(MediaStream stream) {
Log.w(TAG, "onAddStream:" + stream);
for (AudioTrack audioTrack : stream.audioTracks) {
audioTrack.setEnabled(true);
}
if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
VideoTrack videoTrack = stream.videoTracks.getFirst();
videoTrack.setEnabled(true);
videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
}
}
示例4: setAudioEnabled
import org.webrtc.AudioTrack; //导入依赖的package包/类
@Override
public void setAudioEnabled(boolean enabled) {
if (mState != State.kConnected || lMS == null) {
return;
}
for (AudioTrack audioTrack : lMS.audioTracks) {
audioTrack.setEnabled(enabled);
}
}
示例5: createAudioTrack
import org.webrtc.AudioTrack; //导入依赖的package包/类
private AudioTrack createAudioTrack() {
audioSource = factory.createAudioSource(audioConstraints);
localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
localAudioTrack.setEnabled(enableAudio);
return localAudioTrack;
}
示例6: createAudioTrack
import org.webrtc.AudioTrack; //导入依赖的package包/类
private AudioTrack createAudioTrack(MediaConstraints audioConstraints) {
mAudioSource = mPeerConnectionFactory.createAudioSource(audioConstraints);
mLocalAudioTrack = mPeerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, mAudioSource);
mLocalAudioTrack.setEnabled(true);
return mLocalAudioTrack;
}
示例7: createAudioTrack
import org.webrtc.AudioTrack; //导入依赖的package包/类
private AudioTrack createAudioTrack() {
audioSource = factory.createAudioSource(audioConstraints);
localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
localAudioTrack.setEnabled(enableAudio);
return localAudioTrack;
}
示例8: onCreate
import org.webrtc.AudioTrack; //导入依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Initialize PeerConnectionFactory globals.
//Params are context, initAudio,initVideo and videoCodecHwAcceleration
PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);
//Create a new PeerConnectionFactory instance.
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options);
//Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
VideoCapturer videoCapturerAndroid = createVideoCapturer();
//Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later!
MediaConstraints constraints = new MediaConstraints();
//Create a VideoSource instance
VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);
//create an AudioSource instance
AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints);
AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);
//we will start capturing the video from the camera
//width,height and fps
videoCapturerAndroid.startCapture(1000, 1000, 30);
//create surface renderer, init it and add the renderer to the track
SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer);
videoView.setMirror(true);
EglBase rootEglBase = EglBase.create();
videoView.init(rootEglBase.getEglBaseContext(), null);
localVideoTrack.addRenderer(new VideoRenderer(videoView));
}
示例9: onCreate
import org.webrtc.AudioTrack; //导入依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(true);
PeerConnectionFactory.initializeAndroidGlobals(
this, // Context
true, // Audio Enabled
true, // Video Enabled
true, // Hardware Acceleration Enabled
null); // Render EGL Context
peerConnectionFactory = new PeerConnectionFactory();
VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);
localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
localVideoTrack.setEnabled(true);
AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
localAudioTrack.setEnabled(true);
localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
localMediaStream.addTrack(localVideoTrack);
localMediaStream.addTrack(localAudioTrack);
GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);
VideoRendererGui.setView(videoView, null);
try {
otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
localVideoTrack.addRenderer(renderer);
} catch (Exception e) {
e.printStackTrace();
}
}
示例10: AndroidAudioTrack
import org.webrtc.AudioTrack; //导入依赖的package包/类
public AndroidAudioTrack(AudioTrack audioTrack, AndroidMediaStream stream) {
this.audioTrack = audioTrack;
this.stream = stream;
}
示例11: getAudioTrack
import org.webrtc.AudioTrack; //导入依赖的package包/类
public AudioTrack getAudioTrack() {
return audioTrack;
}
示例12: doPublish
import org.webrtc.AudioTrack; //导入依赖的package包/类
/** begin streaming to server - MUST run on VcThread */
void doPublish(VideoStreamsView view) {
if (mVideoCapturer != null) {
return;
}
MediaConstraints videoConstraints = new MediaConstraints();
videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"maxWidth", "320"));
videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"maxHeight", "240"));
videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"maxFrameRate", "10"));
MediaConstraints audioConstraints = new MediaConstraints();
audioConstraints.optional.add(new MediaConstraints.KeyValuePair(
"googEchoCancellation2", "true"));
audioConstraints.optional.add(new MediaConstraints.KeyValuePair(
"googNoiseSuppression", "true"));
lMS = sFactory.createLocalMediaStream("ARDAMS");
if (videoConstraints != null) {
mVideoCapturer = getVideoCapturer();
mVideoSource = sFactory.createVideoSource(mVideoCapturer,
videoConstraints);
VideoTrack videoTrack = sFactory.createVideoTrack("ARDAMSv0",
mVideoSource);
lMS.addTrack(videoTrack);
}
if (audioConstraints != null) {
AudioTrack audioTrack = sFactory.createAudioTrack("ARDAMSa0",
sFactory.createAudioSource(audioConstraints));
lMS.addTrack(audioTrack);
audioTrack.setEnabled(false);
}
StreamDescription stream = new StreamDescription("", false, true, true,
false, null, mNick);
MediaConstraints pcConstraints = makePcConstraints();
MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream,
true), stream);
PeerConnection pc = sFactory.createPeerConnection(mIceServers,
pcConstraints, pcObs);
pc.addStream(lMS, new MediaConstraints());
stream.setMedia(lMS);
if (view != null) {
stream.attachRenderer(new VideoCallbacks(view,
VideoStreamsView.LOCAL_STREAM_ID));
}
stream.initLocal(pc, pcObs.getSdpObserver());
}