本文整理汇总了Java中org.webrtc.VideoRendererGui.create方法的典型用法代码示例。如果您正苦于以下问题:Java VideoRendererGui.create方法的具体用法?Java VideoRendererGui.create怎么用?Java VideoRendererGui.create使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.webrtc.VideoRendererGui
的用法示例。
在下文中一共展示了VideoRendererGui.create方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: RoomChatPresenter
import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
public RoomChatPresenter(IRoomChatView view) {
mView = view;
mSocketAddress = "http://" + App.getInstance().getString(R.string.stream_host);
mSocketAddress += (":" + App.getInstance().getString(R.string.stream_port) + "/");
GLSurfaceView surfaceView = mView.getSurfaceView();
surfaceView.setPreserveEGLContextOnPause(true);
surfaceView.setKeepScreenOn(true);
VideoRendererGui.setView(surfaceView, new Runnable() {
@Override
public void run() {
// SurfaceView 准备完毕
L.d("eglContextReadyCallback");
init();
}
});
localRender = VideoRendererGui.create(
0, 0,
50, 50, scalingType, true);
}
示例2: onCreate
import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
java.lang.System.setProperty("java.net.preferIPv6Addresses", "false");
java.lang.System.setProperty("java.net.preferIPv4Stack", "true");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_janus);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
vsv = (GLSurfaceView) findViewById(R.id.glview);
vsv.setPreserveEGLContextOnPause(true);
vsv.setKeepScreenOn(true);
VideoRendererGui.setView(vsv, new MyInit());
localRender = VideoRendererGui.create(72, 72, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
remoteRender = VideoRendererGui.create(0, 0, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
}
示例3: attachVideoRenderer
import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
/**
* Attach the call's video renderers to the specified GLSurfaceView
*
* @param glView The GLSurfaceView on which to render video
*/
public void attachVideoRenderer(GLSurfaceView glView) {
if (null != glView) {
VideoRendererGui.setView(glView, new Runnable() {
@Override
public void run() {
Log.d(TAG, "VideoRendererGui GL Context ready");
}
});
remoteRender = VideoRendererGui.create(0, 0, 100, 100,
VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
localRender = VideoRendererGui.create(70, 5, 25, 25,
VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
}
}
示例4: onCreate
import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(
LayoutParams.FLAG_FULLSCREEN
| LayoutParams.FLAG_KEEP_SCREEN_ON
| LayoutParams.FLAG_DISMISS_KEYGUARD
| LayoutParams.FLAG_SHOW_WHEN_LOCKED
| LayoutParams.FLAG_TURN_SCREEN_ON);
// setContentView(R.layout.main);
// mSocketAddress = "http://" + getResources().getString(R.string.host);
// mSocketAddress += (":" + getResources().getString(R.string.port) + "/");
//
// vsv = (GLSurfaceView) findViewById(R.id.glview_call);
// vsv.setPreserveEGLContextOnPause(true);
// vsv.setKeepScreenOn(true);
VideoRendererGui.setView(vsv, new Runnable() {
@Override
public void run() {
init();
}
});
// local and remote render
remoteRender = VideoRendererGui.create(
REMOTE_X, REMOTE_Y,
REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false);
localRender = VideoRendererGui.create(
LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING, scalingType, true);
final Intent intent = getIntent();
final String action = intent.getAction();
if (Intent.ACTION_VIEW.equals(action)) {
final List<String> segments = intent.getData().getPathSegments();
callerId = segments.get(0);
}
}
示例5: addRender
import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
private void addRender(MediaStream stream, int position) {
VideoRenderer.Callbacks render;
L.d("addRender position is " + position);
if (position == 0) {
render = localRender;
} else {
render = VideoRendererGui.create(position % 2 == 0 ? 0 : 50,
position / 2 * 50,
50, 50,
scalingType, false);
}
stream.videoTracks.get(0).addRenderer(new VideoRenderer(render));
}
示例6: onCreate
import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
treeState = TreeState.IDLE;
setContentView(R.layout.activity_video_master);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
mHandler = new Handler();
Bundle extras = getIntent().getExtras();
if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
;
Toast.makeText(this, "Need to pass username to MasterVideoActivity in intent extras (Constants.USER_NAME).",
Toast.LENGTH_SHORT).show();
finish();
return;
}
this.treeId = extras.getString(Constants.USER_NAME, "");
Log.i(TAG, "treeId: " + treeId);
this.mCallStatus = (TextView) findViewById(R.id.call_status);
this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);
// Set up the List View for chatting
RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_FILL;
VideoRendererGui.setView(videoView, null);
remoteRender = VideoRendererGui.create( REMOTE_X, REMOTE_Y,
REMOTE_WIDTH, REMOTE_HEIGHT,
scalingType, false);
localRender = VideoRendererGui.create( LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
scalingType, true);
NBMMediaConfiguration.NBMVideoFormat receiverVideoFormat = new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20);
peerConnectionParameters = new NBMMediaConfiguration( NBMMediaConfiguration.NBMRendererType.OPENGLES,
NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
receiverVideoFormat,
NBMMediaConfiguration.NBMCameraPosition.FRONT);
nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, remoteRender, this);
nbmWebRTCPeer.initialize();
Log.i(TAG, "MasterVideoActivity initialized");
// mHandler.postDelayed(publishDelayed, 4000);
MainActivity.getKurentoTreeAPIInstance().addObserver(this);
createTreeRequestId = ++Constants.id;
MainActivity.getKurentoTreeAPIInstance().sendCreateTree(treeId, createTreeRequestId);
treeState = treeState.CREATING;
mCallStatus.setText("Creating tree...");
}
示例7: onCreate
import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
treeState = TreeState.IDLE;
setContentView(R.layout.activity_video_master);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
mHandler = new Handler();
Bundle extras = getIntent().getExtras();
if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
;
Toast.makeText(this, "Need to pass username to MasterVideoActivity in intent extras (Constants.USER_NAME).",
Toast.LENGTH_SHORT).show();
finish();
return;
}
this.treeId = extras.getString(Constants.USER_NAME, "");
Log.i(TAG, "treeId: " + treeId);
// if (extras.containsKey(Constants.CALL_USER)) {
// this.calluser = extras.getString(Constants.CALL_USER, "");
// Log.i(TAG, "callUser: " + calluser);
// }
this.mCallStatus = (TextView) findViewById(R.id.call_status);
this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);
queuedIceCandidates = new Vector<>();
// Set up the List View for chatting
RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_FILL;
VideoRendererGui.setView(videoView, null);
remoteRender = VideoRendererGui.create( REMOTE_X, REMOTE_Y,
REMOTE_WIDTH, REMOTE_HEIGHT,
scalingType, false);
localRender = VideoRendererGui.create( LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
scalingType, true);
NBMMediaConfiguration.NBMVideoFormat receiverVideoFormat = new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20);
peerConnectionParameters = new NBMMediaConfiguration( NBMMediaConfiguration.NBMRendererType.OPENGLES,
NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
receiverVideoFormat,
NBMMediaConfiguration.NBMCameraPosition.FRONT);
nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, remoteRender, this);
nbmWebRTCPeer.initialize();
Log.i(TAG, "MasterVideoActivity initialized");
mHandler.postDelayed(createOfferDelayed, 1000);
MainActivity.getKurentoTreeAPIInstance().addObserver(this);
treeState = TreeState.CREATING_OFFER;
mCallStatus.setText("Creating local offer...");
}
示例8: onCreate
import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Thread.setDefaultUncaughtExceptionHandler(
new UnhandledExceptionHandler(this));
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Point displaySize = new Point();
getWindowManager().getDefaultDisplay().getRealSize(displaySize);
vsv = new AppRTCGLView(this, displaySize);
VideoRendererGui.setView(vsv);
remoteRender = VideoRendererGui.create(0, 0, 100, 100);
localRender = VideoRendererGui.create(70, 5, 25, 25);
vsv.setOnClickListener(new View.OnClickListener() {
@Override public void onClick(View v) {
toggleHUD();
}
});
setContentView(vsv);
logAndToast("Tap the screen to toggle stats visibility");
hudView = new TextView(this);
hudView.setTextColor(Color.BLACK);
hudView.setBackgroundColor(Color.WHITE);
hudView.setAlpha(0.4f);
hudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
hudView.setVisibility(View.INVISIBLE);
addContentView(hudView, hudLayout);
if (!factoryStaticInitialized) {
abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
this, true, true),
"Failed to initializeAndroidGlobals");
factoryStaticInitialized = true;
}
AudioManager audioManager =
((AudioManager) getSystemService(AUDIO_SERVICE));
// TODO(fischman): figure out how to do this Right(tm) and remove the
// suppression.
@SuppressWarnings("deprecation")
boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
audioManager.setMode(isWiredHeadsetOn ?
AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);
sdpMediaConstraints = new MediaConstraints();
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveAudio", "true"));
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveVideo", "true"));
final Intent intent = getIntent();
if ("android.intent.action.VIEW".equals(intent.getAction())) {
connectToRoom(intent.getData().toString());
return;
}
showGetRoomUI();
}