本文整理汇总了Java中org.webrtc.EglBase类的典型用法代码示例。如果您正苦于以下问题:Java EglBase类的具体用法?Java EglBase怎么用?Java EglBase使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
EglBase类属于org.webrtc包,在下文中一共展示了EglBase类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initializeVideo
import org.webrtc.EglBase; //导入依赖的package包/类
private void initializeVideo() {
Util.runOnMainSync(new Runnable() {
@Override
public void run() {
eglBase = EglBase.create();
localRenderer = new SurfaceViewRenderer(WebRtcCallService.this);
remoteRenderer = new SurfaceViewRenderer(WebRtcCallService.this);
localRenderer.init(eglBase.getEglBaseContext(), null);
remoteRenderer.init(eglBase.getEglBaseContext(), null);
peerConnectionFactory.setVideoHwAccelerationOptions(eglBase.getEglBaseContext(),
eglBase.getEglBaseContext());
}
});
}
示例2: VideoView
import org.webrtc.EglBase; //导入依赖的package包/类
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h, AnyRTCVideoLayout videoLayout) {
this.strPeerId = strPeerId;
this.index = index;
this.x = x;
this.y = y;
this.w = w;
this.h = h;
this.mRTCVideoLayout = videoLayout;
mLayout = new PercentFrameLayout(ctx);
mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
View view = View.inflate(ctx, R.layout.layout_top_right, null);
mView = (SurfaceViewRenderer) view.findViewById(R.id.suface_view);
btnClose = (ImageView) view.findViewById(R.id.img_close_render);
mLocalCamera = (ImageView) view.findViewById(R.id.camera_off);
mAudioImageView = (ImageView) view.findViewById(R.id.img_audio_close);
mVideoImageView = (ImageView) view.findViewById(R.id.img_video_close);
layoutCamera = (RelativeLayout) view.findViewById(R.id.layout_camera);
mView.init(eglBase.getEglBaseContext(), null);
mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
mLayout.addView(view);
}
示例3: VideoView
import org.webrtc.EglBase; //导入依赖的package包/类
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h, AnyRTCVideoLayout videoLayout) {
this.strPeerId = strPeerId;
this.index = index;
this.x = x;
this.y = y;
this.w = w;
this.h = h;
this.mRTCVideoLayout = videoLayout;
mLayout = new PercentFrameLayout(ctx);
mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
View view = View.inflate(ctx, org.anyrtc.meet_kit.R.layout.layout_top_right, null);
mView = (SurfaceViewRenderer) view.findViewById(org.anyrtc.meet_kit.R.id.suface_view);
btnClose = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_close_render);
mLocalCamera = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.camera_off);
mAudioImageView = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_audio_close);
mVideoImageView = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_video_close);
layoutCamera = (RelativeLayout) view.findViewById(org.anyrtc.meet_kit.R.id.layout_camera);
mView.init(eglBase.getEglBaseContext(), null);
mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
mLayout.addView(view);
}
示例4: createPeerConnectionClient
import org.webrtc.EglBase; //导入依赖的package包/类
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
VideoCapturer videoCapturer, EglBase.Context eglContext) {
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
SignalingParameters signalingParameters =
new SignalingParameters(iceServers, true, // iceServers, initiator.
null, null, null, // clientId, wssUrl, wssPostUrl.
null, null); // offerSdp, iceCandidates.
PeerConnectionClient client = PeerConnectionClient.getInstance();
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
options.networkIgnoreMask = 0;
options.disableNetworkMonitor = true;
client.setPeerConnectionFactoryOptions(options);
client.createPeerConnectionFactory(
InstrumentationRegistry.getTargetContext(), peerConnectionParameters, this);
client.createPeerConnection(
eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParameters);
client.createOffer();
return client;
}
示例5: VideoView
import org.webrtc.EglBase; //导入依赖的package包/类
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h) {
this.strPeerId = strPeerId;
this.index = index;
this.x = x;
this.y = y;
this.w = w;
this.h = h;
mLayout = new PercentFrameLayout(ctx);
// mLayout.setBackgroundResource(R.drawable.background);
mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
View view = View.inflate(ctx, R.layout.layout_top_right, null);
mView = (SurfaceViewRenderer) view.findViewById(R.id.suface_view);
layoutCamera = (RelativeLayout) view.findViewById(R.id.layout_camera);
mView.init(eglBase.getEglBaseContext(), null);
mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
mLayout.addView(view);
}
示例6: AVMemberView
import org.webrtc.EglBase; //导入依赖的package包/类
public AVMemberView(final Context context,EglBase rootEglBase,AVMember avMember, String trackType){
this.context = context;
this.rootEglBase = rootEglBase;
this.avMember = avMember;
this.trackType = trackType;
convertView = LayoutInflater.from(context).inflate(R.layout.avcall_member_preview_item, null);
viewHolder = new ViewHolder();
viewHolder.sfv_video= (SurfaceViewRenderer)convertView.findViewById(R.id.svr_video_item);
viewHolder.iv_header = (ImageView) convertView.findViewById(R.id.iv_av_member_avatar);
viewHolder.rl_bg = (RelativeLayout) convertView.findViewById(R.id.rl_bg);
viewHolder.sfv_video.init(rootEglBase.getEglBaseContext(), null);
viewHolder.sfv_video.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
viewHolder.sfv_video.setMirror(false);
viewHolder.sfv_video.setZOrderMediaOverlay(true);
convertView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (onMemberViewClickListener != null)
onMemberViewClickListener.itemClick(avMemberView);
}
});
avMemberView = this;
setRenderer();
}
示例7: YuvConverter
import org.webrtc.EglBase; //导入依赖的package包/类
public YuvConverter (EglBase.Context sharedContext) {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
shader.useProgram();
texMatrixLoc = shader.getUniformLocation("texMatrix");
xUnitLoc = shader.getUniformLocation("xUnit");
coeffsLoc = shader.getUniformLocation("coeffs");
GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
// If the width is not a multiple of 4 pixels, the texture
// will be scaled up slightly and clipped at the right border.
shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
eglBase.detachCurrent();
}
示例8: hangup
import org.webrtc.EglBase; //导入依赖的package包/类
/**
* Hang up a call.
*/
public void hangup() {
if (BuildConfig.DEBUG) {
Log.d(TAG, "@@@ VideoChatActivity::hangup");
}
if (mConnection != null) {
mConnection.close();
mConnection = null;
}
mRemoteRender.release();
mLocalRender.release();
EglBase eglBase = mOption.getEglBase();
if (eglBase != null) {
eglBase.release();
}
}
示例9: createPeerConnection
import org.webrtc.EglBase; //导入依赖的package包/类
public void createPeerConnection(final EglBase.Context renderEGLContext,
final VideoRenderer.Callbacks localRender,
final VideoRenderer.Callbacks remoteRender,
final VideoCapturer videoCapturer,
final SignalingParameters signalingParameters) {
createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
videoCapturer, signalingParameters);
}
示例10: FrameProducer
import org.webrtc.EglBase; //导入依赖的package包/类
public FrameProducer(final EglBase eglBase, final File videoFile, final int fps,
final Callback callback) {
mVideoFile = videoFile;
mFps = fps;
mCallback = callback;
mBufferInfo = new MediaCodec.BufferInfo();
mSurfaceTextureHelper = SurfaceTextureHelper.create("SurfaceTextureHelper",
eglBase.getEglBaseContext());
mSurfaceTextureHelper.startListening(this);
mHubSurface = new Surface(mSurfaceTextureHelper.getSurfaceTexture());
}
示例11: RcTest
import org.webrtc.EglBase; //导入依赖的package包/类
public RcTest(final Config config, final EglBase eglBase, final SurfaceViewRenderer renderer,
final Notifier notifier) {
mEglBase = eglBase;
mSurfaceViewRenderer = renderer;
File videoFile = new File(Environment.getExternalStorageDirectory(),
"alien-covenant.mp4");
mFrameProducer = new FrameProducer(mEglBase, videoFile, config.outputFps(), this);
mEncoderWrapper = new EncoderWrapper(config, notifier);
}
示例12: RTCVideoView
import org.webrtc.EglBase; //导入依赖的package包/类
public RTCVideoView(RelativeLayout videoView, Context ctx, EglBase eglBase) {
mAutoLayout = false;
mContext = ctx;
mVideoView = videoView;
mRootEglBase = eglBase;
mLocalRender = null;
mRemoteRenders = new HashMap<>();
this.isHost = isHost;
mRTCVideoLayout = AnyRTCRTCPEngine.Inst().getAnyRTCRTCPOption().getmVideoLayout();
if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_3X3_auto) {
((Activity) mContext).setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
mScreenWidth = ScreenUtils.getScreenWidth(mContext);
mScreenHeight = ScreenUtils.getScreenHeight(mContext) - ScreenUtils.getStatusHeight(mContext);
}
示例13: RTCVideoView
import org.webrtc.EglBase; //导入依赖的package包/类
public RTCVideoView(RelativeLayout videoView, Context ctx, EglBase eglBase) {
mAutoLayout = false;
mContext = ctx;
mVideoView = videoView;
mRootEglBase = eglBase;
mLocalRender = null;
mRemoteRenders = new HashMap<>();
mRTCVideoLayout = AnyRTCMeetEngine.Inst().getAnyRTCMeetOption().getmVideoLayout();
if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_3X3_auto) {
((Activity) mContext).setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
mScreenWidth = ScreenUtils.getScreenWidth(mContext);
mScreenHeight = ScreenUtils.getScreenHeight(mContext) - ScreenUtils.getStatusHeight(mContext);
}
示例14: setUp
import org.webrtc.EglBase; //导入依赖的package包/类
@Before
public void setUp() {
signalingExecutor = Executors.newSingleThreadExecutor();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
eglBase = EglBase.create();
}
}
示例15: RTCVideoView
import org.webrtc.EglBase; //导入依赖的package包/类
public RTCVideoView(RelativeLayout videoView, Context ctx, EglBase eglBase) {
this.mContext = ctx;
mAutoLayout = false;
mVideoView = videoView;
mRootEglBase = eglBase;
mLocalRender = null;
mRemoteRenders = new HashMap<>();
mScreenWidth = ScreenUtils.getScreenWidth(mContext);
mScreenHeight = ScreenUtils.getScreenHeight(mContext) - ScreenUtils.getStatusHeight(mContext);
}