当前位置: 首页>>代码示例>>Java>>正文


Java VideoRendererGui.setView方法代码示例

本文整理汇总了Java中org.webrtc.VideoRendererGui.setView方法的典型用法代码示例。如果您正苦于以下问题:Java VideoRendererGui.setView方法的具体用法?Java VideoRendererGui.setView怎么用?Java VideoRendererGui.setView使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.webrtc.VideoRendererGui的用法示例。


在下文中一共展示了VideoRendererGui.setView方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: RoomChatPresenter

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
public RoomChatPresenter(IRoomChatView view) {
    mView = view;
    mSocketAddress = "http://" + App.getInstance().getString(R.string.stream_host);
    mSocketAddress += (":" + App.getInstance().getString(R.string.stream_port) + "/");

    GLSurfaceView surfaceView = mView.getSurfaceView();
    surfaceView.setPreserveEGLContextOnPause(true);
    surfaceView.setKeepScreenOn(true);
    VideoRendererGui.setView(surfaceView, new Runnable() {
        @Override
        public void run() {
            // SurfaceView 准备完毕
            L.d("eglContextReadyCallback");
            init();
        }
    });

    localRender = VideoRendererGui.create(
            0, 0,
            50, 50, scalingType, true);

}
 
开发者ID:inexistence,项目名称:VideoMeeting,代码行数:23,代码来源:RoomChatPresenter.java

示例2: onCreate

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
    java.lang.System.setProperty("java.net.preferIPv6Addresses", "false");
    java.lang.System.setProperty("java.net.preferIPv4Stack", "true");
    super.onCreate(savedInstanceState);
    requestWindowFeature(Window.FEATURE_NO_TITLE);
    setContentView(R.layout.activity_janus);

    getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);

    vsv = (GLSurfaceView) findViewById(R.id.glview);
    vsv.setPreserveEGLContextOnPause(true);
    vsv.setKeepScreenOn(true);
    VideoRendererGui.setView(vsv, new MyInit());

    localRender = VideoRendererGui.create(72, 72, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    remoteRender = VideoRendererGui.create(0, 0, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
}
 
开发者ID:Computician,项目名称:janus-gateway-android,代码行数:19,代码来源:JanusActivity.java

示例3: attachVideoRenderer

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
/**
 *  Attach the call's video renderers to the specified GLSurfaceView
 *
 *  @param glView  The GLSurfaceView on which to render video
 */
public void attachVideoRenderer(GLSurfaceView glView) {
    if (null != glView) {
        VideoRendererGui.setView(glView, new Runnable() {
            @Override
            public void run() {
                Log.d(TAG, "VideoRendererGui GL Context ready");
            }
        });

        remoteRender = VideoRendererGui.create(0, 0, 100, 100,
                VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
        localRender = VideoRendererGui.create(70, 5, 25, 25,
                VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    }
}
 
开发者ID:respoke,项目名称:respoke-sdk-android,代码行数:21,代码来源:RespokeCall.java

示例4: onCreate

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        getWindow().addFlags(
                LayoutParams.FLAG_FULLSCREEN
                        | LayoutParams.FLAG_KEEP_SCREEN_ON
                        | LayoutParams.FLAG_DISMISS_KEYGUARD
                        | LayoutParams.FLAG_SHOW_WHEN_LOCKED
                        | LayoutParams.FLAG_TURN_SCREEN_ON);
//        setContentView(R.layout.main);
//        mSocketAddress = "http://" + getResources().getString(R.string.host);
//        mSocketAddress += (":" + getResources().getString(R.string.port) + "/");
//
//        vsv = (GLSurfaceView) findViewById(R.id.glview_call);
//        vsv.setPreserveEGLContextOnPause(true);
//        vsv.setKeepScreenOn(true);
        VideoRendererGui.setView(vsv, new Runnable() {
            @Override
            public void run() {
                init();
            }
        });

        // local and remote render
        remoteRender = VideoRendererGui.create(
                REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false);
        localRender = VideoRendererGui.create(
                LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
                LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING, scalingType, true);

        final Intent intent = getIntent();
        final String action = intent.getAction();

        if (Intent.ACTION_VIEW.equals(action)) {
            final List<String> segments = intent.getData().getPathSegments();
            callerId = segments.get(0);
        }
    }
 
开发者ID:ardnezar,项目名称:webrtc-android,代码行数:41,代码来源:RtcActivity.java

示例5: init

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
private void init(TypedArray typedArray) {
    VideoRendererGui.setView(this, null);
    if (typedArray != null) {
        setValuefromResources(typedArray);
        typedArray.recycle();
    }

    obtainMainVideoRenderer();
}
 
开发者ID:QuickBlox,项目名称:q-municate-android,代码行数:10,代码来源:RTCGLVideoView.java

示例6: onCreate

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        treeState = TreeState.IDLE;

        setContentView(R.layout.activity_video_master);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        mHandler = new Handler();
        Bundle extras = getIntent().getExtras();
        if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
            ;
            Toast.makeText(this, "Need to pass username to MasterVideoActivity in intent extras (Constants.USER_NAME).",
                    Toast.LENGTH_SHORT).show();
            finish();
            return;
        }
        this.treeId      = extras.getString(Constants.USER_NAME, "");
        Log.i(TAG, "treeId: " + treeId);

        this.mCallStatus   = (TextView) findViewById(R.id.call_status);

        this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);
        // Set up the List View for chatting
        RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_FILL;
        VideoRendererGui.setView(videoView, null);

        remoteRender = VideoRendererGui.create( REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT,
                scalingType, false);
        localRender = VideoRendererGui.create(	LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
                LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
                scalingType, true);
        NBMMediaConfiguration.NBMVideoFormat receiverVideoFormat = new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20);
        peerConnectionParameters = new NBMMediaConfiguration(   NBMMediaConfiguration.NBMRendererType.OPENGLES,
                NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
                NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
                receiverVideoFormat,
                NBMMediaConfiguration.NBMCameraPosition.FRONT);
        nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, remoteRender, this);
        nbmWebRTCPeer.initialize();
        Log.i(TAG, "MasterVideoActivity initialized");
//        mHandler.postDelayed(publishDelayed, 4000);

        MainActivity.getKurentoTreeAPIInstance().addObserver(this);

        createTreeRequestId = ++Constants.id;
        MainActivity.getKurentoTreeAPIInstance().sendCreateTree(treeId, createTreeRequestId);

        treeState = treeState.CREATING;
        mCallStatus.setText("Creating tree...");

    }
 
开发者ID:nubomedia-vtt,项目名称:nubo-test-tree,代码行数:53,代码来源:MasterVideoActivity.java

示例7: onCreate

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        treeState = TreeState.IDLE;

        setContentView(R.layout.activity_video_master);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        mHandler = new Handler();
        Bundle extras = getIntent().getExtras();
        if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
            ;
            Toast.makeText(this, "Need to pass username to MasterVideoActivity in intent extras (Constants.USER_NAME).",
                    Toast.LENGTH_SHORT).show();
            finish();
            return;
        }
        this.treeId      = extras.getString(Constants.USER_NAME, "");
        Log.i(TAG, "treeId: " + treeId);

//        if (extras.containsKey(Constants.CALL_USER)) {
//            this.calluser      = extras.getString(Constants.CALL_USER, "");
//            Log.i(TAG, "callUser: " + calluser);
//        }

        this.mCallStatus   = (TextView) findViewById(R.id.call_status);

        this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);

        queuedIceCandidates = new Vector<>();
        // Set up the List View for chatting
        RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_FILL;
        VideoRendererGui.setView(videoView, null);

        remoteRender = VideoRendererGui.create( REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT,
                scalingType, false);
        localRender = VideoRendererGui.create(	LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
                LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
                scalingType, true);
        NBMMediaConfiguration.NBMVideoFormat receiverVideoFormat = new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20);
        peerConnectionParameters = new NBMMediaConfiguration(   NBMMediaConfiguration.NBMRendererType.OPENGLES,
                NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
                NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
                receiverVideoFormat,
                NBMMediaConfiguration.NBMCameraPosition.FRONT);
        nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, remoteRender, this);
        nbmWebRTCPeer.initialize();
        Log.i(TAG, "MasterVideoActivity initialized");
        mHandler.postDelayed(createOfferDelayed, 1000);

        MainActivity.getKurentoTreeAPIInstance().addObserver(this);

        treeState = TreeState.CREATING_OFFER;
        mCallStatus.setText("Creating local offer...");

    }
 
开发者ID:nubomedia-vtt,项目名称:nubo-test-tree,代码行数:57,代码来源:ViewerVideoActivity.java

示例8: onCreate

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(true);

    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    peerConnectionFactory = new PeerConnectionFactory();

    VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);

    localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
    localVideoTrack.setEnabled(true);

    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(true);

    localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
    localMediaStream.addTrack(localVideoTrack);
    localMediaStream.addTrack(localAudioTrack);

    GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);

    VideoRendererGui.setView(videoView, null);
    try {
        otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        localVideoTrack.addRenderer(renderer);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
开发者ID:Nitrillo,项目名称:krankygeek,代码行数:45,代码来源:MainActivity.java

示例9: onCreate

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    Log.d(TAG, "onCreate: Model: " + Build.MODEL);

    //getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
    //getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    mLayout = new RelativeLayout(this);
    //LinearLayout layout = new LinearLayout(this);
    //layout.setOrientation(LinearLayout.VERTICAL);


    if (useGlSurfaceView) {
        glSv = new GLSurfaceView(this);
        if (useVideoRendererGui) {
            VideoRendererGui.setView(glSv);
            localRenderer = VideoRendererGui.createGuiRenderer(0, 0, 100, 100, ScalingType.SCALE_ASPECT_FIT);
            //localRenderer = VideoRendererGui.createGuiRenderer(0, 0, 50, 100, ScalingType.SCALE_ASPECT_FIT);
            //localRenderer = VideoRendererGui.createGuiRenderer(0, 0, 100, 30, ScalingType.SCALE_ASPECT_FILL);
            //localRenderer = VideoRendererGui.createGuiRenderer(5, 5, 90, 90);
            //localRenderer = VideoRendererGui.createGuiRenderer(5, 5, 40, 90);
            remoteRenderer = VideoRendererGui.createGuiRenderer(50, 0, 50, 100, ScalingType.SCALE_ASPECT_FIT);
        } else {
            mRenderer = new MyGLRenderer(glSv);
        }

        mLayout.addView(glSv);
    }
    else {
        sv = new SurfaceView(this);
        sv.getHolder().addCallback(this);
        mLayout.addView(sv);
    }

    RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(
            LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
    lp.addRule(RelativeLayout.ALIGN_PARENT_TOP);
    buttonStartSurface = new Button(this);
    buttonStartSurface.setText("Start encoder test with surfaces.");
    buttonStartSurface.setLayoutParams(lp);
    buttonStartSurface.setBackgroundColor(0x80E0E0E0);
    buttonStartSurface.setOnClickListener(mStartSurfaceCameraListener);
    mLayout.addView(buttonStartSurface, lp);

    lp = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
    lp.addRule(RelativeLayout.CENTER_IN_PARENT);
    buttonStartBuffer = new Button(this);
    buttonStartBuffer.setText("Start encoder test with byte buffers.");
    buttonStartBuffer.setLayoutParams(lp);
    buttonStartBuffer.setBackgroundColor(0x80E0E0E0);
    buttonStartBuffer.setOnClickListener(mStartBufferCameraListener);
    mLayout.addView(buttonStartBuffer, lp);

    lp = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
    lp.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
    buttonStartDecoder = new Button(this);
    buttonStartDecoder.setText("Start decoder test with surface.");
    buttonStartDecoder.setLayoutParams(lp);
    buttonStartDecoder.setBackgroundColor(0x80E0E0E0);
    //buttonStartDecoder.setOnClickListener(mStartDecodeSurfaceListener);
    buttonStartDecoder.setOnClickListener(mStartDecodeWebRTCListener);
    mLayout.addView(buttonStartDecoder, lp);

    setContentView(mLayout);
}
 
开发者ID:jingcmu,项目名称:MediaCodecTest,代码行数:67,代码来源:MediaCodecActivity.java

示例10: onCreate

import org.webrtc.VideoRendererGui; //导入方法依赖的package包/类
@Override
public void onCreate(Bundle savedInstanceState) {
  super.onCreate(savedInstanceState);

  Thread.setDefaultUncaughtExceptionHandler(
      new UnhandledExceptionHandler(this));

  getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
  getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

  Point displaySize = new Point();
  getWindowManager().getDefaultDisplay().getRealSize(displaySize);

  vsv = new AppRTCGLView(this, displaySize);
  VideoRendererGui.setView(vsv);
  remoteRender = VideoRendererGui.create(0, 0, 100, 100);
  localRender = VideoRendererGui.create(70, 5, 25, 25);

  vsv.setOnClickListener(new View.OnClickListener() {
      @Override public void onClick(View v) {
        toggleHUD();
      }
    });
  setContentView(vsv);
  logAndToast("Tap the screen to toggle stats visibility");

  hudView = new TextView(this);
  hudView.setTextColor(Color.BLACK);
  hudView.setBackgroundColor(Color.WHITE);
  hudView.setAlpha(0.4f);
  hudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
  hudView.setVisibility(View.INVISIBLE);
  addContentView(hudView, hudLayout);

  if (!factoryStaticInitialized) {
    abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
        this, true, true),
      "Failed to initializeAndroidGlobals");
    factoryStaticInitialized = true;
  }

  AudioManager audioManager =
      ((AudioManager) getSystemService(AUDIO_SERVICE));
  // TODO(fischman): figure out how to do this Right(tm) and remove the
  // suppression.
  @SuppressWarnings("deprecation")
  boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
  audioManager.setMode(isWiredHeadsetOn ?
      AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
  audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

  sdpMediaConstraints = new MediaConstraints();
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveAudio", "true"));
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveVideo", "true"));

  final Intent intent = getIntent();
  if ("android.intent.action.VIEW".equals(intent.getAction())) {
    connectToRoom(intent.getData().toString());
    return;
  }
  showGetRoomUI();
}
 
开发者ID:gaku,项目名称:WebRTCDemo,代码行数:65,代码来源:AppRTCDemoActivity.java


注:本文中的org.webrtc.VideoRendererGui.setView方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。