當前位置: 首頁>>代碼示例>>Java>>正文


Java GLES20.glReadPixels方法代碼示例

本文整理匯總了Java中android.opengl.GLES20.glReadPixels方法的典型用法代碼示例。如果您正苦於以下問題:Java GLES20.glReadPixels方法的具體用法?Java GLES20.glReadPixels怎麽用?Java GLES20.glReadPixels使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在android.opengl.GLES20的用法示例。


在下文中一共展示了GLES20.glReadPixels方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: draw

import android.opengl.GLES20; //導入方法依賴的package包/類
@Override
public void draw(int texture) {
    onTaskExec();
    boolean isBlend= GLES20.glIsEnabled(GLES20.GL_BLEND);
    GLES20.glDisable(GLES20.GL_BLEND);
    GLES20.glGetIntegerv(GLES20.GL_VIEWPORT,lastViewPort,0);
    GLES20.glViewport(0,0,mWidth,mHeight);
    if(mScaleFilter!=null){
        mExportFilter.draw(mScaleFilter.drawToTexture(texture));
    }else{
        mExportFilter.draw(texture);
    }
    GLES20.glReadPixels(0,0,mWidth,mHeight*3/8,GLES20.GL_RGBA,GLES20.GL_UNSIGNED_BYTE,mTempBuffer);
    GLES20.glViewport(lastViewPort[0],lastViewPort[1],lastViewPort[2],lastViewPort[3]);
    if(isBlend){
        GLES20.glEnable(GLES20.GL_BLEND);
    }
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:19,代碼來源:YuvOutputFilter.java

示例2: getRenderBufferBitmap

import android.opengl.GLES20; //導入方法依賴的package包/類
private Bitmap getRenderBufferBitmap() {
    ByteBuffer buffer = ByteBuffer.allocateDirect(renderBufferWidth * renderBufferHeight * 4);
    GLES20.glReadPixels(0, 0, renderBufferWidth, renderBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
    Bitmap bitmap = Bitmap.createBitmap(renderBufferWidth, renderBufferHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    return bitmap;
}
 
開發者ID:MLNO,項目名稱:airgram,代碼行數:8,代碼來源:PhotoFilterView.java

示例3: capture

import android.opengl.GLES20; //導入方法依賴的package包/類
public Bitmap capture(int width, int height) {
    mPipeline.onSurfaceChanged(null, width, height);
    mPipeline.startRender();
    mPipeline.onDrawFrame(null);

    int[] iat = new int[mWidth * mHeight];
    IntBuffer ib = IntBuffer.allocate(width * height);
    GLES20.glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, ib);

    int[] ia = ib.array();
    for (int i = 0; i < mHeight; i++) {
        System.arraycopy(ia, i * mWidth, iat, (mHeight - i - 1) * mWidth, mWidth);
    }
    Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(IntBuffer.wrap(iat));

    mPipeline.onSurfaceDestroyed();

    // 釋放EGL環境
    mInputSurface.release();
    mEgl.release();
    return bitmap;
}
 
開發者ID:uestccokey,項目名稱:EZFilter,代碼行數:24,代碼來源:OffscreenImage.java

示例4: testRgbRendering

import android.opengl.GLES20; //導入方法依賴的package包/類
@Test
@SmallTest
public void testRgbRendering() {
  // Create EGL base with a pixel buffer as display output.
  final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
  eglBase.createPbufferSurface(WIDTH, HEIGHT);
  eglBase.makeCurrent();

  // Create RGB byte buffer plane with random content.
  final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
  final Random random = new Random(SEED);
  random.nextBytes(rgbPlane.array());

  // Upload the RGB byte buffer data as a texture.
  final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
  GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
  GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
  GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
      GLES20.GL_UNSIGNED_BYTE, rgbPlane);
  GlUtil.checkNoGLES2Error("glTexImage2D");

  // Draw the RGB frame onto the pixel buffer.
  final GlRectDrawer drawer = new GlRectDrawer();
  drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
      0 /* viewportY */, WIDTH, HEIGHT);

  // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
  final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
  GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
  GlUtil.checkNoGLES2Error("glReadPixels");

  // Assert rendered image is pixel perfect to source RGB.
  assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);

  drawer.release();
  GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
  eglBase.release();
}
 
開發者ID:lgyjg,項目名稱:AndroidRTC,代碼行數:39,代碼來源:GlRectDrawerTest.java

示例5: getScreenShot

import android.opengl.GLES20; //導入方法依賴的package包/類
public static Bitmap getScreenShot(int width, int height){
    IntBuffer pixelBuffer = IntBuffer.allocate(width * height);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
            pixelBuffer);
    int[] pixelMirroredArray = new int[width * height];
    int[] pixelArray = pixelBuffer.array();
    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j++) {
            pixelMirroredArray[(height - i - 1) * width + j] = pixelArray[i * width + j];
        }
    }
    Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    return bmp;
}
 
開發者ID:zhangyaqiang,項目名稱:Fatigue-Detection,代碼行數:15,代碼來源:BitmapUtils.java

示例6: sendImage

import android.opengl.GLES20; //導入方法依賴的package包/類
static void sendImage(int width, int height) {
    ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
    rgbaBuf.position(0);
    long start = System.nanoTime();
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
            rgbaBuf);
    long end = System.nanoTime();
    Log.d("TryOpenGL", "glReadPixels: " + (end - start));
    saveRgb2Bitmap(rgbaBuf, Environment.getExternalStorageDirectory().getAbsolutePath()
                            + "/gl_dump_" + width + "_" + height + ".png", width, height);
}
 
開發者ID:Piasy,項目名稱:OpenGLESTutorial-Android,代碼行數:12,代碼來源:Utils.java

示例7: getBitmap

import android.opengl.GLES20; //導入方法依賴的package包/類
public Bitmap getBitmap() {
    ByteBuffer buf = ByteBuffer.allocateDirect(mWidth * mHeight * 4);
    buf.order(ByteOrder.LITTLE_ENDIAN);
    GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
    Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
    bmp.copyPixelsFromBuffer(buf);
    return bmp;
}
 
開發者ID:hoanganhtuan95ptit,項目名稱:EditPhoto,代碼行數:9,代碼來源:ImageEglSurface.java

示例8: getFrameBitmap

import android.opengl.GLES20; //導入方法依賴的package包/類
public Bitmap getFrameBitmap() {
    if (!mEglCore.isCurrent(mEGLSurface)) {
        throw new RuntimeException("Expected EGL context/surface is not current");
    }

    // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
    // data (i.e. a byte of red, followed by a byte of green...).  While the Bitmap
    // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
    // Bitmap "copy pixels" method wants the same format GL provides.
    //
    // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
    // here often.
    //
    // Making this even more interesting is the upside-down nature of GL, which means
    // our output will look upside down relative to what appears on screen if the
    // typical GL conventions are used.

    int width = getWidth();
    int height = getHeight();
    ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
    buf.order(ByteOrder.LITTLE_ENDIAN);
    GLES20.glReadPixels(0, 0, width, height,
            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
    //GlUtil.checkGlError("glReadPixels");
    buf.rewind();

    Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    bmp.copyPixelsFromBuffer(buf);

    Matrix matrix = new Matrix();
    matrix.preScale(1f, -1f);
    Bitmap bmp2 = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), matrix, false);

    bmp.recycle();
    bmp = null;
    return bmp2;
}
 
開發者ID:vipycm,項目名稱:mao-android,代碼行數:38,代碼來源:EglSurfaceBase.java

示例9: getFrame

import android.opengl.GLES20; //導入方法依賴的package包/類
public ByteBuffer getFrame() {
    mPixelBuf.rewind();
    GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf);
    return mPixelBuf;
}
 
開發者ID:MLNO,項目名稱:airgram,代碼行數:6,代碼來源:OutputSurface.java

示例10: drawEnhancePass

import android.opengl.GLES20; //導入方法依賴的package包/類
private void drawEnhancePass() {
    if (!hsvGenerated) {
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[0]);
        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[0], 0);
        GLES20.glClear(0);

        GLES20.glUseProgram(rgbToHsvShaderProgram);
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, renderTexture[1]);
        GLES20.glUniform1i(rgbToHsvSourceImageHandle, 0);
        GLES20.glEnableVertexAttribArray(rgbToHsvInputTexCoordHandle);
        GLES20.glVertexAttribPointer(rgbToHsvInputTexCoordHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
        GLES20.glEnableVertexAttribArray(rgbToHsvPositionHandle);
        GLES20.glVertexAttribPointer(rgbToHsvPositionHandle, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

        ByteBuffer hsvBuffer = ByteBuffer.allocateDirect(renderBufferWidth * renderBufferHeight * 4);
        GLES20.glReadPixels(0, 0, renderBufferWidth, renderBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, hsvBuffer);

        GLES20.glBindTexture(GL10.GL_TEXTURE_2D, enhanceTextures[0]);
        GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, renderBufferWidth, renderBufferHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, hsvBuffer);

        ByteBuffer buffer = null;
        try {
            buffer = ByteBuffer.allocateDirect(PGPhotoEnhanceSegments * PGPhotoEnhanceSegments * PGPhotoEnhanceHistogramBins * 4);
            Utilities.calcCDT(hsvBuffer, renderBufferWidth, renderBufferHeight, buffer);
        } catch (Exception e) {
            FileLog.e("tmessages", e);
        }

        GLES20.glBindTexture(GL10.GL_TEXTURE_2D, enhanceTextures[1]);
        GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256, 16, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);

        hsvGenerated = true;
    }

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[1]);
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[1], 0);
    GLES20.glClear(0);

    GLES20.glUseProgram(enhanceShaderProgram);
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, enhanceTextures[0]);
    GLES20.glUniform1i(enhanceSourceImageHandle, 0);
    GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, enhanceTextures[1]);
    GLES20.glUniform1i(enhanceInputImageTexture2Handle, 1);
    if (showOriginal) {
        GLES20.glUniform1f(enhanceIntensityHandle, 0);
    } else {
        GLES20.glUniform1f(enhanceIntensityHandle, getEnhanceValue());
    }

    GLES20.glEnableVertexAttribArray(enhanceInputTexCoordHandle);
    GLES20.glVertexAttribPointer(enhanceInputTexCoordHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
    GLES20.glEnableVertexAttribArray(enhancePositionHandle);
    GLES20.glVertexAttribPointer(enhancePositionHandle, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
 
開發者ID:MLNO,項目名稱:airgram,代碼行數:68,代碼來源:PhotoFilterView.java

示例11: testDrawFrame

import android.opengl.GLES20; //導入方法依賴的package包/類
/**
 * Test for {@link GLTools#drawFrame(Buffer, int, int, int)}.
 * @throws Exception by some fails
 */
@Test
public final void testDrawFrame() throws Exception {
    final EGLDisplay eglDisplay = GLTools.newDisplay();
    final EGLConfig eglConfig = GLTools.newConfig(eglDisplay, true);
    final EGLContext eglContext = GLTools.newContext(eglDisplay, eglConfig);
    final EGLSurface eglSurface =
            GLTools.newSurface(eglDisplay, eglConfig, FRAME_SIZE, FRAME_SIZE);

    GLTools.makeCurrent(eglDisplay, eglSurface, eglContext);

    final int[] attrs = new int[5];
    GLTools.newShader(attrs);
    final int texture = GLTools.newTexture(TEXTURE_LEVEL);

    // 1-st pass
    Bitmap bitmap = Bitmap.createBitmap(FRAME_SIZE, FRAME_SIZE, Bitmap.Config.RGB_565);
    bitmap.setPixel(0, 0, Color.RED); bitmap.setPixel(1, 0, Color.GREEN);
    bitmap.setPixel(0, 1, Color.BLUE); bitmap.setPixel(1, 1, Color.YELLOW);
    ByteBuffer buffer = ByteBuffer.allocate(FRAME_SIZE * FRAME_SIZE * 2);
    bitmap.copyPixelsToBuffer(buffer); bitmap.recycle();

    GLTools.makeCurrent(eglDisplay, eglSurface, eglContext);
    GLTools.drawFrame(buffer, FRAME_SIZE, FRAME_SIZE, 0); buffer.clear();
    //GLTools.swapBuffers(eglDisplay, eglSurface);

    buffer = ByteBuffer.allocateDirect(FRAME_SIZE * FRAME_SIZE * 4);
    GLES20.glReadPixels(0, 0, FRAME_SIZE, FRAME_SIZE,
            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);

    bitmap = Bitmap.createBitmap(FRAME_SIZE, FRAME_SIZE, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer); buffer.clear(); //bitmap.eraseColor(Color.BLACK);

    Assert.assertArrayEquals(new int[]{bitmap.getPixel(0, 0)}, new int[]{Color.BLUE});
    Assert.assertArrayEquals(new int[]{bitmap.getPixel(1, 0)}, new int[]{Color.YELLOW});
    Assert.assertArrayEquals(new int[]{bitmap.getPixel(0, 1)}, new int[]{Color.RED});
    Assert.assertArrayEquals(new int[]{bitmap.getPixel(1, 1)}, new int[]{Color.GREEN});
    bitmap.recycle();

    // 2-nd pass
    bitmap = Bitmap.createBitmap(FRAME_SIZE, FRAME_SIZE, Bitmap.Config.RGB_565);
    bitmap.setPixel(0, 0, Color.YELLOW); bitmap.setPixel(1, 0, Color.BLUE);
    bitmap.setPixel(0, 1, Color.GREEN); bitmap.setPixel(1, 1, Color.RED);
    buffer = ByteBuffer.allocate(FRAME_SIZE * FRAME_SIZE * 2);
    bitmap.copyPixelsToBuffer(buffer); bitmap.recycle();

    GLTools.makeCurrent(eglDisplay, eglSurface, eglContext);
    GLTools.drawFrame(buffer, FRAME_SIZE, FRAME_SIZE, 0); buffer.clear();
    //GLTools.swapBuffers(eglDisplay, eglSurface);

    buffer = ByteBuffer.allocateDirect(FRAME_SIZE * FRAME_SIZE * 4);
    GLES20.glReadPixels(0, 0, FRAME_SIZE, FRAME_SIZE,
            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);

    bitmap = Bitmap.createBitmap(FRAME_SIZE, FRAME_SIZE, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer); buffer.clear(); //bitmap.eraseColor(Color.BLACK);

    Assert.assertArrayEquals(new int[]{bitmap.getPixel(0, 0)}, new int[]{Color.GREEN});
    Assert.assertArrayEquals(new int[]{bitmap.getPixel(1, 0)}, new int[]{Color.RED});
    Assert.assertArrayEquals(new int[]{bitmap.getPixel(0, 1)}, new int[]{Color.YELLOW});
    Assert.assertArrayEquals(new int[]{bitmap.getPixel(1, 1)}, new int[]{Color.BLUE});
    bitmap.recycle();

    GLTools.closeTexture(texture, TEXTURE_LEVEL);
    GLTools.closeShader(attrs);

    GLTools.closeSurface(eglDisplay, eglSurface);
    GLTools.closeContext(eglDisplay, eglContext);
    GLTools.closeDisplay(eglDisplay);
}
 
開發者ID:Nik-Gleb,項目名稱:mpeg-encoder,代碼行數:74,代碼來源:GLToolsAndroidTest.java

示例12: notifyCallbacks

import android.opengl.GLES20; //導入方法依賴的package包/類
private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
  if (frameListeners.isEmpty())
    return;

  drawMatrix.reset();
  drawMatrix.preTranslate(0.5f, 0.5f);
  if (mirror)
    drawMatrix.preScale(-1f, 1f);
  drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
  drawMatrix.preTranslate(-0.5f, -0.5f);

  Iterator<FrameListenerAndParams> it = frameListeners.iterator();
  while (it.hasNext()) {
    FrameListenerAndParams listenerAndParams = it.next();
    if (!wasRendered && listenerAndParams.applyFpsReduction) {
      continue;
    }
    it.remove();

    final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
    final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());

    if (scaledWidth == 0 || scaledHeight == 0) {
      listenerAndParams.listener.onFrame(null);
      continue;
    }

    if (bitmapTextureFramebuffer == null) {
      bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
    }
    bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
        GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);

    GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
        0 /* viewportY */, scaledWidth, scaledHeight);

    final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
    GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
    GLES20.glReadPixels(
        0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");

    final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(bitmapBuffer);
    listenerAndParams.listener.onFrame(bitmap);
  }
}
 
開發者ID:Piasy,項目名稱:AppRTC-Android,代碼行數:55,代碼來源:EglRenderer.java

示例13: testLateReturnFrame

import android.opengl.GLES20; //導入方法依賴的package包/類
/**
 * Test disposing the SurfaceTextureHelper while holding a pending texture frame. The pending
 * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
 * buffer and reading it back with glReadPixels().
 */
@Test
@MediumTest
public void testLateReturnFrame() throws InterruptedException {
  final int width = 16;
  final int height = 16;
  // Create EGL base with a pixel buffer as display output.
  final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
  eglBase.createPbufferSurface(width, height);

  // Create SurfaceTextureHelper and listener.
  final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
      "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
  final MockTextureListener listener = new MockTextureListener();
  surfaceTextureHelper.startListening(listener);
  surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);

  // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
  // |surfaceTextureHelper| as the target EGLSurface.
  final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
  eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
  assertEquals(eglOesBase.surfaceWidth(), width);
  assertEquals(eglOesBase.surfaceHeight(), height);

  final int red = 79;
  final int green = 66;
  final int blue = 161;
  // Draw a constant color frame onto the SurfaceTexture.
  eglOesBase.makeCurrent();
  GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
  GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
  // swapBuffers() will ultimately trigger onTextureFrameAvailable().
  eglOesBase.swapBuffers();
  eglOesBase.release();

  // Wait for OES texture frame.
  listener.waitForNewFrame();
  // Diconnect while holding the frame.
  surfaceTextureHelper.dispose();

  // Draw the pending texture frame onto the pixel buffer.
  eglBase.makeCurrent();
  final GlRectDrawer drawer = new GlRectDrawer();
  drawer.drawOes(
      listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
  drawer.release();

  // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
  final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
  GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
  GlUtil.checkNoGLES2Error("glReadPixels");
  eglBase.release();

  // Assert rendered image is expected constant color.
  while (rgbaData.hasRemaining()) {
    assertEquals(rgbaData.get() & 0xFF, red);
    assertEquals(rgbaData.get() & 0xFF, green);
    assertEquals(rgbaData.get() & 0xFF, blue);
    assertEquals(rgbaData.get() & 0xFF, 255);
  }
  // Late frame return after everything has been disposed and released.
  surfaceTextureHelper.returnTextureFrame();
}
 
開發者ID:lgyjg,項目名稱:AndroidRTC,代碼行數:68,代碼來源:SurfaceTextureHelperTest.java

示例14: notifyCallbacks

import android.opengl.GLES20; //導入方法依賴的package包/類
private void notifyCallbacks(VideoRenderer.I420Frame frame, float[] texMatrix) {
  // Make temporary copy of callback list to avoid ConcurrentModificationException, in case
  // callbacks call addFramelistener or removeFrameListener.
  final ArrayList<FrameListenerAndParams> tmpList;
  if (frameListeners.isEmpty())
    return;
  tmpList = new ArrayList<>(frameListeners);
  frameListeners.clear();

  final float[] bitmapMatrix = RendererCommon.multiplyMatrices(
      RendererCommon.multiplyMatrices(texMatrix,
          mirror ? RendererCommon.horizontalFlipMatrix() : RendererCommon.identityMatrix()),
      RendererCommon.verticalFlipMatrix());

  for (FrameListenerAndParams listenerAndParams : tmpList) {
    final int scaledWidth = (int) (listenerAndParams.scale * frame.rotatedWidth());
    final int scaledHeight = (int) (listenerAndParams.scale * frame.rotatedHeight());

    if (scaledWidth == 0 || scaledHeight == 0) {
      listenerAndParams.listener.onFrame(null);
      continue;
    }

    if (bitmapTextureFramebuffer == null) {
      bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
    }
    bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
        GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);

    if (frame.yuvFrame) {
      listenerAndParams.drawer.drawYuv(yuvTextures, bitmapMatrix, frame.rotatedWidth(),
          frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
    } else {
      listenerAndParams.drawer.drawOes(frame.textureId, bitmapMatrix, frame.rotatedWidth(),
          frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
    }

    final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
    GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
    GLES20.glReadPixels(
        0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");

    final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(bitmapBuffer);
    listenerAndParams.listener.onFrame(bitmap);
  }
}
 
開發者ID:lgyjg,項目名稱:AndroidRTC,代碼行數:54,代碼來源:EglRenderer.java

示例15: notifyCallbacks

import android.opengl.GLES20; //導入方法依賴的package包/類
private void notifyCallbacks(
    VideoRenderer.I420Frame frame, int[] yuvTextures, float[] texMatrix, boolean wasRendered) {
  if (frameListeners.isEmpty())
    return;

  final float[] bitmapMatrix = RendererCommon.multiplyMatrices(
      RendererCommon.multiplyMatrices(texMatrix,
          mirror ? RendererCommon.horizontalFlipMatrix() : RendererCommon.identityMatrix()),
      RendererCommon.verticalFlipMatrix());

  Iterator<FrameListenerAndParams> it = frameListeners.iterator();
  while (it.hasNext()) {
    FrameListenerAndParams listenerAndParams = it.next();
    if (!wasRendered && listenerAndParams.applyFpsReduction) {
      continue;
    }
    it.remove();

    final int scaledWidth = (int) (listenerAndParams.scale * frame.rotatedWidth());
    final int scaledHeight = (int) (listenerAndParams.scale * frame.rotatedHeight());

    if (scaledWidth == 0 || scaledHeight == 0) {
      listenerAndParams.listener.onFrame(null);
      continue;
    }

    if (bitmapTextureFramebuffer == null) {
      bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
    }
    bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
        GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);

    GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    if (frame.yuvFrame) {
      listenerAndParams.drawer.drawYuv(yuvTextures, bitmapMatrix, frame.rotatedWidth(),
          frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
    } else {
      listenerAndParams.drawer.drawOes(frame.textureId, bitmapMatrix, frame.rotatedWidth(),
          frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
    }

    final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
    GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
    GLES20.glReadPixels(
        0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");

    final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(bitmapBuffer);
    listenerAndParams.listener.onFrame(bitmap);
  }
}
 
開發者ID:Piasy,項目名稱:VideoCRE,代碼行數:59,代碼來源:EglRenderer.java


注:本文中的android.opengl.GLES20.glReadPixels方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。