当前位置: 首页>>代码示例>>Java>>正文


Java Bitmap.copyPixelsFromBuffer方法代码示例

本文整理汇总了Java中android.graphics.Bitmap.copyPixelsFromBuffer方法的典型用法代码示例。如果您正苦于以下问题:Java Bitmap.copyPixelsFromBuffer方法的具体用法?Java Bitmap.copyPixelsFromBuffer怎么用?Java Bitmap.copyPixelsFromBuffer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在android.graphics.Bitmap的用法示例。


在下文中一共展示了Bitmap.copyPixelsFromBuffer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: imageAvailable

import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
 * 图像可用时截屏
 *
 * @param reader
 * @param path
 *@param callback @return
 */
private void imageAvailable(ImageReader reader, String path, IRecordShotCallback callback) {
    mImageReaderLock.lock();
    try{
        Image image = reader.acquireLatestImage();

        if(image == null) return;
        int width = image.getWidth();
        int height = image.getHeight();
        final Image.Plane[] planes = image.getPlanes();
        final ByteBuffer buffer = planes[0].getBuffer();
        int pixelStride = planes[0].getPixelStride();
        int rowStride = planes[0].getRowStride();
        int rowPadding = rowStride - pixelStride * width;
        Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
        bitmap.copyPixelsFromBuffer(buffer);
        bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
        image.close();
        //需要在这里释放图片否则会截取很多图片
        release();

        saveBitmap(path, bitmap, callback);
    }finally {
        mImageReaderLock.unlock();
    }
}
 
开发者ID:sunshinecoast,项目名称:ScreenRecordCaptureMaster,代码行数:33,代码来源:CaptureScreenImage.java

示例2: grayscaleImageToBitmap

import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
 * Converts a GrayscaleImage to a Bitmap.
 */
@NonNull
public Bitmap grayscaleImageToBitmap(@NonNull GrayscaleImage img) {
    String stopwatchSessionId = log.startStopwatch(getStopwatchSessionId("grayscaleImageToBitmap"));

    int size = img.width * img.height;
    int[] buffer = new int[size];

    for (int index = 0; index < size; index++) {
        // "AND 0xff" for the signed byte issue
        int luminance = img.data[index] & 0xff;
        // normal encoding for bitmap
        buffer[index] = (0xff000000 | luminance << 16 | luminance << 8 | luminance);
    }

    Bitmap bitmap = Bitmap.createBitmap(img.width, img.height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(IntBuffer.wrap(buffer));

    log.stopStopwatch(stopwatchSessionId);
    return bitmap;
}
 
开发者ID:BioID-GmbH,项目名称:BWS-Android,代码行数:24,代码来源:ImageFormatConverter.java

示例3: readPixel

import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
 * Tries to recycle a pooled bitmap and reads raw pixels into the bitmap. Internally always reuses
 * a (potentially) big buffer to update the texture. The buffer grows to the largest size when needed and
 * is never shrunk, so be careful what you read, e.g. a fullhd frame will take more than 8 MiB. However to
 * lower the pressure to the stupid Android GC we sacrifice throughput by synchronizing globally, so NEVER
 * call this method from the UI thread.
 * <p>
 * The format is:
 * <pre>
 *     bool available   1 byte
 *     int width        4 byte
 *     int height       4 byte
 *     int type         1 byte, [2 = ARGB_8888]
 * </pre>
 *
 * @param in the source
 * @return the instance or null if not available
 * @throws IOException
 */
@Nullable
public static Bitmap readPixel(DataInput in) throws IOException {
    boolean available = in.readBoolean();
    if (!available) {
        return null;
    }
    int width = in.readInt();
    int height = in.readInt();
    int type = in.readUnsignedByte();
    if (type != Config.ARGB_8888.ordinal()) {
        throw new Panic("format not implemented " + type);
    }
    int bytes = width * height * 4;
    Bitmap bmp = getDefaultPool().borrowBitmap(width, height, Config.ARGB_8888);
    synchronized (BitmapPoolFactory.class) {
        if (sTmp.capacity() < bytes) {
            sTmp = ByteBuffer.allocate(bytes);
        }
        sTmp.clear();
        in.readFully(sTmp.array(), 0, bytes);
        sTmp.limit(bytes);
        bmp.copyPixelsFromBuffer(sTmp);
    }
    return bmp;
}
 
开发者ID:worldiety,项目名称:homunculus,代码行数:45,代码来源:BitmapPoolFactory.java

示例4: capture

import android.graphics.Bitmap; //导入方法依赖的package包/类
public Bitmap capture(int width, int height) {
    mPipeline.onSurfaceChanged(null, width, height);
    mPipeline.startRender();
    mPipeline.onDrawFrame(null);

    int[] iat = new int[mWidth * mHeight];
    IntBuffer ib = IntBuffer.allocate(width * height);
    GLES20.glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, ib);

    int[] ia = ib.array();
    for (int i = 0; i < mHeight; i++) {
        System.arraycopy(ia, i * mWidth, iat, (mHeight - i - 1) * mWidth, mWidth);
    }
    Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(IntBuffer.wrap(iat));

    mPipeline.onSurfaceDestroyed();

    // 释放EGL环境
    mInputSurface.release();
    mEgl.release();
    return bitmap;
}
 
开发者ID:uestccokey,项目名称:EZFilter,代码行数:24,代码来源:OffscreenImage.java

示例5: getBitmapFunction

import android.graphics.Bitmap; //导入方法依赖的package包/类
private Function<ImageInfo, Bitmap> getBitmapFunction() {
    return new Function<ImageInfo, Bitmap>() {
        @Override
        public Bitmap apply(ImageInfo imageInfo) throws Exception {
            Bitmap bitmap = Bitmap.createBitmap(imageInfo.width + imageInfo.rowPadding / imageInfo.pixelStride, imageInfo.height,
                    Bitmap.Config.ARGB_8888);
            bitmap.copyPixelsFromBuffer(imageInfo.byteBuffer);
            bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);

            return bitmap;
        }
    };
}
 
开发者ID:OddCN,项目名称:screen-share-to-browser,代码行数:14,代码来源:RecordService.java

示例6: doInBackground

import android.graphics.Bitmap; //导入方法依赖的package包/类
@Override
protected Bitmap doInBackground(Image... params) {

    if (params == null || params.length < 1 || params[0] == null) {

        L.e(" params is null ...");
        return null;
    }

    Image image = params[0];

    int width = image.getWidth();
    int height = image.getHeight();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();
    //每个像素的间距
    int pixelStride = planes[0].getPixelStride();
    //总的间距
    int rowStride = planes[0].getRowStride();
    image.close();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
    image.close();

    compressAndWrite(bitmap);

    return null;
}
 
开发者ID:kaixuanluo,项目名称:pc-android-controller-android,代码行数:31,代码来源:ScreenCaptureSocket.java

示例7: testColorBitmap

import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
 * Tests the {@link Coloring#colorBitmap(Bitmap, int)} method.
 * <p>
 * Due to {@link org.robolectric.shadows.ShadowBitmap}'s empty implementation, this won't really work, so we can only test the transparency.
 */
@Test
public final void testColorBitmap() {
    final Bitmap.Config config = Bitmap.Config.ARGB_8888;
    final int width = 10, height = 10;
    final int[] allReds = new int[width * height];
    for (int i = 0; i < width * height; i++) {
        allReds[i] = Color.RED;
    }

    final Bitmap redSquare = Bitmap.createBitmap(allReds, width, height, config);
    assertNotNull("Created Bitmap is null", redSquare);

    // initialize red Bitmap's internal structures, otherwise it won't draw properly
    redSquare.prepareToDraw();
    final byte[] redPixels = new byte[redSquare.getWidth() * redSquare.getHeight() * 8];
    final ByteBuffer redBuffer = ByteBuffer.wrap(redPixels);
    redBuffer.order(ByteOrder.nativeOrder());
    redSquare.copyPixelsToBuffer(redBuffer);
    redSquare.copyPixelsFromBuffer(redBuffer);
    redSquare.prepareToDraw();

    final String redPixel = hex(redSquare.getPixel(width / 2, height / 2));
    final String errorRed = String.format("Error while creating red bitmap, middle pixel is %s", redPixel);
    assertEquals(errorRed, hex(Color.TRANSPARENT), redPixel);

    final Bitmap greenSquare = Coloring.colorBitmap(redSquare, Color.GREEN);
    assertNotNull("Created colored Bitmap is null", greenSquare);
    final String greenPixel = hex(greenSquare.getPixel(width / 2, height / 2));
    final String errorGreen = String.format("Error while coloring bitmap, middle pixel is %s", greenPixel);
    assertEquals(errorGreen, hex(Color.TRANSPARENT), greenPixel);
}
 
开发者ID:milosmns,项目名称:silly-android,代码行数:37,代码来源:ColoringTest.java

示例8: toBitmap

import android.graphics.Bitmap; //导入方法依赖的package包/类
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
public static Bitmap toBitmap(Image image) {
    Image.Plane plane = image.getPlanes()[0];
    ByteBuffer buffer = plane.getBuffer();
    buffer.position(0);
    int pixelStride = plane.getPixelStride();
    int rowPadding = plane.getRowStride() - pixelStride * image.getWidth();
    Bitmap bitmap = Bitmap.createBitmap(image.getWidth() + rowPadding / pixelStride, image.getHeight(), Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, image.getWidth(), image.getHeight());
    return bitmap;
}
 
开发者ID:hyb1996,项目名称:Auto.js,代码行数:13,代码来源:ImageWrapper.java

示例9: getFrame

import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
 * This method gets a frame from the frame queue and returns the image that matches the format specified by the
 * configVideoSource method.
 *
 * @param frame specifies the frame object to hold image.
 * @return true if success, false otherwise.
 */
@Override
public boolean getFrame(Mat frame)
{
    boolean success = false;

    try
    {
        VuforiaLocalizer.CloseableFrame closeableFrame = localizer.getFrameQueue().take();

        for (int i = 0; i < closeableFrame.getNumImages(); i++)
        {
            Image image = closeableFrame.getImage(i);
            if (image.getWidth() == imageWidth && image.getHeight() == imageHeight &&
                    image.getFormat() == PIXEL_FORMAT.RGB565)
            {
                Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
                bm.copyPixelsFromBuffer(image.getPixels());
                Utils.bitmapToMat(bm, frame);
                break;
            }
        }

        closeableFrame.close();
        success = true;
    }
    catch (InterruptedException e)
    {
        e.printStackTrace();
    }

    return success;
}
 
开发者ID:trc492,项目名称:Ftc2018RelicRecovery,代码行数:40,代码来源:FtcVuforia.java

示例10: getBitmap

import android.graphics.Bitmap; //导入方法依赖的package包/类
public Bitmap getBitmap() {
    ByteBuffer buf = ByteBuffer.allocateDirect(mWidth * mHeight * 4);
    buf.order(ByteOrder.LITTLE_ENDIAN);
    GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
    Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
    bmp.copyPixelsFromBuffer(buf);
    return bmp;
}
 
开发者ID:hoanganhtuan95ptit,项目名称:EditPhoto,代码行数:9,代码来源:ImageEglSurface.java

示例11: getRenderBufferBitmap

import android.graphics.Bitmap; //导入方法依赖的package包/类
private Bitmap getRenderBufferBitmap() {
    ByteBuffer buffer = ByteBuffer.allocateDirect(renderBufferWidth * renderBufferHeight * 4);
    GLES20.glReadPixels(0, 0, renderBufferWidth, renderBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
    Bitmap bitmap = Bitmap.createBitmap(renderBufferWidth, renderBufferHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    return bitmap;
}
 
开发者ID:pooyafaroka,项目名称:PlusGram,代码行数:8,代码来源:PhotoFilterView.java

示例12: getFrameBitmap

import android.graphics.Bitmap; //导入方法依赖的package包/类
public Bitmap getFrameBitmap() {
    if (!mEglCore.isCurrent(mEGLSurface)) {
        throw new RuntimeException("Expected EGL context/surface is not current");
    }

    // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
    // data (i.e. a byte of red, followed by a byte of green...).  While the Bitmap
    // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
    // Bitmap "copy pixels" method wants the same format GL provides.
    //
    // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
    // here often.
    //
    // Making this even more interesting is the upside-down nature of GL, which means
    // our output will look upside down relative to what appears on screen if the
    // typical GL conventions are used.

    int width = getWidth();
    int height = getHeight();
    ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
    buf.order(ByteOrder.LITTLE_ENDIAN);
    GLES20.glReadPixels(0, 0, width, height,
            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
    //GlUtil.checkGlError("glReadPixels");
    buf.rewind();

    Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    bmp.copyPixelsFromBuffer(buf);

    Matrix matrix = new Matrix();
    matrix.preScale(1f, -1f);
    Bitmap bmp2 = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), matrix, false);

    bmp.recycle();
    bmp = null;
    return bmp2;
}
 
开发者ID:vipycm,项目名称:mao-android,代码行数:38,代码来源:EglSurfaceBase.java

示例13: notifyCallbacks

import android.graphics.Bitmap; //导入方法依赖的package包/类
private void notifyCallbacks(VideoRenderer.I420Frame frame, float[] texMatrix) {
  // Make temporary copy of callback list to avoid ConcurrentModificationException, in case
  // callbacks call addFramelistener or removeFrameListener.
  final ArrayList<FrameListenerAndParams> tmpList;
  if (frameListeners.isEmpty())
    return;
  tmpList = new ArrayList<>(frameListeners);
  frameListeners.clear();

  final float[] bitmapMatrix = RendererCommon.multiplyMatrices(
      RendererCommon.multiplyMatrices(texMatrix,
          mirror ? RendererCommon.horizontalFlipMatrix() : RendererCommon.identityMatrix()),
      RendererCommon.verticalFlipMatrix());

  for (FrameListenerAndParams listenerAndParams : tmpList) {
    final int scaledWidth = (int) (listenerAndParams.scale * frame.rotatedWidth());
    final int scaledHeight = (int) (listenerAndParams.scale * frame.rotatedHeight());

    if (scaledWidth == 0 || scaledHeight == 0) {
      listenerAndParams.listener.onFrame(null);
      continue;
    }

    if (bitmapTextureFramebuffer == null) {
      bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
    }
    bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
        GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);

    if (frame.yuvFrame) {
      listenerAndParams.drawer.drawYuv(yuvTextures, bitmapMatrix, frame.rotatedWidth(),
          frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
    } else {
      listenerAndParams.drawer.drawOes(frame.textureId, bitmapMatrix, frame.rotatedWidth(),
          frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
    }

    final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
    GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
    GLES20.glReadPixels(
        0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");

    final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(bitmapBuffer);
    listenerAndParams.listener.onFrame(bitmap);
  }
}
 
开发者ID:lgyjg,项目名称:AndroidRTC,代码行数:54,代码来源:EglRenderer.java

示例14: notifyCallbacks

import android.graphics.Bitmap; //导入方法依赖的package包/类
private void notifyCallbacks(
    VideoRenderer.I420Frame frame, int[] yuvTextures, float[] texMatrix, boolean wasRendered) {
  if (frameListeners.isEmpty())
    return;

  final float[] bitmapMatrix = RendererCommon.multiplyMatrices(
      RendererCommon.multiplyMatrices(texMatrix,
          mirror ? RendererCommon.horizontalFlipMatrix() : RendererCommon.identityMatrix()),
      RendererCommon.verticalFlipMatrix());

  Iterator<FrameListenerAndParams> it = frameListeners.iterator();
  while (it.hasNext()) {
    FrameListenerAndParams listenerAndParams = it.next();
    if (!wasRendered && listenerAndParams.applyFpsReduction) {
      continue;
    }
    it.remove();

    final int scaledWidth = (int) (listenerAndParams.scale * frame.rotatedWidth());
    final int scaledHeight = (int) (listenerAndParams.scale * frame.rotatedHeight());

    if (scaledWidth == 0 || scaledHeight == 0) {
      listenerAndParams.listener.onFrame(null);
      continue;
    }

    if (bitmapTextureFramebuffer == null) {
      bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
    }
    bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
        GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);

    GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    if (frame.yuvFrame) {
      listenerAndParams.drawer.drawYuv(yuvTextures, bitmapMatrix, frame.rotatedWidth(),
          frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
    } else {
      listenerAndParams.drawer.drawOes(frame.textureId, bitmapMatrix, frame.rotatedWidth(),
          frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight);
    }

    final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
    GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
    GLES20.glReadPixels(
        0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");

    final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(bitmapBuffer);
    listenerAndParams.listener.onFrame(bitmap);
  }
}
 
开发者ID:Piasy,项目名称:VideoCRE,代码行数:59,代码来源:EglRenderer.java

示例15: applyExpensiveOutlineWithBlur

import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
 * Applies a more expensive and accurate outline to whatever is currently drawn in a specified
 * bitmap.
 */
public void applyExpensiveOutlineWithBlur(Bitmap srcDst, Canvas srcDstCanvas) {

    // We start by removing most of the alpha channel so as to ignore shadows, and
    // other types of partial transparency when defining the shape of the object
    byte[] pixels = new byte[srcDst.getWidth() * srcDst.getHeight()];
    ByteBuffer buffer = ByteBuffer.wrap(pixels);
    buffer.rewind();
    srcDst.copyPixelsToBuffer(buffer);

    for (int i = 0; i < pixels.length; i++) {
        if ((pixels[i] & 0xFF) < 188) {
            pixels[i] = 0;
        }
    }

    buffer.rewind();
    srcDst.copyPixelsFromBuffer(buffer);

    // calculate the outer blur first
    mBlurPaint.setMaskFilter(mMediumOuterBlurMaskFilter);
    int[] outerBlurOffset = new int[2];
    Bitmap thickOuterBlur = srcDst.extractAlpha(mBlurPaint, outerBlurOffset);

    mBlurPaint.setMaskFilter(mThinOuterBlurMaskFilter);
    int[] brightOutlineOffset = new int[2];
    Bitmap brightOutline = srcDst.extractAlpha(mBlurPaint, brightOutlineOffset);

    // calculate the inner blur
    srcDstCanvas.setBitmap(srcDst);
    srcDstCanvas.drawColor(0xFF000000, PorterDuff.Mode.SRC_OUT);
    mBlurPaint.setMaskFilter(mMediumInnerBlurMaskFilter);
    int[] thickInnerBlurOffset = new int[2];
    Bitmap thickInnerBlur = srcDst.extractAlpha(mBlurPaint, thickInnerBlurOffset);

    // mask out the inner blur
    srcDstCanvas.setBitmap(thickInnerBlur);
    srcDstCanvas.drawBitmap(srcDst, -thickInnerBlurOffset[0],
            -thickInnerBlurOffset[1], mErasePaint);
    srcDstCanvas.drawRect(0, 0, -thickInnerBlurOffset[0], thickInnerBlur.getHeight(),
            mErasePaint);
    srcDstCanvas.drawRect(0, 0, thickInnerBlur.getWidth(), -thickInnerBlurOffset[1],
            mErasePaint);

    // draw the inner and outer blur
    srcDstCanvas.setBitmap(srcDst);
    srcDstCanvas.drawColor(0, PorterDuff.Mode.CLEAR);
    srcDstCanvas.drawBitmap(thickInnerBlur, thickInnerBlurOffset[0], thickInnerBlurOffset[1],
            mDrawPaint);
    srcDstCanvas.drawBitmap(thickOuterBlur, outerBlurOffset[0], outerBlurOffset[1],
            mDrawPaint);

    // draw the bright outline
    srcDstCanvas.drawBitmap(brightOutline, brightOutlineOffset[0], brightOutlineOffset[1],
            mDrawPaint);

    // cleanup
    srcDstCanvas.setBitmap(null);
    brightOutline.recycle();
    thickOuterBlur.recycle();
    thickInnerBlur.recycle();
}
 
开发者ID:enricocid,项目名称:LaunchEnr,代码行数:66,代码来源:HolographicOutlineHelper.java


注:本文中的android.graphics.Bitmap.copyPixelsFromBuffer方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。