本文整理汇总了Java中android.graphics.Bitmap.copyPixelsToBuffer方法的典型用法代码示例。如果您正苦于以下问题:Java Bitmap.copyPixelsToBuffer方法的具体用法?Java Bitmap.copyPixelsToBuffer怎么用?Java Bitmap.copyPixelsToBuffer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类android.graphics.Bitmap
的用法示例。
在下文中一共展示了Bitmap.copyPixelsToBuffer方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: convertBitmapToBytes
import android.graphics.Bitmap; //导入方法依赖的package包/类
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public static byte[] convertBitmapToBytes(Bitmap bitmap)
{
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
{
ByteBuffer buffer = ByteBuffer.allocate(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(buffer);
return buffer.array();
}
else
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, baos);
byte[] data = baos.toByteArray();
return data;
}
}
示例2: bitmapToGrayscaleImage
import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
* Converts a Bitmap to a GrayscaleImage.
*/
@NonNull
public GrayscaleImage bitmapToGrayscaleImage(@NonNull Bitmap bitmap) {
String stopwatchSessionId = log.startStopwatch(getStopwatchSessionId("bitmapToGrayscaleImage"));
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int size = width * height;
IntBuffer intBuffer = IntBuffer.allocate(size);
bitmap.copyPixelsToBuffer(intBuffer);
int[] buffer = intBuffer.array();
byte[] data = new byte[size];
for (int index = 0; index < size; index++) {
data[index] = (byte) (buffer[index] >> 16);
}
log.stopStopwatch(stopwatchSessionId);
return new GrayscaleImage(data, width, height);
}
示例3: writePixel
import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
* The reverse function of {@link #readPixel(DataInput)}
*
* @param src the bitmap
* @param dst the sink
* @throws IOException
*/
public static void writePixel(@Nullable Bitmap src, DataOutput dst) throws IOException {
if (src != null && src.getConfig() != Config.ARGB_8888) {
throw new Panic("only bitmaps of the type ARGB_8888 are supported");
}
dst.writeBoolean(src != null);
if (src == null) {
return;
}
dst.writeInt(src.getWidth());
dst.writeInt(src.getHeight());
int bytes = src.getWidth() * src.getHeight() * 4;
dst.writeByte(src.getConfig().ordinal());
synchronized (BitmapPoolFactory.class) {
if (sTmp.capacity() < bytes) {
sTmp = ByteBuffer.allocate(bytes);
}
sTmp.clear();
sTmp.limit(bytes);
src.copyPixelsToBuffer(sTmp);
dst.write(sTmp.array(), 0, bytes);
}
}
示例4: checkBitmapContent
import android.graphics.Bitmap; //导入方法依赖的package包/类
/** Checks that the bitmap content matches the test frame with the given index. */
private static void checkBitmapContent(Bitmap bitmap, int frame) {
checkBitmap(bitmap, 1f);
byte[] expectedRGBA = convertYUVFrameToRGBA(TEST_FRAMES[frame]);
ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(bitmapBuffer);
for (int i = 0; i < expectedRGBA.length; i++) {
int expected = expectedRGBA[i] & 0xFF;
int value = bitmapBuffer.get(i) & 0xFF;
// Due to unknown conversion differences check value matches +-1.
if (Math.abs(value - expected) > 1) {
Logging.d(TAG, "Expected bitmap content: " + Arrays.toString(expectedRGBA));
Logging.d(TAG, "Bitmap content: " + Arrays.toString(bitmapBuffer.array()));
fail("Frame doesn't match original frame on byte " + i + ". Expected: " + expected
+ " Result: " + value);
}
}
}
示例5: getPixels
import android.graphics.Bitmap; //导入方法依赖的package包/类
private static byte[] getPixels(final Bitmap b) {
final int byteCount = b.getAllocationByteCount();
final ByteBuffer buffer = ByteBuffer.allocate(byteCount);
try {
b.copyPixelsToBuffer(buffer);
} catch (RuntimeException e) {
// Android throws this if there's not enough space in the buffer.
// This should never occur, but if it does, we don't
// really care -- we probably don't need the entire image.
// This is awful. I apologize.
if ("Buffer not large enough for pixels".equals(e.getMessage())) {
return buffer.array();
}
throw e;
}
return buffer.array();
}
示例6: testColorBitmap
import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
* Tests the {@link Coloring#colorBitmap(Bitmap, int)} method.
* <p>
* Due to {@link org.robolectric.shadows.ShadowBitmap}'s empty implementation, this won't really work, so we can only test the transparency.
*/
@Test
public final void testColorBitmap() {
final Bitmap.Config config = Bitmap.Config.ARGB_8888;
final int width = 10, height = 10;
final int[] allReds = new int[width * height];
for (int i = 0; i < width * height; i++) {
allReds[i] = Color.RED;
}
final Bitmap redSquare = Bitmap.createBitmap(allReds, width, height, config);
assertNotNull("Created Bitmap is null", redSquare);
// initialize red Bitmap's internal structures, otherwise it won't draw properly
redSquare.prepareToDraw();
final byte[] redPixels = new byte[redSquare.getWidth() * redSquare.getHeight() * 8];
final ByteBuffer redBuffer = ByteBuffer.wrap(redPixels);
redBuffer.order(ByteOrder.nativeOrder());
redSquare.copyPixelsToBuffer(redBuffer);
redSquare.copyPixelsFromBuffer(redBuffer);
redSquare.prepareToDraw();
final String redPixel = hex(redSquare.getPixel(width / 2, height / 2));
final String errorRed = String.format("Error while creating red bitmap, middle pixel is %s", redPixel);
assertEquals(errorRed, hex(Color.TRANSPARENT), redPixel);
final Bitmap greenSquare = Coloring.colorBitmap(redSquare, Color.GREEN);
assertNotNull("Created colored Bitmap is null", greenSquare);
final String greenPixel = hex(greenSquare.getPixel(width / 2, height / 2));
final String errorGreen = String.format("Error while coloring bitmap, middle pixel is %s", greenPixel);
assertEquals(errorGreen, hex(Color.TRANSPARENT), greenPixel);
}
示例7: classifyImage
import android.graphics.Bitmap; //导入方法依赖的package包/类
public Classification[] classifyImage(Bitmap bitmap) {
Trace.beginSection("create Image Buffer");
ByteBuffer byteBuffer = ByteBuffer.allocate(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(byteBuffer);
byte[] bytes = byteBuffer.array();
Trace.endSection();
Trace.beginSection("color adaption");
float[] colors;
if (modelNeedsMeanAdjust) {
colors = subtractMean(bytes);
} else {
colors = extractRGBData(bytes);
}
Trace.endSection();
Trace.beginSection("Model execution");
final long startTime = SystemClock.uptimeMillis();
mPredictor.forward("data", colors);
mActivity.setLasProcessingTimeMs(SystemClock.uptimeMillis() - startTime);
final float[] result = mPredictor.getOutput(0);
Trace.endSection();
Trace.beginSection("gather top results");
Classification[] results = getTopKresults(result, 5);
Trace.endSection();
mActivity.requestRender();
return results;
}
示例8: applyExpensiveOutlineWithBlur
import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
* Applies a more expensive and accurate outline to whatever is currently drawn in a specified
* bitmap.
*/
public void applyExpensiveOutlineWithBlur(Bitmap srcDst, Canvas srcDstCanvas) {
// We start by removing most of the alpha channel so as to ignore shadows, and
// other types of partial transparency when defining the shape of the object
byte[] pixels = new byte[srcDst.getWidth() * srcDst.getHeight()];
ByteBuffer buffer = ByteBuffer.wrap(pixels);
buffer.rewind();
srcDst.copyPixelsToBuffer(buffer);
for (int i = 0; i < pixels.length; i++) {
if ((pixels[i] & 0xFF) < 188) {
pixels[i] = 0;
}
}
buffer.rewind();
srcDst.copyPixelsFromBuffer(buffer);
// calculate the outer blur first
mBlurPaint.setMaskFilter(mMediumOuterBlurMaskFilter);
int[] outerBlurOffset = new int[2];
Bitmap thickOuterBlur = srcDst.extractAlpha(mBlurPaint, outerBlurOffset);
mBlurPaint.setMaskFilter(mThinOuterBlurMaskFilter);
int[] brightOutlineOffset = new int[2];
Bitmap brightOutline = srcDst.extractAlpha(mBlurPaint, brightOutlineOffset);
// calculate the inner blur
srcDstCanvas.setBitmap(srcDst);
srcDstCanvas.drawColor(0xFF000000, PorterDuff.Mode.SRC_OUT);
mBlurPaint.setMaskFilter(mMediumInnerBlurMaskFilter);
int[] thickInnerBlurOffset = new int[2];
Bitmap thickInnerBlur = srcDst.extractAlpha(mBlurPaint, thickInnerBlurOffset);
// mask out the inner blur
srcDstCanvas.setBitmap(thickInnerBlur);
srcDstCanvas.drawBitmap(srcDst, -thickInnerBlurOffset[0],
-thickInnerBlurOffset[1], mErasePaint);
srcDstCanvas.drawRect(0, 0, -thickInnerBlurOffset[0], thickInnerBlur.getHeight(),
mErasePaint);
srcDstCanvas.drawRect(0, 0, thickInnerBlur.getWidth(), -thickInnerBlurOffset[1],
mErasePaint);
// draw the inner and outer blur
srcDstCanvas.setBitmap(srcDst);
srcDstCanvas.drawColor(0, PorterDuff.Mode.CLEAR);
srcDstCanvas.drawBitmap(thickInnerBlur, thickInnerBlurOffset[0], thickInnerBlurOffset[1],
mDrawPaint);
srcDstCanvas.drawBitmap(thickOuterBlur, outerBlurOffset[0], outerBlurOffset[1],
mDrawPaint);
// draw the bright outline
srcDstCanvas.drawBitmap(brightOutline, brightOutlineOffset[0], brightOutlineOffset[1],
mDrawPaint);
// cleanup
srcDstCanvas.setBitmap(null);
brightOutline.recycle();
thickOuterBlur.recycle();
thickInnerBlur.recycle();
}
示例9: doInBackground
import android.graphics.Bitmap; //导入方法依赖的package包/类
/** {@inheritDoc} */
@Override
protected final Void doInBackground(Void... params) {
final String[] fileNames = new String[NUM_FRAMES];
for (int i = 0; i < NUM_FRAMES; i++) {
fileNames[i] = String.format(Locale.getDefault(), INPUT_IMAGE_FILE_FORMAT, i);
}
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.RGB_565;
options.inBitmap = Bitmap.createBitmap(WIDTH, HEIGHT, options.inPreferredConfig);
options.inTempStorage = new byte[16384];
final MpegEncoder encoder =
MpegEncoder
.from(INPUT_BUFFER, WIDTH, HEIGHT)
.fps(FRAME_RATE).motion(Motion.LOW)
.to(mFilePath, WIDTH, HEIGHT);
for (int i = 0; i < NUM_FRAMES; i++) {
if (isCancelled()) {
break;
}
try (final InputStream is = mAssetManager.open(fileNames[i])) {
final Bitmap bitmap = BitmapFactory.decodeStream(is, null, options);
if (bitmap != null) {
INPUT_BUFFER.rewind();
bitmap.copyPixelsToBuffer(INPUT_BUFFER);
}
} catch (IOException exception) {
Log.w(TAG, exception);
}
encoder.draw();
}
encoder.close();
options.inBitmap.recycle();
return null;
}
示例10: testDrawFrame
import android.graphics.Bitmap; //导入方法依赖的package包/类
/**
* Test for {@link GLTools#drawFrame(Buffer, int, int, int)}.
* @throws Exception by some fails
*/
@Test
public final void testDrawFrame() throws Exception {
final EGLDisplay eglDisplay = GLTools.newDisplay();
final EGLConfig eglConfig = GLTools.newConfig(eglDisplay, true);
final EGLContext eglContext = GLTools.newContext(eglDisplay, eglConfig);
final EGLSurface eglSurface =
GLTools.newSurface(eglDisplay, eglConfig, FRAME_SIZE, FRAME_SIZE);
GLTools.makeCurrent(eglDisplay, eglSurface, eglContext);
final int[] attrs = new int[5];
GLTools.newShader(attrs);
final int texture = GLTools.newTexture(TEXTURE_LEVEL);
// 1-st pass
Bitmap bitmap = Bitmap.createBitmap(FRAME_SIZE, FRAME_SIZE, Bitmap.Config.RGB_565);
bitmap.setPixel(0, 0, Color.RED); bitmap.setPixel(1, 0, Color.GREEN);
bitmap.setPixel(0, 1, Color.BLUE); bitmap.setPixel(1, 1, Color.YELLOW);
ByteBuffer buffer = ByteBuffer.allocate(FRAME_SIZE * FRAME_SIZE * 2);
bitmap.copyPixelsToBuffer(buffer); bitmap.recycle();
GLTools.makeCurrent(eglDisplay, eglSurface, eglContext);
GLTools.drawFrame(buffer, FRAME_SIZE, FRAME_SIZE, 0); buffer.clear();
//GLTools.swapBuffers(eglDisplay, eglSurface);
buffer = ByteBuffer.allocateDirect(FRAME_SIZE * FRAME_SIZE * 4);
GLES20.glReadPixels(0, 0, FRAME_SIZE, FRAME_SIZE,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
bitmap = Bitmap.createBitmap(FRAME_SIZE, FRAME_SIZE, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer); buffer.clear(); //bitmap.eraseColor(Color.BLACK);
Assert.assertArrayEquals(new int[]{bitmap.getPixel(0, 0)}, new int[]{Color.BLUE});
Assert.assertArrayEquals(new int[]{bitmap.getPixel(1, 0)}, new int[]{Color.YELLOW});
Assert.assertArrayEquals(new int[]{bitmap.getPixel(0, 1)}, new int[]{Color.RED});
Assert.assertArrayEquals(new int[]{bitmap.getPixel(1, 1)}, new int[]{Color.GREEN});
bitmap.recycle();
// 2-nd pass
bitmap = Bitmap.createBitmap(FRAME_SIZE, FRAME_SIZE, Bitmap.Config.RGB_565);
bitmap.setPixel(0, 0, Color.YELLOW); bitmap.setPixel(1, 0, Color.BLUE);
bitmap.setPixel(0, 1, Color.GREEN); bitmap.setPixel(1, 1, Color.RED);
buffer = ByteBuffer.allocate(FRAME_SIZE * FRAME_SIZE * 2);
bitmap.copyPixelsToBuffer(buffer); bitmap.recycle();
GLTools.makeCurrent(eglDisplay, eglSurface, eglContext);
GLTools.drawFrame(buffer, FRAME_SIZE, FRAME_SIZE, 0); buffer.clear();
//GLTools.swapBuffers(eglDisplay, eglSurface);
buffer = ByteBuffer.allocateDirect(FRAME_SIZE * FRAME_SIZE * 4);
GLES20.glReadPixels(0, 0, FRAME_SIZE, FRAME_SIZE,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
bitmap = Bitmap.createBitmap(FRAME_SIZE, FRAME_SIZE, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer); buffer.clear(); //bitmap.eraseColor(Color.BLACK);
Assert.assertArrayEquals(new int[]{bitmap.getPixel(0, 0)}, new int[]{Color.GREEN});
Assert.assertArrayEquals(new int[]{bitmap.getPixel(1, 0)}, new int[]{Color.RED});
Assert.assertArrayEquals(new int[]{bitmap.getPixel(0, 1)}, new int[]{Color.YELLOW});
Assert.assertArrayEquals(new int[]{bitmap.getPixel(1, 1)}, new int[]{Color.BLUE});
bitmap.recycle();
GLTools.closeTexture(texture, TEXTURE_LEVEL);
GLTools.closeShader(attrs);
GLTools.closeSurface(eglDisplay, eglSurface);
GLTools.closeContext(eglDisplay, eglContext);
GLTools.closeDisplay(eglDisplay);
}
示例11: applyExpensiveOutlineWithBlur
import android.graphics.Bitmap; //导入方法依赖的package包/类
public void applyExpensiveOutlineWithBlur(Bitmap srcDst, Canvas srcDstCanvas,
boolean clipAlpha) {
if (ProviderConfig.IS_DOGFOOD_BUILD && srcDst.getConfig() != Bitmap.Config.ALPHA_8) {
throw new RuntimeException("Outline blue is only supported on alpha bitmaps");
}
// We start by removing most of the alpha channel so as to ignore shadows, and
// other types of partial transparency when defining the shape of the object
if (clipAlpha) {
byte[] pixels = new byte[srcDst.getWidth() * srcDst.getHeight()];
ByteBuffer buffer = ByteBuffer.wrap(pixels);
buffer.rewind();
srcDst.copyPixelsToBuffer(buffer);
for (int i = 0; i < pixels.length; i++) {
if ((pixels[i] & 0xFF) < 188) {
pixels[i] = 0;
}
}
buffer.rewind();
srcDst.copyPixelsFromBuffer(buffer);
}
// calculate the outer blur first
mBlurPaint.setMaskFilter(mMediumOuterBlurMaskFilter);
int[] outerBlurOffset = new int[2];
Bitmap thickOuterBlur = srcDst.extractAlpha(mBlurPaint, outerBlurOffset);
mBlurPaint.setMaskFilter(mThinOuterBlurMaskFilter);
int[] brightOutlineOffset = new int[2];
Bitmap brightOutline = srcDst.extractAlpha(mBlurPaint, brightOutlineOffset);
// calculate the inner blur
srcDstCanvas.setBitmap(srcDst);
srcDstCanvas.drawColor(0xFF000000, PorterDuff.Mode.SRC_OUT);
mBlurPaint.setMaskFilter(mMediumInnerBlurMaskFilter);
int[] thickInnerBlurOffset = new int[2];
Bitmap thickInnerBlur = srcDst.extractAlpha(mBlurPaint, thickInnerBlurOffset);
// mask out the inner blur
srcDstCanvas.setBitmap(thickInnerBlur);
srcDstCanvas.drawBitmap(srcDst, -thickInnerBlurOffset[0],
-thickInnerBlurOffset[1], mErasePaint);
srcDstCanvas.drawRect(0, 0, -thickInnerBlurOffset[0], thickInnerBlur.getHeight(),
mErasePaint);
srcDstCanvas.drawRect(0, 0, thickInnerBlur.getWidth(), -thickInnerBlurOffset[1],
mErasePaint);
// draw the inner and outer blur
srcDstCanvas.setBitmap(srcDst);
srcDstCanvas.drawColor(0, PorterDuff.Mode.CLEAR);
srcDstCanvas.drawBitmap(thickInnerBlur, thickInnerBlurOffset[0], thickInnerBlurOffset[1],
mDrawPaint);
srcDstCanvas.drawBitmap(thickOuterBlur, outerBlurOffset[0], outerBlurOffset[1],
mDrawPaint);
// draw the bright outline
srcDstCanvas.drawBitmap(brightOutline, brightOutlineOffset[0], brightOutlineOffset[1],
mDrawPaint);
// cleanup
srcDstCanvas.setBitmap(null);
brightOutline.recycle();
thickOuterBlur.recycle();
thickInnerBlur.recycle();
}