本文整理匯總了Java中android.graphics.ImageFormat.getBitsPerPixel方法的典型用法代碼示例。如果您正苦於以下問題:Java ImageFormat.getBitsPerPixel方法的具體用法?Java ImageFormat.getBitsPerPixel怎麽用?Java ImageFormat.getBitsPerPixel使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類android.graphics.ImageFormat
的用法示例。
在下文中一共展示了ImageFormat.getBitsPerPixel方法的6個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createPreviewBuffer
import android.graphics.ImageFormat; //導入方法依賴的package包/類
/**
* Creates one buffer for the camera preview callback. The size of the buffer is based off of
* the camera preview size and the format of the camera image.
*
* @return a new preview buffer of the appropriate size for the current camera settings
*/
private byte[] createPreviewBuffer(Size previewSize) {
int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;
//
// NOTICE: This code only works when using play services v. 8.1 or higher.
//
// Creating the byte array this way and wrapping it, as opposed to using .allocate(),
// should guarantee that there will be an array to work with.
byte[] byteArray = new byte[bufferSize];
ByteBuffer buffer = ByteBuffer.wrap(byteArray);
if (!buffer.hasArray() || (buffer.array() != byteArray)) {
// I don't think that this will ever happen. But if it does, then we wouldn't be
// passing the preview content to the underlying detector later.
throw new IllegalStateException("Failed to create valid buffer for camera source.");
}
mBytesToByteBuffer.put(byteArray, buffer);
return byteArray;
}
示例2: createPreviewBuffer
import android.graphics.ImageFormat; //導入方法依賴的package包/類
/**
* Creates one buffer for the camera preview callback. The size of the buffer is based off of
* the camera preview size and the format of the camera image.
*
* @return a new preview buffer of the appropriate size for the current camera settings
*/
private byte[] createPreviewBuffer(Size previewSize) {
int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;
//
// NOTICE: This code only works when using play services v. 8.1 or higher.
//
// Creating the byte array this way and wrapping it, as opposed to using .allocate(),
// should guarantee that there will be an array to work with.
byte[] byteArray = new byte[bufferSize];
ByteBuffer buffer = ByteBuffer.wrap(byteArray);
if (!buffer.hasArray() || (buffer.array() != byteArray)) {
// I don't think that this will ever happen. But if it does, then we wouldn't be
// passing the preview content to the underlying detector later.
throw new IllegalStateException("Failed to create valid buffer for camera source.");
}
mBytesToByteBuffer.put(byteArray, buffer);
return byteArray;
}
示例3: openCamera
import android.graphics.ImageFormat; //導入方法依賴的package包/類
public void openCamera(boolean facingFront) {
synchronized (this) {
int facing=facingFront? Camera.CameraInfo.CAMERA_FACING_FRONT:Camera.CameraInfo.CAMERA_FACING_BACK;
currentCameraId=getCameraIdWithFacing(facing);
camera = Camera.open(currentCameraId);
camera.setPreviewCallbackWithBuffer(this);
initRotateDegree(currentCameraId);
if (camera != null) {
mParams = camera.getParameters();
List<Camera.Size> supportedPictureSizesList=mParams.getSupportedPictureSizes();
List<Camera.Size> supportedVideoSizesList=mParams.getSupportedVideoSizes();
List<Camera.Size> supportedPreviewSizesList=mParams.getSupportedPreviewSizes();
Logger.logCameraSizes(supportedPictureSizesList);
Logger.logCameraSizes(supportedVideoSizesList);
Logger.logCameraSizes(supportedPreviewSizesList);
previewSize=choosePreferredSize(supportedPreviewSizesList,preferredRatio);
Camera.Size photoSize=choosePreferredSize(supportedPictureSizesList,preferredRatio);
frameHeight=previewSize.width;
frameWidth=previewSize.height;
Log.d(TAG, "openCamera: choose preview size"+previewSize.height+"x"+previewSize.width);
mParams.setPreviewSize(frameHeight,frameWidth);
mParams.setPictureSize(photoSize.width,photoSize.height);
Log.d(TAG, "openCamera: choose photo size"+photoSize.height+"x"+photoSize.width);
//mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
int size = frameWidth*frameHeight;
size = size * ImageFormat.getBitsPerPixel(mParams.getPreviewFormat()) / 8;
if (mBuffer==null || mBuffer.length!=size)
mBuffer = new byte[size];
mFrameChain[0].init(size);
mFrameChain[1].init(size);
camera.addCallbackBuffer(mBuffer);
camera.setParameters(mParams);
cameraOpened=true;
}
}
}
示例4: frameSize
import android.graphics.ImageFormat; //導入方法依賴的package包/類
public static int frameSize(int width, int height, int imageFormat) {
if (imageFormat != ImageFormat.NV21) {
throw new UnsupportedOperationException("Don't know how to calculate "
+ "the frame size of non-NV21 image formats.");
}
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
示例5: surfaceCreated
import android.graphics.ImageFormat; //導入方法依賴的package包/類
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (safeCameraOpen(mCurrentFacingId)) {
try {
mCamera.setPreviewDisplay(holder);
int degree = getDeviceRotationDegree(mContext);
Camera.CameraInfo camInfo = new Camera.CameraInfo();
Camera.getCameraInfo(mCurrentFacingId, camInfo);
int orientation;
if (mCurrentFacingId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
orientation = (camInfo.orientation + degree) % 360;
mCameraPreviewDegree = orientation;
orientation = (360 - orientation) % 360; // compensate the mirror
} else { // back-facing
orientation = (camInfo.orientation - degree + 360) % 360;
mCameraPreviewDegree = orientation;
}
mCamera.setDisplayOrientation(orientation);
Camera.Parameters params = mCamera.getParameters();
params.setPreviewFormat(ImageFormat.NV21);
final Camera.Size previewSize = params.getPreviewSize();
List<Camera.Size> sizes = mCamera.getParameters().getSupportedPreviewSizes();
for (Camera.Size size : sizes) {
if (size.width == DEFAULT_VIDEO_WIDTH && size.height == DEFAULT_VIDEO_HEIGHT) {
previewSize.width = size.width;
previewSize.height = size.height;
break;
}
}
params.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(params);
final int bitsPerPixel = ImageFormat.getBitsPerPixel(params.getPreviewFormat());
final int previewBufferSize = (previewSize.width * previewSize.height * bitsPerPixel) / 8;
for (int i = 0; i < MAX_CALLBACK_BUFFER_NUM; i++) {
mCamera.addCallbackBuffer(new byte[previewBufferSize]);
}
mPreviewWidth = previewSize.width;
mPreviewHeight = previewSize.height;
mCamera.setPreviewCallbackWithBuffer(this);
} catch (IOException e) {
e.printStackTrace();
}
}
}
示例6: openCamera
import android.graphics.ImageFormat; //導入方法依賴的package包/類
public void openCamera(CameraSetup cameraSetup) throws Exception {
// Check camera setup
cameraSetup.checkSetup();
// Gets back camera
Pair<Camera.CameraInfo, Integer> backCamera = getBackCamera();
final int backCameraId = backCamera.second;
mBackCameraInfo = backCamera.first;
// Tries to open camera
mCamera = Camera.open(backCameraId);
// Check display orientation and fixes camera image.
// Without this fix, for example, in landscape mode you would get a stretched preview
setDisplayOrientation();
// Sets up preview surface
mCamera.setPreviewDisplay(cameraSetup.holder);
// Sets up callback for camera, that will get input YUV NV21 bytes
mCamera.setPreviewCallbackWithBuffer(cameraSetup.previewCallback);
mCameraParameters = mCamera.getParameters();
// Gets supported previews sizes and uses chooseOptimalPreviewSize to set maximum preview
// size and ratio.
List<Camera.Size> supportedPreviewSizes = mCameraParameters.getSupportedPreviewSizes();
mCameraSize = chooseOptimalPreviewSize(supportedPreviewSizes, cameraSetup.maxPreviewSize.x, cameraSetup.maxPreviewSize.y);
// Calculates expected camera bytes length, as YUV image uses compressed stored image.
expectedCameraBytes = mCameraSize.width * mCameraSize.height *
ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8;
int bufferSize = expectedCameraBytes;
// For each camera buffer, adds corresponding byte array.
// Camera buffers are used to store temporarily images to process in an async way.
for (int i = 0; i < cameraSetup.cameraBuffersCount; i++) {
byte[] cameraBuffer = new byte[bufferSize];
mCamera.addCallbackBuffer(cameraBuffer);
}
// Forces camera preview size, as got earlier.
mCameraParameters.setPreviewSize(mCameraSize.width, mCameraSize.height);
// Sets previously set camera parameters
mCamera.setParameters(mCameraParameters);
// Starts preview
mCamera.startPreview();
}