本文整理匯總了Java中android.graphics.ImageFormat類的典型用法代碼示例。如果您正苦於以下問題:Java ImageFormat類的具體用法?Java ImageFormat怎麽用?Java ImageFormat使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
ImageFormat類屬於android.graphics包,在下文中一共展示了ImageFormat類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createFromNV21
import android.graphics.ImageFormat; //導入依賴的package包/類
public static byte[] createFromNV21(@NonNull final byte[] data,
final int width,
final int height,
int rotation,
final Rect croppingRect,
final boolean flipHorizontal)
throws IOException
{
byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
final int rotatedWidth = rotation % 180 > 0 ? height : width;
final int rotatedHeight = rotation % 180 > 0 ? width : height;
YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
rotatedWidth, rotatedHeight, null);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
previewImage.compressToJpeg(croppingRect, 80, outputStream);
byte[] bytes = outputStream.toByteArray();
outputStream.close();
return bytes;
}
示例2: configureCamera
import android.graphics.ImageFormat; //導入依賴的package包/類
private void configureCamera() {
final Camera.Parameters parameters = camera.getParameters();
try {
parameters.setPreviewFormat(ImageFormat.NV21);
// set focus for video if present
List<String> focusModes = parameters.getSupportedFocusModes();
if (null != focusModes && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
// check if torch is present
List<String> flashModes = parameters.getSupportedFlashModes();
cameraFlashIsSupported = null != flashModes && flashModes.contains(Camera.Parameters.FLASH_MODE_TORCH);
final Camera.Size bestPreviewSize = getBestPreviewSize();
photoProcessor.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
camera.setParameters(parameters);
} catch (RuntimeException exception) {
Toast.makeText(getContext(), R.string.camera_configuration_failed, Toast.LENGTH_SHORT).show();
}
}
示例3: setParams
import android.graphics.ImageFormat; //導入依賴的package包/類
private void setParams() {
//LogUtil.e("preview set size=" + width + " : " + height);
Camera.Parameters parameters = camera.getParameters();
// parameters.setPreviewSize(width, height);
// parameters.setPictureSize(width, height);
parameters.setPreviewFormat(ImageFormat.NV21);
camera.setDisplayOrientation(90);
parameters.setRotation(90);
List<Integer> supportedPreviewFormats = parameters.getSupportedPreviewFormats();
for (Integer integer : supportedPreviewFormats) {
//LogUtil.e("preview format=" + integer);
}
List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
for (Camera.Size size : supportedPreviewSizes) {
//LogUtil.e("preview size=" + size.width + " : " + size.height);
}
camera.setParameters(parameters);
}
示例4: saveFace
import android.graphics.ImageFormat; //導入依賴的package包/類
private void saveFace(final int x, final int y, final int r, final int b) {
if (DEBUG) Log.d(TAG, "[saveFace()]");
new Thread(new Runnable() {
@Override
public void run() {
synchronized (mVideoSource) {
mImageYuv = new YuvImage(mVideoSource, ImageFormat.NV21, CameraWrapper.IMAGE_WIDTH, CameraWrapper.IMAGE_HEIGHT, null);
}
ByteArrayOutputStream stream = new ByteArrayOutputStream();
mImageYuv.compressToJpeg(new Rect(0, 0, CameraWrapper.IMAGE_WIDTH, CameraWrapper.IMAGE_HEIGHT), 100, stream);
Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
int left = (x > 0) ? x : 0;
int top = (y > 0) ? y : 0;
int creatW = (r < CameraWrapper.IMAGE_WIDTH) ? (r - x) : (CameraWrapper.IMAGE_HEIGHT - x - 1);
int creatH = (b < CameraWrapper.IMAGE_WIDTH) ? (b - y) : (CameraWrapper.IMAGE_HEIGHT - y - 1);
mImage = Bitmap.createBitmap(bitmap, left, top, creatW, creatH, null, false);
if (DEBUG) Log.d(TAG, "[saveFace()] x:" + x + " y:" + y + "\n" +
"[saveFace()] h:" + mImage.getHeight() + " w:" + mImage.getWidth());
if (null != mImage)
FaceUtil.saveBitmapToFile(mImage);
}
}).start();
}
示例5: createPreviewBuffer
import android.graphics.ImageFormat; //導入依賴的package包/類
/**
* Creates one buffer for the camera preview callback. The size of the buffer is based off of
* the camera preview size and the format of the camera image.
*
* @return a new preview buffer of the appropriate size for the current camera settings
*/
private byte[] createPreviewBuffer(Size previewSize) {
int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;
//
// NOTICE: This code only works when using play services v. 8.1 or higher.
//
// Creating the byte array this way and wrapping it, as opposed to using .allocate(),
// should guarantee that there will be an array to work with.
byte[] byteArray = new byte[bufferSize];
ByteBuffer buffer = ByteBuffer.wrap(byteArray);
if (!buffer.hasArray() || (buffer.array() != byteArray)) {
// I don't think that this will ever happen. But if it does, then we wouldn't be
// passing the preview content to the underlying detector later.
throw new IllegalStateException("Failed to create valid buffer for camera source.");
}
mBytesToByteBuffer.put(byteArray, buffer);
return byteArray;
}
示例6: decodeToBitMap
import android.graphics.ImageFormat; //導入依賴的package包/類
private Bitmap decodeToBitMap(byte[] data) {
try {
YuvImage image = new YuvImage(data, ImageFormat.NV21, PREVIEW_WIDTH,
PREVIEW_HEIGHT, null);
if (image != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, PREVIEW_WIDTH, PREVIEW_HEIGHT),
80, stream);
Bitmap bmp = BitmapFactory.decodeByteArray(
stream.toByteArray(), 0, stream.size());
stream.close();
return bmp ;
}
} catch (Exception ex) {
Log.e("Sys", "Error:" + ex.getMessage());
}
return null;
}
示例7: createPreviewBuffer
import android.graphics.ImageFormat; //導入依賴的package包/類
/**
* Creates one buffer for the camera preview callback. The size of the buffer is based off of
* the camera preview size and the format of the camera image.
*
* @return a new preview buffer of the appropriate size for the current camera settings
*/
private byte[] createPreviewBuffer(Size previewSize) {
int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;
//
// NOTICE: This code only works when using play services v. 8.1 or higher.
//
// Creating the byte array this way and wrapping it, as opposed to using .allocate(),
// should guarantee that there will be an array to work with.
byte[] byteArray = new byte[bufferSize];
ByteBuffer buffer = ByteBuffer.wrap(byteArray);
if (!buffer.hasArray() || (buffer.array() != byteArray)) {
// I don't think that this will ever happen. But if it does, then we wouldn't be
// passing the preview content to the underlying detector later.
throw new IllegalStateException("Failed to create valid buffer for camera source.");
}
mBytesToByteBuffer.put(byteArray, buffer);
return byteArray;
}
示例8: rgba
import android.graphics.ImageFormat; //導入依賴的package包/類
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
示例9: extract
import android.graphics.ImageFormat; //導入依賴的package包/類
/**
* Extracts the Y-Plane from the YUV_420_8888 image to creates a IntensityPlane.
* The actual plane data will be copied into the new IntensityPlane object.
*
* @throws IllegalArgumentException if the provided images is not in the YUV_420_888 format
*/
@NonNull
public static IntensityPlane extract(@NonNull Image img) {
if (img.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("image format must be YUV_420_888");
}
Image.Plane[] planes = img.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
byte[] yPlane = new byte[buffer.remaining()];
buffer.get(yPlane);
int yRowStride = planes[0].getRowStride();
return new IntensityPlane(img.getWidth(), img.getHeight(), yPlane, yRowStride);
}
示例10: setupPreviewSizeAndImageReader
import android.graphics.ImageFormat; //導入依賴的package包/類
/**
* lazily initialize ImageReader and select preview size
*/
private void setupPreviewSizeAndImageReader() {
if (previewSize == null) {
previewSize = cameraHelper.selectPreviewSize(openCamera);
}
if (imageReader == null) {
int maxImages = 2; // should be at least 2 according to ImageReader.acquireLatestImage() documentation
imageReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, maxImages);
imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image img = reader.acquireLatestImage();
if (img != null) {
// Make a in memory copy of the image to close the image from the reader as soon as possible.
// This helps the thread running the preview staying up to date.
IntensityPlane imgCopy = IntensityPlane.extract(img);
img.close();
int imageRotation = cameraHelper.getImageRotation(openCamera, getRelativeDisplayRotation());
presenter.onImageCaptured(imgCopy, imageRotation);
}
}
}, null);
}
}
示例11: startStream
import android.graphics.ImageFormat; //導入依賴的package包/類
/**
* Need be called after @prepareVideo or/and @prepareAudio.
* This method override resolution of @startPreview to resolution seated in @prepareVideo. If you
* never startPreview this method startPreview for you to resolution seated in @prepareVideo.
*
* @param url of the stream like:
* protocol://ip:port/application/streamName
*
* RTSP: rtsp://192.168.1.1:1935/live/pedroSG94
* RTSPS: rtsps://192.168.1.1:1935/live/pedroSG94
* RTMP: rtmp://192.168.1.1:1935/live/pedroSG94
* RTMPS: rtmps://192.168.1.1:1935/live/pedroSG94
*/
public void startStream(String url) {
if (openGlView != null && Build.VERSION.SDK_INT >= 18) {
if (videoEncoder.getRotation() == 90 || videoEncoder.getRotation() == 270) {
openGlView.setEncoderSize(videoEncoder.getHeight(), videoEncoder.getWidth());
} else {
openGlView.setEncoderSize(videoEncoder.getWidth(), videoEncoder.getHeight());
}
openGlView.startGLThread();
openGlView.addMediaCodecSurface(videoEncoder.getInputSurface());
cameraManager =
new Camera1ApiManager(openGlView.getSurfaceTexture(), openGlView.getContext());
cameraManager.prepareCamera(videoEncoder.getWidth(), videoEncoder.getHeight(),
videoEncoder.getFps(), ImageFormat.NV21);
}
startStreamRtp(url);
videoEncoder.start();
audioEncoder.start();
cameraManager.start();
microphoneManager.start();
streaming = true;
onPreview = true;
}
示例12: startPreview
import android.graphics.ImageFormat; //導入依賴的package包/類
private void startPreview() {
try {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size previewSize = Util.getPreferredPreviewSize(
configMap.getOutputSizes(ImageFormat.JPEG),textureView.getWidth(), textureView.getHeight());
surfaceTexture.setDefaultBufferSize(previewSize.getWidth(),previewSize.getHeight());
Surface surface = new Surface(surfaceTexture);
captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface),captureSessionCallback,backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
示例13: setDefaultCameraParameters
import android.graphics.ImageFormat; //導入依賴的package包/類
public void setDefaultCameraParameters(Camera camera, Camera.CameraInfo cameraInfo) {
Camera.Parameters parameters = camera.getParameters();
parameters.setPictureFormat(ImageFormat.JPEG);
List<Camera.Size> supportedSizes = parameters.getSupportedPictureSizes();
Camera.Size pictureSize = getBestSize(supportedSizes, 0);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
float whRatio = (float) pictureSize.width / pictureSize.height;
List<Camera.Size> previewSupportedSizes = parameters.getSupportedPreviewSizes();
Camera.Size previewSize = getBestSize(previewSupportedSizes, whRatio);
parameters.setPreviewSize(previewSize.width, previewSize.height);
List<String> supportedFocusModes = camera.getParameters().getSupportedFocusModes();
boolean hasAutoFocus = supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO);
if(hasAutoFocus) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
if(cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
}
List<String> supportedScreenModes = camera.getParameters().getSupportedSceneModes();
boolean hasAutoScene = supportedScreenModes != null && supportedFocusModes.contains(Camera.Parameters.SCENE_MODE_AUTO);
if(hasAutoScene) {
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
}
parameters.setColorEffect(Camera.Parameters.EFFECT_NONE);
int orientation = cameraInfo.orientation;
parameters.setRotation(orientation);
camera.setParameters(parameters);
}
示例14: setPreviewFormat
import android.graphics.ImageFormat; //導入依賴的package包/類
public static void setPreviewFormat(Camera camera, Camera.Parameters parameters) throws CameraNotSupportException{
//設置預覽回調的圖片格式
try {
parameters.setPreviewFormat(ImageFormat.NV21);
camera.setParameters(parameters);
} catch (Exception e) {
throw new CameraNotSupportException();
}
}
示例15: initCamera
import android.graphics.ImageFormat; //導入依賴的package包/類
private void initCamera() {
if (this.mCamera != null) {
this.mCameraParamters = this.mCamera.getParameters();
this.mCameraParamters.setPreviewFormat(ImageFormat.NV21);
this.mCameraParamters.setFlashMode("off");
this.mCameraParamters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
this.mCameraParamters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
Point p = MyApplication.getBestCameraResolution(this.mCameraParamters, MyApplication.getScreenMetrics());
IMAGE_WIDTH = p.x;
IMAGE_HEIGHT = p.y;
this.mCameraParamters.setPreviewSize(IMAGE_WIDTH, IMAGE_HEIGHT);
mCameraPreviewCallback = new CameraPreviewCallback();
byte[] a = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
byte[] b = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
byte[] c = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
mCamera.addCallbackBuffer(a);
mCamera.addCallbackBuffer(b);
mCamera.addCallbackBuffer(c);
mCamera.setPreviewCallbackWithBuffer(mCameraPreviewCallback);
List<String> focusModes = this.mCameraParamters.getSupportedFocusModes();
if (focusModes.contains("continuous-video")) {
this.mCameraParamters
.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
this.mCamera.setParameters(this.mCameraParamters);
this.mCamera.startPreview();
this.mIsPreviewing = true;
}
}