本文整理汇总了Java中android.graphics.ImageFormat.NV21属性的典型用法代码示例。如果您正苦于以下问题:Java ImageFormat.NV21属性的具体用法?Java ImageFormat.NV21怎么用?Java ImageFormat.NV21使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类android.graphics.ImageFormat
的用法示例。
在下文中一共展示了ImageFormat.NV21属性的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createFromNV21
public static byte[] createFromNV21(@NonNull final byte[] data,
final int width,
final int height,
int rotation,
final Rect croppingRect,
final boolean flipHorizontal)
throws IOException
{
byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
final int rotatedWidth = rotation % 180 > 0 ? height : width;
final int rotatedHeight = rotation % 180 > 0 ? width : height;
YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
rotatedWidth, rotatedHeight, null);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
previewImage.compressToJpeg(croppingRect, 80, outputStream);
byte[] bytes = outputStream.toByteArray();
outputStream.close();
return bytes;
}
示例2: decodeToBitMap
private Bitmap decodeToBitMap(byte[] data) {
try {
YuvImage image = new YuvImage(data, ImageFormat.NV21, PREVIEW_WIDTH,
PREVIEW_HEIGHT, null);
if (image != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, PREVIEW_WIDTH, PREVIEW_HEIGHT),
80, stream);
Bitmap bmp = BitmapFactory.decodeByteArray(
stream.toByteArray(), 0, stream.size());
stream.close();
return bmp ;
}
} catch (Exception ex) {
Log.e("Sys", "Error:" + ex.getMessage());
}
return null;
}
示例3: rgba
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
示例4: createYuvType
@RequiresApi(18)
public static Type createYuvType(RenderScript rs, int x, int y, int yuvFormat) {
boolean supported = yuvFormat == ImageFormat.NV21 || yuvFormat == ImageFormat.YV12;
if (Build.VERSION.SDK_INT >= 19) {
supported |= yuvFormat == ImageFormat.YUV_420_888;
}
if (!supported) {
throw new IllegalArgumentException("invalid yuv format: " + yuvFormat);
}
return new Type.Builder(rs, createYuvElement(rs)).setX(x).setY(y).setYuvFormat(yuvFormat)
.create();
}
示例5: frameSize
public static int frameSize(int width, int height, int imageFormat) {
if (imageFormat != ImageFormat.NV21) {
throw new UnsupportedOperationException("Don't know how to calculate "
+ "the frame size of non-NV21 image formats.");
}
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
示例6: decodeToBitMap
public static Bitmap decodeToBitMap(byte[] data, Camera _camera) {
Camera.Size size = _camera.getParameters().getPreviewSize();
try {
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
if (image != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, stream);
Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
stream.close();
return bmp;
}
} catch (Exception ex) {
}
return null;
}
示例7: SrsEncoder
public SrsEncoder(SrsFlvMuxer flvMuxer, SrsMp4Muxer mp4Muxer) {
this.flvMuxer = flvMuxer;
this.mp4Muxer = mp4Muxer;
mVideoColorFormat = chooseVideoEncoder();
if (mVideoColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
VFORMAT = ImageFormat.YV12;
} else if (mVideoColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
VFORMAT = ImageFormat.NV21;
} else {
throw new IllegalStateException("Unsupported color format!");
}
}
示例8: NV21ToRGBABitmap
@SuppressLint("NewApi")
public static Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height, Context context) {
TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap");
Rect rect = new Rect(0, 0, width, height);
try {
Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV");
Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB");
byte[] imageData = nv21;
if (mRS == null) {
mRS = RenderScript.create(context);
mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS));
Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV));
tb.setX(width);
tb.setY(height);
tb.setMipmaps(false);
tb.setYuvFormat(ImageFormat.NV21);
ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT);
timings.addSplit("Prepare for ain");
Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS));
tb2.setX(width);
tb2.setY(height);
tb2.setMipmaps(false);
aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED);
timings.addSplit("Prepare for aOut");
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
timings.addSplit("Create Bitmap");
}
ain.copyFrom(imageData);
timings.addSplit("ain copyFrom");
mYuvToRgb.setInput(ain);
timings.addSplit("setInput ain");
mYuvToRgb.forEach(aOut);
timings.addSplit("NV21 to ARGB forEach");
aOut.copyTo(bitmap);
timings.addSplit("Allocation to Bitmap");
} catch (Exception e) {
YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
timings.addSplit("NV21 bytes to YuvImage");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvImage.compressToJpeg(rect, 90, baos);
byte[] cur = baos.toByteArray();
timings.addSplit("YuvImage crop and compress to Jpeg Bytes");
bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length);
timings.addSplit("Jpeg Bytes to Bitmap");
}
timings.dumpToLog();
return bitmap;
}
示例9: prepareVideo
/**
* Call this method before use @startStream. If not you will do a stream without video.
* NOTE: Rotation with encoder is silence ignored in some devices.
*
* @param width resolution in px.
* @param height resolution in px.
* @param fps frames per second of the stream.
* @param bitrate H264 in kb.
* @param hardwareRotation true if you want rotate using encoder, false if you want rotate with
* software if you are using a SurfaceView or TextureView or with OpenGl if you are using
* OpenGlView.
* @param rotation could be 90, 180, 270 or 0 (Normally 0 if you are streaming in landscape or 90
* if you are streaming in Portrait). This only affect to stream result.
* NOTE: Rotation with encoder is silence ignored in some devices.
* @return true if success, false if you get a error (Normally because the encoder selected
* doesn't support any configuration seated or your device hasn't a H264 encoder).
*/
public boolean prepareVideo(int width, int height, int fps, int bitrate, boolean hardwareRotation,
int rotation) {
if (onPreview) {
stopPreview();
onPreview = true;
}
int imageFormat = ImageFormat.NV21; //supported nv21 and yv12
if (openGlView == null) {
cameraManager.prepareCamera(width, height, fps, imageFormat);
return videoEncoder.prepareVideoEncoder(width, height, fps, bitrate, rotation,
hardwareRotation, FormatVideoEncoder.YUV420Dynamical);
} else {
return videoEncoder.prepareVideoEncoder(width, height, fps, bitrate, rotation,
hardwareRotation, FormatVideoEncoder.SURFACE);
}
}
示例10: run
@Override
public void run() {
int[] newPicLuma = ImageCodec.N21toLuma(rawNewPic, width, height);
if (rawOldPic == null) {
newBitmap = ImageCodec.lumaToBitmapGreyscale(newPicLuma, width, height);
lastBitmap = newBitmap;
} else {
int[] oldPicLuma = ImageCodec.N21toLuma(rawOldPic, width, height);
detector = new LuminanceMotionDetector();
detector.setThreshold(motionSensitivity);
List<Integer> changedPixels =
detector.detectMotion(oldPicLuma, newPicLuma, width, height);
hasChanged = false;
int[] newPic = ImageCodec.lumaToGreyscale(newPicLuma, width, height);
if (changedPixels != null) {
hasChanged = true;
for (int changedPixel : changedPixels) {
newPic[changedPixel] = Color.YELLOW;
}
}
lastBitmap = ImageCodec.lumaToBitmapGreyscale(oldPicLuma, width, height);
newBitmap = Bitmap.createBitmap(newPic, width, height, Bitmap.Config.RGB_565);
if (hasChanged) {
YuvImage image = new YuvImage(rawNewPic, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
image.compressToJpeg(
new Rect(0, 0, image.getWidth(), image.getHeight()), 90,
baos);
byte[] imageBytes = baos.toByteArray();
rawBitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
// Setting post rotate to 90
Matrix mtx = new Matrix();
mtx.postRotate(-90);
// Rotating Bitmap
rawBitmap = Bitmap.createBitmap(rawBitmap, 0, 0, width, height, mtx, true);
}
else
{
rawBitmap = null;
}
}
Log.i("MotionAsyncTask", "Finished processing, sending results");
handler.post(new Runnable() {
public void run() {
for (MotionListener listener : listeners) {
Log.i("MotionAsyncTask", "Updating back view");
listener.onProcess(
lastBitmap,
newBitmap,
rawBitmap,
hasChanged);
}
}
});
}
示例11: NV21toJPEG
private static byte[] NV21toJPEG(byte[] nv21, int width, int height) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuv = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
yuv.compressToJpeg(new Rect(0, 0, width, height), 100, out);
return out.toByteArray();
}