本文整理汇总了Java中org.opencv.android.Utils.bitmapToMat方法的典型用法代码示例。如果您正苦于以下问题:Java Utils.bitmapToMat方法的具体用法?Java Utils.bitmapToMat怎么用?Java Utils.bitmapToMat使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.opencv.android.Utils
的用法示例。
在下文中一共展示了Utils.bitmapToMat方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: bytesToMat
import org.opencv.android.Utils; //导入方法依赖的package包/类
private Mat bytesToMat(byte[] data) {
// Scale down the image for performance
Bitmap bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
int targetWidth = 1200;
if (bmp.getWidth() > targetWidth) {
float scaleDownFactor = (float)targetWidth / bmp.getWidth();
bmp = Bitmap.createScaledBitmap(bmp,
(int)(bmp.getWidth()*scaleDownFactor),
(int)(bmp.getHeight()*scaleDownFactor),
true);
}
Mat BGRImage = new Mat (bmp.getWidth(), bmp.getHeight(), CvType.CV_8UC3);
Utils.bitmapToMat(bmp, BGRImage);
return BGRImage;
}
示例2: onPictureTaken
import org.opencv.android.Utils; //导入方法依赖的package包/类
@Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
/*FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();*/
Bitmap bmp = BitmapFactory.decodeByteArray(data , 0, data.length);
Mat orig = new Mat(bmp.getHeight(),bmp.getWidth(),CvType.CV_8UC3);
Bitmap myBitmap32 = bmp.copy(Bitmap.Config.ARGB_8888, true);
Utils.bitmapToMat(myBitmap32, orig);
mImage = new Mat();
Imgproc.cvtColor(orig,mImage,Imgproc.COLOR_RGB2GRAY);
/*Imgproc.cvtColor(orig, orig, Imgproc.COLOR_BGR2RGB,4);
Mat frame = new Mat(mFrameHeight+mFrameHeight/2,mFrameWidth, CvType.CV_8UC1);
frame.put(0,0,data);
//Imgcodecs.imdecode(frame,0);
Imgproc.cvtColor(frame,mImage,Imgproc.COLOR_YUV2RGBA_NV21);//frame.submat(0, mFrameHeight, 0, mFrameWidth);*/
} catch (Exception e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
示例3: getInputDataLeNet
import org.opencv.android.Utils; //导入方法依赖的package包/类
private float[] getInputDataLeNet(Bitmap bitmap) {
final int INPUT_LENGTH = 28;
Mat imageMat = new Mat();
Mat inputMat = new Mat();
Utils.bitmapToMat(bitmap, imageMat);
// convert the image to 28 * 28, grayscale, 0~1, and smaller means whiter
Imgproc.cvtColor(imageMat, imageMat, Imgproc.COLOR_RGBA2GRAY);
imageMat = centerCropAndScale(imageMat, INPUT_LENGTH);
imageMat.convertTo(imageMat, CvType.CV_32F, 1. / 255);
Core.subtract(Mat.ones(imageMat.size(), CvType.CV_32F), imageMat, inputMat);
float[] inputData = new float[inputMat.width() * inputMat.height()];
inputMat.get(0, 0, inputData);
return inputData;
}
示例4: onActivityResult
import org.opencv.android.Utils; //导入方法依赖的package包/类
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
super.onActivityResult(requestCode, resultCode, imageReturnedIntent);
switch(requestCode) {
case SELECT_PHOTO:
if(resultCode == RESULT_OK){
try {
final Uri imageUri = imageReturnedIntent.getData();
final InputStream imageStream = getContentResolver().openInputStream(imageUri);
final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
Utils.bitmapToMat(selectedImage, src);
srcSelected = true;
bGaussianPyrUp.setEnabled(true);
bGaussianPyrDown.setEnabled(true);
bLaplacianPyr.setEnabled(true);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
break;
}
}
示例5: onCameraViewStarted
import org.opencv.android.Utils; //导入方法依赖的package包/类
/**
* This method is called when the camera view is started. It will allocate and initialize
* some global resources.
*
* @param width specifies the width of the camera view.
* @param height specifies the height of the camera view.
*/
@Override
public void onCameraViewStarted(int width, int height)
{
faceRects = new MatOfRect();
totalProcessingTime = 0;
framesProcessed = 0;
overlayImage = new Mat();
Bitmap overlayBitmap =
BitmapFactory.decodeResource(activity.getResources(), R.drawable.mustache);
Utils.bitmapToMat(overlayBitmap, overlayImage);
//
// Don't allow overlay unless overlay image has the rgba channels.
//
if (overlayImage.channels() < 4) doOverlayImage = false;
}
示例6: detectLight
import org.opencv.android.Utils; //导入方法依赖的package包/类
private void detectLight(Bitmap bitmap, double gaussianBlurValue) {
Mat rgba = new Mat();
Utils.bitmapToMat(bitmap, rgba);
Mat grayScaleGaussianBlur = new Mat();
Imgproc.cvtColor(rgba, grayScaleGaussianBlur, Imgproc.COLOR_BGR2GRAY);
Imgproc.GaussianBlur(grayScaleGaussianBlur, grayScaleGaussianBlur, new Size(gaussianBlurValue, gaussianBlurValue), 0);
Core.MinMaxLocResult minMaxLocResultBlur = Core.minMaxLoc(grayScaleGaussianBlur);
Imgproc.circle(rgba, minMaxLocResultBlur.maxLoc, 30, new Scalar(255), 3);
// Don't do that at home or work it's for visualization purpose.
Bitmap resultBitmap = Bitmap.createBitmap(rgba.cols(), rgba.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(rgba, resultBitmap);
BitmapHelper.showBitmap(this, resultBitmap, detectLightImageView);
Bitmap blurryBitmap = Bitmap.createBitmap(grayScaleGaussianBlur.cols(), grayScaleGaussianBlur.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(grayScaleGaussianBlur, blurryBitmap);
BitmapHelper.showBitmap(this, blurryBitmap, gaussianBlurImageView);
}
示例7: detectDocument
import org.opencv.android.Utils; //导入方法依赖的package包/类
Document detectDocument(Frame frame){
Size imageSize = new Size(frame.getMetadata().getWidth(), frame.getMetadata().getHeight());
Mat src = new Mat();
Utils.bitmapToMat(frame.getBitmap(), src);
List<MatOfPoint> contours = CVProcessor.findContours(src);
src.release();
if(!contours.isEmpty()){
CVProcessor.Quadrilateral quad = CVProcessor.getQuadrilateral(contours, imageSize);
if(quad != null){
quad.points = CVProcessor.getUpscaledPoints(quad.points, CVProcessor.getScaleRatio(imageSize));
return new Document(frame, quad);
}
}
return null;
}
示例8: onActivityResult
import org.opencv.android.Utils; //导入方法依赖的package包/类
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(requestCode == REQ_PICK_IMAGE && resultCode == RESULT_OK && data != null){
try {
Bitmap image = BitmapFactory.decodeStream(getContentResolver().openInputStream(data.getData()));
if(mData != null){
mData.release();
mData = null;
}
mData = new Mat();
Utils.bitmapToMat(image, mData);
image.recycle();
startTests();
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
示例9: object_to_left
import org.opencv.android.Utils; //导入方法依赖的package包/类
@Test
public void object_to_left() {
Assert.assertTrue(true);
ExtractPath mExtractPath = new ExtractPath();
Path myPath = new Path();
Bitmap bmp = null;
Mat currFrame = new Mat();
Utils.bitmapToMat(bmp,currFrame);
Mat prevFrame = new Mat();
Utils.bitmapToMat(bmp,currFrame);
Assert.assertEquals(currFrame, prevFrame);
Assert.assertSame(currFrame, mExtractPath.withRigidTransformation(currFrame, prevFrame, myPath));
}
示例10: drawTemplateOutline
import org.opencv.android.Utils; //导入方法依赖的package包/类
/**
* Draws the outline of the Pattern to the preview screen.
*/
public void drawTemplateOutline() {
//Load the template outline
cardType = DataHolder.getInstance().getData();
if(!cardType.equals("-DETECT-")) getTemplate();
Bitmap icon = BitmapFactory.decodeResource(this.getResources(),
R.drawable.card_frame);
Mat outline = new Mat ( icon.getHeight(), icon.getWidth(), CvType.CV_8U, new Scalar(0, 0, 0));
Utils.bitmapToMat(icon, outline);
Imgproc.cvtColor(outline, outline, Imgproc.COLOR_BGRA2RGBA);
if(showOutline) {
for (String key : card.getPatternMap().keySet()) {
if(card.getPattern(key).getResource().matches("[t][e][x][t].*")) Core.rectangle(outline, new Point(Math.abs(outline.cols() - (card.getPattern(key).getTl().y * outline.cols())), card.getPattern(key).getTl().x * outline.rows()),new Point(Math.abs(outline.cols() - (card.getPattern(key).getBr().y * outline.cols())), card.getPattern(key).getBr().x * outline.rows()), new Scalar(0, 255, 0, 255), 1);
//Core.rectangle(outline, new Point(Math.abs(outline.cols() - (card.getPattern(key).getTl().y * outline.cols())), card.getPattern(key).getTl().x * outline.rows()),new Point(Math.abs(outline.cols() - (card.getPattern(key).getBr().y * outline.cols())), card.getPattern(key).getBr().x * outline.rows()), new Scalar(255, 0, 0, 0), 1);
}
Core.rectangle(outline, new Point(Math.abs(outline.cols() - (facetl.y * outline.cols())), facetl.x * outline.rows()),new Point(Math.abs(outline.cols() - (facebr.y * outline.cols())),facebr.x * outline.rows()), new Scalar(0, 255, 0, 255), 1);
}
Bitmap bimage = Bitmap.createBitmap(outline.cols(), outline.rows(),Bitmap.Config.ARGB_8888);
Imgproc.cvtColor(outline, outline, Imgproc.COLOR_RGBA2BGRA);
Utils.matToBitmap(outline, bimage);
ImageView imgV = (ImageView )findViewById(R.id.frame_det);
imgV.setImageBitmap(bimage);
}
示例11: read
import org.opencv.android.Utils; //导入方法依赖的package包/类
/**
* Read frame and create a bundle
*
* @return Frame Bundle
*/
@Override
public Bundle read() throws IOException {
// obtaining a camera image (pixel data are stored in an array in JNI).
if (cameraExists) {
processCamera();
// camera image to bmp
pixeltobmp(bmp);
Mat mat = new Mat();
Utils.bitmapToMat(bmp, mat);
Bundle bundle = new Bundle();
Log.i(TAG, "Usb camera got new frame " + cameraId
+ "Could not open");
bundle.put(Constants.SOURCE_MAT_RGB, mat);
bundle.put(Constants.SOURCE_BITMAP, bmp);
return bundle;
}
throw new IOException("Usb camera is not open. Could not read frame");
}
示例12: convertGray
import org.opencv.android.Utils; //导入方法依赖的package包/类
private void convertGray() {
Mat src = new Mat();
Mat temp = new Mat();
Mat dst = new Mat();
Utils.bitmapToMat(selectbp, src);
Imgproc.cvtColor(src, temp, Imgproc.COLOR_BGRA2BGR);
Log.i("CV", "image type:" + (temp.type() == CvType.CV_8UC3));
Imgproc.cvtColor(temp, dst, Imgproc.COLOR_BGR2GRAY);
Utils.matToBitmap(dst, selectbp);
myImageView.setImageBitmap(selectbp);
}
示例13: setInputImage
import org.opencv.android.Utils; //导入方法依赖的package包/类
public void setInputImage(Bitmap bitmap, boolean isPano) {
if (bitmap == null)
return;
mIsFaded = !isPano;
mIsImageLoaded = true;
mInputImage = new Mat();
// Check if the bitmap has the correct type for the OpenCV bitmapToMat function:
if (bitmap.getConfig() != null) { // bitmap.getConfig() just returns a valid value if the format is in one of the public formats.
if (bitmap.getConfig() != Bitmap.Config.ARGB_8888 && bitmap.getConfig() != Bitmap.Config.RGB_565)
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, false);
}
else
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, false);
Utils.bitmapToMat(bitmap, mInputImage);
mOriginalImage = mInputImage.clone();
mFullOutputSize = Math.max(mInputImage.width(), mInputImage.height());
if (mFullOutputSize > MainActivity.MAX_IMG_SIZE)
mFullOutputSize = MainActivity.MAX_IMG_SIZE;
initImages();
}
示例14: getFrame
import org.opencv.android.Utils; //导入方法依赖的package包/类
/**
* This method gets a frame from the frame queue and returns the image that matches the format specified by the
* configVideoSource method.
*
* @param frame specifies the frame object to hold image.
* @return true if success, false otherwise.
*/
@Override
public boolean getFrame(Mat frame)
{
boolean success = false;
try
{
VuforiaLocalizer.CloseableFrame closeableFrame = localizer.getFrameQueue().take();
for (int i = 0; i < closeableFrame.getNumImages(); i++)
{
Image image = closeableFrame.getImage(i);
if (image.getWidth() == imageWidth && image.getHeight() == imageHeight &&
image.getFormat() == PIXEL_FORMAT.RGB565)
{
Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
bm.copyPixelsFromBuffer(image.getPixels());
Utils.bitmapToMat(bm, frame);
break;
}
}
closeableFrame.close();
success = true;
}
catch (InterruptedException e)
{
e.printStackTrace();
}
return success;
}
示例15: initAnalysisVideo
import org.opencv.android.Utils; //导入方法依赖的package包/类
private void initAnalysisVideo(String mChosenFile) {
try {
if (!carregandoVideo) {
carregandoVideo = true;
File videoFile = new File(mChosenFile);
Uri videoFileUri = Uri.parse(videoFile.toString());
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
FileInputStream inputStream = new FileInputStream(videoFile.getAbsolutePath());
retriever.setDataSource(inputStream.getFD());
//Create a new Media Player
ProgressDialog pd = new ProgressDialog(activity);
pd.setMessage("aguarde!");
pd.show();
MediaPlayer mp = MediaPlayer.create(activity, videoFileUri);
int millis = mp.getDuration();
for (int i = 0; i < millis; i += 100) {
Bitmap bitmap = retriever.getFrameAtTime(i * 1000, MediaMetadataRetriever.OPTION_CLOSEST);
Mat mat = new Mat();
Utils.bitmapToMat(bitmap, mat);
Utils.matToBitmap(faceDetectController.detectface(mat), bitmap);
framePackage.addImage(Bitmap.createScaledBitmap(bitmap, bitmap.getWidth() / 3, bitmap.getHeight() / 3, false));
}
new MyVideoProgressBar(activity, framePackage, Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/out.mp4", "MOBILE").execute();
pd.dismiss();
}
} catch (Exception e) {
e.printStackTrace();
}
}