當前位置: 首頁>>代碼示例>>Java>>正文


Java CvCameraViewFrame.gray方法代碼示例

本文整理匯總了Java中org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame.gray方法的典型用法代碼示例。如果您正苦於以下問題:Java CvCameraViewFrame.gray方法的具體用法?Java CvCameraViewFrame.gray怎麽用?Java CvCameraViewFrame.gray使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame的用法示例。


在下文中一共展示了CvCameraViewFrame.gray方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        final int viewMode = mViewMode;
        switch (viewMode) {
            case VIEW_MODE_OPTICAL_FLOW:
                mGray = inputFrame.gray();
                if(features.toArray().length==0){
                    int rowStep = 50, colStep = 100;
                    int nRows = mGray.rows()/rowStep, nCols = mGray.cols()/colStep;

//                    Log.d(TAG, "\nRows: "+nRows+"\nCols: "+nCols+"\n");

                    Point points[] = new Point[nRows*nCols];
                    for(int i=0; i<nRows; i++){
                        for(int j=0; j<nCols; j++){
                            points[i*nCols+j]=new Point(j*colStep, i*rowStep);
//                            Log.d(TAG, "\nRow: "+i*rowStep+"\nCol: "+j*colStep+"\n: ");
                        }
                    }

                    features.fromArray(points);

                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
                    break;
                }

                nextFeatures.fromArray(prevFeatures.toArray());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);

                List<Point> prevList=features.toList(), nextList=nextFeatures.toList();
                Scalar color = new Scalar(255);

                for(int i = 0; i<prevList.size(); i++){
//                    Core.circle(mGray, prevList.get(i), 5, color);
                    Imgproc.line(mGray, prevList.get(i), nextList.get(i), color);
                }

                mPrevGray = mGray.clone();
                break;
            case VIEW_MODE_KLT_TRACKER:
                mGray = inputFrame.gray();

                if(features.toArray().length==0){
                    Imgproc.goodFeaturesToTrack(mGray, features, 10, 0.01, 10);
                    Log.d(TAG, features.toList().size()+"");
                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
//                    prevFeatures.fromList(nextFeatures.toList());
                    break;
                }

//                OpticalFlow(mPrevGray.getNativeObjAddr(), mGray.getNativeObjAddr(), prevFeatures.getNativeObjAddr(), nextFeatures.getNativeObjAddr());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);
                List<Point> drawFeature = nextFeatures.toList();
//                Log.d(TAG, drawFeature.size()+"");
                for(int i = 0; i<drawFeature.size(); i++){
                    Point p = drawFeature.get(i);
                    Imgproc.circle(mGray, p, 5, new Scalar(255));
                }
                mPrevGray = mGray.clone();
                prevFeatures.fromList(nextFeatures.toList());
                break;
            default: mViewMode = VIEW_MODE_KLT_TRACKER;
        }

        return mGray;
    }
 
開發者ID:johnhany,項目名稱:MOAAP,代碼行數:68,代碼來源:MainActivity.java

示例2: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        } else if (mDetectorType == NATIVE_DETECTOR) {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
        } else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++) {
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
        }

        //轉置90度
        Mat rotateMat = Imgproc.getRotationMatrix2D(new Point(mRgba.rows() / 2, mRgba.cols() / 2), 90, 1);
        Imgproc.warpAffine(mRgba, mRgba, rotateMat, mRgba.size());

        //以y軸翻轉
        Core.flip(mRgba, mRgba, 1);

        return mRgba;
    }
 
開發者ID:vipycm,項目名稱:mao-android,代碼行數:41,代碼來源:FdActivity.java

示例3: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) 
{   
	contours.clear();
	//gray frame because it requires less resource to process
	mGray = inputFrame.gray(); 
	
	//this function converts the gray frame into the correct RGB format for the BackgroundSubtractorMOG apply function
	Imgproc.cvtColor(mGray, mRgb, Imgproc.COLOR_GRAY2RGB); 
	
	//apply detects objects moving and produces a foreground mask
	//the lRate updates dynamically dependent upon seekbar changes
	sub.apply(mRgb, mFGMask, lRate); 

	//erode and dilate are used  to remove noise from the foreground mask
	Imgproc.erode(mFGMask, mFGMask, new Mat());
	Imgproc.dilate(mFGMask, mFGMask, new Mat());
	
	//drawing contours around the objects by first called findContours and then calling drawContours
	//RETR_EXTERNAL retrieves only external contours
	//CHAIN_APPROX_NONE detects all pixels for each contour
	Imgproc.findContours(mFGMask, contours, new Mat(), Imgproc.RETR_EXTERNAL , Imgproc.CHAIN_APPROX_NONE);
	
	//draws all the contours in red with thickness of 2
	Imgproc.drawContours(mRgb, contours, -1, new Scalar(255, 0, 0), 2);
	
	return mRgb;
}
 
開發者ID:projektlp,項目名稱:Android_OCV_Movement_Detection,代碼行數:28,代碼來源:MainActivity.java

示例4: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        Log.w(TAG, "Got frame");
        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        }
        else if (mDetectorType == NATIVE_DETECTOR) {
//            if (mNativeDetector != null)
//                mNativeDetector.detect(mGray, faces);
        }
        else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++)
            Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

//        if (facesArray.length > 0) {
//            Intent i = new Intent(FdActivity.this, PreviewActivity.class);
//            i.putExtra("num", facesArray.length);
//            startActivity(i);
//        }

        return mRgba;
    }
 
開發者ID:rwoodley,項目名稱:AndroidExperiments,代碼行數:40,代碼來源:MainActivity.java

示例5: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();
    
    MatOfRect objs = new MatOfRect();

    // Adjust minimun size for objects in the image
    int width = mGray.cols();
    int height = mGray.rows();
    relativeObjSize = Math.round(height * 0.12f);
    
    mNativeDetector.setMinDetectionSize(relativeObjSize);
    
    if (mDetectorType == JAVA_DETECTOR) {
    	javaClassifier.detectMultiScale(mGray, objs, 1.1, 4, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
    			new Size(relativeObjSize, relativeObjSize), new Size());
    }
    else {
    	mNativeDetector.detect(mGray, objs);
    }

    track_vehicles(objs);
    
    /** Draw the final classification **/
    Rect[] objArray = vehicles.toArray(new Rect[0]);
    //Rect[] objArray = objs.toArray();
    for (int i = 0; i < objArray.length; i++) {
        String distance = String.format("%.2fm", pixels_to_meters((double)objArray[i].width / (double)width));
        Scalar color = colors[vids.get(i) % colors.length];
        //Scalar color = colors[0];
        Core.rectangle(mRgba, objArray[i].tl(), objArray[i].br(), color, 3);
        Core.putText(mRgba, distance, objArray[i].tl(), Core.FONT_HERSHEY_SIMPLEX, 1.5, color, 4);
    }

    objs = null;
    return mRgba;
}
 
開發者ID:alfonsoros88,項目名稱:Andrive,代碼行數:40,代碼來源:Andrive.java

示例6: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) 
{
	mRgba = inputFrame.rgba();
	mGray = inputFrame.gray();

	switch (MainActivity.viewMode) 
	{
	case MainActivity.VIEW_MODE_RGBA:
		return this.mRgba;

	case MainActivity.VIEW_MODE_HIST:
		return this.mRgba;

	case MainActivity.VIEW_MODE_CANNY:
		Imgproc.Canny(this.mGray, mIntermediateMat, 80, 100);
		Imgproc.cvtColor(mIntermediateMat, this.mGray, Imgproc.COLOR_GRAY2BGRA, 4);
		return this.mGray;

	case MainActivity.VIEW_MODE_SOBEL:
		Imgproc.Sobel(this.mGray, this.mGray, CvType.CV_8U, 1, 1);
		//			Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
		Imgproc.cvtColor(this.mGray, this.mGray, Imgproc.COLOR_GRAY2BGRA, 4);
		return this.mGray;

	case MainActivity.VIEW_MODE_PIXELIZE:
		Imgproc.resize(this.mGray, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
		Imgproc.resize(mIntermediateMat, this.mRgba, this.mRgba.size(), 0.0, 0.0, Imgproc.INTER_NEAREST);
		return this.mRgba;

	case MainActivity.VIEW_MODE_GRAY:
		return this.mGray;

	case MainActivity.VIEW_MODE_FEATURES:
		FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
		return this.mRgba;

	default:
		return this.mRgba;
	}
}
 
開發者ID:jaredsburrows,項目名稱:open-quartz,代碼行數:41,代碼來源:MainActivity.java

示例7: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
/**
 * Called for processing of each camera frame
 * @param inputFrame - the delivered frame
 * @return mRgba
 */
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    // Retrieve timestamp
    // This is where the timestamp for each video frame originates
    time = System.currentTimeMillis();

    mRgba.release();
    mGray.release();

    // Get RGBA and Gray versions
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    // Write frame to video
    if (VIDEO) {
        encoder.writeFrame(mRgba.dataAddr(), time);
    }

    // Send the frame to rPPG for processing
    // To C++
    rPPG.processFrame(mRgba.getNativeObjAddr(), mGray.getNativeObjAddr(), time);

    return mRgba;
}
 
開發者ID:prouast,項目名稱:heartbeat-android,代碼行數:30,代碼來源:Main.java

示例8: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mFaceDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        Rect[] facesArray = mFaceDetector.Detectfaces(mGray);
        for (int i = 0; i < facesArray.length; i++)
            Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

        return mRgba;
    }
 
開發者ID:fblandroidhackathon,項目名稱:persontracker,代碼行數:20,代碼來源:FdActivity.java

示例9: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) 
{
	mRgba = inputFrame.rgba();
	mGray = inputFrame.gray();

	if (mAbsoluteFaceSize == 0) 
	{
		int height = mGray.rows();
		if (Math.round(height * mRelativeFaceSize) > 0) 
		{
			mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
		}
	}

	MatOfRect faces = new MatOfRect();

	if (mJavaDetector != null)
	{
		mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
				new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
	}

	// Draw rectangles
	Rect[] facesArray = faces.toArray();
	for (int i = 0; i < facesArray.length; i++)
	{
		Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
	}
	
	return mRgba;
}
 
開發者ID:jaredsburrows,項目名稱:open-quartz,代碼行數:32,代碼來源:FdActivity.java

示例10: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    Mat gray = inputFrame.gray();
    Mat imgCard = new Mat(310, 223, gray.type());
    Mat liveMat = new Mat();
    

    if (mFrameCount % MODE_PROCESS_EVERY_N_FRAME == 0) {
        int res = Card.findCard(gray, imgCard);
        if (MODE_ONLY_DISPLAY_MATCH == 0
                || (MODE_ONLY_DISPLAY_MATCH == 1 && res == Card.RECTANGLE_FOUND)) {
            updateSnapshotFrame(imgCard);
        }
    }
    mFrameCount++;

    liveMat = inputFrame.rgba();
    return liveMat;
}
 
開發者ID:chrismeyersfsu,項目名稱:android-mtg_card_scan,代碼行數:19,代碼來源:MainActivity.java

示例11: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        }
        else if (mDetectorType == NATIVE_DETECTOR) {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
        }
        else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++)
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

        return mRgba;
    }
 
開發者ID:yippeesoft,項目名稱:NotifyTools,代碼行數:35,代碼來源:FdActivity.java

示例12: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
        }

        MatOfRect faces = new MatOfRect();


        if (mJavaDetector != null)
            mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                    new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());



        Rect[] facesArray = faces.toArray();
        if(isFaceRectangleEnabled){
            for (int i = 0; i < facesArray.length; i++)
                Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
        }

        if (facesArray.length == 1)
        {
            chooseFaceRect = facesArray[0].clone();
            isFaceRectChosen = true;
        }
        return mRgba;
    }
 
開發者ID:wblgers,項目名稱:OpenCV_Android_Plus,代碼行數:35,代碼來源:MainActivity.java

示例13: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    final int viewMode = mViewMode;
    switch (viewMode) {
        case VIEW_MODE_GRAY:
            // input frame has gray scale format
            Imgproc.cvtColor(inputFrame.gray(), mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
            break;
        case VIEW_MODE_RGBA:
            // input frame has RBGA format
            mRgba = inputFrame.rgba();
            break;
        case VIEW_MODE_CANNY:
            // input frame has gray scale format
            mRgba = inputFrame.rgba();
            Imgproc.Canny(inputFrame.gray(), mIntermediateMat, 80, 100);
            Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
            break;
        case VIEW_MODE_FEATURES:
            // input frame has RGBA format
            mRgba = inputFrame.rgba();
            mGray = inputFrame.gray();
            FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
            break;
    }

    return mRgba;
}
 
開發者ID:xunqun,項目名稱:OpenCV-AndroidSamples-master,代碼行數:28,代碼來源:Tutorial2Activity.java

示例14: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        
        mGray = inputFrame.gray();
        
        if(!detectionInProgress){
        	Mat image = new Mat(mGray.rows(), mGray.cols(), mGray.type());
        	mGray.copyTo(image);
        	detectFaceOnFrame(image);
        }
        
        return mRgba;
    }
 
開發者ID:yaylas,項目名稱:AndroidFaceRecognizer,代碼行數:15,代碼來源:FaceDetectionActivity.java

示例15: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入方法依賴的package包/類
/**
 * Executed when a frame is receieved
 * 
 * @param inputFrame
 *            Frame from camera
 * @return mat Frame mat object
 */
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
	Log.i(TAG, "got frame");

	rgba = inputFrame.rgba();
	gray = inputFrame.gray();

	try {
		mBitmap = Bitmap.createBitmap(rgba.cols(), rgba.rows(),
				Bitmap.Config.ARGB_8888);

		Bitmap temp = Bitmap.createScaledBitmap(mBitmap, 640, 480, false);
		Log.i(RGB2GRAYFilter.TAG,
				"W :" + temp.getWidth() + " H : " + temp.getHeight());

		mBitmap = temp;
		Utils.matToBitmap(rgba, mBitmap);
	} catch (Exception ex) {
		System.out.println(ex.getMessage());
	}

	startGate.countDown();
	try {
		endGate.await();
	} catch (InterruptedException e) {
		e.printStackTrace();
	}
	endGate = new CountDownLatch(1);

	return null;
}
 
開發者ID:centosGit,項目名稱:EyeDroid,代碼行數:39,代碼來源:InputStreamCamera.java


注:本文中的org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame.gray方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。