當前位置: 首頁>>代碼示例>>Java>>正文


Java CvCameraViewFrame類代碼示例

本文整理匯總了Java中org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame的典型用法代碼示例。如果您正苦於以下問題:Java CvCameraViewFrame類的具體用法?Java CvCameraViewFrame怎麽用?Java CvCameraViewFrame使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


CvCameraViewFrame類屬於org.opencv.android.CameraBridgeViewBase包,在下文中一共展示了CvCameraViewFrame類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        final int viewMode = mViewMode;
        switch (viewMode) {
            case VIEW_MODE_OPTICAL_FLOW:
                mGray = inputFrame.gray();
                if(features.toArray().length==0){
                    int rowStep = 50, colStep = 100;
                    int nRows = mGray.rows()/rowStep, nCols = mGray.cols()/colStep;

//                    Log.d(TAG, "\nRows: "+nRows+"\nCols: "+nCols+"\n");

                    Point points[] = new Point[nRows*nCols];
                    for(int i=0; i<nRows; i++){
                        for(int j=0; j<nCols; j++){
                            points[i*nCols+j]=new Point(j*colStep, i*rowStep);
//                            Log.d(TAG, "\nRow: "+i*rowStep+"\nCol: "+j*colStep+"\n: ");
                        }
                    }

                    features.fromArray(points);

                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
                    break;
                }

                nextFeatures.fromArray(prevFeatures.toArray());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);

                List<Point> prevList=features.toList(), nextList=nextFeatures.toList();
                Scalar color = new Scalar(255);

                for(int i = 0; i<prevList.size(); i++){
//                    Core.circle(mGray, prevList.get(i), 5, color);
                    Imgproc.line(mGray, prevList.get(i), nextList.get(i), color);
                }

                mPrevGray = mGray.clone();
                break;
            case VIEW_MODE_KLT_TRACKER:
                mGray = inputFrame.gray();

                if(features.toArray().length==0){
                    Imgproc.goodFeaturesToTrack(mGray, features, 10, 0.01, 10);
                    Log.d(TAG, features.toList().size()+"");
                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
//                    prevFeatures.fromList(nextFeatures.toList());
                    break;
                }

//                OpticalFlow(mPrevGray.getNativeObjAddr(), mGray.getNativeObjAddr(), prevFeatures.getNativeObjAddr(), nextFeatures.getNativeObjAddr());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);
                List<Point> drawFeature = nextFeatures.toList();
//                Log.d(TAG, drawFeature.size()+"");
                for(int i = 0; i<drawFeature.size(); i++){
                    Point p = drawFeature.get(i);
                    Imgproc.circle(mGray, p, 5, new Scalar(255));
                }
                mPrevGray = mGray.clone();
                prevFeatures.fromList(nextFeatures.toList());
                break;
            default: mViewMode = VIEW_MODE_KLT_TRACKER;
        }

        return mGray;
    }
 
開發者ID:johnhany,項目名稱:MOAAP,代碼行數:68,代碼來源:MainActivity.java

示例2: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();

    if (mIsColorSelected) {
        mDetector.process(mRgba);
        List<MatOfPoint> contours = mDetector.getContours();
        Log.e(TAG, "Contours count: " + contours.size());
        Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);

        Mat colorLabel = mRgba.submat(4, 68, 4, 68);
        colorLabel.setTo(mBlobColorRgba);

        Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
        mSpectrum.copyTo(spectrumLabel);
    }

    return mRgba;
}
 
開發者ID:joaopedronardari,項目名稱:OpenCV-AndroidSamples,代碼行數:19,代碼來源:ColorBlobDetectionActivity.java

示例3: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        } else if (mDetectorType == NATIVE_DETECTOR) {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
        } else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++) {
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
        }

        //轉置90度
        Mat rotateMat = Imgproc.getRotationMatrix2D(new Point(mRgba.rows() / 2, mRgba.cols() / 2), 90, 1);
        Imgproc.warpAffine(mRgba, mRgba, rotateMat, mRgba.size());

        //以y軸翻轉
        Core.flip(mRgba, mRgba, 1);

        return mRgba;
    }
 
開發者ID:vipycm,項目名稱:mao-android,代碼行數:41,代碼來源:FdActivity.java

示例4: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) 
{   
	contours.clear();
	//gray frame because it requires less resource to process
	mGray = inputFrame.gray(); 
	
	//this function converts the gray frame into the correct RGB format for the BackgroundSubtractorMOG apply function
	Imgproc.cvtColor(mGray, mRgb, Imgproc.COLOR_GRAY2RGB); 
	
	//apply detects objects moving and produces a foreground mask
	//the lRate updates dynamically dependent upon seekbar changes
	sub.apply(mRgb, mFGMask, lRate); 

	//erode and dilate are used  to remove noise from the foreground mask
	Imgproc.erode(mFGMask, mFGMask, new Mat());
	Imgproc.dilate(mFGMask, mFGMask, new Mat());
	
	//drawing contours around the objects by first called findContours and then calling drawContours
	//RETR_EXTERNAL retrieves only external contours
	//CHAIN_APPROX_NONE detects all pixels for each contour
	Imgproc.findContours(mFGMask, contours, new Mat(), Imgproc.RETR_EXTERNAL , Imgproc.CHAIN_APPROX_NONE);
	
	//draws all the contours in red with thickness of 2
	Imgproc.drawContours(mRgb, contours, -1, new Scalar(255, 0, 0), 2);
	
	return mRgb;
}
 
開發者ID:projektlp,項目名稱:Android_OCV_Movement_Detection,代碼行數:28,代碼來源:MainActivity.java

示例5: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        Log.w(TAG, "Got frame");
        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        }
        else if (mDetectorType == NATIVE_DETECTOR) {
//            if (mNativeDetector != null)
//                mNativeDetector.detect(mGray, faces);
        }
        else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++)
            Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

//        if (facesArray.length > 0) {
//            Intent i = new Intent(FdActivity.this, PreviewActivity.class);
//            i.putExtra("num", facesArray.length);
//            startActivity(i);
//        }

        return mRgba;
    }
 
開發者ID:rwoodley,項目名稱:AndroidExperiments,代碼行數:40,代碼來源:MainActivity.java

示例6: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
	Mat frame = inputFrame.rgba();
	
	//Replace Frame
    String filename=  mocker.getNextFrame(frame).getName();

    detector.proccessFrame(frame, filename);
    return frame;
}
 
開發者ID:mikrasov,項目名稱:BuzzSense,代碼行數:10,代碼來源:BuzzSenseActivity.java

示例7: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();
    
    MatOfRect objs = new MatOfRect();

    // Adjust minimun size for objects in the image
    int width = mGray.cols();
    int height = mGray.rows();
    relativeObjSize = Math.round(height * 0.12f);
    
    mNativeDetector.setMinDetectionSize(relativeObjSize);
    
    if (mDetectorType == JAVA_DETECTOR) {
    	javaClassifier.detectMultiScale(mGray, objs, 1.1, 4, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
    			new Size(relativeObjSize, relativeObjSize), new Size());
    }
    else {
    	mNativeDetector.detect(mGray, objs);
    }

    track_vehicles(objs);
    
    /** Draw the final classification **/
    Rect[] objArray = vehicles.toArray(new Rect[0]);
    //Rect[] objArray = objs.toArray();
    for (int i = 0; i < objArray.length; i++) {
        String distance = String.format("%.2fm", pixels_to_meters((double)objArray[i].width / (double)width));
        Scalar color = colors[vids.get(i) % colors.length];
        //Scalar color = colors[0];
        Core.rectangle(mRgba, objArray[i].tl(), objArray[i].br(), color, 3);
        Core.putText(mRgba, distance, objArray[i].tl(), Core.FONT_HERSHEY_SIMPLEX, 1.5, color, 4);
    }

    objs = null;
    return mRgba;
}
 
開發者ID:alfonsoros88,項目名稱:Andrive,代碼行數:40,代碼來源:Andrive.java

示例8: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
	Mat mat = new Mat();
	Mat input = inputFrame.rgba();
	
	processFrame(input.getNativeObjAddr(), mat.getNativeObjAddr());
    return mat;
    
}
 
開發者ID:prclibo,項目名稱:OpenCVAndroidBoilerplate,代碼行數:9,代碼來源:MainActivity.java

示例9: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) 
{
	mRgba = inputFrame.rgba();
	mGray = inputFrame.gray();

	switch (MainActivity.viewMode) 
	{
	case MainActivity.VIEW_MODE_RGBA:
		return this.mRgba;

	case MainActivity.VIEW_MODE_HIST:
		return this.mRgba;

	case MainActivity.VIEW_MODE_CANNY:
		Imgproc.Canny(this.mGray, mIntermediateMat, 80, 100);
		Imgproc.cvtColor(mIntermediateMat, this.mGray, Imgproc.COLOR_GRAY2BGRA, 4);
		return this.mGray;

	case MainActivity.VIEW_MODE_SOBEL:
		Imgproc.Sobel(this.mGray, this.mGray, CvType.CV_8U, 1, 1);
		//			Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
		Imgproc.cvtColor(this.mGray, this.mGray, Imgproc.COLOR_GRAY2BGRA, 4);
		return this.mGray;

	case MainActivity.VIEW_MODE_PIXELIZE:
		Imgproc.resize(this.mGray, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
		Imgproc.resize(mIntermediateMat, this.mRgba, this.mRgba.size(), 0.0, 0.0, Imgproc.INTER_NEAREST);
		return this.mRgba;

	case MainActivity.VIEW_MODE_GRAY:
		return this.mGray;

	case MainActivity.VIEW_MODE_FEATURES:
		FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
		return this.mRgba;

	default:
		return this.mRgba;
	}
}
 
開發者ID:jaredsburrows,項目名稱:open-quartz,代碼行數:41,代碼來源:MainActivity.java

示例10: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    Mat img = inputFrame.rgba();
    /*if(CameraProcessActivity.this.onTouch){
        //CameraProcessActivity.this.picture = img;
        CameraProcessActivity.this.onTouch = false;
    }*/
    this.currentFrame = img;
    if(CameraProcessActivity.this.state) return img;
    else {
        Mat im = Thresholding.normalThresholding(inputFrame.gray());
        try{
            Mat stat = Processing.ExtractBoxes(im,8);
            Processing.drawNumbers(img,CameraProcessActivity.this.emptySud,CameraProcessActivity.this.filledSud,stat,Processing.getNumberColor());
            this.currentFrame = img;
            return img;
        }catch (Exception e){
            return img;
        }
    }
}
 
開發者ID:Sanahm,項目名稱:SudoCAM-Ku,代碼行數:21,代碼來源:CameraProcessActivity.java

示例11: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
/**
 * Called for processing of each camera frame
 * @param inputFrame - the delivered frame
 * @return mRgba
 */
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    // Retrieve timestamp
    // This is where the timestamp for each video frame originates
    time = System.currentTimeMillis();

    mRgba.release();
    mGray.release();

    // Get RGBA and Gray versions
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    // Write frame to video
    if (VIDEO) {
        encoder.writeFrame(mRgba.dataAddr(), time);
    }

    // Send the frame to rPPG for processing
    // To C++
    rPPG.processFrame(mRgba.getNativeObjAddr(), mGray.getNativeObjAddr(), time);

    return mRgba;
}
 
開發者ID:prouast,項目名稱:heartbeat-android,代碼行數:30,代碼來源:Main.java

示例12: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
	final PoseRecognizer.Result result = mHandDetector.detect(inputFrame.rgba(), true);
	
	if (result.nonZero) {
		mActionManager.reaction();
	}
	if (mCheckPoseNow) {
 	// Note: We are not in UI thread here!
 	MainActivity.this.runOnUiThread(new Runnable(){
   public void run(){
   	// Now we're on the UI thread...
  		mCheckPoseNow = false;
  		mConfirmButton.setEnabled(true);
   	mActionManager.check(result.pose);
   	if (mShowRecognition) {
   		Util.toast(MainActivity.this, "ID:" + result.pose + ", Pose: " + PoseRecognizer.getDescription(result.pose));
   	}
   }
 	});
	}
	
	return result.frame;
}
 
開發者ID:Aletheios,項目名稱:MIME,代碼行數:24,代碼來源:MainActivity.java

示例13: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame)
{

    if (dftFlag) // Display Fourier Transform of Image
    {

        Imgproc.applyColorMap(fftMagnitude(inputFrame.gray()), returnMat, Imgproc.COLORMAP_JET);
    } else {
        if (colorFilterFlag)
            returnMat = applyFourierFilter(inputFrame.rgba());
        else
            returnMat = applyFourierFilterGray(inputFrame.gray());
    }

    return returnMat;
}
 
開發者ID:zfphil,項目名稱:FourierFilterCam,代碼行數:17,代碼來源:FourierCamActivity.java

示例14: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame)
{
	last_frame = inputFrame.gray().clone();
	Mat rgba_img = inputFrame.rgba();
	
	Point[] corners = last_corners;
	
	if (corners != null) {
		for (Point p : corners)
			Core.circle(rgba_img, p, 10, CIRCLE_COLOR);
		
		Core.line(rgba_img, corners[0], corners[1], LINE_COLOR);
		Core.line(rgba_img, corners[1], corners[2], LINE_COLOR);
		Core.line(rgba_img, corners[2], corners[3], LINE_COLOR);
		Core.line(rgba_img, corners[3], corners[0], LINE_COLOR);
	}
	
	return rgba_img;
}
 
開發者ID:diedricm,項目名稱:MapEver,代碼行數:20,代碼來源:CornerDetectionCamera.java

示例15: onCameraFrame

import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; //導入依賴的package包/類
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
	if (firstFrame == null && cnt > 5) {
		firstFrame = inputFrame.gray().clone();
		return firstFrame;
	} else if (cnt <= 5) {
		cnt++;
		return inputFrame.rgba();
	} else {
		Core.subtract(inputFrame.gray(), firstFrame, result);
		Scalar newMean = Core.mean(result);
		double newMeanVal = newMean.val[0];
		if (5 < newMeanVal - lastMean) {
			down=true;
		} else if(newMeanVal < lastMean) {
			if (down) {
				Utils.PlaySound(R.raw.bongo_1, context);
				down = false;
			}
		}
		Log.d("tamMean","mean " + newMeanVal);
		lastMean = newMeanVal;
		return result;
	}
}
 
開發者ID:devgi,項目名稱:TamTam,代碼行數:25,代碼來源:ImgHandlerSimple.java


注:本文中的org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。