本文整理汇总了Java中org.opencv.android.CameraBridgeViewBase类的典型用法代码示例。如果您正苦于以下问题:Java CameraBridgeViewBase类的具体用法?Java CameraBridgeViewBase怎么用?Java CameraBridgeViewBase使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
CameraBridgeViewBase类属于org.opencv.android包,在下文中一共展示了CameraBridgeViewBase类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: onCreate
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_face_detection_opencv);
openCvCameraView = (CameraBridgeViewBase) findViewById(R.id.camera_surface_view);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
&& ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{Manifest.permission.CAMERA}, PERMISSIONS_REQUEST_CAMERA);
} else {
initPresenter();
presenter.setCamera(openCvCameraView);
}
}
示例2: onClick
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.switch_camera:
cameraView.disableView();
if (isFrontCamera) {
cameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK);
isFrontCamera = false;
} else {
cameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT);
isFrontCamera = true;
}
cameraView.enableView();
break;
default:
}
}
示例3: onCreate
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
Log.i("permission", "request READ_EXTERNAL_STORAGE");
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA);
}else {
Log.i("permission", "READ_EXTERNAL_STORAGE already granted");
camera_granted = true;
}
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.main_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
示例4: onCreate
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
Log.i("permission", "request READ_EXTERNAL_STORAGE");
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA);
}else {
Log.i("permission", "READ_EXTERNAL_STORAGE already granted");
camera_granted = true;
}
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.java_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
示例5: onCameraFrame
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
//Rotating the input frame
Mat mGray = inputFrame.gray();
mRgba = inputFrame.rgba();
if (mIsFrontCamera)
{
Core.flip(mRgba, mRgba, 1);
Core.flip(mGray, mGray, 1);
}
//Detecting face in the frame
MatOfRect faces = new MatOfRect();
if(haarCascade != null)
{
haarCascade.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(200,200), new Size());
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), new Scalar(100), 3);
return mRgba;
}
示例6: onCreate
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_my);
// get the OverlayView responsible for displaying images on top of the camera
overlayView = (OverlayView) findViewById(R.id.overlay_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.java_camera_view);
// Michael Troger
if (FIXED_FRAME_SIZE) {
mOpenCvCameraView.setMaxFrameSize(FRAME_SIZE_WIDTH, FRAME_SIZE_HEIGHT);
}
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mi = new ActivityManager.MemoryInfo();
activityManager = (ActivityManager) getSystemService(ACTIVITY_SERVICE);
}
示例7: onCameraFrame
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
if (absoluteFaceSize == 0) {
int height = mGray.rows();
float relativeFaceSize = 0.2f;
if (Math.round(height * relativeFaceSize) > 0) {
absoluteFaceSize = Math.round(height * relativeFaceSize);
}
nativeDetector.setMinFaceSize(absoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
MatOfRect facesFliped = new MatOfRect();
return getMat(mRgba, mGray, faces, facesFliped);
}
示例8: onCreate
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
// Michael Troger
if (FIXED_FRAME_SIZE) {
mOpenCvCameraView.setMaxFrameSize(FRAME_SIZE_WIDTH, FRAME_SIZE_HEIGHT);
}
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
示例9: onCreate
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.image_manipulations_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
init();
if (null == savedInstanceState) {
mNavItemId = R.id.rgb;
} else {
mNavItemId = savedInstanceState.getInt(NAV_ITEM_ID);
}
drawerLayoutSetup();
}
示例10: render
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
public Mat render(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat undistortedFrame = new Mat(inputFrame.rgba().size(), inputFrame.rgba().type());
Imgproc.undistort(inputFrame.rgba(), undistortedFrame,
mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients());
Mat comparisonFrame = inputFrame.rgba();
undistortedFrame.colRange(new Range(0, mWidth / 2)).copyTo(comparisonFrame.colRange(new Range(mWidth / 2, mWidth)));
List<MatOfPoint> border = new ArrayList<MatOfPoint>();
final int shift = (int)(mWidth * 0.005);
border.add(new MatOfPoint(new Point(mWidth / 2 - shift, 0), new Point(mWidth / 2 + shift, 0),
new Point(mWidth / 2 + shift, mHeight), new Point(mWidth / 2 - shift, mHeight)));
Imgproc.fillPoly(comparisonFrame, border, new Scalar(255, 255, 255));
Imgproc.putText(comparisonFrame, mResources.getString(R.string.original), new Point(mWidth * 0.1, mHeight * 0.1),
Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0));
Imgproc.putText(comparisonFrame, mResources.getString(R.string.undistorted), new Point(mWidth * 0.6, mHeight * 0.1),
Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0));
return comparisonFrame;
}
示例11: changeCamera
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
private void changeCamera() {
try {
mOpenCvCameraView.disableView();
if (usingFront) {
mOpenCvCameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK);
mItemCameraId.setTitle("Back");
} else {
mOpenCvCameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT);
mItemCameraId.setTitle("Front");
}
usingFront = !usingFront;
mOpenCvCameraView.enableView();
//onResume();
} catch (Exception e) {
e.printStackTrace();
}
}
示例12: onCameraFrame
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mInputMat = inputFrame.rgba();
if(mCapturing || mStarting) {
mOutputMat.setTo(new Scalar(0));
mCapturing = false;
mStarting = false;
return mOutputMat;
}
Filter currentFilter = mFilterManager.getCurrentFilter();
if(currentFilter != null) {
if (mFilterManager.getFilterScaleFactor() != currentFilter.getDefaultScaleFactor())
mFilterManager.setFilterScaleFactor(currentFilter.getDefaultScaleFactor());
currentFilter.process(mInputMat, mOutputMat);
return mOutputMat;
}
return mInputMat;
}
示例13: calculateCameraFrameSize
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
private Size calculateCameraFrameSize(List<?> supportedSizes, CameraBridgeViewBase.ListItemAccessor accessor, int surfaceHeight, int surfaceWidth) {
int calcWidth = 0;
int calcHeight = 0;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= surfaceWidth && height <= surfaceHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = width;
calcHeight = height;
}
}
}
return new Size(calcWidth, calcHeight);
}
示例14: onCameraFrame
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat imgRgba = inputFrame.rgba();
Mat img = new Mat();
imgRgba.copyTo(img);
List<Mat> images = ppF.getProcessedImage(img, PreProcessorFactory.PreprocessingMode.RECOGNITION);
Rect[] faces = ppF.getFacesForRecognition();
// Selfie / Mirror mode
if(front_camera){
Core.flip(imgRgba,imgRgba,1);
}
if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){
// skip
return imgRgba;
} else {
faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
for(int i = 0; i<faces.length; i++){
MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""), front_camera);
}
return imgRgba;
}
}
开发者ID:Qualeams,项目名称:Android-Face-Recognition-with-Deep-Learning-Test-Framework,代码行数:23,代码来源:RecognitionActivity.java
示例15: onCreate
import org.opencv.android.CameraBridgeViewBase; //导入依赖的package包/类
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_detection);
mDetectionView = (CustomCameraView) findViewById(R.id.DetectionView);
// Use camera which is selected in settings
SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
front_camera = sharedPref.getBoolean("key_front_camera", true);
night_portrait = sharedPref.getBoolean("key_night_portrait", false);
exposure_compensation = Integer.valueOf(sharedPref.getString("key_exposure_compensation", "20"));
if (front_camera){
mDetectionView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT);
} else {
mDetectionView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK);
}
mDetectionView.setVisibility(SurfaceView.VISIBLE);
mDetectionView.setCvCameraViewListener(this);
int maxCameraViewWidth = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_width", "640"));
int maxCameraViewHeight = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_height", "480"));
mDetectionView.setMaxFrameSize(maxCameraViewWidth, maxCameraViewHeight);
}
开发者ID:Qualeams,项目名称:Android-Face-Recognition-with-Deep-Learning-Test-Framework,代码行数:26,代码来源:DetectionActivity.java