本文整理匯總了Java中com.google.android.gms.vision.CameraSource類的典型用法代碼示例。如果您正苦於以下問題:Java CameraSource類的具體用法?Java CameraSource怎麽用?Java CameraSource使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
CameraSource類屬於com.google.android.gms.vision包,在下文中一共展示了CameraSource類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: creteCameraTracker
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
/**
* Create face decoder and camera source.
*/
private void creteCameraTracker() {
mDetector = new FaceDetector.Builder(mActivity)
.setTrackingEnabled(false)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
mDetector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
if (!mDetector.isOperational()) {
mUserAwareVideoView.onErrorOccurred();
Log.e("Start Tracking", "Face tracker is not operational.");
}
mCameraSource = new CameraSource.Builder(mActivity, mDetector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(30.0f)
.build();
}
示例2: onTouchEvent
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
@Override
public boolean onTouchEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_DOWN) {
Log.d("onTouch", "OnTouch");
cameraSource.takePicture(new CameraSource.ShutterCallback() {
@Override
public void onShutter() {
}
}, new CameraSource.PictureCallback() {
@Override
public void onPictureTaken(byte[] bytes) {
}
});
}
return false;
}
示例3: createCameraSource
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
private void createCameraSource() {
Context context = getApplicationContext();
FaceDetector detector = createFaceDetector(context);
detector.setProcessor(new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()).build());
if (!detector.isOperational()) {
Timber.d("Face detector dependencies are not yet available.");
}
cameraSource = new CameraSource.Builder(context, detector) //
// Camera will decide actual size, and we'll crop accordingly in layout.
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(MAX_FRAME_RATE)
.setAutoFocusEnabled(true)
.build();
}
示例4: start
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
@RequiresPermission(Manifest.permission.CAMERA)
public void start(CameraSource cameraSource) throws IOException, SecurityException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
requestLayout();
invalidate();
}
}
示例5: startCameraView
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
private void startCameraView(Context context, CameraSource cameraSource,
SurfaceView surfaceView) {
if (cameraRunning) {
throw new IllegalStateException("Camera already started!");
}
try {
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
Log.e(LOGTAG, "Permission not granted!");
}
else if (!cameraRunning && cameraSource != null && surfaceView != null) {
cameraSource.start(surfaceView.getHolder());
cameraRunning = true;
}
} catch (IOException ie) {
Log.e(LOGTAG, ie.getMessage());
ie.printStackTrace();
}
}
示例6: takeShot
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
private void takeShot() {
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
@Override
public void onPictureTaken(byte[] bytes) {
mPreview.stop();
File shot = null;
try {
shot = FileUtils.saveImage(bytes, "jpg");
} catch (IOException e) {
e.printStackTrace();
return;
}
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.setType("image/jpeg");
shareIntent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(shot));
startActivity(Intent.createChooser(shareIntent, ""));
}
});
}
示例7: createCameraSource
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
/**
* Creates the face detector and the camera.
*/
private void createCameraSource() {
Context context = getApplicationContext();
FaceDetector detector = createFaceDetector(context);
int facing = CameraSource.CAMERA_FACING_FRONT;
if (!mIsFrontFacing) {
facing = CameraSource.CAMERA_FACING_BACK;
}
// The camera source is initialized to use either the front or rear facing camera. We use a
// relatively low resolution for the camera preview, since this is sufficient for this app
// and the face detector will run faster at lower camera resolutions.
//
// However, note that there is a speed/accuracy trade-off with respect to choosing the
// camera resolution. The face detector will run faster with lower camera resolutions,
// but may miss smaller faces, landmarks, or may not correctly detect eyes open/closed in
// comparison to using higher camera resolutions. If you have any of these issues, you may
// want to increase the resolution.
mCameraSource = new CameraSource.Builder(context, detector)
.setFacing(facing)
.setRequestedPreviewSize(320, 240)
.setRequestedFps(60.0f)
.setAutoFocusEnabled(true)
.build();
}
示例8: takePhotoAndGoToNextActivity
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
private void takePhotoAndGoToNextActivity() {
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
@Override
public void onPictureTaken(byte[] bytes) {
faceAndPoemService.setFace(mFace);
if (faceAndPoemService.doesFaceHaveTwoEyes()) {
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
faceAndPoemService.setFacePicture(bitmap);
Intent intent = new Intent(FaceCaptureActivity.this, FindGesichtGedichtActivity.class);
startActivity(intent);
} else {
Toast.makeText(FaceCaptureActivity.this, "both eyes need to be visible", Toast.LENGTH_SHORT).show();
}
}
});
}
示例9: translateX
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
/**
* Adjusts the x coordinate from the preview's coordinate system to the view coordinate
* system.
*/
public float translateX(float x) {
if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) {
return mOverlay.getWidth() - scaleX(x);
} else {
return scaleX(x);
}
}
示例10: start
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
示例11: createCameraSource
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
/**
* Creates and starts the camera. Note that this uses a higher resolution in comparison
* to other detection examples to enable the barcode detector to detect small barcodes
* at long distances.
*/
private void createCameraSource() {
Context context = getApplicationContext();
FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.setLandmarkType(FaceDetector.ALL_LANDMARKS)
.setMode(FaceDetector.ACCURATE_MODE)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
if (!detector.isOperational()) {
// Note: The first time that an app using face API is installed on a device, GMS will
// download a native library to the device in order to do detection. Usually this
// completes before the app is run for the first time. But if that download has not yet
// completed, then the above call will not detect any faces.
//
// isOperational() can be used to check if the required native library is currently
// available. The detector will automatically become operational once the library
// download completes on device.
Log.w(TAG, "Face detector dependencies are not yet available.");
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(30.0f)
.build();
}
示例12: translateX
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
/**
* Adjusts the x coordinate from the preview's coordinate system to the view coordinate
* system.
*/
public float translateX(float x) {
if (GraphicOverlay.sFacing == CameraSource.CAMERA_FACING_FRONT) {
return getWidth() - scaleX(x);
} else {
return scaleX(x);
}
}
示例13: start
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
public void start(CameraSource cameraSource, GraphicOverlay overlay, float requiredRatio) {
this.requiredRatio = requiredRatio;
this.overlay = overlay;
this.cameraSource = cameraSource;
startRequested = true;
startIfReady();
}
示例14: translateX
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
@Override public float translateX(float x) {
if (facing == CameraSource.CAMERA_FACING_FRONT) {
return getWidth() - scaleHorizontal(x);
} else {
return scaleHorizontal(x);
}
}
示例15: createCameraSource
import com.google.android.gms.vision.CameraSource; //導入依賴的package包/類
/**
* Creates and starts the camera. Note that this uses a higher resolution in comparison
* to other detection examples to enable the barcode detector to detect small barcodes
* at long distances.
*/
private void createCameraSource() {
Context context = getApplicationContext();
FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
if (!detector.isOperational()) {
// Note: The first time that an app using face API is installed on a device, GMS will
// download a native library to the device in order to do detection. Usually this
// completes before the app is run for the first time. But if that download has not yet
// completed, then the above call will not detect any faces.
//
// isOperational() can be used to check if the required native library is currently
// available. The detector will automatically become operational once the library
// download completes on device.
Log.w(TAG, "Face detector dependencies are not yet available.");
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedFps(30.0f)
.build();
}