本文整理汇总了Java中com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay类的典型用法代码示例。如果您正苦于以下问题:Java GraphicOverlay类的具体用法?Java GraphicOverlay怎么用?Java GraphicOverlay使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
GraphicOverlay类属于com.google.android.gms.samples.vision.ocrreader.ui.camera包,在下文中一共展示了GraphicOverlay类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: OcrGraphic
import com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay; //导入依赖的package包/类
OcrGraphic(GraphicOverlay overlay, TextBlock text) {
super(overlay);
mText = text;
if (sRectPaint == null) {
sRectPaint = new Paint();
sRectPaint.setColor(TEXT_COLOR);
sRectPaint.setStyle(Paint.Style.STROKE);
sRectPaint.setStrokeWidth(1.0f);
}
if (sTextPaint == null) {
sTextPaint = new Paint();
sTextPaint.setColor(TEXT_COLOR);
sTextPaint.setTextSize(13.6f);
}
// Redraw the overlay, as this graphic has been added.
postInvalidate();
}
示例2: OcrGraphic
import com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay; //导入依赖的package包/类
OcrGraphic(GraphicOverlay overlay, TextBlock text) {
super(overlay);
mText = text;
if (sRectPaint == null) {
sRectPaint = new Paint();
sRectPaint.setColor(TEXT_COLOR);
sRectPaint.setStyle(Paint.Style.STROKE);
sRectPaint.setStrokeWidth(4.0f);
}
if (sTextPaint == null) {
sTextPaint = new Paint();
sTextPaint.setColor(TEXT_COLOR);
sTextPaint.setTextSize(54.0f);
}
// Redraw the overlay, as this graphic has been added.
postInvalidate();
}
示例3: onCreate
import com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay; //导入依赖的package包/类
/**
* Initializes the UI and creates the detector pipeline.
*/
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.ocr_capture);
mToolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(mToolbar);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);
// read parameters from the intent used to launch the activity.
boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource(autoFocus, useFlash);
} else {
requestCameraPermission();
}
gestureDetector = new GestureDetector(this, new CaptureGestureListener());
scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());
Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom",
Snackbar.LENGTH_LONG)
.show();
}
示例4: onCreate
import com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay; //导入依赖的package包/类
/**
* Initializes the UI and creates the detector pipeline.
*/
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.ocr_capture);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);
// read parameters from the intent used to launch the activity.
boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource(autoFocus, useFlash);
} else {
requestCameraPermission();
}
gestureDetector = new GestureDetector(this, new CaptureGestureListener());
scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());
Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom",
Snackbar.LENGTH_LONG)
.show();
}
示例5: onCreate
import com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay; //导入依赖的package包/类
/**
* Initializes the UI and creates the detector pipeline.
*/
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
setContentView(R.layout.ocr_capture);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);
// Set good defaults for capturing text.
boolean autoFocus = true;
boolean useFlash = false;
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource(autoFocus, useFlash);
} else {
requestCameraPermission();
}
gestureDetector = new GestureDetector(this, new CaptureGestureListener());
scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());
Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom",
Snackbar.LENGTH_LONG)
.show();
// TODO: Set up the Text To Speech engine.
}
示例6: OcrDetectorProcessor
import com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay; //导入依赖的package包/类
OcrDetectorProcessor(GraphicOverlay<OcrGraphic> ocrGraphicOverlay) {
mGraphicOverlay = ocrGraphicOverlay;
}
示例7: onCreate
import com.google.android.gms.samples.vision.ocrreader.ui.camera.GraphicOverlay; //导入依赖的package包/类
/**
* Initializes the UI and creates the detector pipeline.
*/
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
setContentView(R.layout.ocr_capture);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);
// Set good defaults for capturing text.
boolean autoFocus = true;
boolean useFlash = false;
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource(autoFocus, useFlash);
} else {
requestCameraPermission();
}
gestureDetector = new GestureDetector(this, new CaptureGestureListener());
scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());
Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom",
Snackbar.LENGTH_LONG)
.show();
// Set up the Text To Speech engine.
TextToSpeech.OnInitListener listener =
new TextToSpeech.OnInitListener() {
@Override
public void onInit(final int status) {
if (status == TextToSpeech.SUCCESS) {
Log.d("OnInitListener", "Text to speech engine started successfully.");
tts.setLanguage(Locale.US);
} else {
Log.d("OnInitListener", "Error starting the text to speech engine.");
}
}
};
tts = new TextToSpeech(this.getApplicationContext(), listener);
}