本文整理汇总了Java中com.google.android.gms.vision.text.TextBlock类的典型用法代码示例。如果您正苦于以下问题:Java TextBlock类的具体用法?Java TextBlock怎么用?Java TextBlock使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TextBlock类属于com.google.android.gms.vision.text包,在下文中一共展示了TextBlock类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: detectText
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
public void detectText(View view) {
Bitmap textBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.cat);
TextRecognizer textRecognizer = new TextRecognizer.Builder(this).build();
if (!textRecognizer.isOperational()) {
new AlertDialog.Builder(this)
.setMessage("Text recognizer could not be set up on your device :(")
.show();
return;
}
Frame frame = new Frame.Builder().setBitmap(textBitmap).build();
SparseArray<TextBlock> text = textRecognizer.detect(frame);
for (int i = 0; i < text.size(); ++i) {
TextBlock item = text.valueAt(i);
if (item != null && item.getValue() != null) {
detectedTextView.setText(item.getValue());
}
}
}
示例2: detectTextBlocks
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
List<TextBlock> detectTextBlocks(UQI uqi) {
List<TextBlock> result = new ArrayList<>();
Bitmap bitmap = this.getBitmap(uqi);
if (bitmap == null) return result;
TextRecognizer textRecognizer = new TextRecognizer.Builder(uqi.getContext()).build();
if (!textRecognizer.isOperational()) {
Logging.warn("TextRecognizer is not operational");
textRecognizer.release();
return result;
}
Frame imageFrame = new Frame.Builder().setBitmap(bitmap).build();
SparseArray<TextBlock> textBlocks = textRecognizer.detect(imageFrame);
for (int i = 0; i < textBlocks.size(); i++) {
TextBlock textBlock = textBlocks.get(textBlocks.keyAt(i));
result.add(textBlock);
}
textRecognizer.release();
return result;
}
示例3: receiveDetections
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
/**
* Called by the detector to deliver detection results.
* If your application called for it, this could be a place to check for
* equivalent detections by tracking TextBlocks that are similar in location and content from
* previous frames, or reduce noise by eliminating TextBlocks that have not persisted through
* multiple detections.
*/
@Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
mGraphicOverlay.clear();
SparseArray<TextBlock> items = detections.getDetectedItems();
for (int i = 0; i < items.size(); ++i) {
TextBlock item = items.valueAt(i);
if (item != null && item.getValue() != null) {
final String wort = item.getValue();
if(wort.length() > 1 && Character.isUpperCase(wort.charAt(0)) && wort.matches("[A-Za-z-]+") && wort.charAt(wort.length() - 1) != '-') {
Log.d("OCR", "Name detected: " + wort);
// Show AccountDetailActivity
new Thread(new Runnable() {
@Override
public void run() {
DownloadIntentService.startService(context, OcrCaptureActivity.DOWNLOAD_REQUEST_CODE, wort);
}
}).start();
} else {
Log.d("OcrDetectorProcessor", "Text detected! " + item.getValue());
}
}
}
}
示例4: onTap
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
/**
* onTap is called to capture the first TextBlock under the tap location and return it to
* the Initializing Activity.
*
* @param rawX - the raw position of the tap
* @param rawY - the raw position of the tap.
* @return true if the activity is ending.
*/
private boolean onTap(float rawX, float rawY) {
OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
TextBlock text = null;
if (graphic != null) {
text = graphic.getTextBlock();
if (text != null && text.getValue() != null) {
Intent data = new Intent();
data.putExtra(TextBlockObject, text.getValue());
setResult(CommonStatusCodes.SUCCESS, data);
finish();
}
else {
Log.d(TAG, "text data is null");
}
}
else {
Log.d(TAG,"no text detected");
}
return text != null;
}
示例5: filterInvalidDetections
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
private List<Text> filterInvalidDetections(Detector.Detections<TextBlock> items) {
List<Text> result = new ArrayList<>();
SparseArray<TextBlock> detectedItems = items.getDetectedItems();
for (int i = 0; i < detectedItems.size(); ++i) {
TextBlock textBlock = detectedItems.valueAt(i);
// Get sub-components and extract only lines
List<? extends Text> components = textBlock.getComponents();
for (Text component : components) {
String value = component.getValue();
if (component instanceof Line
&& value != null
&& !value.isEmpty()
&& isFoodRelated((value))) {
result.add(component);
} else {
Log.d(TAG, "filterInvalidDetections: sub-component is not a Line, should we go deeper?");
}
}
}
return result;
}
示例6: onTap
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
/**
* onTap is called to speak the tapped TextBlock, if any, out loud.
*
* @param rawX - the raw position of the tap
* @param rawY - the raw position of the tap.
* @return true if the tap was on a TextBlock
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private boolean onTap(float rawX, float rawY) {
OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
TextBlock text = null;
if (graphic != null) {
text = graphic.getTextBlock();
if (text != null && text.getValue() != null) {
Log.d(TAG, "text data is being displayed! " + text.getValue());
//Show the string.
String capturedText = text.getValue();
capturedText = capturedText.substring(0,1).toUpperCase() + capturedText.substring(1).toLowerCase();
textView.setText(capturedText);
}
else {
Log.d(TAG, "text data is null");
}
}
else {
Log.d(TAG,"no text detected");
}
return text != null;
}
示例7: receiveDetections
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
/**
* Called by the detector to deliver detection results.
* If your application called for it, this could be a place to check for
* equivalent detections by tracking TextBlocks that are similar in location and content from
* previous frames, or reduce noise by eliminating TextBlocks that have not persisted through
* multiple detections.
*/
@Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
mGraphicOverlay.clear();
SparseArray<TextBlock> items = detections.getDetectedItems();
for (int i = 0; i < items.size(); ++i) {
TextBlock item = items.valueAt(i);
if (item != null && item.getValue() != null) {
if (item.getValue().contains("INSTITUTO FEDERAL ELECTORAL")) {
documentIdentifier.setType(Constants.IFEB);
} else if (item.getValue().contains("INSTITUTO NACIONAL ELECTORAL")) {
Log.d("OcrDetectorProcessor", "INE E " + item.getValue());
documentIdentifier.setType(Constants.IFEE);
}
}
//OcrGraphic graphic = new OcrGraphic(mGraphicOverlay, item);
//mGraphicOverlay.add(graphic);
}
}
示例8: receiveDetections
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
/**
* Called by the detector to deliver detection results.
* If your application called for it, this could be a place to icon_save for
* equivalent detections by tracking TextBlocks that are similar in location and content from
* previous frames, or reduce noise by eliminating TextBlocks that have not persisted through
* multiple detections.
*/
@Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
mGraphicOverlay.clear();
//final String result;
//String detectedText = "";
SparseArray<TextBlock> items = detections.getDetectedItems();
for (int i = 0; i < items.size(); ++i) {
final TextBlock item = items.valueAt(i);
OcrGraphic graphic = new OcrGraphic(mGraphicOverlay, item);
mGraphicOverlay.add(graphic);
//detectedText += item.getValue();
}
/*result = detectedText;
((OcrCaptureActivity)context).runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(context, result, Toast.LENGTH_SHORT).show();
}
});*/
}
示例9: OcrGraphic
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
OcrGraphic(GraphicOverlay overlay, TextBlock text) {
super(overlay);
mText = text;
if (sRectPaint == null) {
sRectPaint = new Paint();
sRectPaint.setColor(TEXT_COLOR);
sRectPaint.setStyle(Paint.Style.STROKE);
sRectPaint.setStrokeWidth(1.0f);
}
if (sTextPaint == null) {
sTextPaint = new Paint();
sTextPaint.setColor(TEXT_COLOR);
sTextPaint.setTextSize(13.6f);
}
// Redraw the overlay, as this graphic has been added.
postInvalidate();
}
示例10: OcrGraphic
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
OcrGraphic(GraphicOverlay overlay, TextBlock text) {
super(overlay);
mText = text;
if (sRectPaint == null) {
sRectPaint = new Paint();
sRectPaint.setColor(TEXT_COLOR);
sRectPaint.setStyle(Paint.Style.STROKE);
sRectPaint.setStrokeWidth(4.0f);
}
if (sTextPaint == null) {
sTextPaint = new Paint();
sTextPaint.setColor(TEXT_COLOR);
sTextPaint.setTextSize(54.0f);
}
// Redraw the overlay, as this graphic has been added.
postInvalidate();
}
示例11: onTap
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
/**
* onTap is called to speak the tapped TextBlock, if any, out loud.
*
* @param rawX - the raw position of the tap
* @param rawY - the raw position of the tap.
* @return true if the tap was on a TextBlock
*/
private boolean onTap(float rawX, float rawY) {
OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
TextBlock text = null;
if (graphic != null) {
text = graphic.getTextBlock();
if (text != null && text.getValue() != null) {
Log.d(TAG, "text data is being spoken! " + text.getValue());
// Speak the string.
tts.speak(text.getValue(), TextToSpeech.QUEUE_ADD, null, "DEFAULT");
}
else {
Log.d(TAG, "text data is null");
}
}
else {
Log.d(TAG,"no text detected");
}
return text != null;
}
示例12: receiveDetections
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
@Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
SparseArray<TextBlock> items = detections.getDetectedItems();
for (int i = 0; i < items.size(); ++i) {
TextBlock item = items.valueAt(i);
if (item != null && item.getValue() != null) {
Log.d("Processor", "Text detected! " + item.getValue());
}
}
}
示例13: detectText
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
String detectText(UQI uqi) {
StringBuilder text = new StringBuilder();
for (TextBlock textBlock : this.detectTextBlocks(uqi)) {
text.append(textBlock.getValue()).append("\n");
}
return text.toString();
}
示例14: receiveDetections
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
/**
* Called by the detector to deliver detection results.
* If your application called for it, this could be a place to check for
* equivalent detections by tracking TextBlocks that are similar in location and content from
* previous frames, or reduce noise by eliminating TextBlocks that have not persisted through
* multiple detections.
*/
@Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
mGraphicOverlay.clear();
SparseArray<TextBlock> items = detections.getDetectedItems();
for (int i = 0; i < items.size(); ++i) {
TextBlock item = items.valueAt(i);
OcrGraphic graphic = new OcrGraphic(mGraphicOverlay, item);
mGraphicOverlay.add(graphic);
}
}
示例15: contains
import com.google.android.gms.vision.text.TextBlock; //导入依赖的package包/类
/**
* Checks whether a point is within the bounding box of this graphic.
* The provided point should be relative to this graphic's containing overlay.
* @param x An x parameter in the relative context of the canvas.
* @param y A y parameter in the relative context of the canvas.
* @return True if the provided point is contained within this graphic's bounding box.
*/
public boolean contains(float x, float y) {
TextBlock text = mText;
if (text == null) {
return false;
}
RectF rect = new RectF(text.getBoundingBox());
rect.left = translateX(rect.left);
rect.top = translateY(rect.top);
rect.right = translateX(rect.right);
rect.bottom = translateY(rect.bottom);
return (rect.left < x && rect.right > x && rect.top < y && rect.bottom > y);
}