本文整理汇总了Java中ch.zhaw.facerecognitionlibrary.Helpers.MatOperation类的典型用法代码示例。如果您正苦于以下问题:Java MatOperation类的具体用法?Java MatOperation怎么用?Java MatOperation使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
MatOperation类属于ch.zhaw.facerecognitionlibrary.Helpers包,在下文中一共展示了MatOperation类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: onCameraFrame
import ch.zhaw.facerecognitionlibrary.Helpers.MatOperation; //导入依赖的package包/类
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat imgRgba = inputFrame.rgba();
Mat img = new Mat();
imgRgba.copyTo(img);
List<Mat> images = ppF.getProcessedImage(img, PreProcessorFactory.PreprocessingMode.RECOGNITION);
Rect[] faces = ppF.getFacesForRecognition();
// Selfie / Mirror mode
if(front_camera){
Core.flip(imgRgba,imgRgba,1);
}
if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){
// skip
return imgRgba;
} else {
faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
for(int i = 0; i<faces.length; i++){
MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""), front_camera);
}
return imgRgba;
}
}
开发者ID:Qualeams,项目名称:Android-Face-Recognition-with-Deep-Learning-Test-Framework,代码行数:23,代码来源:RecognitionActivity.java
示例2: onCameraFrame
import ch.zhaw.facerecognitionlibrary.Helpers.MatOperation; //导入依赖的package包/类
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat imgRgba = inputFrame.rgba();
Mat img = new Mat();
imgRgba.copyTo(img);
List<Mat> images = ppF.getCroppedImage(img);
Rect[] faces = ppF.getFacesForRecognition();
// Selfie / Mirror mode
if(front_camera){
Core.flip(imgRgba,imgRgba,1);
}
if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){
// skip
return imgRgba;
} else {
faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
for(int i = 0; i<faces.length; i++){
MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], "", front_camera);
}
return imgRgba;
}
}
开发者ID:Qualeams,项目名称:Android-Face-Recognition-with-Deep-Learning-Test-Framework,代码行数:24,代码来源:DetectionActivity.java
示例3: setAngle
import ch.zhaw.facerecognitionlibrary.Helpers.MatOperation; //导入依赖的package包/类
public void setAngle(int angle) {
this.angle = angle;
for (Mat img : images){
MatOperation.rotate_90n(img, angle);
}
}
开发者ID:Qualeams,项目名称:Android-Face-Recognition-with-Deep-Learning-Library,代码行数:7,代码来源:PreProcessor.java
示例4: drawArrowFromFaceToFrame
import ch.zhaw.facerecognitionlibrary.Helpers.MatOperation; //导入依赖的package包/类
public static void drawArrowFromFaceToFrame(AnimalOverlay animalOverlay, Mat img, Rect face){
Rect mirroredFace = MatOperation.getMirroredFaceForFrontCamera(img, face);
Point pointFace = new Point(mirroredFace.tl().x + mirroredFace.width / 2, mirroredFace.tl().y + mirroredFace.height / 2);
Point pointFrame = new Point(animalOverlay.getFrameStartX() + (animalOverlay.getFrameEndX() - animalOverlay.getFrameStartX()) / 2, animalOverlay.getFrameStartY() + (animalOverlay.getFrameEndY() - animalOverlay.getFrameStartY()) / 2);
Imgproc.arrowedLine(img, pointFace, pointFrame, RED_COLOR, 20, Imgproc.LINE_8, 0, 0.2);
}
示例5: onCameraFrame
import ch.zhaw.facerecognitionlibrary.Helpers.MatOperation; //导入依赖的package包/类
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
final Mat imgRgba = inputFrame.rgba();
// Do not change screen brightness manually during test phase, due to the unknown location of the different test users.
// M.Schälchli 20170129
// if (isDeviceRooted){
// DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba);
// }
long currentTime = new Date().getTime();
if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){
prepareForAuthentication();
Mat imgCopy = new Mat();
// Store original image for face recognition
imgRgba.copyTo(imgCopy);
// Mirror front camera image
Core.flip(imgRgba,imgRgba,1);
Rect face = new Rect();
boolean isFaceInsideFrame = false;
boolean faceDetected = false;
if((lastTime + TIMER_DIFF) < currentTime){
lastTime = currentTime;
List<Mat> images = ppF.getCroppedImage(imgCopy);
if((images != null) && (images.size() == 1)){
Mat img = images.get(0);
if(img != null) {
Rect[] faces = ppF.getFacesForRecognition();
if ((faces != null) && (faces.length == 1)) {
faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
face = faces[0];
faceDetected = true;
// Reset startTimeFallback for fallback timeout, because at least one face has been detected
startTimeFallback = currentTime;
isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face);
if (isFaceInsideFrame){
if (!activityStopped){
mediaPlayerAnimalSound.start();
studentImages.add(img);
// Stop after NUMBER_OF_IMAGES (settings option)
if(imagesProcessed == NUMBER_OF_IMAGES){
storeStudentImages();
finish();
}
imagesProcessed++;
}
}
}
}
}
}
if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){
// Prevent from second execution of fallback activity because of threading
startTimeFallback = currentTime;
DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName());
finish();
}
if (faceDetected && !isFaceInsideFrame && !activityStopped){
DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face);
AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound);
}
EnvironmentSettings.freeMemory();
}
return imgRgba;
}