本文整理匯總了Java中org.opencv.core.Core.perspectiveTransform方法的典型用法代碼示例。如果您正苦於以下問題:Java Core.perspectiveTransform方法的具體用法?Java Core.perspectiveTransform怎麽用?Java Core.perspectiveTransform使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.opencv.core.Core
的用法示例。
在下文中一共展示了Core.perspectiveTransform方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: drawObjectLocation
import org.opencv.core.Core; //導入方法依賴的package包/類
/**
* Draw the object's location
*
* @param output Image to draw on
* @param objectAnalysis Object analysis information
* @param sceneAnalysis Scene analysis information
*/
public static void drawObjectLocation(Mat output, ObjectAnalysis objectAnalysis, SceneAnalysis sceneAnalysis) {
List<Point> ptsObject = new ArrayList<>();
List<Point> ptsScene = new ArrayList<>();
KeyPoint[] keypointsObject = objectAnalysis.keypoints.toArray();
KeyPoint[] keypointsScene = sceneAnalysis.keypoints.toArray();
DMatch[] matches = sceneAnalysis.matches.toArray();
for (DMatch matche : matches) {
//Get the keypoints from these matches
ptsObject.add(keypointsObject[matche.queryIdx].pt);
ptsScene.add(keypointsScene[matche.trainIdx].pt);
}
MatOfPoint2f matObject = new MatOfPoint2f();
matObject.fromList(ptsObject);
MatOfPoint2f matScene = new MatOfPoint2f();
matScene.fromList(ptsScene);
//Calculate homography of object in scene
Mat homography = Calib3d.findHomography(matObject, matScene, Calib3d.RANSAC, 5.0f);
//Create the unscaled array of corners, representing the object size
Point cornersObject[] = new Point[4];
cornersObject[0] = new Point(0, 0);
cornersObject[1] = new Point(objectAnalysis.object.cols(), 0);
cornersObject[2] = new Point(objectAnalysis.object.cols(), objectAnalysis.object.rows());
cornersObject[3] = new Point(0, objectAnalysis.object.rows());
Point[] cornersSceneTemp = new Point[0];
MatOfPoint2f cornersSceneMatrix = new MatOfPoint2f(cornersSceneTemp);
MatOfPoint2f cornersObjectMatrix = new MatOfPoint2f(cornersObject);
//Transform the object coordinates to the scene coordinates by the homography matrix
Core.perspectiveTransform(cornersObjectMatrix, cornersSceneMatrix, homography);
//Mat transform = Imgproc.getAffineTransform(cornersObjectMatrix, cornersSceneMatrix);
//Draw the lines of the object on the scene
Point[] cornersScene = cornersSceneMatrix.toArray();
final ColorRGBA lineColor = new ColorRGBA("#00ff00");
Drawing.drawLine(output, new Point(cornersScene[0].x + objectAnalysis.object.cols(), cornersScene[0].y),
new Point(cornersScene[1].x + objectAnalysis.object.cols(), cornersScene[1].y), lineColor, 5);
Drawing.drawLine(output, new Point(cornersScene[1].x + objectAnalysis.object.cols(), cornersScene[1].y),
new Point(cornersScene[2].x + objectAnalysis.object.cols(), cornersScene[2].y), lineColor, 5);
Drawing.drawLine(output, new Point(cornersScene[2].x + objectAnalysis.object.cols(), cornersScene[2].y),
new Point(cornersScene[3].x + objectAnalysis.object.cols(), cornersScene[3].y), lineColor, 5);
Drawing.drawLine(output, new Point(cornersScene[3].x + objectAnalysis.object.cols(), cornersScene[3].y),
new Point(cornersScene[0].x + objectAnalysis.object.cols(), cornersScene[0].y), lineColor, 5);
}
示例2: transform
import org.opencv.core.Core; //導入方法依賴的package包/類
public static TransformResult transform(CalibrationResult calibrationResult) {
MatOfPoint2f src = new MatOfPoint2f(
new org.opencv.core.Point(calibrationResult.getLeftLowX(), calibrationResult.getLeftLowY()), // tl
new org.opencv.core.Point(calibrationResult.getLeftUpX(), calibrationResult.getLeftUpY()), // tr
new org.opencv.core.Point(calibrationResult.getRightLowX(), calibrationResult.getRightLowY()), // br
new org.opencv.core.Point(calibrationResult.getRightUpX(), calibrationResult.getRightUpY()) // bl
);
MatOfPoint2f dst = new MatOfPoint2f(
new org.opencv.core.Point(0, 0), // tl
new org.opencv.core.Point(0, 100), // tr
new org.opencv.core.Point(500, 0), // br
new org.opencv.core.Point(500, 100) // bl
);
TransformResult transformResult = new TransformResult();
MatOfPoint2f Src = new MatOfPoint2f(
new org.opencv.core.Point(calibrationResult.getLeftLowX(), calibrationResult.getLeftLowY()), // tl
new org.opencv.core.Point(calibrationResult.getLeftUpRightX(), calibrationResult.getLeftUpRightY()), // tr
new org.opencv.core.Point(calibrationResult.getRightLowX(), calibrationResult.getRightLowY()), // br
new org.opencv.core.Point(calibrationResult.getRightUpLeftX(), calibrationResult.getRightUpLeftY()) // bl
);
MatOfPoint2f Dst = new MatOfPoint2f(
new org.opencv.core.Point(calibrationResult.getLeftLowX(), calibrationResult.getLeftLowY()), // tl
new org.opencv.core.Point(calibrationResult.getLeftUpRightX(), calibrationResult.getLeftUpRightY()), // tr
new org.opencv.core.Point(calibrationResult.getRightLowX(), calibrationResult.getRightLowY()), // br
new org.opencv.core.Point(calibrationResult.getRightUpLeftX(), calibrationResult.getRightUpLeftY()) // bl
);
transformResult.m = Imgproc.getPerspectiveTransform(src, dst);
Core.perspectiveTransform(Src, Dst, transformResult.m);
org.opencv.core.Point[] points = Dst.toArray();
double widthleft = Math.abs(points[0].x - points[1].x);
double widthright = Math.abs(points[2].x - points[3].x);
transformResult.blackWidth = (widthleft + widthright) / 2;
return transformResult;
}
示例3: Key
import org.opencv.core.Core; //導入方法依賴的package包/類
public static int Key(TransformResult transformResult, double x, double y) {
MatOfPoint2f Src = new MatOfPoint2f(
new org.opencv.core.Point(x, y) // bl
);
MatOfPoint2f Dst = new MatOfPoint2f(
new org.opencv.core.Point(x, y) // bl
);
Core.perspectiveTransform(Src, Dst, transformResult.m);
return key(Dst.get(0, 0)[0], Dst.get(0, 0)[1], transformResult.blackWidth);
}