本文整理汇总了Java中org.opencv.core.TermCriteria.EPS属性的典型用法代码示例。如果您正苦于以下问题:Java TermCriteria.EPS属性的具体用法?Java TermCriteria.EPS怎么用?Java TermCriteria.EPS使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类org.opencv.core.TermCriteria
的用法示例。
在下文中一共展示了TermCriteria.EPS属性的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: findCheckereboardPatterns
private void findCheckereboardPatterns() {
cornersFromAllImages = new ArrayList<>();
for (int i = 0; i < images.size(); i++) {
corners = new MatOfPoint2f();
boolean patternFound = Calib3d.findCheckerboardCorners(images.get(i),
new Size(width, height), corners, Calib3d.CALIB_CB_FAST_CHECK);
if (patternFound) {
TermCriteria termCriteria = new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS,
30, 0.1);
Imgproc.cornerSubPix(images.get(i), corners, new Size(width, height),
new Size(-1, -1), termCriteria);
cornersFromAllImages.add(corners);
Log.d(TAG, "IMG_" + String.valueOf(i + 1) + " -> PATTERN FOUND");
} else {
rejectedImage++;
Log.d(TAG, "IMG_" + String.valueOf(i + 1) + " -> PATTERN NOT FOUND");
}
}
}
开发者ID:PawelTypiak,项目名称:Checkerboard-IMU-Comparator,代码行数:19,代码来源:CheckerboardPatternComputingInitializer.java
示例2: findPattern
private boolean findPattern() {
boolean isPatternFound = Calib3d.findCheckerboardCorners(IMAGE, new Size(width, height),
cornerOfOneImage, Calib3d.CALIB_CB_FAST_CHECK);
if (isPatternFound) {
TermCriteria termCriteria = new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS,
30, 0.1);
Imgproc.cornerSubPix(IMAGE, cornerOfOneImage, new Size(width, height),
new Size(-1, -1), termCriteria);
Log.d(TAG, "findPattern(picture, externalCorners) -> done - found checkerboard");
return true;
} else {
Log.d(TAG, "findPattern(picture, externalCorners) -> done - no checkerboard detected");
return false;
}
}
开发者ID:PawelTypiak,项目名称:Checkerboard-IMU-Comparator,代码行数:15,代码来源:CheckerboardPatternComputingInitializer.java
示例3: computeModel
public void computeModel(ArrayList<MetaData> photos)
{
numPhotos = photos.size();
model.setNumPhotos(numPhotos);
MatOfKeyPoint[] keypoints = new MatOfKeyPoint[numPhotos];
Mat[] descriptors = new Mat[numPhotos];
Mat allDescriptors = new Mat();
ArrayList<Integer> descriptorLabels = new ArrayList<Integer>();
// compute keypoints and descriptors
Mat currentImg = null;
for (int a = 0; a < numPhotos; a++)
{
// System.out.println("now:" + animalFiles.get(a));
currentImg = Highgui.imread(photos.get(a).getZooName().toString(), 0);
Imgproc.resize(currentImg, currentImg, new Size(150, 250));
Imgproc.equalizeHist(currentImg, currentImg);
Imgproc.threshold(currentImg, currentImg, 127, 255, Imgproc.THRESH_BINARY);
featureDetector.detect(currentImg, keypoints[a]);
descriptorExtractor.compute(currentImg, keypoints[a], descriptors[a]);
allDescriptors.push_back(descriptors[a]);
for (int i = 0; i < descriptors[a].rows(); i++)
descriptorLabels.add(a);
}
System.out.println("label size:" + descriptorLabels.size());
Mat clusterLabels = new Mat();
Mat centers = new Mat();
// set up all desriptors, init criteria
allDescriptors.convertTo(allDescriptors, CvType.CV_32F);
TermCriteria criteria = new TermCriteria(TermCriteria.EPS + TermCriteria.MAX_ITER, 100, 0.1);
long before = System.currentTimeMillis();
// compute clusters
System.out.print("creating kmeans clusters...");
Core.kmeans(allDescriptors, k, clusterLabels, criteria, 10, Core.KMEANS_PP_CENTERS, centers);
System.out.println("done.");
// map k-means centroid labels to descriptors of all images
ArrayList<ArrayList<Integer>> clusterImageMap = new ArrayList<ArrayList<Integer>>();
for (int nk = 0; nk < k + 1; nk++)
clusterImageMap.add(new ArrayList<Integer>());
for (int r = 0; r < clusterLabels.rows(); r++)
clusterImageMap.get((int) clusterLabels.get(r, 0)[0]).add(descriptorLabels.get(r));
model.setCentroids(centers);
model.setLabels(clusterLabels);
model.setClusterImageMap(clusterImageMap);
model.setKeypoints(keypoints);
model.setDescriptors(descriptors);
}