本文整理汇总了Java中boofcv.abst.feature.detect.interest.ConfigFastHessian类的典型用法代码示例。如果您正苦于以下问题:Java ConfigFastHessian类的具体用法?Java ConfigFastHessian怎么用?Java ConfigFastHessian使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ConfigFastHessian类属于boofcv.abst.feature.detect.interest包,在下文中一共展示了ConfigFastHessian类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: stitch
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
/**
* Given two input images create and display an image where the two have been overlayed on top of each other.
*/
public static <T extends ImageSingleBand>
void stitch( BufferedImage imageA , BufferedImage imageB , Class<T> imageType )
{
T inputA = ConvertBufferedImage.convertFromSingle(imageA, null, imageType);
T inputB = ConvertBufferedImage.convertFromSingle(imageB, null, imageType);
// Detect using the standard SURF feature descriptor and describer
DetectDescribePoint detDesc = FactoryDetectDescribe.surfStable(
new ConfigFastHessian(1, 2, 200, 1, 9, 4, 4), null,null, ImageDataType.single(imageType));
ScoreAssociation<SurfFeature> scorer = FactoryAssociation.scoreEuclidean(SurfFeature.class,true);
AssociateDescription<SurfFeature> associate = FactoryAssociation.greedy(scorer,2,true);
// fit the images using a homography. This works well for rotations and distant objects.
GenerateHomographyLinear modelFitter = new GenerateHomographyLinear(true);
DistanceHomographySq distance = new DistanceHomographySq();
ModelMatcher<Homography2D_F64,AssociatedPair> modelMatcher =
new Ransac<Homography2D_F64,AssociatedPair>(123,modelFitter,distance,60,9);
Homography2D_F64 H = computeTransform(inputA, inputB, detDesc, associate, modelMatcher);
renderStitching(imageA,imageB,H);
}
示例2: dda_FH_SURF_Fast
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
/**
* Creates a tracker which detects Fast-Hessian features and describes them with SURF using the faster variant
* of SURF.
*
* @see DescribePointSurf
* @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
*
* @param configDetector Configuration for SURF detector
* @param configDescribe Configuration for SURF descriptor
* @param configOrientation Configuration for orientation
* @param imageType Type of image the input is.
* @return SURF based tracker.
*/
// TODO remove maxTracks? Use number of detected instead
public static <I extends ImageSingleBand>
PointTracker<I> dda_FH_SURF_Fast(
ConfigFastHessian configDetector ,
ConfigSurfDescribe.Speed configDescribe ,
ConfigAverageIntegral configOrientation ,
Class<I> imageType)
{
ScoreAssociation<TupleDesc_F64> score = FactoryAssociation.scoreEuclidean(TupleDesc_F64.class, true);
AssociateSurfBasic assoc = new AssociateSurfBasic(FactoryAssociation.greedy(score, 5, true));
AssociateDescription2D<SurfFeature> generalAssoc =
new AssociateDescTo2D<SurfFeature>(new WrapAssociateSurfBasic(assoc));
DetectDescribePoint<I,SurfFeature> fused =
FactoryDetectDescribe.surfFast(configDetector, configDescribe, configOrientation,
ImageDataType.single(imageType));
DdaManagerDetectDescribePoint<I,SurfFeature> manager = new DdaManagerDetectDescribePoint<I,SurfFeature>(fused);
return new DetectDescribeAssociate<I,SurfFeature>(manager, generalAssoc,false);
}
示例3: dda_FH_SURF_Stable
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
/**
* Creates a tracker which detects Fast-Hessian features and describes them with SURF using the faster variant
* of SURF.
*
* @see DescribePointSurf
* @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
*
* @param configDetector Configuration for SURF detector
* @param configDescribe Configuration for SURF descriptor
* @param configOrientation Configuration for orientation
* @param imageType Type of image the input is.
* @return SURF based tracker.
*/
// TODO remove maxTracks? Use number of detected instead
public static <I extends ImageSingleBand>
PointTracker<I> dda_FH_SURF_Stable(
ConfigFastHessian configDetector ,
ConfigSurfDescribe.Stablility configDescribe ,
ConfigSlidingIntegral configOrientation ,
Class<I> imageType)
{
ScoreAssociation<TupleDesc_F64> score = FactoryAssociation.scoreEuclidean(TupleDesc_F64.class, true);
AssociateSurfBasic assoc = new AssociateSurfBasic(FactoryAssociation.greedy(score, 5, true));
AssociateDescription2D<SurfFeature> generalAssoc =
new AssociateDescTo2D<SurfFeature>(new WrapAssociateSurfBasic(assoc));
DetectDescribePoint<I,SurfFeature> fused =
FactoryDetectDescribe.surfStable(configDetector,configDescribe,configOrientation,
ImageDataType.single(imageType));
DdaManagerDetectDescribePoint<I,SurfFeature> manager = new DdaManagerDetectDescribePoint<I,SurfFeature>(fused);
return new DetectDescribeAssociate<I,SurfFeature>(manager, generalAssoc,false);
}
示例4: combined_FH_SURF_KLT
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
/**
* Creates a tracker which detects Fast-Hessian features, describes them with SURF, nominally tracks them using KLT.
*
* @see DescribePointSurf
* @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
*
* @param trackRadius Size of feature being tracked by KLT
* @param pyramidScalingKlt Image pyramid used for KLT
* @param reactivateThreshold Tracks are reactivated after this many have been dropped. Try 10% of maxMatches
* @param configDetector Configuration for SURF detector
* @param configDescribe Configuration for SURF descriptor
* @param configOrientation Configuration for region orientation
* @param imageType Type of image the input is.
* @param <I> Input image type.
* @return SURF based tracker.
*/
public static <I extends ImageSingleBand>
PointTracker<I> combined_FH_SURF_KLT(int trackRadius,
int[] pyramidScalingKlt ,
int reactivateThreshold ,
ConfigFastHessian configDetector ,
ConfigSurfDescribe.Stablility configDescribe ,
ConfigSlidingIntegral configOrientation ,
Class<I> imageType) {
ScoreAssociation<TupleDesc_F64> score = FactoryAssociation.defaultScore(TupleDesc_F64.class);
AssociateSurfBasic assoc = new AssociateSurfBasic(FactoryAssociation.greedy(score, 100000, true));
AssociateDescription<SurfFeature> generalAssoc = new WrapAssociateSurfBasic(assoc);
DetectDescribePoint<I,SurfFeature> fused =
FactoryDetectDescribe.surfStable(configDetector, configDescribe, configOrientation,
ImageDataType.single(imageType));
return combined(fused,generalAssoc,trackRadius,pyramidScalingKlt,reactivateThreshold,
imageType);
}
示例5: perform
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
public static <T extends ImageSingleBand, D extends ImageSingleBand>
void perform( String fileName , Class<T> imageType , Class<D> derivType )
{
SimpleImageSequence<T> sequence = BoofVideoManager.loadManagerDefault().load(fileName, ImageDataType.single(imageType));
int maxCorners = 200;
int radius = 2;
// if null then no orientation will be computed
OrientationImageAverage<T> orientation = null;
orientation = FactoryOrientationAlgs.nogradient(radius,imageType);
InterestPointDetector<T> detector;
detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(1, 2, 100, 2, 9, 4, 4));
// FeatureScaleSpace<T,D> feature = FactoryInterestPointAlgs.hessianScaleSpace(radius,1,maxCorners,imageType,derivType);
// detector = FactoryInterestPoint.wrapDetector(feature,new double[]{1,2,4,6,8,12},imageType);
VideoDetectInterestPoints<T> display = new VideoDetectInterestPoints<T>(sequence, detector,orientation);
display.process();
}
示例6: DetectFeaturePointSOApp
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
public DetectFeaturePointSOApp(Class<T> imageType, Class<D> derivType) {
super(1);
this.imageType = imageType;
FeatureLaplacePyramid<T, D> flss = FactoryInterestPointAlgs.hessianLaplace(radius, thresh, maxScaleFeatures, imageType, derivType);
addAlgorithm(0, "Hess Lap SS", FactoryInterestPoint.wrapDetector(flss, scales, false, imageType));
FeatureLaplacePyramid<T, D> flp = FactoryInterestPointAlgs.hessianLaplace(radius, thresh, maxScaleFeatures, imageType, derivType);
addAlgorithm(0, "Hess Lap P", FactoryInterestPoint.wrapDetector(flp, scales, true,imageType));
addAlgorithm(0, "FastHessian", FactoryInterestPoint.<T>fastHessian(
new ConfigFastHessian(thresh, 2, maxScaleFeatures, 2, 9, 4, 4)));
if( imageType == ImageFloat32.class )
addAlgorithm(0, "SIFT", FactoryInterestPoint.siftDetector(null,new ConfigSiftDetector(2,10,maxScaleFeatures,5)));
JPanel viewArea = new JPanel(new BorderLayout());
corruptPanel = new ImageCorruptPanel();
corruptPanel.setListener(this);
panel = new ImagePanel();
viewArea.add(corruptPanel, BorderLayout.WEST);
viewArea.add(panel, BorderLayout.CENTER);
setMainGUI(viewArea);
}
示例7: VisualizeAssociationAlgorithmsApp
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
public VisualizeAssociationAlgorithmsApp( Class<T> imageType ) {
super(1);
this.imageType = imageType;
detector = (DetectDescribePoint) FactoryDetectDescribe.surfStable(
new ConfigFastHessian(5, 4, 200, 1, 9, 4, 4), null, null, ImageDataType.single(ImageFloat32.class));
// detector = (DetectDescribePoint) FactoryDetectDescribe.sift(4,1,false,200);
int DOF = detector.createDescription().size();
ScoreAssociation<TupleDesc_F64> score = FactoryAssociation.scoreEuclidean(TupleDesc_F64.class,true);
addAlgorithm(0, "Greedy", FactoryAssociation.greedy(score, Double.MAX_VALUE, false));
addAlgorithm(0, "Greedy Backwards", FactoryAssociation.greedy(score, Double.MAX_VALUE, true));
addAlgorithm(0, "K-D Tree BBF", FactoryAssociation.kdtree(DOF, 75));
addAlgorithm(0, "Random Forest", FactoryAssociation.kdRandomForest(DOF, 75, 10, 5, 1233445565));
image0 = GeneralizedImageOps.createSingleBand(imageType, 1, 1);
image1 = GeneralizedImageOps.createSingleBand(imageType, 1, 1);
setMainGUI(panel);
}
示例8: extractFeaturesInternal
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
/**
* Detects key points inside the image and computes descriptions at those points.
*/
protected double[][] extractFeaturesInternal(BufferedImage image) {
ImageFloat32 boofcvImage = ConvertBufferedImage.convertFromSingle(image, null, ImageFloat32.class);
// create the SURF detector and descriptor in BoofCV v0.15
ConfigFastHessian conf = new ConfigFastHessian(detectThreshold, 2, maxFeaturesPerScale, 2, 9, 4, 4);
DetectDescribePoint<ImageFloat32, SurfFeature> surf = FactoryDetectDescribe.surfStable(conf, null,
null, ImageFloat32.class);
// specify the image to process
surf.detect(boofcvImage);
int numPoints = surf.getNumberOfFeatures();
double[][] descriptions = new double[numPoints][SURFLength];
for (int i = 0; i < numPoints; i++) {
descriptions[i] = surf.getDescription(i).getValue();
}
return descriptions;
}
示例9: detect
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
public <T extends ImageFloat32>
Bitmap detect( Bitmap image, Class<T> imageType , int nPuntos) {
if(nPuntos != 0){ //There is a problem with BoofCV if we put 0 points
T input = ConvertBitmap.bitmapToGray(image, null, imageType, null);
// Create a Fast Hessian detector from the SURF paper.
// Other detectors can be used in this example too.
InterestPointDetector<T> detector = FactoryInterestPoint.fastHessian(
new ConfigFastHessian(30, 2, nPuntos, 2, 9, 3, 4));
// find interest points in the image
detector.detect(input);
Paint paintMax;
paintMax = new Paint();
paintMax.setColor(Color.RED);
paintMax.setStyle(Paint.Style.FILL);
Canvas canvas = new Canvas(image);
for(int i = 0; i<detector.getNumberOfFeatures();i++){
canvas.drawCircle((float) detector.getLocation(i).getX(), (float) detector.getLocation(i).getY(), 3, paintMax);
points.add(new Point(detector.getLocation(i).getX(), detector.getLocation(i).getY()));
}
}
return image;
}
示例10: getStableSurf
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
/**
* Returns SURF descriptors for an image using the settings above. Uses the BoofCV stable SURF algorithm.
*
* @param image Image for which to obtain the SURF descriptors.
* @return
*/
public static DetectDescribePoint<GrayF32, BrightFeature> getStableSurf(BufferedImage image) {
/* Obtain raw SURF descriptors using the configuration above (FH-9 according to [1]). */
GrayF32 gray = ConvertBufferedImage.convertFromSingle(image, null, GrayF32.class);
ConfigFastHessian config = new ConfigFastHessian(0, 2, FH_MAX_FEATURES_PER_SCALE, FH_INITIAL_SAMPLE_SIZE, FH_INITIAL_SIZE, FH_NUMBER_SCALES_PER_OCTAVE, FH_NUMBER_OF_OCTAVES);
DetectDescribePoint<GrayF32, BrightFeature> surf = FactoryDetectDescribe.surfStable(config, null, null, GrayF32.class);
surf.detect(gray);
return surf;
}
示例11: getFastSurf
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
/**
* Returns SURF descriptors for an image using the settings above. Uses the BoofCV fast SURF algorithm,
* which yields less images but operates a bit faster.
*
* @param image Image for which to obtain the SURF descriptors.
* @return
*/
public static DetectDescribePoint<GrayF32, BrightFeature> getFastSurf(BufferedImage image) {
/* Obtain raw SURF descriptors using the configuration above (FH-9 according to [1]). */
GrayF32 gray = ConvertBufferedImage.convertFromSingle(image, null, GrayF32.class);
ConfigFastHessian config = new ConfigFastHessian(0, 2, FH_MAX_FEATURES_PER_SCALE, FH_INITIAL_SAMPLE_SIZE, FH_INITIAL_SIZE, FH_NUMBER_SCALES_PER_OCTAVE, FH_NUMBER_OF_OCTAVES);
DetectDescribePoint<GrayF32, BrightFeature> surf = FactoryDetectDescribe.surfFast(config, null, null, GrayF32.class);
surf.detect(gray);
return surf;
}
示例12: init
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
@Override
public void init(RunConfig config) throws InvalidTestFormatException {
super.init(config);
File file = new File(GR.getGoldenDir(), goldenFileName);
try {
InputStream inImageStream = MTTestResourceManager.openFileAsInputStream(file.getPath());
image = UtilImageIO.loadPGM_U8(inImageStream, (ImageUInt8) null);
detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(
10, 2, 100, 2, 9, 3, 4));
} catch (IOException e) {
throw new GoldenFileNotFoundException(file, this.getClass());
}
}
示例13: getDesc
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
public DetectDescribePoint<ImageFloat32,SurfFeature> getDesc( BufferedImage src ) {
ImageFloat32 image = convertImage(src);
// create the detector and descriptors
DetectDescribePoint<ImageFloat32,SurfFeature> surf = FactoryDetectDescribe.
surfFast(new ConfigFastHessian(0, 2, 200, 2, 9, 4, 4), null, null, ImageFloat32.class);
// specify the image to process
surf.detect(image);
return surf;
}
示例14: detect
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
public static <T extends ImageSingleBand>
void detect( BufferedImage image , Class<T> imageType ) {
T input = ConvertBufferedImage.convertFromSingle(image, null, imageType);
// Create a Fast Hessian detector from the SURF paper.
// Other detectors can be used in this example too.
InterestPointDetector<T> detector = FactoryInterestPoint.fastHessian(
new ConfigFastHessian(10, 2, 100, 2, 9, 3, 4));
// find interest points in the image
detector.detect(input);
// Show the features
displayResults(image, detector);
}
示例15: createFromPremade
import boofcv.abst.feature.detect.interest.ConfigFastHessian; //导入依赖的package包/类
/**
* For some features, there are pre-made implementations of DetectDescribePoint. This has only been done
* in situations where there was a performance advantage or that it was a very common combination.
*/
public static <T extends ImageSingleBand, TD extends TupleDesc>
DetectDescribePoint<T, TD> createFromPremade( Class<T> imageType ) {
return (DetectDescribePoint)FactoryDetectDescribe.surfStable(
new ConfigFastHessian(1, 2, 200, 1, 9, 4, 4), null,null, ImageDataType.single(ImageFloat32.class));
// note that SIFT only supports ImageFloat32
// if( imageType == ImageFloat32.class )
// return (DetectDescribePoint)FactoryDetectDescribe.sift(null,new ConfigSiftDetector(2,0,200,5),null,null);
// else
// throw new RuntimeException("Unsupported image type");
}