本文整理汇总了Java中org.openimaj.image.ImageUtilities.readMBF方法的典型用法代码示例。如果您正苦于以下问题:Java ImageUtilities.readMBF方法的具体用法?Java ImageUtilities.readMBF怎么用?Java ImageUtilities.readMBF使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.openimaj.image.ImageUtilities
的用法示例。
在下文中一共展示了ImageUtilities.readMBF方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: PuppeteerDemo
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
/**
* Default constructor.
*
* @throws MalformedURLException
* @throws IOException
*/
public PuppeteerDemo() throws MalformedURLException, IOException {
super("iSight");
tracker.fcheck = true;
final CLMFaceTracker ptracker = new CLMFaceTracker();
final URL[] puppetUrls = {
PuppeteerDemo.class.getResource("images/mark.jpg")
};
for (final URL url : puppetUrls) {
final MBFImage image = ImageUtilities.readMBF(url);
ptracker.track(image);
final TrackedFace face = ptracker.getTrackedFaces().get(0);
puppets.add(IndependentPair.pair(image, ptracker.getTriangles(face)));
ptracker.reset();
}
}
示例2: loadFile
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
private void loadFile(File f) throws IOException
{
// Load the image
System.out.println("Analysing " + f);
TestGUI.this.img = ImageUtilities.readMBF(f);
System.out.println("Image Dimensions: " + TestGUI.this.img.getWidth() + "x" + TestGUI.this.img.getHeight());
final long start = System.currentTimeMillis();
final MSERFeatureGenerator mser = new MSERFeatureGenerator(1, 1, 1, 0f, 0.7f, PixelsFeature.class);
TestGUI.this.mergeTrees = mser.performWatershed(Transforms.calculateIntensityNTSC(img));
final long end = System.currentTimeMillis();
// Show some stats
final long timeTaken = end - start;
System.out.println("--------------------------------------------");
System.out.println("Time taken: " + timeTaken + " milliseconds");
System.out.println("Number of pixels: " + img.getWidth() * img.getHeight());
System.out.println("Pixels per second: " + (img.getWidth() * img.getHeight()) / (timeTaken / (double) 1000));
System.out.println("--------------------------------------------");
updateMSER();
}
示例3: render
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
@Override
public void render(final MBFImageRenderer renderer, final Matrix transform, final Rectangle rectangle) {
if (this.toRender == null) {
try {
this.toRender = ImageUtilities.readMBF(VideoSIFT.class
.getResource("/org/openimaj/demos/OpenIMAJ.png"));
} catch (final IOException e) {
System.err.println("Can't load image to render");
}
this.renderToBounds = TransformUtilities.makeTransform(this.toRender.getBounds(), rectangle);
}
final MBFProjectionProcessor mbfPP = new MBFProjectionProcessor();
mbfPP.setMatrix(transform.times(this.renderToBounds));
mbfPP.accumulate(this.toRender);
mbfPP.performProjection(0, 0, renderer.getImage());
}
示例4: main
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
/**
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
final MBFImage image = ImageUtilities.readMBF(new File("/Users/jsh2/Pictures/08-earth_shuttle2.jpg"));
int[] pixels = image.toPackedARGBPixels();
Arrays.sort(pixels);
final MBFImage sorted = new MBFImage(pixels, image.getWidth(), image.getHeight());
ImageUtilities.write(sorted, new File("/Users/jsh2/Pictures/sorted.jpg"));
final List<Integer> plist = Arrays.asList(ArrayUtils.toObject(pixels));
Collections.shuffle(plist);
pixels = ArrayUtils.toPrimitive(plist.toArray(new Integer[pixels.length]));
final MBFImage shuffled = new MBFImage(pixels, image.getWidth(), image.getHeight());
ImageUtilities.write(shuffled, new File("/Users/jsh2/Pictures/shuffled.jpg"));
}
示例5: next
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
@Override
public ImageCollectionEntry<MBFImage> next() {
IndependentPair<URL, Map<String, String>> urlMeta = imageList.next();
URL u = urlMeta.firstObject();
try {
MBFImage image = ImageUtilities.readMBF(u.openConnection().getInputStream());
ImageCollectionEntry<MBFImage> entry = new ImageCollectionEntry<MBFImage>();
entry.image = image;
entry.meta = urlMeta.secondObject();
entry.accepted = true;
if(this.selection!=null) entry.accepted = selection.acceptEntry(image);
return entry;
} catch (IOException e) {
}
return null;
}
示例6: MultiPuppeteer
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
/**
* Default constructor.
*
* @throws MalformedURLException
* @throws IOException
*/
public MultiPuppeteer() throws MalformedURLException, IOException {
tracker.scale = 0.5f;
tracker.fpd = 120;
tracker.fcheck = false;
final CLMFaceTracker ptracker = new CLMFaceTracker();
final URL[] puppetUrls = {
MultiPuppeteer.class.getResource("nigel.jpg"),
MultiPuppeteer.class.getResource("wendy.png")
};
for (final URL url : puppetUrls) {
MBFImage image = ImageUtilities.readMBF(url);
final int paddingWidth = Math.max(image.getWidth(), 640);
final int paddingHeight = Math.max(image.getHeight(), 480);
image = image.padding(paddingWidth, paddingHeight);
ptracker.track(image);
final TrackedFace face = ptracker.getTrackedFaces().get(0);
puppets.add(IndependentPair.pair(image, ptracker.getTriangles(face)));
ptracker.reset();
}
}
示例7: main
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
/**
* Main method.
*
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
args = new String[] { "-v" };
if (args.length > 0 && args[0].equals("-v")) {
new Mustache.VideoMustache();
} else {
MBFImage cimg = ImageUtilities.readMBF(Mustache.class
.getResourceAsStream("/org/openimaj/demos/image/sinaface.jpg"));
cimg = new Mustache().addMustaches(cimg);
DisplayUtilities.display(cimg);
}
}
示例8: PiecewiseMeshWarpDemo
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
/**
* Construct the demo
*
* @throws IOException
*/
public PiecewiseMeshWarpDemo() throws IOException {
img = ImageUtilities.readMBF(getClass().getResource("/org/openimaj/demos/image/bird.png"));
frame = DisplayUtilities.displaySimple(img);
frame.addMouseMotionListener(this);
}
示例9: getMBFImage
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
@Override
public MBFImage getMBFImage() {
try {
return ImageUtilities.readMBF(new File(imageBase, imageName + imageExtension));
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
示例10: setup
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
/**
* Setup tests
*
* @throws IOException
*/
@Before
public void setup() throws IOException {
final InputStream is = this.getClass().getResourceAsStream("/org/openimaj/image/data/cat.jpg");
tmpImageFile = folder.newFile("cat.jpg");
tmpNormImageFile = folder.newFile("catIntensityNormalised.jpg");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(tmpImageFile);
final byte[] arr = new byte[1024];
int read = is.read(arr);
while (read != -1) {
try {
fos.write(arr, 0, read);
read = is.read(arr);
} catch (final Exception e) {
System.out.println(e);
}
}
} finally {
fos.close();
}
loaded = ImageUtilities.readMBF(tmpImageFile);
normalised = Transforms.RGB_TO_RGB_NORMALISED(loaded);
ImageUtilities.write(loaded, "jpg", tmpImageFile);
ImageUtilities.write(normalised.getBand(1), "jpg", tmpNormImageFile);
System.out.println("Image out: " + tmpImageFile);
System.out.println("Normalised Image out: " + tmpNormImageFile);
}
示例11: process
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
@Override
public MBFImage process(byte[] img) throws IOException {
MBFImage toRet = ImageUtilities.readMBF(new ByteArrayInputStream(img));
if (ct != null)
toRet = ct.convert(toRet);
return toRet;
}
示例12: main
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException {
final MBFImage sourceC = ImageUtilities.readMBF(monaLisaSource);
final MBFImage targetC = ImageUtilities.readMBF(monaLisaTarget);
final FImage source = sourceC.flatten();
final FImage target = targetC.flatten();
final DoGSIFTEngine eng = new DoGSIFTEngine();
final LocalFeatureList<Keypoint> sourceFeats = eng.findFeatures(source);
final LocalFeatureList<Keypoint> targetFeats = eng.findFeatures(target);
final HomographyModel model = new HomographyModel();
final SingleImageTransferResidual2d<HomographyModel> errorModel = new SingleImageTransferResidual2d<HomographyModel>();
final RANSAC<Point2d, Point2d, HomographyModel> ransac = new RANSAC<Point2d, Point2d, HomographyModel>(model,
errorModel, 5f, 1500, new RANSAC.BestFitStoppingCondition(), true);
final ConsistentLocalFeatureMatcher2d<Keypoint> matcher = new ConsistentLocalFeatureMatcher2d<Keypoint>(
new FastBasicKeypointMatcher<Keypoint>(8));
matcher.setFittingModel(ransac);
matcher.setModelFeatures(sourceFeats);
matcher.findMatches(targetFeats);
final Matrix boundsToPoly = model.getTransform().inverse();
final Shape s = source.getBounds().transform(boundsToPoly);
targetC.drawShape(s, 10, RGBColour.BLUE);
final MBFImage matches = MatchingUtilities.drawMatches(sourceC, targetC, matcher.getMatches(), RGBColour.RED);
matches.processInplace(new ResizeProcessor(640, 480));
DisplayUtilities.display(matches);
ImageUtilities.write(matches, new File("/Users/ss/Desktop/keypoint-match-example.png"));
}
示例13: main
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException {
MBFImage image = ImageUtilities.readMBF(ImageIPDSIFTEngine.class.getResourceAsStream("/org/openimaj/image/feature/validator/graf/img1.ppm"));
FImage fimage = Transforms.calculateIntensity(image);
File featureOut = new File("/tmp/img1.oi-sift-features");
LocalFeatureList<? extends InterestPointKeypoint<? extends InterestPointData>> kps = null;
boolean force = true;
HarrisIPD harrisIPD = new HarrisIPD(1.4f);
harrisIPD.setImageBlurred(true);
AffineAdaption affineIPD = new AffineAdaption(harrisIPD,new IPDSelectionMode.Threshold(250f));
affineIPD.setFastDifferentiationScale(true);
AbstractIPDSIFTEngine<EllipticInterestPointData> engine = new EllipticIPDSIFTEngine(affineIPD);
engine.setFinderMode(new FinderMode.Basic<EllipticInterestPointData>());
// engine.setFinderMode(new FinderMode.Basic<InterestPointData>());
// engine.setSelectionMode(new IPDSelectionMode.Threshold(10000f));
engine.setSelectionMode(new IPDSelectionMode.All());
engine.setAcrossScales(true);
if (!featureOut.exists() || force) {
kps = engine.findFeatures(fimage);
IOUtils.writeBinary(featureOut, kps);
} else {
kps = MemoryLocalFeatureList.read(featureOut,
CircularInterestPointKeypoint.class);
}
InterestPointVisualiser<Float[], MBFImage> visualiser = InterestPointVisualiser
.visualiseKeypoints(image, kps);
MBFImage out = visualiser.drawPatches(RGBColour.RED, RGBColour.GREEN);
JFrame f = DisplayUtilities.display(out);
FeatureClickListener l = new FeatureClickListener();
l.setImage(kps, image);
l.setDisplayFrame(f);
f.getContentPane().addMouseListener(l);
}
示例14: main
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
/**
* Main method
*
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
// Load the image
MBFImage input = ImageUtilities.readMBF(new URL("http://static.openimaj.org/media/tutorial/sinaface.jpg"));
input = ColourSpace.convert(input, ColourSpace.CIE_Lab);
final FloatKMeans cluster = FloatKMeans.createExact(3, 2);
final float[][] imageData = input.getPixelVectorNative(new float[input.getWidth() * input.getHeight()][3]);
final FloatCentroidsResult result = cluster.cluster(imageData);
final float[][] centroids = result.centroids;
for (final float[] fs : centroids) {
System.out.println(Arrays.toString(fs));
}
final HardAssigner<float[], ?, ?> assigner = result.defaultHardAssigner();
for (int y = 0; y < input.getHeight(); y++) {
for (int x = 0; x < input.getWidth(); x++) {
final float[] pixel = input.getPixelNative(x, y);
final int centroid = assigner.assign(pixel);
input.setPixelNative(x, y, centroids[centroid]);
}
}
input = ColourSpace.convert(input, ColourSpace.RGB);
DisplayUtilities.display(input);
final GreyscaleConnectedComponentLabeler labeler = new GreyscaleConnectedComponentLabeler();
final List<ConnectedComponent> components = labeler.findComponents(input.flatten());
int i = 0;
for (final PixelSet comp : components) {
if (comp.calculateArea() < 50)
continue;
input.drawText("Point:" + (i++), comp.calculateCentroidPixel(), HersheyFont.TIMES_MEDIUM, 20);
}
DisplayUtilities.display(input);
}
示例15: testOpponent
import org.openimaj.image.ImageUtilities; //导入方法依赖的package包/类
/**
* Test with the opponent colour space
*
* @throws IOException
*/
@Test
public void testOpponent() throws IOException {
final MBFImage img = ImageUtilities.readMBF(OpenIMAJ.getLogoAsStream());
final ColourDenseSIFT cdsift = new ColourDenseSIFT(new DenseSIFT(), ColourSpace.MODIFIED_OPPONENT);
final DenseSIFT luminance_dsift = new DenseSIFT();
final DenseSIFT o1_dsift = new DenseSIFT();
final DenseSIFT o2_dsift = new DenseSIFT();
final MBFImage oppImg = ColourSpace.MODIFIED_OPPONENT.convertFromRGB(img);
cdsift.analyseImage(img);
luminance_dsift.analyseImage(oppImg.getBand(0));
o1_dsift.analyseImage(oppImg.getBand(1));
o2_dsift.analyseImage(oppImg.getBand(2));
assertEquals(cdsift.descriptors.length, luminance_dsift.descriptors.length);
assertEquals(cdsift.descriptors.length, o1_dsift.descriptors.length);
assertEquals(cdsift.descriptors.length, o2_dsift.descriptors.length);
assertEquals(cdsift.descriptors[0].length, 3 * luminance_dsift.descriptors[0].length);
final LocalFeatureList<ByteDSIFTKeypoint> cdbyte = cdsift.getByteKeypoints();
final LocalFeatureList<ByteDSIFTKeypoint> ldbyte = luminance_dsift.getByteKeypoints();
final LocalFeatureList<ByteDSIFTKeypoint> o1dbyte = o1_dsift.getByteKeypoints();
final LocalFeatureList<ByteDSIFTKeypoint> o2dbyte = o2_dsift.getByteKeypoints();
final LocalFeatureList<FloatDSIFTKeypoint> cdfloat = cdsift.getFloatKeypoints();
final LocalFeatureList<FloatDSIFTKeypoint> ldfloat = luminance_dsift.getFloatKeypoints();
final LocalFeatureList<FloatDSIFTKeypoint> o1dfloat = o1_dsift.getFloatKeypoints();
final LocalFeatureList<FloatDSIFTKeypoint> o2dfloat = o2_dsift.getFloatKeypoints();
for (int i = 0; i < cdsift.descriptors.length; i++) {
assertArrayEquals(luminance_dsift.descriptors[i], ArrayUtils.subarray(cdsift.descriptors[i], 0, 128), 0f);
assertArrayEquals(o1_dsift.descriptors[i], ArrayUtils.subarray(cdsift.descriptors[i], 128, 256), 0f);
assertArrayEquals(o2_dsift.descriptors[i], ArrayUtils.subarray(cdsift.descriptors[i], 256, 384), 0f);
assertEquals(cdbyte.get(i).x, ldbyte.get(i).x, 0);
assertEquals(cdbyte.get(i).y, ldbyte.get(i).y, 0);
assertEquals(cdbyte.get(i).energy, ldbyte.get(i).energy, 0);
assertArrayEquals(ArrayUtils.subarray(cdbyte.get(i).descriptor, 0, 128), ldbyte.get(i).descriptor);
assertArrayEquals(ArrayUtils.subarray(cdbyte.get(i).descriptor, 128, 256), o1dbyte.get(i).descriptor);
assertArrayEquals(ArrayUtils.subarray(cdbyte.get(i).descriptor, 256, 384), o2dbyte.get(i).descriptor);
assertEquals(cdfloat.get(i).x, ldfloat.get(i).x, 0);
assertEquals(cdfloat.get(i).y, ldfloat.get(i).y, 0);
assertEquals(cdfloat.get(i).energy, ldfloat.get(i).energy, 0);
assertArrayEquals(ArrayUtils.subarray(cdfloat.get(i).descriptor, 0, 128), ldfloat.get(i).descriptor, 0f);
assertArrayEquals(ArrayUtils.subarray(cdfloat.get(i).descriptor, 128, 256), o1dfloat.get(i).descriptor, 0f);
assertArrayEquals(ArrayUtils.subarray(cdfloat.get(i).descriptor, 256, 384), o2dfloat.get(i).descriptor, 0f);
}
}