本文整理汇总了Java中org.openimaj.image.DisplayUtilities类的典型用法代码示例。如果您正苦于以下问题:Java DisplayUtilities类的具体用法?Java DisplayUtilities怎么用?Java DisplayUtilities使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
DisplayUtilities类属于org.openimaj.image包,在下文中一共展示了DisplayUtilities类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: MSEREllipseFinder
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
/**
* Construct demo
*/
public MSEREllipseFinder() {
final MBFImage image = new MBFImage(400, 400, ColourSpace.RGB);
final MBFImageRenderer renderer = image.createRenderer();
image.fill(RGBColour.WHITE);
final List<Ellipse> ellipses = new ArrayList<Ellipse>();
ellipses.add(new Ellipse(200, 100, 100, 80, Math.PI / 4));
ellipses.add(new Ellipse(200, 300, 50, 30, -Math.PI / 4));
ellipses.add(new Ellipse(100, 300, 30, 50, -Math.PI / 3));
for (final Ellipse ellipse : ellipses) {
renderer.drawShapeFilled(ellipse, RGBColour.BLACK);
}
final MSERFeatureGenerator mser = new MSERFeatureGenerator(MomentFeature.class);
final List<Component> features = mser.generateMSERs(Transforms
.calculateIntensityNTSC(image));
for (final Component c : features) {
final MomentFeature feature = c.getFeature(MomentFeature.class);
renderer.drawShape(feature.getEllipse(2), RGBColour.RED);
renderer.drawShape(feature.getEllipse(2)
.calculateOrientedBoundingBox(), RGBColour.GREEN);
}
DisplayUtilities.display(image);
}
示例2: analyseImage
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
@Override
public void analyseImage(FImage image) {
final FourierTransform ft = new FourierTransform(image, false);
final FImage mag = ft.getMagnitude();
int count = 0;
for (int y = 0; y < mag.height; y++) {
for (int x = 0; x < mag.width; x++) {
if (Math.abs(mag.pixels[y][x]) > threshold)
count++;
}
}
bpp = (double) count / (double) (mag.height * mag.width);
DisplayUtilities.display(image, "" + bpp);
}
示例3: drawPositivePatches
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
private void drawPositivePatches() {
final NNClassifier nnClass = tldMain.tld.detectorCascade.getNNClassifier();
final List<NormalizedPatch> patches = nnClass.getPositivePatches();
final Rectangle inDim = new Rectangle(0, 0, NormalizedPatch.TLD_PATCH_SIZE, NormalizedPatch.TLD_PATCH_SIZE);
final int X = 5;
final int Y = Math.max(6, (patches.size() / X) + 1);
final FImage out = new FImage(50 * X, 50 * Y);
out.fill(1f);
int i = 0;
final Rectangle otRect = new Rectangle(0, 0, 50, 50);
for (final NormalizedPatch normalizedPatch : patches) {
otRect.x = (i % X) * 50;
otRect.y = (i / X) * 50;
if ((i / X) >= Y)
break;
ResizeProcessor.zoom(normalizedPatch.normalisedPatch, inDim, out, otRect);
i++;
}
DisplayUtilities.displayName(out, "patches", true);
}
示例4: regenAndDisplay
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
private void regenAndDisplay() {
double sumDistance = 0;
for (float y = 0; y < outImage.getHeight(); y++) {
for (float x = 0; x < outImage.getWidth(); x++) {
Point2dImpl point = new Point2dImpl(x, y);
Point2d distorted = getDistortedPoint(point);
if (image.getBounds().isInside(distorted)) {
Point2d undistorted = getUndistortedPoint(distorted);
sumDistance += new Line2d(point, undistorted)
.calculateLength();
}
outImage.setPixel((int) x, (int) y, image.getPixelInterp(
distorted.getX(), distorted.getY(), RGBColour.BLACK));
}
}
System.out.println("Sum difference: " + sumDistance);
if (this.outFrame == null) {
outFrame = DisplayUtilities.display(outImage);
} else {
DisplayUtilities.display(outImage, outFrame);
}
}
示例5: main
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
/**
* Test the distance transform
* @param args
* @throws IOException
*/
public static void main(String args[]) throws IOException{
FImage i = ImageUtilities.readF(new File("/Users/ss/Desktop/tache.jpg"));
EuclideanDistanceTransform etrans = new EuclideanDistanceTransform();
// i.processInplace(new CannyEdgeDetector());
i.inverse();
for(int x = 0;x < i.width; x++)
for(int y = 0; y < i.height; y++)
if(i.pixels[y][x] == 1.0f)
i.setPixel(x, y, Float.MAX_VALUE);
DisplayUtilities.display(i);
i.analyseWith(etrans);
i = etrans.getDistances();
i.normalise();
DisplayUtilities.display(i);
}
示例6: tryDuelingCarl
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
private void tryDuelingCarl() throws MalformedURLException {
final String testVideo = "https://www.youtube.com/watch?v=t-7mQhSZRgM";
VGetVideo v = new VGetVideo(testVideo);
int frames = 0;
while (v.hasNextFrame()) {
frames++;
v.getNextFrame();
}
System.out.println("Seen frames: " + frames);
final VideoInfoUser user = new VideoInfoUser();
user.setUserQuality(VideoQuality.p144);
v = new VGetVideo(testVideo, user);
int newframes = 0;
while (v.hasNextFrame()) {
newframes++;
// v.getNextFrame();
DisplayUtilities.displayName(v.getNextFrame(), "frame");
}
System.out.println("Low Quality frames: " + newframes);
}
示例7: run
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
@Override
public void run() {
while (true) {
if (!renderMode)
break;
final MBFImage extracted = this.image.extractROI(this.visibleArea);
if (clear) {
extracted.fill(RGBColour.WHITE);
this.clear = false;
}
this.mode.drawToImage(extracted);
this.image.drawImage(extracted, 0, 0);
DisplayUtilities.display(this.image, this);
}
}
示例8: extractFeatures
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
private static Map<Ellipse, OrientedFeatureVector> extractFeatures(FImage img) {
final StrokeWidthTransform swt = new StrokeWidthTransform(false, 1.0f);
final FImage swtImage = img.process(swt);
DisplayUtilities.display(StrokeWidthTransform.normaliseImage(swtImage));
final EllipseGradientFeatureExtractor egfe = new EllipseGradientFeatureExtractor();
final List<ConnectedComponent> ccs = findComponents(swtImage);
final Map<Ellipse, OrientedFeatureVector> im1f = new HashMap<Ellipse, OrientedFeatureVector>();
for (final ConnectedComponent cc : ccs) {
final double[] centroid = cc.calculateCentroid();
final Matrix m = computeCovariance(cc, centroid);
final Ellipse e = EllipseUtilities.ellipseFromCovariance((float) centroid[0], (float) centroid[1], m, 3f);
for (final OrientedFeatureVector f : egfe.extract(img, e))
im1f.put(e, f);
}
return im1f;
}
示例9: PowerCepstrumVis
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
/**
*
* @param as
* @throws Exception
*/
public PowerCepstrumVis( AudioStream as ) throws Exception
{
FImage img = new FImage( 1000, 600 );
PowerCepstrumTransform pct = new PowerCepstrumTransform();
SampleChunk sc = null;
while( (sc = as.nextSampleChunk()) != null )
{
pct.process( sc );
float[][] c = pct.getLastCepstrum();
for( int i = 0; i < c[0].length; i++ )
img.setPixel( img.getWidth()-1, i, c[0][i]/50f );
img.shiftLeftInplace();
DisplayUtilities.displayName( img, "Power Cepstrum" );
}
}
示例10: main
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
public static void main(String[] args) throws IOException {
final List<PointList> pointData = loadData();
final PointListConnections plc = loadConnections();
final List<FImage> images = loadImages();
System.out.println(pointData.size());
System.out.println(images.size());
final Float[][] cols = new Float[pointData.get(0).size()][];
for (int i = 0; i < cols.length; i++)
cols[i] = RGBColour.randomColour();
for (int j = 0; j < pointData.size(); j++) {
final PointList pl = pointData.get(j);
final MBFImage img = images.get(j).toRGB();
final List<Line2d> lines = plc.getLines(pl);
img.drawLines(lines, 1, RGBColour.RED);
for (int i = 0; i < pl.size(); i++) {
final Point2d pt = pl.get(i);
img.drawPoint(pt, cols[i], 3);
}
DisplayUtilities.display(img);
}
}
示例11: main
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
public static void main(String[] args) throws IOException {
final FImage image = ImageUtilities.readF(new File("/Users/jsh2/Desktop/test-images/A7K9ZlZCAAA9VoL.jpg"));
final CLMFaceDetector detector = new CLMFaceDetector();
final List<Rectangle> rects = detector.getConfiguration().faceDetector.detect(image);
final MBFImage img = new MBFImage(image.clone(), image.clone(), image.clone());
for (final Rectangle r : rects) {
r.scaleCentroid(1.2f);
img.drawShape(r, RGBColour.RED);
}
DisplayUtilities.display(img);
final List<CLMDetectedFace> faces = detector.detectFaces(image, rects);
final CLMAligner aligner = new CLMAligner();
DisplayUtilities.display(aligner.align(faces.get(0)));
}
示例12: redraw
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
private void redraw() {
this.display.fill(RGBColour.WHITE);
boolean first = true;
// Start at the end (i.e. most recent)
int ind = 0;
for (final MBFImage img : this.displayList) {
if (first) {
first = false;
// main image!
this.display.drawImage(img.process(this.mainResizer), 0, 0);
} else {
final int y = ind / GRID_NX;
final int x = (ind - (y * GRID_NX));
this.display.drawImage(img.process(this.thumbResizer), this.thumbXOffset + (x * GRID_W), y * GRID_H);
ind++;
}
}
DisplayUtilities.displayName(display, "Pics, slurped!");
}
示例13: main
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
public static void main(String[] args) throws IOException {
final FImage image1 = ImageUtilities.readF(new File("/Users/jsh2/Data/ukbench/full/ukbench00000.jpg"));
final FImage image2 = ImageUtilities.readF(new File("/Users/jsh2/Data/ukbench/full/ukbench00001.jpg"));
final DoGSIFTEngine engine = new DoGSIFTEngine();
final LocalFeatureList<Keypoint> keys1 = engine.findFeatures(ResizeProcessor.resizeMax(image1, 150));
final LocalFeatureList<Keypoint> keys2 = engine.findFeatures(ResizeProcessor.resizeMax(image2, 150));
final List<Keypoint> keys1f = FilterUtils.filter(keys1, new ByteEntropyFilter());
final List<Keypoint> keys2f = FilterUtils.filter(keys2, new ByteEntropyFilter());
System.out.println(keys1.size() + " " + keys1f.size());
System.out.println(keys2.size() + " " + keys2f.size());
final FastEuclideanKeypointMatcher<Keypoint> matcher = new FastEuclideanKeypointMatcher<Keypoint>(8000);
matcher.setModelFeatures(keys1);
matcher.findMatches(keys2);
System.out.println(matcher.getMatches().size());
DisplayUtilities.display(MatchingUtilities.drawMatches(image1, image2, matcher.getMatches(), 1F));
matcher.setModelFeatures(keys1f);
matcher.findMatches(keys2f);
System.out.println(matcher.getMatches().size());
DisplayUtilities.display(MatchingUtilities.drawMatches(image1, image2, matcher.getMatches(), 1F));
}
示例14: KinectDepthSnapshot
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
/**
* Default constructor
* @param id of kinect controller
* @throws KinectException
*/
public KinectDepthSnapshot(int id) throws KinectException {
controller = new KinectController(id, irmode,true);
GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
GraphicsDevice[] devices = ge.getScreenDevices();
JFrame frame=new JFrame("Full Screen JFrame");
//Set default close operation for JFrame
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
//Set JFrame size to full screen size follow current screen size
screenWidth = Toolkit.getDefaultToolkit().getScreenSize().width;
screenHeight = Toolkit.getDefaultToolkit().getScreenSize().height;
fullScreenResizeProcessor = new ResizeProcessor(screenWidth, screenHeight);
frame.setBounds(0,0,screenWidth,screenHeight);
videoFrame = VideoDisplay.createVideoDisplay(this, new DisplayUtilities.ImageComponent(true));
JFrame wholeWindow = new JFrame();
wholeWindow.setUndecorated(true);
wholeWindow.setAlwaysOnTop(true);
wholeWindow.getContentPane().add(videoFrame.getScreen());
devices[0].setFullScreenWindow(wholeWindow);
((JFrame)SwingUtilities.getRoot(videoFrame.getScreen())).setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
SwingUtilities.getRoot(videoFrame.getScreen()).addKeyListener(this);
}
示例15: beforeUpdate
import org.openimaj.image.DisplayUtilities; //导入依赖的package包/类
@Override
public void beforeUpdate(MBFImage frame) {
DisplayUtilities.displayName(frame, "video");
if (renderer == null) {
this.renderer = frame.createRenderer();
}
// this.renderer.drawShapeFilled(targetArea, RGBColour.RED);
updatePolygon();
final ProjectionProcessor<Float[], MBFImage> proc = new MBFProjectionProcessor();
proc.setMatrix(captureToVideo);
proc.accumulate(nextCaptureFrame);
if (this.targetArea != null) {
final Matrix transform = TransformUtilities.homographyMatrixNorm(pointList);
proc.setMatrix(transform);
proc.accumulate(frame.clone());
}
synchronized (this) {
proc.performProjection(0, 0, frame);
}
}