本文整理汇总了Java中org.openimaj.image.DisplayUtilities.displayName方法的典型用法代码示例。如果您正苦于以下问题:Java DisplayUtilities.displayName方法的具体用法?Java DisplayUtilities.displayName怎么用?Java DisplayUtilities.displayName使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.openimaj.image.DisplayUtilities
的用法示例。
在下文中一共展示了DisplayUtilities.displayName方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: PowerCepstrumVis
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
/**
*
* @param as
* @throws Exception
*/
public PowerCepstrumVis( AudioStream as ) throws Exception
{
FImage img = new FImage( 1000, 600 );
PowerCepstrumTransform pct = new PowerCepstrumTransform();
SampleChunk sc = null;
while( (sc = as.nextSampleChunk()) != null )
{
pct.process( sc );
float[][] c = pct.getLastCepstrum();
for( int i = 0; i < c[0].length; i++ )
img.setPixel( img.getWidth()-1, i, c[0][i]/50f );
img.shiftLeftInplace();
DisplayUtilities.displayName( img, "Power Cepstrum" );
}
}
示例2: beforeUpdate
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
@Override
public void beforeUpdate(MBFImage frame) {
DisplayUtilities.displayName(frame, "video");
if (renderer == null) {
this.renderer = frame.createRenderer();
}
// this.renderer.drawShapeFilled(targetArea, RGBColour.RED);
updatePolygon();
final ProjectionProcessor<Float[], MBFImage> proc = new MBFProjectionProcessor();
proc.setMatrix(captureToVideo);
proc.accumulate(nextCaptureFrame);
if (this.targetArea != null) {
final Matrix transform = TransformUtilities.homographyMatrixNorm(pointList);
proc.setMatrix(transform);
proc.accumulate(frame.clone());
}
synchronized (this) {
proc.performProjection(0, 0, frame);
}
}
示例3: main
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException {
final XuggleVideo xv = new XuggleVideo(new File("/Users/jon/Desktop/merlin/tunnel.mp4"));
final FImage bg = ResizeProcessor.halfSize(
ImageUtilities.readF(new File("/Users/jon/Desktop/merlin/tunnel-background.png"))
);
// final XuggleVideoWriter xvw = new
// XuggleVideoWriter("/Users/jon/Desktop/merlin/tunnel-proc.mp4",
// bg.width,
// bg.height, xv.getFPS());
for (final MBFImage frc : xv) {
final FImage fr = ResizeProcessor.halfSize(frc.flatten());
final MBFImage diff = diff(bg, fr);
// xvw.addFrame(diff);
DisplayUtilities.displayName(diff, "");
}
// xvw.close();
}
示例4: HandWritingInputDisplay
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
public HandWritingInputDisplay(MLDataSet training) {
this.imageValues = new double[(int) training.getRecordCount()][];
this.numberValues = new int[(int) training.getRecordCount()];
int index = 0;
for (MLDataPair mlDataPair : training) {
this.imageValues[index] = mlDataPair.getInputArray();
int yIndex = 0;
while(mlDataPair.getIdealArray()[yIndex]!=1)yIndex++;
this.numberValues[index] = (yIndex + 1) % 10;
index++;
}
this.currentImageIndex = 0;
rp = new ResizeProcessor(200, 200);
JFrame frame = DisplayUtilities.displayName(this.getCurrentImage(), "numbers");
frame.addKeyListener(this);
}
示例5: tryDuelingCarl
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
private void tryDuelingCarl() throws MalformedURLException {
final String testVideo = "https://www.youtube.com/watch?v=t-7mQhSZRgM";
VGetVideo v = new VGetVideo(testVideo);
int frames = 0;
while (v.hasNextFrame()) {
frames++;
v.getNextFrame();
}
System.out.println("Seen frames: " + frames);
final VideoInfoUser user = new VideoInfoUser();
user.setUserQuality(VideoQuality.p144);
v = new VGetVideo(testVideo, user);
int newframes = 0;
while (v.hasNextFrame()) {
newframes++;
// v.getNextFrame();
DisplayUtilities.displayName(v.getNextFrame(), "frame");
}
System.out.println("Low Quality frames: " + newframes);
}
示例6: redraw
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
private void redraw() {
this.display.fill(RGBColour.WHITE);
boolean first = true;
// Start at the end (i.e. most recent)
int ind = 0;
for (final MBFImage img : this.displayList) {
if (first) {
first = false;
// main image!
this.display.drawImage(img.process(this.mainResizer), 0, 0);
} else {
final int y = ind / GRID_NX;
final int x = (ind - (y * GRID_NX));
this.display.drawImage(img.process(this.thumbResizer), this.thumbXOffset + (x * GRID_W), y * GRID_H);
ind++;
}
}
DisplayUtilities.displayName(display, "Pics, slurped!");
}
示例7: drawPointCloud
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
private void drawPointCloud(FImage depth, MBFImage frame, int xmin, int ymin, int xmax, int ymax, float xdiv,
float ydiv)
{
v3d.fill(RGBColour.BLACK);
final List<Simple3D.Primative> points = new ArrayList<Simple3D.Primative>();
final float stepx = 1;// (xmax - xmin) / xdiv;
final float stepy = 1;// (ymax - ymin) / ydiv;
float meanDepth = 0;
int count = 0;
final float[] xyz = new float[3];
final double factor = controller.computeScalingFactor();
for (int y = ymin; y < ymax; y += stepy) {
for (int x = xmin; x < xmax; x += stepx) {
final int d = (int) depth.pixels[y][x];
if (d > 0) {
// double[] xyz = controller.cameraToWorld(x, y, d);
controller.cameraToWorld(x, y, d, factor, xyz);
// writer.printf("%4.2f %4.2f %4.2f\n", xyz[0], xyz[1],
// xyz[2]);
points.add(new Simple3D.Point3D(xyz[0], -xyz[1], -xyz[2], frame.getPixel(x, y), 1));
meanDepth -= xyz[2];
count++;
}
}
}
meanDepth /= count;
final double ax = Math.PI / 4;
final Simple3D.Scene scene = new Simple3D.Scene(points);
scene.translate(0, (int) (Math.tan(ax) * meanDepth), 0);
scene.renderOrtho(Simple3D.euler2Rot(ax, 0, 0), v3d);
DisplayUtilities.displayName(v3d, "3d");
}
示例8: drawLine
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
private static void drawLine(MBFImage img, double[] startD, double[] endD) {
final Point2d lineStart = new Point2dImpl((float) startD[0], (float) startD[1]);
final Point2d lineEnd = new Point2dImpl((float) endD[0], (float) endD[1]);
final Line2d line = new Line2d(lineStart, lineEnd);
// System.out.println("Drawing: " + line);
img.drawLine(line, 3, RGBColour.GREEN);
// img.drawPoint(new Point2dImpl((float)origin.get(0),(float)
// origin.get(1)), RGBColour.RED, 5);
DisplayUtilities.displayName(img, "line");
}
示例9: displayEllipsesZoomed
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
@SuppressWarnings("unused")
private static void displayEllipsesZoomed(Ellipse ellipse1, Ellipse ellipse2) {
final int zoomHeight = 400;
final int zoomWidth = 400;
final int midzoomx = zoomWidth / 2;
final int midzoomy = zoomHeight / 2;
final double e1Radius = getRadius(ellipse1, 1);
final double scale = (zoomWidth * 0.50) / e1Radius;
final Matrix scaleMatrix = TransformUtilities.scaleMatrixAboutPoint(
1 / scale, 1 / scale, 0, 0);
final MBFImage zoomed = new MBFImage(zoomWidth, zoomHeight, ColourSpace.RGB);
Matrix translateE1 = Matrix.identity(3, 3);
translateE1 = translateE1.times(TransformUtilities
.translateToPointMatrix(new Point2dImpl(0, 0), new Point2dImpl(
midzoomx, midzoomy)));
translateE1 = translateE1.times(scaleMatrix);
translateE1 = translateE1.times(TransformUtilities
.translateToPointMatrix(ellipse1.calculateCentroid(),
new Point2dImpl(0, 0)));
final Ellipse expandedTranslated1 = ellipse1.transformAffine(translateE1);
final Ellipse expandedTranslated2 = ellipse2.transformAffine(translateE1);
zoomed.drawShape(expandedTranslated1, RGBColour.RED);
zoomed.drawShape(expandedTranslated2, RGBColour.BLUE);
DisplayUtilities.displayName(zoomed, "zoomed image");
System.out.println();
}
示例10: performInpainting
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
protected void performInpainting(FImage image) {
this.template = image.newInstance(windowHalfSize * 2 + 1, windowHalfSize * 2 + 1);
float maxErrThreshold = INITIAL_MAX_ERR_THRESHOLD;
while (true) {
final List<FValuePixel> pixelList = getUnfilledNeighbours();
if (pixelList.size() == 0)
return;
boolean progress = false;
for (final Pixel p : pixelList) {
// template = getNeighborhoodWindow(Pixel);
setTemplate(p.x, p.y, image);
final List<FValuePixel> bestMatches = findMatches(image);
final FValuePixel bestMatch = bestMatches.get((int) (Math.random() * bestMatches.size()));
if (bestMatch.value < maxErrThreshold) {
image.pixels[p.y][p.x] = image.pixels[bestMatch.y][bestMatch.x];
mask.pixels[p.y][p.x] = 0;
progress = true;
DisplayUtilities.displayName(image, "");
System.out.println(p);
}
}
if (!progress)
maxErrThreshold *= 1.1;
}
}
示例11: main
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
final Snap snap = new Snap();
final VideoCapture vc = new VideoCapture(640, 480);
while (true) {
final FImage img = vc.getNextFrame().flatten();
DisplayUtilities.displayName(img, "Live Video");
final FImage patch = ResizeProcessor.resample(img, 160, 120);// .extractCenter(120,
// 120);
final String res = snap.getGridRef(patch);
if (!res.contains("Not"))
System.out.println(res);
}
}
示例12: main
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException {
// final XuggleVideo xv = new XuggleVideo(new
// File("/Users/jon/Desktop/merlin/tunnel480.mov"));
final XuggleVideo xv = new XuggleVideo(new File("/Users/jon/Downloads/ewap_dataset/seq_hotel/seq_hotel.avi"));
final AdaptiveMoGBackgroundEstimator<MBFImage> proc = new AdaptiveMoGBackgroundEstimator<MBFImage>(xv);
for (final MBFImage img : proc) {
DisplayUtilities.displayName(img, "video");
}
}
示例13: main
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
public static void main(String[] args) throws InterruptedException {
final OnlineBackpropOneHidden bp = new OnlineBackpropOneHidden(2, 2, 1);
FImage img = new FImage(200, 200);
img = imagePredict(bp, img);
final ColourMap m = ColourMap.Hot;
DisplayUtilities.displayName(m.apply(img), "xor");
final int npixels = img.width * img.height;
final int half = img.width / 2;
final int[] pixels = RandomData.getUniqueRandomInts(npixels, 0, npixels);
while (true) {
// for (int i = 0; i < pixels.length; i++) {
// int pixel = pixels[i];
// int y = pixel / img.width;
// int x = pixel - (y * img.width);
// bp.update(new double[]{x < half ? -1 : 1,y < half ? -1 : 1},new
// double[]{xorValue(half,x,y)});
// // Thread.sleep(5);
// }
bp.update(new double[] { 0, 0 }, new double[] { 0 });
bp.update(new double[] { 1, 1 }, new double[] { 0 });
bp.update(new double[] { 0, 1 }, new double[] { 1 });
bp.update(new double[] { 1, 0 }, new double[] { 1 });
imagePredict(bp, img);
DisplayUtilities.displayName(m.apply(img), "xor");
}
}
示例14: main
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException, InterruptedException {
final List<HaarFeature> features = HaarFeatureType.generateFeatures(20, 20, HaarFeatureType.BASIC);
final FImage img = loadPositive();
for (int i = 0; i < features.size(); i++) {
final HaarFeature f = features.get(i);
DisplayUtilities.displayName(drawRects(f.rects, img.clone()), "foo");
Thread.sleep(100);
}
}
示例15: displayEllipsesFull
import org.openimaj.image.DisplayUtilities; //导入方法依赖的package包/类
@SuppressWarnings("unused")
private void displayEllipsesFull(Ellipse ellipse1, Ellipse ellipse2) {
final MBFImage debugDisplay = new MBFImage(this.imageWidth, this.imageHeight,
ColourSpace.RGB);
debugDisplay.drawShape(ellipse1, RGBColour.RED);
debugDisplay.drawShape(ellipse2, RGBColour.BLUE);
debugDisplay
.drawShape(
ellipse2.calculateRegularBoundingBox().union(
ellipse1.calculateRegularBoundingBox()),
RGBColour.BLUE);
DisplayUtilities.displayName(debugDisplay, "debug display full");
}