本文整理汇总了Java中ij.process.ByteProcessor.setMinAndMax方法的典型用法代码示例。如果您正苦于以下问题:Java ByteProcessor.setMinAndMax方法的具体用法?Java ByteProcessor.setMinAndMax怎么用?Java ByteProcessor.setMinAndMax使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ij.process.ByteProcessor
的用法示例。
在下文中一共展示了ByteProcessor.setMinAndMax方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: actionSeedImage
import ij.process.ByteProcessor; //导入方法依赖的package包/类
public void actionSeedImage() {
final ImagePlus imp = UIUtils.getImage();
if (imp == null) {
return;
}
final List<Region> regions = multiRegionManagerModel.getRegions();
if (regions.size() < 1) {
IJ.error(CAPTION, "Cannot create seed image, at least one regions required.");
return;
}
final ByteProcessor seeds = createSeedImage(regions, imp.getWidth(), imp.getHeight());
if (seeds == null) {
return;
}
seeds.setMinAndMax(0, regions.size());
new ImagePlus("Seeds", seeds).show();
}
示例2: run
import ij.process.ByteProcessor; //导入方法依赖的package包/类
@Override
public void run(String arg) {
if ("about".equalsIgnoreCase(arg)) {
IJ.showMessage("About " + TITLE, ABOUT);
return;
}
final Pair<List<ResultsTable>, List<String>> resultTables = listTextWindows();
if (resultTables.getFirst().size() < 1) {
IJ.error("Expecting at least one open Result Table window.");
return;
}
final Pair<List<ImagePlus>, List<String>> images = listSupportedImages();
if (images.getFirst().size() < 1) {
IJ.error("Expecting at least one open image (that is not indexed color).");
return;
}
// Ask user for image, results table, and other options
if (!showOptionsDialog(resultTables.getSecond(), images.getSecond())) {
return;
}
if (CONFIG.interpretStackAs3D) {
IJ.error(TITLE, "Interpreting stacks as 3D images not yet supported.");
return;
}
final ResultsTable rt = resultTables.getFirst().get(CONFIG.tableIndex);
final ImagePlus imp = images.getFirst().get(CONFIG.imageIndex);
//
// Verify that table headings match image bands
//
final ImagePlus stack = KMeansClusteringPlugin.convertToFloatStack(imp);
final int stackSize = stack.getStackSize();
final String[] bandLabels = stack.getStack().getSliceLabels();
final String[] expectedHeadings = new String[stackSize + 1];
expectedHeadings[0] = "Cluster";
System.arraycopy(bandLabels, 0, expectedHeadings, 1, stackSize);
final String[] tableHeadings = rt.getHeadings();
if (tableHeadings.length < expectedHeadings.length) {
IJ.error(TITLE, "Not enough headings, expecting: " + Arrays.toString(expectedHeadings));
return;
}
for (int i = 0; i < expectedHeadings.length; i++) {
if (!expectedHeadings[i].equals(tableHeadings[i])) {
IJ.error(TITLE, "Expecting heading " + (i + 1) + " to be " + expectedHeadings[i] + ", but got: " + tableHeadings[i] + ".");
return;
}
}
// Read cluster centers from the table
final int nbClusters = rt.getCounter();
final float[][] clusterCenters = new float[nbClusters][expectedHeadings.length - 1];
for (int clusterIndex = 0; clusterIndex < nbClusters; clusterIndex++) {
for (int bandIndex = 1; bandIndex < expectedHeadings.length; bandIndex++)
clusterCenters[clusterIndex][bandIndex - 1] = (float) rt.getValueAsDouble(bandIndex, clusterIndex);
}
// Apply clustering to input image
final VectorProcessor vp = new VectorProcessor(stack);
final ByteProcessor bp = KMeans2D.encodeSegmentedImage(vp, clusterCenters);
// Apply default color map
if (KMeansClusteringPlugin.APPLY_LUT) {
bp.setColorModel(KMeansClusteringPlugin.defaultColorModel());
}
if (KMeansClusteringPlugin.AUTO_BRIGHTNESS) {
bp.setMinAndMax(0, nbClusters);
}
new ImagePlus("Clusters", bp).show();
// Apply clustering
if (CONFIG.showCentroidImage) {
final ImageStack clustered = KMeansUtils.encodeCentroidValueImage(clusterCenters, new VectorProcessor(stack));
final ImagePlus cvImp = KMeansUtils.createCentroidImage(imp.getType(), clustered);
cvImp.show();
}
}
示例3: run
import ij.process.ByteProcessor; //导入方法依赖的package包/类
private void run(final ImagePlus imp) {
// Convert to a stack of float images
final ImagePlus stack = convertToFloatStack(imp);
// Run clustering
final KMeans2D kMeans = new KMeans2D(CONFIG);
// Roi roi = imp.getRoi();
// ByteProcessor mask = (ByteProcessor) imp.getMask();
// kMeans.setRoi(roi.getBoundingRect());
// kMeans.setMask(mask);
final long startTime = System.currentTimeMillis();
final ByteProcessor bp = kMeans.run(stack.getStack());
final long endTime = System.currentTimeMillis();
// Apply default color map
if (APPLY_LUT) {
bp.setColorModel(defaultColorModel());
}
if (AUTO_BRIGHTNESS) {
bp.setMinAndMax(0, CONFIG.getNumberOfClusters());
}
// Show result image
final ImagePlus r = new ImagePlus("Clusters", bp);
r.show();
// Show animation
if (CONFIG.isClusterAnimationEnabled()) {
final ImageStack animationStack = kMeans.getClusterAnimation();
if (APPLY_LUT) {
animationStack.setColorModel(defaultColorModel());
}
final ImagePlus animation = new ImagePlus("Cluster animation", animationStack);
animation.show();
if (AUTO_BRIGHTNESS) {
for (int i = 0; i < animationStack.getSize(); i++) {
animation.setSlice(i + 1);
animation.getProcessor().setMinAndMax(0, CONFIG.getNumberOfClusters());
}
animation.setSlice(1);
animation.updateAndDraw();
}
}
// Show centroid image
if (showCentroidImage) {
final ImagePlus cvImp = KMeansUtils.createCentroidImage(imp.getType(),
kMeans.getCentroidValueImage());
cvImp.show();
}
if (sendToResultTable) {
sendToResultTable(kMeans.getClusterCenters(), stack.getStack().getSliceLabels());
}
IJ.showStatus("Clustering completed in " + (endTime - startTime) + " ms.");
}