本文整理汇总了Java中org.openimaj.image.FImage.getPixelInterp方法的典型用法代码示例。如果您正苦于以下问题:Java FImage.getPixelInterp方法的具体用法?Java FImage.getPixelInterp怎么用?Java FImage.getPixelInterp使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.openimaj.image.FImage
的用法示例。
在下文中一共展示了FImage.getPixelInterp方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: calculateLBP
import org.openimaj.image.FImage; //导入方法依赖的package包/类
/**
* Calculate the extended LBP for a single point. The
* point must be within the image.
*
* @param image the image
* @param radius the radius of the sampling circle
* @param samples the number of samples around the circle
* @param x the x-coordinate of the point
* @param y the y-coordinate of the point
* @return the LBP code
*/
public static int calculateLBP(FImage image, float radius, int samples, int x, int y) {
float centre = image.pixels[y][x];
int pattern = 0;
for (int i=0; i<samples; i++) {
double dx = -radius * Math.sin(2 * Math.PI * i / samples);
double dy = radius * Math.cos(2 * Math.PI * i / samples);
float pix = image.getPixelInterp(x+dx, y+dy);
if (pix - centre >= 0) {
pattern += Math.pow(2, i);
}
}
return pattern;
}
示例2: calculateLTP
import org.openimaj.image.FImage; //导入方法依赖的package包/类
/**
* Calculate the LTP for a single point. The
* point must be within the image.
*
* @param image the image
* @param radius the radius of the sampling circle
* @param samples the number of samples around the circle
* @param threshold the threshold
* @param x the x-coordinate of the point
* @param y the y-coordinate of the point
* @return the LTP code (positive and negative binary code and ternary code)
*/
public static int[] calculateLTP(FImage image, float radius, int samples, float threshold, int x, int y) {
float centre = image.pixels[y][x];
int pattern[] = new int[3];
for (int i=0; i<samples; i++) {
double xx = -radius * Math.sin(2 * Math.PI * i / samples);
double yy = radius * Math.cos(2 * Math.PI * i / samples);
float pix = image.getPixelInterp(x+xx, y+yy);
float d = pix - centre;
if (d >= threshold) {
pattern[0] += Math.pow(2, i);
pattern[2] += Math.pow(3, i);
}
if (d <= threshold) {
pattern[1] += Math.pow(2, i);
pattern[2] += 2 * Math.pow(3, i);
}
}
return pattern;
}
示例3: processImage
import org.openimaj.image.FImage; //导入方法依赖的package包/类
@Override
public void processImage(FImage image) {
FImage newimage = image.newInstance(width, height);
for (int y=0; y<height; y++)
for (int x=0; x<width; x++)
newimage.pixels[y][x] = image.getPixelInterp(x*scale, y*scale);
image.internalAssign(newimage);
}
示例4: extractFeatures
import org.openimaj.image.FImage; //导入方法依赖的package包/类
protected void extractFeatures(KEDetectedFace face) {
final Matrix T0 = AffineAligner.estimateAffineTransform(face);
final Matrix T = T0.copy();
final FImage J = FKEFaceDetector.pyramidResize(face.getFacePatch(), T);
final FacialKeypoint[] pts = face.getKeypoints();
faceParts.clear();
final float pyrScale = (float) (T0.get(0, 2) / T.get(0, 2));
// build a list of the center of each patch wrt image J
final Point2dImpl[] P0 = new Point2dImpl[VP.length];
for (int j = 0; j < P0.length; j++) {
final int[] vp = VP[j];
final int vp0 = vp[0];
P0[j] = new Point2dImpl(0, 0);
if (vp.length == 1) {
P0[j].x = pts[vp0].position.x / pyrScale;
P0[j].y = pts[vp0].position.y / pyrScale;
} else {
final int vp1 = vp[1];
P0[j].x = ((pts[vp0].position.x + pts[vp1].position.x) / 2.0f) / pyrScale;
P0[j].y = ((pts[vp0].position.y + pts[vp1].position.y) / 2.0f) / pyrScale;
}
}
// Prebuild transform
final List<Point2dImpl> transformed = new ArrayList<Point2dImpl>();
final List<Pixel> nontransformed = new ArrayList<Pixel>();
for (int rr = -radius; rr <= radius; rr++) {
for (int cc = -radius; cc <= radius; cc++) {
final float r2 = rr * rr + cc * cc;
if (r2 <= radius * radius) { // inside circle
// Note: do transform without the translation!!!
final float px = (float) (cc * scl * T.get(0, 0) + rr * scl * T.get(0, 1));
final float py = (float) (cc * scl * T.get(1, 0) + rr * scl * T.get(1, 1));
transformed.add(new Point2dImpl(px, py));
nontransformed.add(new Pixel(cc, rr));
}
}
}
for (int j = 0; j < VP.length; j++) {
final DetectedFacePart pd = new DetectedFacePart(FacialKeypointType.valueOf(j),
new Point2dImpl(P0[j].x * pyrScale, P0[j].y * pyrScale));
faceParts.add(pd);
pd.featureVector = new float[transformed.size()];
int n = 0;
float mean = 0;
float m2 = 0;
for (int i = 0; i < transformed.size(); i++) {
final Point2dImpl XYt = transformed.get(i);
final double xt = XYt.x + P0[j].x;
final double yt = XYt.y + P0[j].y;
final float val = J.getPixelInterp(xt, yt);
pd.featureVector[i] = val;
n++;
final float delta = val - mean;
mean = mean + delta / n;
m2 = m2 + delta * (val - mean);
}
float std = (float) Math.sqrt(m2 / (n - 1));
if (std <= 0)
std = 1;
for (int i = 0; i < transformed.size(); i++) {
pd.featureVector[i] = (pd.featureVector[i] - mean) / std;
}
}
}