本文整理汇总了C#中INyARRgbRaster.getRgbPixelReader方法的典型用法代码示例。如果您正苦于以下问题:C# INyARRgbRaster.getRgbPixelReader方法的具体用法?C# INyARRgbRaster.getRgbPixelReader怎么用?C# INyARRgbRaster.getRgbPixelReader使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类INyARRgbRaster
的用法示例。
在下文中一共展示了INyARRgbRaster.getRgbPixelReader方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: pickFromRaster
/**
* i_imageから、idマーカを読みだします。
* o_dataにはマーカデータ、o_paramにはまーかのパラメータを返却します。
* @param image
* @param i_square
* @param o_data
* @param o_param
* @return
* @throws NyARException
*/
public bool pickFromRaster(INyARRgbRaster image, NyARIntPoint2d[] i_vertex, NyIdMarkerPattern o_data, NyIdMarkerParam o_param)
{
//遠近法のパラメータを計算
if (!this._perspective_reader.setSourceSquare(i_vertex))
{
return false;
};
INyARRgbPixelReader reader = image.getRgbPixelReader();
NyARIntSize raster_size = image.getSize();
PerspectivePixelReader.TThreshold th = this.__pickFromRaster_th;
MarkerPattEncoder encoder = this.__pickFromRaster_encoder;
//マーカパラメータを取得
this._perspective_reader.detectThresholdValue(reader, raster_size, th);
if (!this._perspective_reader.readDataBits(reader, raster_size,th, encoder))
{
return false;
}
int d = encoder.encode(o_data);
if (d < 0)
{
return false;
}
o_param.direction = d;
o_param.threshold = th.th;
return true;
}
示例2: pickFromRaster
public void pickFromRaster(double[] i_cpara, INyARRgbRaster image, int[] o_patt)
{
int i2x, i2y;//プライム変数
int x, y;
int w;
int r, g, b;
int resolution = this._resolution;
int res_pix = resolution * resolution;
int img_x = image.getWidth();
int img_y = image.getHeight();
int[] rgb_tmp = this._rgb_temp;
int[] rgb_px = this._rgb_px;
int[] rgb_py = this._rgb_py;
double[] cp1cy_cp2 = this._cp1cy_cp2;
double[] cp4cy_cp5 = this._cp4cy_cp5;
double[] cp7cy_1 = this._cp7cy_1;
double cp0 = i_cpara[0];
double cp3 = i_cpara[3];
double cp6 = i_cpara[6];
double cp1 = i_cpara[1];
double cp2 = i_cpara[2];
double cp4 = i_cpara[4];
double cp5 = i_cpara[5];
double cp7 = i_cpara[7];
int pick_y = this._lt_ref.y;
int pick_x = this._lt_ref.x;
//ピクセルリーダーを取得
INyARRgbPixelReader reader = image.getRgbPixelReader();
int p = 0;
for (int iy = 0; iy < this._size_ref.h * resolution; iy += resolution)
{
w = pick_y + iy;
cp1cy_cp2[0] = cp1 * w + cp2;
cp4cy_cp5[0] = cp4 * w + cp5;
cp7cy_1[0] = cp7 * w + 1.0;
for (i2y = 1; i2y < resolution; i2y++)
{
cp1cy_cp2[i2y] = cp1cy_cp2[i2y - 1] + cp1;
cp4cy_cp5[i2y] = cp4cy_cp5[i2y - 1] + cp4;
cp7cy_1[i2y] = cp7cy_1[i2y - 1] + cp7;
}
//解像度分の点を取る。
for (int ix = 0; ix < this._size_ref.w * resolution; ix += resolution)
{
int n = 0;
w = pick_x + ix;
for (i2y = resolution - 1; i2y >= 0; i2y--)
{
double cp0cx = cp0 * w + cp1cy_cp2[i2y];
double cp6cx = cp6 * w + cp7cy_1[i2y];
double cp3cx = cp3 * w + cp4cy_cp5[i2y];
double m = 1 / (cp6cx);
double d = -cp6 / (cp6cx * (cp6cx + cp6));
double m2 = cp0cx * m;
double m3 = cp3cx * m;
double d2 = cp0cx * d + cp0 * (m + d);
double d3 = cp3cx * d + cp3 * (m + d);
for (i2x = resolution - 1; i2x >= 0; i2x--)
{
//1ピクセルを作成
x = rgb_px[n] = (int)(m2);
y = rgb_py[n] = (int)(m3);
if (x < 0 || x >= img_x || y < 0 || y >= img_y)
{
if (x < 0) { rgb_px[n] = 0; } else if (x >= img_x) { rgb_px[n] = img_x - 1; }
if (y < 0) { rgb_py[n] = 0; } else if (y >= img_y) { rgb_py[n] = img_y - 1; }
}
n++;
m2 += d2;
m3 += d3;
}
}
reader.getPixelSet(rgb_px, rgb_py, res_pix, rgb_tmp);
r = g = b = 0;
for (int i = res_pix * 3 - 1; i > 0; )
{
b += rgb_tmp[i--];
g += rgb_tmp[i--];
r += rgb_tmp[i--];
}
r /= res_pix;
g /= res_pix;
b /= res_pix;
o_patt[p] = ((r & 0xff) << 16) | ((g & 0xff) << 8) | ((b & 0xff));
p++;
}
}
return;
}
示例3: pickFromRaster
/**
* @see INyARColorPatt#pickFromRaster
*/
public virtual bool pickFromRaster(INyARRgbRaster image,NyARIntPoint2d[] i_vertexs)
{
//遠近法のパラメータを計算
double[] cpara = this.__pickFromRaster_cpara;
if (!this._perspective_gen.getParam(i_vertexs, cpara)) {
return false;
}
int resolution=this._resolution;
int img_x = image.getWidth();
int img_y = image.getHeight();
int res_pix=resolution*resolution;
int[] rgb_tmp = this.__pickFromRaster_rgb_tmp;
//ピクセルリーダーを取得
INyARRgbPixelReader reader=image.getRgbPixelReader();
int p=0;
for(int iy=0;iy<this._size.h*resolution;iy+=resolution){
//解像度分の点を取る。
for(int ix=0;ix<this._size.w*resolution;ix+=resolution){
int r,g,b;
r=g=b=0;
for(int i2y=iy;i2y<iy+resolution;i2y++){
int cy=this._pickup_lt.y+i2y;
for(int i2x=ix;i2x<ix+resolution;i2x++){
//1ピクセルを作成
int cx=this._pickup_lt.x+i2x;
double d=cpara[6]*cx+cpara[7]*cy+1.0;
int x=(int)((cpara[0]*cx+cpara[1]*cy+cpara[2])/d);
int y=(int)((cpara[3]*cx+cpara[4]*cy+cpara[5])/d);
if(x<0){x=0;}
if(x>=img_x){x=img_x-1;}
if(y<0){y=0;}
if(y>=img_y){y=img_y-1;}
reader.getPixel(x, y, rgb_tmp);
r+=rgb_tmp[0];
g+=rgb_tmp[1];
b+=rgb_tmp[2];
}
}
r/=res_pix;
g/=res_pix;
b/=res_pix;
this._patdata[p]=((r&0xff)<<16)|((g&0xff)<<8)|((b&0xff));
p++;
}
}
//ピクセル問い合わせ
//ピクセルセット
return true;
}
示例4: updateExtpat
//分割数16未満になると少し遅くなるかも。
private void updateExtpat(INyARRgbRaster image, NyARMat i_cpara, int i_xdiv2, int i_ydiv2)
{
int i, j;
int r, g, b;
//ピクセルリーダーを取得
int pat_size_w = this._size.w;
int xdiv = i_xdiv2 / pat_size_w;// xdiv = xdiv2/Config.AR_PATT_SIZE_X;
int ydiv = i_ydiv2 / this._size.h;// ydiv = ydiv2/Config.AR_PATT_SIZE_Y;
int xdiv_x_ydiv = xdiv * ydiv;
double reciprocal;
double[][] para = i_cpara.getArray();
double para00 = para[0 * 3 + 0][0];
double para01 = para[0 * 3 + 1][0];
double para02 = para[0 * 3 + 2][0];
double para10 = para[1 * 3 + 0][0];
double para11 = para[1 * 3 + 1][0];
double para12 = para[1 * 3 + 2][0];
double para20 = para[2 * 3 + 0][0];
double para21 = para[2 * 3 + 1][0];
INyARRgbPixelReader reader = image.getRgbPixelReader();
int img_width = image.getWidth();
int img_height = image.getHeight();
//ワークバッファの準備
reservWorkBuffers(xdiv, ydiv);
double[] xw = this.__updateExtpat_xw;
double[] yw = this.__updateExtpat_yw;
int[] xc = this.__updateExtpat_xc;
int[] yc = this.__updateExtpat_yc;
int[] rgb_set = this.__updateExtpat_rgbset;
for (int iy = this._size.h - 1; iy >= 0; iy--)
{
for (int ix = pat_size_w - 1; ix >= 0; ix--)
{
//xw,ywマップを作成
reciprocal = 1.0 / i_xdiv2;
for (i = xdiv - 1; i >= 0; i--)
{
xw[i] = LT_POS + SQ_SIZE * (ix * xdiv + i + 0.5) * reciprocal;
}
reciprocal = 1.0 / i_ydiv2;
for (i = ydiv - 1; i >= 0; i--)
{
yw[i] = LT_POS + SQ_SIZE * (iy * ydiv + i + 0.5) * reciprocal;
}
//1ピクセルを構成するピクセル座標の集合をxc,yc配列に取得
int number_of_pix = 0;
for (i = ydiv - 1; i >= 0; i--)
{
double para01_x_yw_para02 = para01 * yw[i] + para02;
double para11_x_yw_para12 = para11 * yw[i] + para12;
double para12_x_yw_para22 = para21 * yw[i] + 1.0;
for (j = xdiv - 1; j >= 0; j--)
{
double d = para20 * xw[j] + para12_x_yw_para22;
if (d == 0)
{
throw new NyARException();
}
int xcw = (int)((para00 * xw[j] + para01_x_yw_para02) / d);
int ycw = (int)((para10 * xw[j] + para11_x_yw_para12) / d);
if (xcw < 0 || xcw >= img_width || ycw < 0 || ycw >= img_height)
{
continue;
}
xc[number_of_pix] = xcw;
yc[number_of_pix] = ycw;
number_of_pix++;
}
}
//1ピクセル分の配列を取得
reader.getPixelSet(xc, yc, number_of_pix, rgb_set);
r = g = b = 0;
for (i = number_of_pix * 3 - 1; i >= 0; i -= 3)
{
r += rgb_set[i - 2];// R
g += rgb_set[i - 1];// G
b += rgb_set[i];// B
}
//1ピクセル確定
this._patdata[iy * pat_size_w + ix] = (((r / xdiv_x_ydiv) & 0xff) << 16) | (((g / xdiv_x_ydiv) & 0xff) << 8) | (((b / xdiv_x_ydiv) & 0xff));
}
}
return;
}
示例5: pickFromRaster
/**
* @see INyARColorPatt#pickFromRaster
*/
public bool pickFromRaster(INyARRgbRaster image,NyARIntPoint2d[] i_vertexs)
{
double[] conv_param=this._convparam;
int rx2,ry2;
rx2=this._size.w;
ry2=this._size.h;
int[] rgb_tmp=new int[3];
INyARRgbPixelReader reader=image.getRgbPixelReader();
// 変形先領域の頂点を取得
//変換行列から現在の座標系への変換パラメタを作成
calcPara(i_vertexs,conv_param);// 変換パラメータを求める
for(int y=0;y<ry2;y++){
for(int x=0;x<rx2;x++){
int ttx=(int)((conv_param[0]*x*y+conv_param[1]*x+conv_param[2]*y+conv_param[3])+0.5);
int tty=(int)((conv_param[4]*x*y+conv_param[5]*x+conv_param[6]*y+conv_param[7])+0.5);
reader.getPixel((int)ttx,(int)tty,rgb_tmp);
this._patdata[x+y*rx2]=(rgb_tmp[0]<<16)|(rgb_tmp[1]<<8)|rgb_tmp[2];
}
}
return true;
}