本文整理汇总了C++中cvMatMul函数的典型用法代码示例。如果您正苦于以下问题:C++ cvMatMul函数的具体用法?C++ cvMatMul怎么用?C++ cvMatMul使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了cvMatMul函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
int main()
{
CvMat* A = cvCreateMat( 3, 3, CV_64FC1 );
CvMat* B = cvCreateMat( 3, 1, CV_64FC1 );
CvMat* X = cvCreateMat( 3, 1, CV_64FC1 );
A->data.db[0] = 11;
A->data.db[1] = 5;
A->data.db[2] = 15;
A->data.db[3] = 5;
A->data.db[4] = 3;
A->data.db[5] = 1;
A->data.db[6] = 15;
A->data.db[7] = 1;
A->data.db[8] = 31;
B->data.db[0] = 1;
B->data.db[1] = 2;
B->data.db[2] = 3;
printf("A :\n[%lf, %lf, %lf]\n[%lf, %lf, %lf]\n[%lf, %lf, %lf]\n", A->data.db[0], A->data.db[1], A->data.db[2], A->data.db[3], A->data.db[4], A->data.db[5], A->data.db[6], A->data.db[7], A->data.db[8]);
cvCGSolve( icvMatMulOps, A, B, X );
printf("x : [%lf, %lf, %lf]\n", X->data.db[0], X->data.db[1], X->data.db[2]);
cvMatMul( A, X, X );
printf("Ax : [%lf, %lf, %lf]\n", X->data.db[0], X->data.db[1], X->data.db[2]);
cvSolve( A, B, X );
printf("x : [%lf, %lf, %lf]\n", X->data.db[0], X->data.db[1], X->data.db[2]);
cvMatMul( A, X, X );
printf("Ax : [%lf, %lf, %lf]\n", X->data.db[0], X->data.db[1], X->data.db[2]);
return 0;
}
示例2: cvTranspose
void Kalman::predict_P() {
// P_pred = F*P*trans(F) + Q
cvTranspose(F, F_trans);
cvMatMul(P, F_trans, P_pred);
cvMatMul(F, P_pred, P_pred);
cvScaleAdd(P_pred, cvScalar(1), Q, P_pred);
}
示例3: cvMatMul
void KalmanSensor::update_P(CvMat *P_pred, CvMat *P) {
//P = (I - K*H) * P_pred
cvMatMul(K, H, P_tmp);
cvSetIdentity(P);
cvScaleAdd(P_tmp, cvScalar(-1), P, P);
cvMatMul(P, P_pred, P);
}
示例4: sin
CvMat* AbstractCamera::computeRtMatrix(double a, double b, double g, double tX, double tY, double tZ) {
//--- Represent 3d rotation with euler angles
double sinG = sin(g);
double cosG = cos(g);
CvMat* rZg = cvCreateMat(3, 3, CV_32FC1);
cvSetZero(rZg);
cvmSet(rZg, 0, 0, cosG);
cvmSet(rZg, 0, 1, -sinG);
cvmSet(rZg, 1, 0, sinG);
cvmSet(rZg, 1, 1, cosG);
cvmSet(rZg, 2, 2, 1.0f);
double sinB = sin(b);
double cosB = cos(b);
CvMat* rXb = cvCreateMat(3, 3, CV_32FC1);
cvSetZero(rXb);
cvmSet(rXb, 0, 0, 1.0f);
cvmSet(rXb, 1, 1, cosB);
cvmSet(rXb, 1, 2, -sinB);
cvmSet(rXb, 2, 1, sinB);
cvmSet(rXb, 2, 2, cosB);
double sinA = sin(a);
double cosA = cos(a);
CvMat* rZa = cvCreateMat(3, 3, CV_32FC1);
cvSetZero(rZa);
cvmSet(rZa, 0, 0, cosA);
cvmSet(rZa, 0, 1, -sinA);
cvmSet(rZa, 1, 0, sinA);
cvmSet(rZa, 1, 1, cosA);
cvmSet(rZa, 2, 2, 1.0f);
CvMat* rotationMatrix = cvCreateMat(3, 3, CV_32FC1);
cvMatMul(rZg, rXb, rotationMatrix);
cvMatMul(rotationMatrix, rZa, rotationMatrix);
cvReleaseMat(&rZg);
cvReleaseMat(&rXb);
cvReleaseMat(&rZa);
//--- [R|T] ; First camera rotation and translation matrix
CvMat* rtMatrix = cvCreateMat(3, 4, CV_32FC1);
for (int i = 0; i < 3; i++) {
cvmSet(rtMatrix, i, 0, cvmGet(rotationMatrix, i, 0));
cvmSet(rtMatrix, i, 1, cvmGet(rotationMatrix, i, 1));
cvmSet(rtMatrix, i, 2, cvmGet(rotationMatrix, i, 2));
}
cvmSet(rtMatrix, 0, 3, tX);
cvmSet(rtMatrix, 1, 3, tY);
cvmSet(rtMatrix, 2, 3, tZ);
cvReleaseMat(&rotationMatrix);
return rtMatrix;
}
示例5: projectImg
static void projectImg(IplImage *src, int64_t TRANS_X, int64_t TRANS_Y,
IplImage *dst, CvMat *tmatrix) {
if (tmatrix->rows == 2) {
//translate
CvMat* result = cvCreateMat(2, 3, CV_32FC1);
cvSetReal2D(result, 0, 0, cvGetReal2D(tmatrix, 0, 0));
cvSetReal2D(result, 0, 1, cvGetReal2D(tmatrix, 0, 1));
cvSetReal2D(result, 1, 0, cvGetReal2D(tmatrix, 1, 0));
cvSetReal2D(result, 1, 1, cvGetReal2D(tmatrix, 1, 1));
cvSetReal2D(result, 0, 2, cvGetReal2D(tmatrix, 0, 2) + TRANS_X);
cvSetReal2D(result, 1, 2, cvGetReal2D(tmatrix, 1, 2) + TRANS_Y);
cvWarpAffine(src, dst, result, CV_INTER_LINEAR, cvScalarAll(0));
cvReleaseMat(&result);
} else if (tmatrix->rows == 3) {
//translate matrix
CvMat* offset = cvCreateMat(3, 3, CV_32FC1);
cvSetReal2D(offset, 0, 0, 1);
cvSetReal2D(offset, 0, 1, 0);
cvSetReal2D(offset, 0, 2, TRANS_X);
cvSetReal2D(offset, 1, 0, 0);
cvSetReal2D(offset, 1, 1, 1);
cvSetReal2D(offset, 1, 2, TRANS_Y);
cvSetReal2D(offset, 2, 0, 0);
cvSetReal2D(offset, 2, 1, 0);
cvSetReal2D(offset, 2, 2, 1);
//translate
CvMat* result = cvCreateMat(3, 3, CV_32FC1);
cvMatMul(offset, tmatrix, result);
cvWarpPerspective(src, dst, result, CV_INTER_LINEAR, cvScalarAll(0));
cvReleaseMat(&offset);
cvReleaseMat(&result);
}
}
示例6: cvCreateMat
/**
* Calculate: K P R|T
*/
CvMat* MultipleViewGeomOld::calculateProjectionMatrix(CvMat *rtMat) {
// 3 rows, 4 columns
CvMat* kpMat = cvCreateMat(3, 4, CV_32FC1);
cvMatMul(kMat,pMat,kpMat);
CvMat* projMat = cvCreateMat(3, 4, CV_32FC1);
cvMatMul(kpMat,rtMat,projMat);
projMat = cvCloneMat(projMat);
LOG4CPLUS_DEBUG(myLogger,"Projection K P R|T matrix" << endl << printCvMat(projMat));
return projMat;
}
示例7: cvCreateMat
void CModel::Draw(IplImage* img, CvMat* CamMat, CvScalar color, int thickness)
{
int nEdges = Edges.size();
int nVertexes = Vertexes.size();
// Tao mang anh chieu
vector<CvMat*> ProjectedVertex;
ProjectedVertex.resize(Vertexes.size());
for (int i = 0; i < nVertexes; i++)
ProjectedVertex[i] = cvCreateMat(3,1,CV_32FC1);
// Thuc hien phep chieu cac dinh
for (int i = 0; i < nVertexes; i++)
{
cvMatMul(CamMat,Vertexes[i],ProjectedVertex[i]);
CV_MAT_ELEM(*ProjectedVertex[i],float,0,0) =
CV_MAT_ELEM(*ProjectedVertex[i],float,0,0)/CV_MAT_ELEM(*ProjectedVertex[i],float,2,0);
CV_MAT_ELEM(*ProjectedVertex[i],float,1,0) =
CV_MAT_ELEM(*ProjectedVertex[i],float,1,0)/CV_MAT_ELEM(*ProjectedVertex[i],float,2,0);
}
for (int i = 0; i < nEdges; i++)
{
cvLine(img,
cvPoint(CV_MAT_ELEM(*ProjectedVertex[Edges[i].idxFrm],float,0,0), CV_MAT_ELEM(*ProjectedVertex[Edges[i].idxFrm],float,1,0)),
cvPoint(CV_MAT_ELEM(*ProjectedVertex[Edges[i].idxTo], float,0,0), CV_MAT_ELEM(*ProjectedVertex[Edges[i].idxTo], float,1,0)),
color, thickness);
}
for (int i = 0; i < nVertexes; i++)
cvReleaseMat(&ProjectedVertex[i]);
ProjectedVertex.clear();
}
示例8: getOptFlow
Point2D getOptFlow(IplImage* currentFrame,Point2D p,IplImage* preFrame)
{
Point2D temp;
double b[2];
b[0]=0;b[1]=0;
double M11=0,M12=0,M22=0;
for(int i = -OPTICAL_FLOW_POINT_AREA/2; i < OPTICAL_FLOW_POINT_AREA/2; i++)
{
for (int j = -OPTICAL_FLOW_POINT_AREA/2;j < OPTICAL_FLOW_POINT_AREA/2;j++)
{
temp = partial(currentFrame,Point2D(p.row+i,p.col+j));
M11 += temp.dcol*temp.dcol;
M12 += temp.dcol*temp.drow;
M22 += temp.drow*temp.drow;
b[0] += temp.dcol*(pixval8U(currentFrame,p.row+i,p.col+j)-pixval8U(preFrame,p.row+i,p.col+j));
b[1] += temp.drow*(pixval8U(currentFrame,p.row+i,p.col+j)-pixval8U(preFrame,p.row+i,p.col+j));
}
}
double a[] = {M11,M12,M12,M22};
CvMat M=cvMat(2, 2, CV_64FC1, a);
CvMat *Mi = cvCloneMat(&M);
cvInvert(&M,Mi,CV_SVD);
temp.col=0;
temp.row=0;
b[0] = -b[0];
b[1] = -b[1];
CvMat Mb = cvMat(2,1,CV_64FC1,b);
CvMat *Mr = cvCloneMat(&Mb);
cvMatMul( Mi, &Mb, Mr);
double vy = (cvmGet(Mr,1,0));
double vx = (cvmGet(Mr,0,0));
return (Point2D(vy,vx));
}
示例9: cvCreateMat
/** Computes the optical center of camera from camera projection matrix (http://en.wikipedia.org/wiki/Camera_matrix )
* @param pM : Camera projection matrix (3x4).
* @return : Optical center of camera.
*/
Point3D Utility::getOpticalCenter( CvMat* pM )
{
CvMat *A = cvCreateMat(3, 3, CV_64FC1);
CvMat *Ainv = cvCreateMat(3, 3, CV_64FC1);
CvMat *b = cvCreateMat(3, 1, CV_64FC1);
for(int i=0; i<3; ++i)
{
for(int j=0; j<3; ++j)
cvmSet(A, i, j, cvmGet(pM,i,j));
cvmSet(b, i, 0, cvmGet(pM, i,3));
}
cvInvert(A, Ainv);
CvMat *oc = cvCreateMat(3, 1, CV_64FC1);
cvMatMul(Ainv, b, oc);
Point3D toRet;
toRet.x = -1 * cvmGet(oc, 0, 0); //NULL SPACE OF MATRIX pM
toRet.y = -1 * cvmGet(oc, 1, 0);
toRet.z = -1 * cvmGet(oc, 2, 0);
cvReleaseMat(&A);
cvReleaseMat(&Ainv);
cvReleaseMat(&b);
cvReleaseMat(&oc);
return toRet;
}
示例10: cvMatMul
/*! Transform the grid with given homograhy and average colors over
* triangles.
*/
void LightCollector::averageImage(IplImage *im, CvMat *_homography)
{
if (avgChannels != im->nChannels) {
if (avgChannels < im->nChannels) {
delete[] avg;
avg = 0;
}
avgChannels = im->nChannels;
}
if (!avg) avg = new float[avgChannels*nbTri];
// apply the homography to every mesh vertex
if (_homography)
cvMatMul(_homography, vertices, transformed);
else
cvCopy(vertices, transformed);
CvMat r1,r2,r3;
cvGetRow(transformed, &r1, 0);
cvGetRow(transformed, &r2, 1);
cvGetRow(transformed, &r3, 2);
cvDiv(&r1,&r3,&r1);
cvDiv(&r2,&r3,&r2);
nbPix=0;
for (int t=0; t<nbTri;t++) {
int pts[3][2];
for (int i=0; i<3; i++) {
assert(triangles[t*3+i] < transformed->cols);
pts[i][0] = cvRound(CV_MAT_ELEM(*transformed, float, 0, triangles[t*3+i]));
pts[i][1] = cvRound(CV_MAT_ELEM(*transformed, float, 1, triangles[t*3+i]));
}
nbPix+=stat_triangle(im, pts, avg+t*avgChannels);
}
}
示例11: calculateWidthInPixels
int calculateWidthInPixels(CvMat* P, float Y){
float W = 0.10; //width of road 20cm ~ 0.2m
float w = 0.0; //width of the roads in pixels
CvMat tmp;
//create P_1 (row 1 of matrix P)
CvMat *P_1 = cvCreateMat(1,4,CV_32FC1);
cvGetRow(P,&tmp,0); //row 0
cvCopy(&tmp,P_1,NULL);
CvMat *P_3 = cvCreateMat(1,4,CV_32FC1);
cvGetRow(P,&tmp,2); //row 2
cvCopy(&tmp,P_3,NULL);
CvMat* X_1 = cvCreateMat(4,1,CV_32FC1);
CvMat* X_2 = cvCreateMat(4,1,CV_32FC1);
CvMat* P_1_times_X_1 = cvCreateMat(1,1,CV_32FC1);
CvMat* P_3_times_X_1 = cvCreateMat(1,1,CV_32FC1);
CvMat* P_1_times_X_2 = cvCreateMat(1,1,CV_32FC1);
CvMat* P_3_times_X_2 = cvCreateMat(1,1,CV_32FC1);
cvmSet(X_1,0,0,W);
cvmSet(X_1,1,0,Y);
cvmSet(X_1,2,0,0.0);
cvmSet(X_1,3,0,1.0);
cvmSet(X_2,0,0,0);
cvmSet(X_2,1,0,Y);
cvmSet(X_2,2,0,0);
cvmSet(X_2,3,0,1);
cvMatMul(P_1,X_1,P_1_times_X_1);
cvMatMul(P_3,X_1,P_3_times_X_1);
cvMatMul(P_1,X_2,P_1_times_X_2);
cvMatMul(P_3,X_2,P_3_times_X_2);
w = ((cvmGet(P_1_times_X_1,0,0) /
cvmGet(P_3_times_X_1,0,0)
)
-
(cvmGet(P_1_times_X_2,0,0) /
cvmGet(P_3_times_X_2,0,0)
));
return int(w+0.5);
}
示例12: mcvGetVanishingPoint
FLOAT_POINT2D mcvGetVanishingPoint(const CameraInfo *cameraInfo)
{
//get the vp in world coordinates
FLOAT_MAT_ELEM_TYPE vpp[] = {sin(cameraInfo->yaw)/cos(cameraInfo->pitch),
cos(cameraInfo->yaw)/cos(cameraInfo->pitch), 0};
CvMat vp = cvMat(3, 1, FLOAT_MAT_TYPE, vpp);
//transform from world to camera coordinates
//
//rotation matrix for yaw
FLOAT_MAT_ELEM_TYPE tyawp[] = {cos(cameraInfo->yaw), -sin(cameraInfo->yaw), 0,
sin(cameraInfo->yaw), cos(cameraInfo->yaw), 0,
0, 0, 1};
CvMat tyaw = cvMat(3, 3, FLOAT_MAT_TYPE, tyawp);
//rotation matrix for pitch
FLOAT_MAT_ELEM_TYPE tpitchp[] = {1, 0, 0,
0, -sin(cameraInfo->pitch), -cos(cameraInfo->pitch),
0, cos(cameraInfo->pitch), -sin(cameraInfo->pitch)};
CvMat transform = cvMat(3, 3, FLOAT_MAT_TYPE, tpitchp);
//combined transform
cvMatMul(&transform, &tyaw, &transform);
//
//transformation from (xc, yc) in camra coordinates
// to (u,v) in image frame
//
//matrix to shift optical center and focal length
FLOAT_MAT_ELEM_TYPE t1p[] = {
cameraInfo->focalLength.x, 0,
cameraInfo->opticalCenter.x,
0, cameraInfo->focalLength.y,
cameraInfo->opticalCenter.y,
0, 0, 1};
CvMat t1 = cvMat(3, 3, FLOAT_MAT_TYPE, t1p);
//combine transform
cvMatMul(&t1, &transform, &transform);
//transform
cvMatMul(&transform, &vp, &vp);
//
//clean and return
//
FLOAT_POINT2D ret;
ret.x = cvGetReal1D(&vp, 0);
ret.y = cvGetReal1D(&vp, 1);
return ret;
}
示例13: printf
//============================================================================
void AAM_Basic::CalcGradientMatrix(const CvMat* CParams,
const CvMat* vCDisps,
const CvMat* vPoseDisps,
const std::vector<AAM_Shape>& AllShapes,
const std::vector<IplImage*>& AllImages)
{
int npixels = __cam.__texture.nPixels();
int np = __cam.nModes();
// do model parameter experiments
{
printf("Calculating parameter gradient matrix...\n");
CvMat* GParam = cvCreateMat(np, npixels, CV_64FC1);cvZero(GParam);
CvMat* GtG = cvCreateMat(np, np, CV_64FC1);
CvMat* GtGInv = cvCreateMat(np, np, CV_64FC1);
// estimate Rc
EstCParamGradientMatrix(GParam, CParams, AllShapes, AllImages, vCDisps);
__Rc = cvCreateMat(np, npixels, CV_64FC1);
cvGEMM(GParam, GParam, 1, NULL, 0, GtG, CV_GEMM_B_T);
cvInvert(GtG, GtGInv, CV_SVD );
cvMatMul(GtGInv, GParam, __Rc);
cvReleaseMat(&GtG);
cvReleaseMat(&GtGInv);
cvReleaseMat(&GParam);
}
// do pose experiments, this is for global shape normalization
{
printf("Calculating pose gradient matrix...\n");
CvMat* GtG = cvCreateMat(4, 4, CV_64FC1);
CvMat* GtGInv = cvCreateMat(4, 4, CV_64FC1);
CvMat* GPose = cvCreateMat(4, npixels, CV_64FC1); cvZero(GPose);
// estimate Rt
EstPoseGradientMatrix(GPose, CParams, AllShapes, AllImages, vPoseDisps);
__Rq = cvCreateMat(4, npixels, CV_64FC1);
cvGEMM(GPose, GPose, 1, NULL, 0, GtG, CV_GEMM_B_T);
cvInvert(GtG, GtGInv, CV_SVD);
cvMatMul(GtGInv, GPose, __Rq);
cvReleaseMat(&GtG);
cvReleaseMat(&GtGInv);
cvReleaseMat(&GPose);
}
}
示例14: assert
CvMat* camcalib::matMul(const CvMat* A, const CvMat* B)
{
assert(A->cols == B->rows);
CvMat* M = cvCreateMat(A->rows, B->cols, A->type);
cvMatMul(A, B, M);
return M;
}
示例15: cvCopy
void BazARTracker::show_result(CamAugmentation &augment, IplImage *video, IplImage **dst)
{
if (getDebugMode()){
if (*dst==0) *dst=cvCloneImage(video);
else cvCopy(video, *dst);
}
CvMat *m = augment.GetProjectionMatrix(0);
// Flip...(This occured from OpenGL origin / camera origin)
CvMat *coordinateTrans = cvCreateMat(3, 3, CV_64F);
cvmSetIdentity(coordinateTrans);
cvmSet(coordinateTrans, 1, 1, -1);
cvmSet(coordinateTrans, 1, 2, m_cparam->cparam.ysize);
cvMatMul(coordinateTrans, m, m);
// extract intrinsic camera parameters from bazar's projection matrix..
GetARToolKitRTfromBAZARProjMat(g_matIntrinsic, m, matCameraRT4_4);
cvTranspose(matCameraRT4_4, matCameraRT4_4);
cvReleaseMat(&coordinateTrans);
// Debug
if (getDebugMode()) {
// draw the coordinate system axes
double w =video->width/2.0;
double h =video->height/2.0;
// 3D coordinates of an object
double pts[4][4] = {
{w,h,0, 1}, // 0,0,0,1
{w*2,h,0, 1}, // w, 0
{w,h*2,0, 1}, // 0, h
{w,h,-w-h, 1} // 0, 0, -
};
CvMat ptsMat, projectedMat;
cvInitMatHeader(&ptsMat, 4, 4, CV_64FC1, pts);
cvInitMatHeader(&projectedMat, 3, 4, CV_64FC1, projected);
cvGEMM(m, &ptsMat, 1, 0, 0, &projectedMat, CV_GEMM_B_T );
for (int i=0; i<4; i++)
{
projected[0][i] /= projected[2][i];
projected[1][i] /= projected[2][i];
}
// draw the projected lines
cvLine(*dst, cvPoint((int)projected[0][0], (int)projected[1][0]),
cvPoint((int)projected[0][1], (int)projected[1][1]), CV_RGB(255,0,0), 2);
cvLine(*dst, cvPoint((int)projected[0][0], (int)projected[1][0]),
cvPoint((int)projected[0][2], (int)projected[1][2]), CV_RGB(0,255,0), 2);
cvLine(*dst, cvPoint((int)projected[0][0], (int)projected[1][0]),
cvPoint((int)projected[0][3], (int)projected[1][3]), CV_RGB(0,0,255), 2);
}
}