本文整理汇总了C++中QMatrix4x4::mapVector方法的典型用法代码示例。如果您正苦于以下问题:C++ QMatrix4x4::mapVector方法的具体用法?C++ QMatrix4x4::mapVector怎么用?C++ QMatrix4x4::mapVector使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类QMatrix4x4
的用法示例。
在下文中一共展示了QMatrix4x4::mapVector方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: calcInvRot
void SurfaceSet::calcInvRot(){
float mat[16];
glGetFloatv(GL_MODELVIEW_MATRIX, mat);
//This one is tricky: the matrix below is the GL matrix (different row/column convention than QT) with the 3x3 part transposed.
//This inverts the rotation and does weird stuff to the scale...
QMatrix4x4* invRotMat = new QMatrix4x4(mat[0],mat[1],mat[2],mat[12],mat[4],mat[5],mat[6],mat[13],mat[8],mat[9],mat[10],mat[14],mat[3],mat[7],mat[11],mat[15]);
//QMatrix4x4* invRotMat = new QMatrix4x4(mat[0],mat[1],mat[2],mat[3],mat[4],mat[5],mat[6],mat[7],mat[8],mat[9],mat[10],mat[11],mat[12],mat[13],mat[14],mat[15]);
const QVector3D xVec(1,0,0);
const QVector3D yVec(0,1,0);
const QVector3D zVec(0,0,1);
//mapVector ignores translation and such...
invRotX = invRotMat->mapVector(xVec);
invRotY = invRotMat->mapVector(yVec);
invRotZ = invRotMat->mapVector(zVec);
//normalization ignores the scale
invRotX.normalize();
invRotY.normalize();
invRotZ.normalize();
}
示例2: QFETCH
void tst_QRay3D::transform()
{
QFETCH(QVector3D, point);
QFETCH(QVector3D, direction);
QMatrix4x4 m;
m.translate(-1.0f, 2.5f, 5.0f);
m.rotate(45.0f, 1.0f, 1.0f, 1.0f);
m.scale(23.5f);
Qt3DRender::RayCasting::QRay3D ray1(point, direction);
Qt3DRender::RayCasting::QRay3D ray2(ray1);
Qt3DRender::RayCasting::QRay3D ray3;
ray1.transform(m);
ray3 = ray2.transformed(m);
QVERIFY(fuzzyCompare(ray1.origin(), ray3.origin()));
QVERIFY(fuzzyCompare(ray1.direction(), ray3.direction()));
QVERIFY(fuzzyCompare(ray1.origin(), m * point));
QVERIFY(fuzzyCompare(ray1.direction(), m.mapVector(direction)));
}
示例3: QFETCH
void tst_QRay3D::transform()
{
QFETCH(QVector3D, point);
QFETCH(QVector3D, direction);
QMatrix4x4 m;
m.translate(-1.0f, 2.5f, 5.0f);
m.rotate(45.0f, 1.0f, 1.0f, 1.0f);
m.scale(23.5f);
QRay3D ray1(point, direction);
QRay3D ray2(ray1);
QRay3D ray3;
ray1.transform(m);
ray3 = ray2.transformed(m);
QCOMPARE(ray1.origin(), ray3.origin());
QCOMPARE(ray1.direction(), ray3.direction());
QCOMPARE(ray1.origin(), m * point);
QCOMPARE(ray1.direction(), m.mapVector(direction));
}
示例4: spotDirection
/*!
Returns the spotDirection() for this light after transforming it
from world co-ordinates to eye co-ordinates using the top-left
3x3 submatrix within \a transform.
The returned result is suitable to be applied to the GL_SPOT_DIRECTION
property of \c{glLight()}, assuming that the modelview transformation
in the GL context is set to the identity.
\sa eyePosition()
*/
QVector3D QGLLightParameters::eyeSpotDirection
(const QMatrix4x4& transform) const
{
Q_D(const QGLLightParameters);
return transform.mapVector(d->spotDirection);
}
示例5: QPoint
QPoint CubeItem::cubeIntersection
(QWidget *widget, const QPoint &point, int *actualFace) const
{
// Bail out if no scene.
if (!mScene) {
*actualFace = -1;
return QPoint();
}
// Get the combined matrix for the projection.
int dpiX = widget->logicalDpiX();
int dpiY = widget->logicalDpiY();
QRectF bounds = boundingRect();
qreal aspectRatio = (bounds.width() * dpiY) / (bounds.height() * dpiX);
QMatrix4x4 mv = camera()->modelViewMatrix();
QMatrix4x4 proj = camera()->projectionMatrix(aspectRatio);
QMatrix4x4 combined = proj * mv;
// Find the relative position of the point within (-1, -1) to (1, 1).
QPointF relativePoint =
QPointF((point.x() - bounds.center().x()) * 2 / bounds.width(),
-(point.y() - bounds.center().y()) * 2 / bounds.height());
// Determine which face of the cube contains the point.
QVector3D pt1, pt2, pt3, pt4;
bool singleFace = (pressedFace != -1);
for (int face = 0; face < 6; ++face) {
if (singleFace && face != pressedFace)
continue;
// Create a polygon from the projected version of the face
// so that we can test for point membership.
pt1 = QVector3D(vertexData[face * 4 * 3],
vertexData[face * 4 * 3 + 1],
vertexData[face * 4 * 3 + 2]);
pt2 = QVector3D(vertexData[face * 4 * 3 + 3],
vertexData[face * 4 * 3 + 4],
vertexData[face * 4 * 3 + 5]);
pt3 = QVector3D(vertexData[face * 4 * 3 + 6],
vertexData[face * 4 * 3 + 7],
vertexData[face * 4 * 3 + 8]);
pt4 = QVector3D(vertexData[face * 4 * 3 + 9],
vertexData[face * 4 * 3 + 10],
vertexData[face * 4 * 3 + 11]);
QVector<QPointF> points2d;
points2d.append((combined * pt1).toPointF());
points2d.append((combined * pt2).toPointF());
points2d.append((combined * pt3).toPointF());
points2d.append((combined * pt4).toPointF());
QPolygonF polygon(points2d);
if (!singleFace) {
if (!polygon.containsPoint(relativePoint, Qt::OddEvenFill))
continue;
}
// We want the face that is pointing towards the user.
QVector3D v = mv.mapVector
(QVector3D::crossProduct(pt2 - pt1, pt3 - pt1));
if (!singleFace && v.z() <= 0.0f)
continue;
// Determine the intersection between the cube face and
// the ray coming from the eye position.
QVector3D eyept = proj.inverted().map
(QVector3D(relativePoint.x(), relativePoint.y(), -1.0f));
QLine3D ray(QVector3D(0, 0, 0), eyept);
QPlane3D plane(mv * pt1, v);
QResult<QVector3D> intersection = plane.intersection(ray);
if (!intersection.isValid())
continue;
QVector3D worldpt = mv.inverted().map(intersection.value());
// Map the world point to the range 0..1.
worldpt = (worldpt / CubeSize) + QVector3D(0.5f, 0.5f, 0.5f);
// Figure out the texture co-ordinates on the face that
// correspond to the point.
qreal xtex, ytex;
switch (face) {
case 0:
xtex = 1.0f - worldpt.y();
ytex = 1.0f - worldpt.z();
break;
case 1:
xtex = 1.0f - worldpt.x();
ytex = 1.0f - worldpt.z();
break;
case 2:
xtex = worldpt.y();
ytex = 1.0f - worldpt.z();
break;
case 3:
xtex = worldpt.x();
ytex = 1.0f - worldpt.z();
break;
case 4:
xtex = worldpt.x();
ytex = 1.0f - worldpt.y();
break;
case 5: default:
//.........这里部分代码省略.........
示例6: paintNodes
void SurfaceSet::paintNodes(int ns){
calcInvRot();
SConnections* ccs = scons.at(cs);
glPointSize(qMax(size,0.1)); //does not like 0 for pointsize...
glLineWidth(size);
//for all nodes in the current surface...
for (int i = 0; i < ccs->dn.length(); i++){
Node* p = (Node*)(&ccs->dn.at(i));
Node* mlp = (Node*)(&scons.at(minSpace)->dn.at(i));
QVector3D nnormal = p->normal.normalized();
QMatrix4x4* view = viewMatrix();
QVector3D mapped = view->mapVector(nnormal);
QVector3D mappedp = view->map(p->p);
bool visible = mapped.z() > 0; //normal points to camera
//TODO: poor guys clipping, should take ar into account...
double clip = 1;
visible &= (mappedp.x()>-clip)&&(mappedp.x()<clip)&&(mappedp.y()>-clip)&&(mappedp.y()<clip);
if (visible) {
//How many connections have a value above the threshold?
//TODO: Change p to whatever makes sense, make conditional on pies? move?
int cOver = 0;
for (int count = 0; count < p->ncs.length(); count++){
if ((p->ncs.at(count)->v > threshold) && (mlp->ncs.at(count)->length()>minlength)) cOver++;
}
int nth = 0; //the how-manieth drawn connection for the pie chart...
QVector3D zshift = glyphRadius*invRotZ;
if (ns==4) {
//pie charts
glShadeModel(GL_FLAT);
glBegin(GL_TRIANGLE_FAN);
glVertex3f(p->p.x()+zshift.x(),p->p.y()+zshift.y(),p->p.z()+zshift.z());
}
QVector3D pieClosePoint;
float cr,cg,cb;
//TODO: Wouldn't iterating through ALL nodes and indexing be easier, taking the number of nodes now into account?
for (int j=0; j<p->ncs.length();j++){
//scaled vector from current point to point on the other side: edges are now connected to the nodes with fn == n.p
Connection* diffc = ((Node*)(&scons.at(geo)->dn.at(i)))->ncs.at(j);
QVector3D diff = (diffc->tn-diffc->fn) * glyphRadius/100.0;
Node* colorNode = (Node*)(&scons.at(colorsFrom)->dn.at(i));
Connection* c = colorNode->ncs.at(j);
glColor4f(c->r,c->g,c->b,glyphAlpha);
bool draw = ((c->v > threshold) && (c->length()>minlength));//TODO: use minSpace
if (billboarding && (ns==6)) {
diff = diffc->tn;
QVector2D xy(diff.x(),diff.y());
xy /= 100;
xy.normalize();
double l = diff.z()/2.0+0.5;
xy *= l*glyphRadius/100;
diff = xy.x()*invRotX + xy.y()*invRotY;
}
Connection* pieEdge = colorNode->sncs.at(j);
if (ns==4) {
//pie charts
draw = ((pieEdge->v > threshold) && (mlp->ncs.at(pieEdge->origInd)->length()>minlength)); //my brain hurts...
if (draw) {
if (nth==1) {
cr = pieEdge->r;
cg = pieEdge->g;
cb = pieEdge->b;
}
glColor4f(pieEdge->r,pieEdge->g,pieEdge->b,glyphAlpha);
float t = (nth/(float)cOver)*2*M_PI;
nth++;
float rad = norm*glyphRadius/3 + (1-norm)*glyphRadius*qSqrt(cOver)/30.0;
diff = rad*qSin(t)*invRotX + rad*qCos(t)*invRotY;
}
}
QVector3D p_shifted = p->p + diff;
if ((nth==1) && draw && (ns==4)) pieClosePoint = QVector3D(p_shifted.x()+zshift.x(),p_shifted.y()+zshift.y(),p_shifted.z()+zshift.z());
if (!vectors){
glBegin(GL_POINTS);
} else if (ns!=4){
glBegin(GL_LINES);
if (draw) glVertex3d(p->p.x()+zshift.x(),p->p.y()+zshift.y(),p->p.z()+zshift.z());
}
if (draw) glVertex3d(p_shifted.x()+zshift.x(),p_shifted.y()+zshift.y(),p_shifted.z()+zshift.z());
if (ns!=4) glEnd();
}
//TODO: deal with two/one point issue...
if (ns==4) {
glColor4f(cr,cg,cb,glyphAlpha);
glVertex3f(pieClosePoint.x(),pieClosePoint.y(),pieClosePoint.z());
glEnd();
}
}
}
}