本文整理汇总了C++中eigen::Matrix4f::transpose方法的典型用法代码示例。如果您正苦于以下问题:C++ Matrix4f::transpose方法的具体用法?C++ Matrix4f::transpose怎么用?C++ Matrix4f::transpose使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Matrix4f
的用法示例。
在下文中一共展示了Matrix4f::transpose方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1:
Eigen::Matrix4f Helper::convertToEigenMatrix(const Matrix4 &mat)
{
Eigen::Matrix4f M;
M << mat.M11, mat.M12, mat.M13, mat.M14,
mat.M21, mat.M22, mat.M23, mat.M24,
mat.M31, mat.M32, mat.M33, mat.M34,
mat.M41, mat.M42, mat.M43, mat.M44;
return M.transpose();
}
示例2: reductionSystemCPU
HRESULT DX11BuildLinearSystem::applyBL( ID3D11DeviceContext* context, ID3D11ShaderResourceView* inputSRV, ID3D11ShaderResourceView* correspondenceSRV, ID3D11ShaderResourceView* correspondenceNormalsSRV, D3DXVECTOR3& mean, float meanStDev, Eigen::Matrix4f& deltaTransform, unsigned int imageWidth, unsigned int imageHeight, unsigned int level, Matrix6x7f& res, LinearSystemConfidence& conf )
{
HRESULT hr = S_OK;
unsigned int dimX = (unsigned int)ceil(((float)imageWidth*imageHeight)/(GlobalCameraTrackingState::getInstance().s_localWindowSize[level]*m_blockSizeBL));
Eigen::Matrix4f deltaTransformT = deltaTransform.transpose();
// Initialize Constant Buffer
D3D11_MAPPED_SUBRESOURCE mappedResource;
V_RETURN(context->Map(m_constantBufferBL, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource))
CBufferBL *cbuffer = (CBufferBL*)mappedResource.pData;
cbuffer->imageWidth = (int)imageWidth;
cbuffer->imageHeigth = (int)imageHeight;
memcpy(cbuffer->deltaTransform, deltaTransformT.data(), 16*sizeof(float));
cbuffer->imageHeigth = (int)imageHeight;
memcpy(cbuffer->mean, (float*)mean, 3*sizeof(float));
cbuffer->meanStDevInv = 1.0f/meanStDev;
context->Unmap(m_constantBufferBL, 0);
// Setup Pipeline
context->CSSetShaderResources(0, 1, &inputSRV);
context->CSSetShaderResources(1, 1, &correspondenceSRV);
context->CSSetShaderResources(2, 1, &correspondenceNormalsSRV);
context->CSSetUnorderedAccessViews(0, 1, &m_pOutputFloatUAV, 0);
context->CSSetConstantBuffers(0, 1, &m_constantBufferBL);
context->CSSetShader(m_pComputeShaderBL[level], 0, 0);
// Start Compute Shader
context->Dispatch(dimX, 1, 1);
assert(dimX <= D3D11_CS_DISPATCH_MAX_THREAD_GROUPS_PER_DIMENSION);
// De-Initialize Pipeline
ID3D11ShaderResourceView* nullSAV[1] = { NULL };
ID3D11UnorderedAccessView* nullUAV[1] = { NULL };
ID3D11Buffer* nullCB[1] = { NULL };
context->CSSetShaderResources(0, 1, nullSAV);
context->CSSetShaderResources(1, 1, nullSAV);
context->CSSetShaderResources(2, 1, nullSAV);
context->CSSetUnorderedAccessViews(0, 1, nullUAV, 0);
context->CSSetConstantBuffers(0, 1, nullCB);
context->CSSetShader(0, 0, 0);
// Copy to CPU
context->CopyResource(m_pOutputFloatCPU, m_pOutputFloat);
V_RETURN(context->Map(m_pOutputFloatCPU, 0, D3D11_MAP_READ, 0, &mappedResource));
res = reductionSystemCPU((float*)mappedResource.pData, dimX, conf);
context->Unmap(m_pOutputFloatCPU, 0);
return hr;
}
示例3: paintGL
void GLWidget::paintGL (void)
{
makeCurrent();
// Clear the screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Eigen::Vector4f viewport = camera->getViewport();
glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
shader.bind();
if (height_map[currentMap] != nullptr)
shader.setUniform("in_HeightMap", height_map[currentMap]->bind());
// sets all uniform variables for the phong shader
Eigen::Matrix4f modelViewMatrix = (camera->getViewMatrix() * terrainMesh.getModelMatrix()).matrix();
shader.setUniform("modelViewMatrix", modelViewMatrix);
Eigen::Matrix3f normalMatrix = modelViewMatrix.transpose().inverse().matrix().block<3, 3>(0, 0);
shader.setUniform("normalMatrix", normalMatrix);
shader.setUniform("projectionMatrix", camera->getProjectionMatrix());
shader.setUniform("lightViewMatrix", light_trackball->getViewMatrix());
shader.setUniform("tessLevelInner", tessInnerLevel);
shader.setUniform("tessLevelOuter", tessOuterLevel);
shader.setUniform("depthLevel", depthLevel);
shader.setUniform("MinTessLevel", minTessLevel);
shader.setUniform("MaxTessLevel", maxTessLevel);
shader.setUniform("MinDepth", minDepth);
shader.setUniform("MaxDepth", maxDepth);
shader.setUniform("wireframeEnabled", wireframeEnabled);
shader.setUniform("showNormals", showNormals);
terrainMesh.setAttributeLocation(shader);
glEnable(GL_DEPTH_TEST);
terrainMesh.render();
shader.unbind();
if (height_map[currentMap] != nullptr)
height_map[currentMap]->unbind();
}
示例4: estimateCameraPose
bool CameraPoseFinderICP::estimateCameraPose(const DepthFrameData& depth_frame,const ColorFrameData& color_frame)
{
if(depth_frame.frameId()==0)
{
return true;
}
//prepare datas
cudaDownSampleNewVertices();
cudaDownSampleNewNormals();
cudaDownSampleModelVertices();
cudaDownSampleModelNormals();
Mat44 cur_transform=_pose;
Mat44 last_transform_inv=_pose.getInverse();
Eigen::Matrix<float, 6, 1, 0, 6, 1> delta_dof;
for(int l=_iter_nums.size()-1;l>=0;l--)
{
int it_nums=_iter_nums[l];
while(it_nums--)
{
// findCorresSet(l,cur_transform,last_transform_inv);
if(false==minimizePointToPlaneErrFunc(l,delta_dof,cur_transform,last_transform_inv)) return false;
Eigen::Matrix4f t = Eigen::Matrix4f::Identity();
if (false==vector6ToTransformMatrix(delta_dof, t) )
{
cout<<"camera shaking detected"<<endl;
return false;
}
//eigen matrix to mat44
Eigen::Matrix4f tt = t.transpose();
Mat44 mat_t(tt.data());
cur_transform = mat_t*cur_transform;
}
}
_pose=cur_transform;
return true;
}
示例5:
template <typename GraphT, typename PointT> void
pcl::PairwiseGraphRegistration<GraphT, PointT>::computeRegistration ()
{
if (!registration_method_)
{
PCL_ERROR ("[pcl::PairwiseGraphRegistration::computeRegistration] No registration method set!\n");
return;
}
typename std::vector<GraphHandlerVertex>::iterator last_vx_it = last_vertices_.begin ();
if (last_aligned_vertex_ == boost::graph_traits<GraphT>::null_vertex ())
{
last_aligned_vertex_ = *last_vx_it;
++last_vx_it;
}
pcl::PointCloud<PointT> fake_cloud;
registration_method_->setInputTarget (boost::get_cloud<PointT> (last_aligned_vertex_, *(graph_handler_->getGraph ())));
for(; last_vx_it < last_vertices_.end (); ++last_vx_it)
{
registration_method_->setInputCloud (boost::get_cloud<PointT> (*last_vx_it, *(graph_handler_->getGraph ())));
const Eigen::Matrix4f last_aligned_vertex_pose = boost::get_pose (last_aligned_vertex_, *(graph_handler_->getGraph ()));
if (!incremental_)
{
const Eigen::Matrix4f guess = last_aligned_vertex_pose.transpose () * boost::get_pose (*last_vx_it, *(graph_handler_->getGraph ()));
registration_method_->align (fake_cloud, guess);
} else
registration_method_->align (fake_cloud);
const Eigen::Matrix4f global_ref_final_tr = last_aligned_vertex_pose * registration_method_->getFinalTransformation ();
boost::set_estimate<PointT> (*last_vx_it, global_ref_final_tr, *(graph_handler_->getGraph ()));
last_aligned_vertex_ = *last_vx_it;
registration_method_->setInputTarget (boost::get_cloud<PointT> (last_aligned_vertex_, *(graph_handler_->getGraph ())));
}
}