本文整理汇总了C++中UTimer::ticks方法的典型用法代码示例。如果您正苦于以下问题:C++ UTimer::ticks方法的具体用法?C++ UTimer::ticks怎么用?C++ UTimer::ticks使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类UTimer
的用法示例。
在下文中一共展示了UTimer::ticks方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: addData
void DataRecorder::addData(const rtabmap::SensorData & data, const Transform & pose, const cv::Mat & covariance)
{
memoryMutex_.lock();
if(memory_)
{
if(memory_->getStMem().size() == 0 && data.id() > 0)
{
ParametersMap customParameters;
customParameters.insert(ParametersPair(Parameters::kMemGenerateIds(), "false")); // use id from data
memory_->parseParameters(customParameters);
}
//save to database
UTimer time;
memory_->update(data, pose, covariance);
const Signature * s = memory_->getLastWorkingSignature();
totalSizeKB_ += (int)s->sensorData().imageCompressed().total()/1000;
totalSizeKB_ += (int)s->sensorData().depthOrRightCompressed().total()/1000;
totalSizeKB_ += (int)s->sensorData().laserScanCompressed().total()/1000;
memory_->cleanup();
if(++count_ % 30)
{
memory_->emptyTrash();
}
UDEBUG("Time to process a message = %f s", time.ticks());
}
memoryMutex_.unlock();
}
示例2: node
// returned OcTree must be deleted
// RTAB-Map optimizes the graph at almost each iteration, an octomap cannot
// be updated online. Only available on service. To have an "online" octomap published as a topic,
// you may want to subscribe an octomap_server to /rtabmap/cloud topic.
//
octomap::OcTree * MapsManager::createOctomap(const std::map<int, Transform> & poses)
{
octomap::OcTree * octree = new octomap::OcTree(gridCellSize_);
UTimer time;
for(std::map<int, Transform>::const_iterator posesIter = poses.begin(); posesIter!=poses.end(); ++posesIter)
{
std::map<int, pcl::PointCloud<pcl::PointXYZRGB>::Ptr >::iterator cloudsIter = clouds_.find(posesIter->first);
if(cloudsIter != clouds_.end() && cloudsIter->second->size())
{
octomap::Pointcloud * scan = new octomap::Pointcloud();
//octomap::pointcloudPCLToOctomap(*cloudsIter->second, *scan); // Not anymore in Indigo!
scan->reserve(cloudsIter->second->size());
for(pcl::PointCloud<pcl::PointXYZRGB>::const_iterator it = cloudsIter->second->begin();
it != cloudsIter->second->end();
++it)
{
// Check if the point is invalid
if(pcl::isFinite(*it))
{
scan->push_back(it->x, it->y, it->z);
}
}
float x,y,z, r,p,w;
posesIter->second.getTranslationAndEulerAngles(x,y,z,r,p,w);
octomap::ScanNode node(scan, octomap::pose6d(x,y,z, r,p,w), posesIter->first);
octree->insertPointCloud(node, cloudMaxDepth_, true, true);
ROS_INFO("inserted %d pt=%d (%fs)", posesIter->first, (int)scan->size(), time.ticks());
}
}
octree->updateInnerOccupancy();
ROS_INFO("updated inner occupancy (%fs)", time.ticks());
// clear memory if no one subscribed
if(mapCacheCleanup_ && cloudMapPub_.getNumSubscribers() == 0)
{
clouds_.clear();
}
return octree;
}
示例3: takeImage
SensorData Camera::takeImage(CameraInfo * info)
{
bool warnFrameRateTooHigh = false;
float actualFrameRate = 0;
if(_imageRate>0)
{
int sleepTime = (1000.0f/_imageRate - 1000.0f*_frameRateTimer->getElapsedTime());
if(sleepTime > 2)
{
uSleep(sleepTime-2);
}
else if(sleepTime < 0)
{
warnFrameRateTooHigh = true;
actualFrameRate = 1.0/(_frameRateTimer->getElapsedTime());
}
// Add precision at the cost of a small overhead
while(_frameRateTimer->getElapsedTime() < 1.0/double(_imageRate)-0.000001)
{
//
}
double slept = _frameRateTimer->getElapsedTime();
_frameRateTimer->start();
UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(_imageRate));
}
UTimer timer;
SensorData data = this->captureImage(info);
double captureTime = timer.ticks();
if(warnFrameRateTooHigh)
{
UWARN("Camera: Cannot reach target image rate %f Hz, current rate is %f Hz and capture time = %f s.",
_imageRate, actualFrameRate, captureTime);
}
else
{
UDEBUG("Time capturing image = %fs", captureTime);
}
if(info)
{
info->id = data.id();
info->stamp = data.stamp();
info->timeCapture = captureTime;
}
return data;
}
示例4: mapDataReceivedCallback
void mapDataReceivedCallback(const rtabmap_ros::MapDataConstPtr & msg)
{
UTimer timer;
std::map<int, Transform> poses;
std::multimap<int, Link> constraints;
Transform mapOdom;
rtabmap_ros::mapGraphFromROS(msg->graph, poses, constraints, mapOdom);
for(unsigned int i=0; i<msg->nodes.size(); ++i)
{
if(msg->nodes[i].image.size() ||
msg->nodes[i].depth.size() ||
msg->nodes[i].laserScan.size())
{
uInsert(nodes_, std::make_pair(msg->nodes[i].id, rtabmap_ros::nodeDataFromROS(msg->nodes[i])));
}
}
// create a tmp signature with latest sensory data
if(poses.size() && nodes_.find(poses.rbegin()->first) != nodes_.end())
{
Signature tmpS = nodes_.at(poses.rbegin()->first);
SensorData tmpData = tmpS.sensorData();
tmpData.setId(-1);
uInsert(nodes_, std::make_pair(-1, Signature(-1, -1, 0, tmpS.getStamp(), "", tmpS.getPose(), Transform(), tmpData)));
poses.insert(std::make_pair(-1, poses.rbegin()->second));
}
// Update maps
poses = mapsManager_.updateMapCaches(
poses,
0,
false,
false,
false,
false,
false,
nodes_);
mapsManager_.publishMaps(poses, msg->header.stamp, msg->header.frame_id);
ROS_INFO("map_assembler: Publishing data = %fs", timer.ticks());
}
示例5: mainLoop
void CameraThread::mainLoop()
{
UTimer totalTime;
UDEBUG("");
CameraInfo info;
SensorData data = _camera->takeImage(&info);
if(!data.imageRaw().empty() || (dynamic_cast<DBReader*>(_camera) != 0 && data.id()>0)) // intermediate nodes could not have image set
{
postUpdate(&data, &info);
info.cameraName = _camera->getSerial();
info.timeTotal = totalTime.ticks();
this->post(new CameraEvent(data, info));
}
else if(!this->isKilled())
{
UWARN("no more images...");
this->kill();
this->post(new CameraEvent());
}
}
示例6: mapDataReceivedCallback
//.........这里部分代码省略.........
}
else
{
ROS_ERROR("Odometry pose of node %d not found in cache!", msg->graph.posesId[i]);
return;
}
}
}
std::map<int, Transform> poses;
for(std::map<int, Signature>::iterator iter=nodeInfos.begin(); iter!=nodeInfos.end(); ++iter)
{
poses.insert(std::make_pair(iter->first, iter->second.getPose()));
}
// Optimize only if there is a subscriber
if(mapDataPub_.getNumSubscribers() || mapGraphPub_.getNumSubscribers())
{
UTimer timer;
std::map<int, Transform> optimizedPoses;
Transform mapCorrection = Transform::getIdentity();
std::map<int, rtabmap::Transform> posesOut;
std::multimap<int, rtabmap::Link> linksOut;
if(poses.size() > 1 && constraints.size() > 0)
{
int fromId = optimizeFromLastNode_?poses.rbegin()->first:poses.begin()->first;
optimizer_->getConnectedGraph(
fromId,
poses,
constraints,
posesOut,
linksOut);
optimizedPoses = optimizer_->optimize(fromId, posesOut, linksOut);
mapToOdomMutex_.lock();
mapCorrection = optimizedPoses.at(posesOut.rbegin()->first) * posesOut.rbegin()->second.inverse();
mapToOdom_ = mapCorrection;
mapToOdomMutex_.unlock();
}
else if(poses.size() == 1 && constraints.size() == 0)
{
optimizedPoses = poses;
}
else if(poses.size() || constraints.size())
{
ROS_ERROR("map_optimizer: Poses=%d and edges=%d (poses must "
"not be null if there are edges, and edges must be null if poses <= 1)",
(int)poses.size(), (int)constraints.size());
}
rtabmap_ros::MapData outputDataMsg;
rtabmap_ros::MapGraph outputGraphMsg;
rtabmap_ros::mapGraphToROS(optimizedPoses,
linksOut,
mapCorrection,
outputGraphMsg);
if(mapGraphPub_.getNumSubscribers())
{
outputGraphMsg.header = msg->header;
mapGraphPub_.publish(outputGraphMsg);
}
if(mapDataPub_.getNumSubscribers())
{
outputDataMsg.header = msg->header;
outputDataMsg.graph = outputGraphMsg;
outputDataMsg.nodes = msg->nodes;
if(posesOut.size() > msg->nodes.size())
{
std::set<int> addedNodes;
for(unsigned int i=0; i<msg->nodes.size(); ++i)
{
addedNodes.insert(msg->nodes[i].id);
}
std::list<int> toAdd;
for(std::map<int, Transform>::iterator iter=posesOut.begin(); iter!=posesOut.end(); ++iter)
{
if(addedNodes.find(iter->first) == addedNodes.end())
{
toAdd.push_back(iter->first);
}
}
if(toAdd.size())
{
int oi = outputDataMsg.nodes.size();
outputDataMsg.nodes.resize(outputDataMsg.nodes.size()+toAdd.size());
for(std::list<int>::iterator iter=toAdd.begin(); iter!=toAdd.end(); ++iter)
{
UASSERT(cachedNodeInfos_.find(*iter) != cachedNodeInfos_.end());
rtabmap_ros::nodeDataToROS(cachedNodeInfos_.at(*iter), outputDataMsg.nodes[oi]);
++oi;
}
}
}
mapDataPub_.publish(outputDataMsg);
}
ROS_INFO("Time graph optimization = %f s", timer.ticks());
}
}
示例7: Render
//.........这里部分代码省略.........
UERROR("Not found mesh %d !?!?", id);
}
}
else if(uContains(bufferedSensorData, id))
{
rtabmap::SensorData & data = bufferedSensorData.at(id);
if(!data.imageRaw().empty() && !data.depthRaw().empty())
{
// Voxelize and filter depending on the previous cloud?
pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
pcl::IndicesPtr indices(new std::vector<int>);
LOGI("Creating node cloud %d (image size=%dx%d)", id, data.imageRaw().cols, data.imageRaw().rows);
cloud = rtabmap::util3d::cloudRGBFromSensorData(data, data.imageRaw().rows/data.depthRaw().rows, maxCloudDepth_, 0, indices.get());
if(cloud->size() && indices->size())
{
UTimer time;
// pcl::organizedFastMesh doesn't take indices, so set to NaN points we don't need to mesh
pcl::PointCloud<pcl::PointXYZRGB>::Ptr output(new pcl::PointCloud<pcl::PointXYZRGB>);
pcl::ExtractIndices<pcl::PointXYZRGB> filter;
filter.setIndices(indices);
filter.setKeepOrganized(true);
filter.setInputCloud(cloud);
filter.filter(*output);
std::vector<pcl::Vertices> polygons = rtabmap::util3d::organizedFastMesh(output, meshAngleToleranceDeg_*M_PI/180.0, false, meshTrianglePix_);
pcl::PointCloud<pcl::PointXYZRGB>::Ptr outputCloud(new pcl::PointCloud<pcl::PointXYZRGB>);
std::vector<pcl::Vertices> outputPolygons;
outputCloud = output;
outputPolygons = polygons;
LOGI("Creating mesh, %d polygons (%fs)", (int)outputPolygons.size(), time.ticks());
if(outputCloud->size() && outputPolygons.size())
{
totalPolygons_ += outputPolygons.size();
main_scene_.addCloud(id, outputCloud, outputPolygons, iter->second, data.imageRaw());
// protect createdMeshes_ used also by exportMesh() method
std::pair<std::map<int, Mesh>::iterator, bool> inserted = createdMeshes_.insert(std::make_pair(id, Mesh()));
UASSERT(inserted.second);
inserted.first->second.cloud = outputCloud;
inserted.first->second.polygons = outputPolygons;
inserted.first->second.pose = iter->second;
inserted.first->second.texture = data.imageCompressed();
}
else
{
LOGE("Not mesh could be created for node %d", id);
}
}
totalPoints_+=indices->size();
}
}
}
}
}
//filter poses?
if(poses.size() > 2)
{
if(nodesFiltering_)
示例8: main
//.........这里部分代码省略.........
Transform lastLocalizationPose = info.odomPose;
while(data.isValid() && g_loopForever)
{
UTimer iterationTime;
std::string status;
if(!odometryIgnored && info.odomPose.isNull())
{
printf("Skipping node %d as it doesn't have odometry pose set.\n", data.id());
}
else
{
if(!odometryIgnored && !info.odomCovariance.empty() && info.odomCovariance.at<double>(0,0)>=9999)
{
printf("High variance detected, triggering a new map...\n");
if(!incrementalMemory && processed>0)
{
showLocalizationStats();
lastLocalizationPose = info.odomPose;
}
if(incrementalMemory)
{
rtabmap.triggerNewMap();
}
}
UTimer t;
if(!rtabmap.process(data, info.odomPose, info.odomCovariance, info.odomVelocity, globalMapStats))
{
printf("Failed processing node %d.\n", data.id());
globalMapStats.clear();
}
else if(assemble2dMap || assemble3dMap || assemble2dOctoMap || assemble3dOctoMap)
{
globalMapStats.clear();
double timeRtabmap = t.ticks();
double timeUpdateInit = 0.0;
double timeUpdateGrid = 0.0;
#ifdef RTABMAP_OCTOMAP
double timeUpdateOctoMap = 0.0;
#endif
const rtabmap::Statistics & stats = rtabmap.getStatistics();
if(stats.poses().size() && stats.getLastSignatureData().id())
{
int id = stats.poses().rbegin()->first;
if(id == stats.getLastSignatureData().id() &&
stats.getLastSignatureData().sensorData().gridCellSize() > 0.0f)
{
bool updateGridMap = false;
bool updateOctoMap = false;
if((assemble2dMap || assemble3dMap) && grid.addedNodes().find(id) == grid.addedNodes().end())
{
updateGridMap = true;
}
#ifdef RTABMAP_OCTOMAP
if((assemble2dOctoMap || assemble3dOctoMap) && octomap.addedNodes().find(id) == octomap.addedNodes().end())
{
updateOctoMap = true;
}
#endif
if(updateGridMap || updateOctoMap)
{
cv::Mat ground, obstacles, empty;
stats.getLastSignatureData().sensorData().uncompressDataConst(0, 0, 0, 0, &ground, &obstacles, &empty);
timeUpdateInit = t.ticks();
if(updateGridMap)
示例9: process
//.........这里部分代码省略.........
kpts[i].pt.x *= _imageDecimation;
kpts[i].pt.y *= _imageDecimation;
kpts[i].size *= _imageDecimation;
kpts[i].octave += log2value;
}
data.setFeatures(kpts, decimatedData.descriptors());
if(info)
{
UASSERT(info->newCorners.size() == info->refCorners.size());
for(unsigned int i=0; i<info->newCorners.size(); ++i)
{
info->refCorners[i].x *= _imageDecimation;
info->refCorners[i].y *= _imageDecimation;
info->newCorners[i].x *= _imageDecimation;
info->newCorners[i].y *= _imageDecimation;
}
for(std::multimap<int, cv::KeyPoint>::iterator iter=info->words.begin(); iter!=info->words.end(); ++iter)
{
iter->second.pt.x *= _imageDecimation;
iter->second.pt.y *= _imageDecimation;
iter->second.size *= _imageDecimation;
iter->second.octave += log2value;
}
}
}
else
{
t = this->computeTransform(data, guess, info);
}
if(info)
{
info->timeEstimation = time.ticks();
info->lost = t.isNull();
info->stamp = data.stamp();
info->interval = dt;
info->transform = t;
if(!data.groundTruth().isNull())
{
if(!previousGroundTruthPose_.isNull())
{
info->transformGroundTruth = previousGroundTruthPose_.inverse() * data.groundTruth();
}
previousGroundTruthPose_ = data.groundTruth();
}
}
if(!t.isNull())
{
_resetCurrentCount = _resetCountdown;
float vx,vy,vz, vroll,vpitch,vyaw;
t.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);
// transform to velocity
if(dt)
{
vx /= dt;
vy /= dt;
vz /= dt;
vroll /= dt;
vpitch /= dt;
vyaw /= dt;
}
示例10: takeImage
cv::Mat Camera::takeImage(cv::Mat & descriptors, std::vector<cv::KeyPoint> & keypoints)
{
descriptors = cv::Mat();
keypoints.clear();
float imageRate = _imageRate==0.0f?33.0f:_imageRate; // limit to 33Hz if infinity
if(imageRate>0)
{
int sleepTime = (1000.0f/imageRate - 1000.0f*_frameRateTimer->getElapsedTime());
if(sleepTime > 2)
{
uSleep(sleepTime-2);
}
// Add precision at the cost of a small overhead
while(_frameRateTimer->getElapsedTime() < 1.0/double(imageRate)-0.000001)
{
//
}
double slept = _frameRateTimer->getElapsedTime();
_frameRateTimer->start();
UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(imageRate));
}
cv::Mat img;
UTimer timer;
img = this->captureImage();
UDEBUG("Time capturing image = %fs", timer.ticks());
if(!img.empty())
{
if(img.depth() != CV_8U)
{
UWARN("Images should have already 8U depth !?");
cv::Mat tmp = img;
img = cv::Mat();
tmp.convertTo(img, CV_8U);
UDEBUG("Time converting image to 8U = %fs", timer.ticks());
}
if(_featuresExtracted && _keypointDetector && _keypointDescriptor)
{
keypoints = _keypointDetector->generateKeypoints(img);
descriptors = _keypointDescriptor->generateDescriptors(img, keypoints);
UDEBUG("Post treatment time = %fs", timer.ticks());
}
if(_framesDropped)
{
unsigned int count = 0;
while(count++ < _framesDropped)
{
cv::Mat tmp = this->captureImage();
if(!tmp.empty())
{
UDEBUG("frame dropped (%d/%d)", (int)count, (int)_framesDropped);
}
else
{
break;
}
}
UDEBUG("Frames dropped time = %fs", timer.ticks());
}
}
return img;
}
示例11: uKeys
const std::map<int, float> & BayesFilter::computePosterior(const Memory * memory, const std::map<int, float> & likelihood)
{
ULOGGER_DEBUG("");
if(!memory)
{
ULOGGER_ERROR("Memory is Null!");
return _posterior;
}
if(!likelihood.size())
{
ULOGGER_ERROR("likelihood is empty!");
return _posterior;
}
if(_predictionLC.size() < 2)
{
ULOGGER_ERROR("Prediction is not valid!");
return _posterior;
}
UTimer timer;
timer.start();
cv::Mat prior;
cv::Mat posterior;
float sum = 0;
int j=0;
// Recursive Bayes estimation...
// STEP 1 - Prediction : Prior*lastPosterior
_prediction = this->generatePrediction(memory, uKeys(likelihood));
ULOGGER_DEBUG("STEP1-generate prior=%fs, rows=%d, cols=%d", timer.ticks(), _prediction.rows, _prediction.cols);
//std::cout << "Prediction=" << _prediction << std::endl;
// Adjust the last posterior if some images were
// reactivated or removed from the working memory
posterior = cv::Mat(likelihood.size(), 1, CV_32FC1);
this->updatePosterior(memory, uKeys(likelihood));
j=0;
for(std::map<int, float>::const_iterator i=_posterior.begin(); i!= _posterior.end(); ++i)
{
((float*)posterior.data)[j++] = (*i).second;
}
ULOGGER_DEBUG("STEP1-update posterior=%fs, posterior=%d, _posterior size=%d", posterior.rows, _posterior.size());
//std::cout << "LastPosterior=" << posterior << std::endl;
// Multiply prediction matrix with the last posterior
// (m,m) X (m,1) = (m,1)
prior = _prediction * posterior;
ULOGGER_DEBUG("STEP1-matrix mult time=%fs", timer.ticks());
//std::cout << "ResultingPrior=" << prior << std::endl;
ULOGGER_DEBUG("STEP1-matrix mult time=%fs", timer.ticks());
std::vector<float> likelihoodValues = uValues(likelihood);
//std::cout << "Likelihood=" << cv::Mat(likelihoodValues) << std::endl;
// STEP 2 - Update : Multiply with observations (likelihood)
j=0;
for(std::map<int, float>::const_iterator i=likelihood.begin(); i!= likelihood.end(); ++i)
{
std::map<int, float>::iterator p =_posterior.find((*i).first);
if(p!= _posterior.end())
{
(*p).second = (*i).second * ((float*)prior.data)[j++];
sum+=(*p).second;
}
else
{
ULOGGER_ERROR("Problem1! can't find id=%d", (*i).first);
}
}
ULOGGER_DEBUG("STEP2-likelihood time=%fs", timer.ticks());
//std::cout << "Posterior (before normalization)=" << _posterior << std::endl;
// Normalize
ULOGGER_DEBUG("sum=%f", sum);
if(sum != 0)
{
for(std::map<int, float>::iterator i=_posterior.begin(); i!= _posterior.end(); ++i)
{
(*i).second /= sum;
}
}
ULOGGER_DEBUG("normalize time=%fs", timer.ticks());
//std::cout << "Posterior=" << _posterior << std::endl;
return _posterior;
}
示例12: emptyTrashes
void DBDriver::emptyTrashes(bool async)
{
if(async)
{
ULOGGER_DEBUG("Async emptying, start the trash thread");
this->start();
return;
}
UTimer totalTime;
totalTime.start();
std::map<int, Signature*> signatures;
std::map<int, VisualWord*> visualWords;
_trashesMutex.lock();
{
ULOGGER_DEBUG("signatures=%d, visualWords=%d", _trashSignatures.size(), _trashVisualWords.size());
signatures = _trashSignatures;
visualWords = _trashVisualWords;
_trashSignatures.clear();
_trashVisualWords.clear();
_dbSafeAccessMutex.lock();
}
_trashesMutex.unlock();
if(signatures.size() || visualWords.size())
{
this->beginTransaction();
UTimer timer;
timer.start();
if(signatures.size())
{
if(this->isConnected())
{
//Only one query to the database
this->saveOrUpdate(uValues(signatures));
}
for(std::map<int, Signature *>::iterator iter=signatures.begin(); iter!=signatures.end(); ++iter)
{
delete iter->second;
}
signatures.clear();
ULOGGER_DEBUG("Time emptying memory signatures trash = %f...", timer.ticks());
}
if(visualWords.size())
{
if(this->isConnected())
{
//Only one query to the database
this->saveOrUpdate(uValues(visualWords));
}
for(std::map<int, VisualWord *>::iterator iter=visualWords.begin(); iter!=visualWords.end(); ++iter)
{
delete (*iter).second;
}
visualWords.clear();
ULOGGER_DEBUG("Time emptying memory visualWords trash = %f...", timer.ticks());
}
this->commit();
}
_emptyTrashesTime = totalTime.ticks();
ULOGGER_DEBUG("Total time emptying trashes = %fs...", _emptyTrashesTime);
_dbSafeAccessMutex.unlock();
}
示例13: postUpdate
void CameraThread::postUpdate(SensorData * dataPtr, CameraInfo * info) const
{
UASSERT(dataPtr!=0);
SensorData & data = *dataPtr;
if(_colorOnly && !data.depthRaw().empty())
{
data.setDepthOrRightRaw(cv::Mat());
}
if(_distortionModel && !data.depthRaw().empty())
{
UTimer timer;
if(_distortionModel->getWidth() == data.depthRaw().cols &&
_distortionModel->getHeight() == data.depthRaw().rows )
{
cv::Mat depth = data.depthRaw().clone();// make sure we are not modifying data in cached signatures.
_distortionModel->undistort(depth);
data.setDepthOrRightRaw(depth);
}
else
{
UERROR("Distortion model size is %dx%d but dpeth image is %dx%d!",
_distortionModel->getWidth(), _distortionModel->getHeight(),
data.depthRaw().cols, data.depthRaw().rows);
}
if(info) info->timeUndistortDepth = timer.ticks();
}
if(_bilateralFiltering && !data.depthRaw().empty())
{
UTimer timer;
data.setDepthOrRightRaw(util2d::fastBilateralFiltering(data.depthRaw(), _bilateralSigmaS, _bilateralSigmaR));
if(info) info->timeBilateralFiltering = timer.ticks();
}
if(_imageDecimation>1 && !data.imageRaw().empty())
{
UDEBUG("");
UTimer timer;
if(!data.depthRaw().empty() &&
!(data.depthRaw().rows % _imageDecimation == 0 && data.depthRaw().cols % _imageDecimation == 0))
{
UERROR("Decimation of depth images should be exact (decimation=%d, size=(%d,%d))! "
"Images won't be resized.", _imageDecimation, data.depthRaw().cols, data.depthRaw().rows);
}
else
{
data.setImageRaw(util2d::decimate(data.imageRaw(), _imageDecimation));
data.setDepthOrRightRaw(util2d::decimate(data.depthOrRightRaw(), _imageDecimation));
std::vector<CameraModel> models = data.cameraModels();
for(unsigned int i=0; i<models.size(); ++i)
{
if(models[i].isValidForProjection())
{
models[i] = models[i].scaled(1.0/double(_imageDecimation));
}
}
data.setCameraModels(models);
StereoCameraModel stereoModel = data.stereoCameraModel();
if(stereoModel.isValidForProjection())
{
stereoModel.scale(1.0/double(_imageDecimation));
data.setStereoCameraModel(stereoModel);
}
}
if(info) info->timeImageDecimation = timer.ticks();
}
if(_mirroring && !data.imageRaw().empty() && data.cameraModels().size() == 1)
{
UDEBUG("");
UTimer timer;
cv::Mat tmpRgb;
cv::flip(data.imageRaw(), tmpRgb, 1);
data.setImageRaw(tmpRgb);
UASSERT_MSG(data.cameraModels().size() <= 1 && !data.stereoCameraModel().isValidForProjection(), "Only single RGBD cameras are supported for mirroring.");
if(data.cameraModels().size() && data.cameraModels()[0].cx())
{
CameraModel tmpModel(
data.cameraModels()[0].fx(),
data.cameraModels()[0].fy(),
float(data.imageRaw().cols) - data.cameraModels()[0].cx(),
data.cameraModels()[0].cy(),
data.cameraModels()[0].localTransform());
data.setCameraModel(tmpModel);
}
if(!data.depthRaw().empty())
{
cv::Mat tmpDepth;
cv::flip(data.depthRaw(), tmpDepth, 1);
data.setDepthOrRightRaw(tmpDepth);
}
if(info) info->timeMirroring = timer.ticks();
}
if(_stereoToDepth && !data.imageRaw().empty() && data.stereoCameraModel().isValidForProjection() && !data.rightRaw().empty())
{
UDEBUG("");
UTimer timer;
cv::Mat depth = util2d::depthFromDisparity(
_stereoDense->computeDisparity(data.imageRaw(), data.rightRaw()),
data.stereoCameraModel().left().fx(),
//.........这里部分代码省略.........
示例14: main
int main(int argc, char** argv)
{
ros::init(argc, argv, "data_player");
//ULogger::setType(ULogger::kTypeConsole);
//ULogger::setLevel(ULogger::kDebug);
//ULogger::setEventLevel(ULogger::kWarning);
ros::NodeHandle nh;
ros::NodeHandle pnh("~");
std::string frameId = "base_link";
std::string odomFrameId = "odom";
std::string cameraFrameId = "camera_optical_link";
std::string scanFrameId = "base_laser_link";
double rate = -1.0f;
std::string databasePath = "";
bool publishTf = true;
int startId = 0;
pnh.param("frame_id", frameId, frameId);
pnh.param("odom_frame_id", odomFrameId, odomFrameId);
pnh.param("camera_frame_id", cameraFrameId, cameraFrameId);
pnh.param("scan_frame_id", scanFrameId, scanFrameId);
pnh.param("rate", rate, rate); // Set -1 to use database stamps
pnh.param("database", databasePath, databasePath);
pnh.param("publish_tf", publishTf, publishTf);
pnh.param("start_id", startId, startId);
// based on URG-04LX
double scanHeight, scanAngleMin, scanAngleMax, scanAngleIncrement, scanTime, scanRangeMin, scanRangeMax;
pnh.param<double>("scan_height", scanHeight, 0.3);
pnh.param<double>("scan_angle_min", scanAngleMin, -M_PI / 2.0);
pnh.param<double>("scan_angle_max", scanAngleMax, M_PI / 2.0);
pnh.param<double>("scan_angle_increment", scanAngleIncrement, M_PI / 360.0);
pnh.param<double>("scan_time", scanTime, 1.0 / 10.0);
pnh.param<double>("scan_range_min", scanRangeMin, 0.02);
pnh.param<double>("scan_range_max", scanRangeMax, 6.0);
ROS_INFO("frame_id = %s", frameId.c_str());
ROS_INFO("odom_frame_id = %s", odomFrameId.c_str());
ROS_INFO("camera_frame_id = %s", cameraFrameId.c_str());
ROS_INFO("scan_frame_id = %s", scanFrameId.c_str());
ROS_INFO("rate = %f", rate);
ROS_INFO("publish_tf = %s", publishTf?"true":"false");
ROS_INFO("start_id = %d", startId);
if(databasePath.empty())
{
ROS_ERROR("Parameter \"database\" must be set (path to a RTAB-Map database).");
return -1;
}
databasePath = uReplaceChar(databasePath, '~', UDirectory::homeDir());
if(databasePath.size() && databasePath.at(0) != '/')
{
databasePath = UDirectory::currentDir(true) + databasePath;
}
ROS_INFO("database = %s", databasePath.c_str());
rtabmap::DBReader reader(databasePath, rate, false, false, false, startId);
if(!reader.init())
{
ROS_ERROR("Cannot open database \"%s\".", databasePath.c_str());
return -1;
}
ros::ServiceServer pauseSrv = pnh.advertiseService("pause", pauseCallback);
ros::ServiceServer resumeSrv = pnh.advertiseService("resume", resumeCallback);
image_transport::ImageTransport it(nh);
image_transport::Publisher imagePub;
image_transport::Publisher rgbPub;
image_transport::Publisher depthPub;
image_transport::Publisher leftPub;
image_transport::Publisher rightPub;
ros::Publisher rgbCamInfoPub;
ros::Publisher depthCamInfoPub;
ros::Publisher leftCamInfoPub;
ros::Publisher rightCamInfoPub;
ros::Publisher odometryPub;
ros::Publisher scanPub;
tf2_ros::TransformBroadcaster tfBroadcaster;
UTimer timer;
rtabmap::CameraInfo info;
rtabmap::SensorData data = reader.takeImage(&info);
rtabmap::OdometryEvent odom(data, info.odomPose, info.odomCovariance);
double acquisitionTime = timer.ticks();
while(ros::ok() && odom.data().id())
{
ROS_INFO("Reading sensor data %d...", odom.data().id());
ros::Time time = ros::Time::now();
sensor_msgs::CameraInfo camInfoA; //rgb or left
sensor_msgs::CameraInfo camInfoB; //depth or right
camInfoA.K.assign(0);
camInfoA.K[0] = camInfoA.K[4] = camInfoA.K[8] = 1;
//.........这里部分代码省略.........
示例15: main
int main(int argc, char * argv[])
{
if(argc < 2)
{
showUsage();
}
ULogger::setType(ULogger::kTypeConsole);
ULogger::setLevel(ULogger::kDebug);
std::string dictionaryPath = argv[argc-1];
std::list<std::vector<float> > objectDescriptors;
//std::list<std::vector<float> > descriptors;
std::map<int, std::vector<float> > descriptors;
int dimension = 0;
UTimer timer;
int objectDescriptorsSize= 400;
std::ifstream file;
if(!dictionaryPath.empty())
{
file.open(dictionaryPath.c_str(), std::ifstream::in);
}
if(file.good())
{
UDEBUG("Loading the dictionary from \"%s\"", dictionaryPath.c_str());
// first line is the header
std::string str;
std::list<std::string> strList;
std::getline(file, str);
strList = uSplitNumChar(str);
for(std::list<std::string>::iterator iter = strList.begin(); iter != strList.end(); ++iter)
{
if(uIsDigit(iter->at(0)))
{
dimension = std::atoi(iter->c_str());
break;
}
}
if(dimension == 0 || dimension > 1000)
{
UERROR("Invalid dictionary file, visual word dimension (%d) is not valid, \"%s\"", dimension, dictionaryPath.c_str());
}
else
{
int descriptorsLoaded = 0;
// Process all words
while(file.good())
{
std::getline(file, str);
strList = uSplit(str);
if((int)strList.size() == dimension+1)
{
//first one is the visual word id
std::list<std::string>::iterator iter = strList.begin();
int id = atoi(iter->c_str());
++iter;
std::vector<float> descriptor(dimension);
int i=0;
//get descriptor
for(;i<dimension && iter != strList.end(); ++i, ++iter)
{
descriptor[i] = uStr2Float(*iter);
}
if(i != dimension)
{
UERROR("");
}
if(++descriptorsLoaded<=objectDescriptorsSize)
{
objectDescriptors.push_back(descriptor);
}
else
{
//descriptors.push_back(descriptor);
descriptors.insert(std::make_pair(id, descriptor));
}
}
else if(str.size())
{
UWARN("Cannot parse line \"%s\"", str.c_str());
}
}
}
UDEBUG("Time loading dictionary = %fs, dimension=%d", timer.ticks(), dimension);
}
else
{
UERROR("Cannot open dictionary file \"%s\"", dictionaryPath.c_str());
}
file.close();
if(descriptors.size() && objectDescriptors.size() && dimension)
{
cv::Mat dataTree;
//.........这里部分代码省略.........