本文整理汇总了C++中CLOG函数的典型用法代码示例。如果您正苦于以下问题:C++ CLOG函数的具体用法?C++ CLOG怎么用?C++ CLOG使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了CLOG函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: CLOG
void CvBayesClassifier::onClearDataset() {
CLOG(LTRACE) << "CvBayesClassifier::onClearDataset\n";
training_dataset.clear();
training_responses.clear();
CLOG(LINFO) << "Training dataset cleared";
}
示例2: LOG
void SIFTNOMWriter::WriteNormals() {
LOG(LTRACE) << "SIFTNOMWriter::WriteNormals";
// Try to save the model retrieved from the SOM data stream.
ptree ptree_file;
//if(in_cloud_xyzrgb_normals.empty()&&in_cloud_xyzsift.empty()&&in_mean_viewpoint_features_number.empty()){
// CLOG(LWARNING) << "There are no required datastreams enabling save of the SOM to file.";
// return;
//}
if (!in_som.empty()) {
LOG(LDEBUG) << "!in_som.empty()";
// Get SOM.
SIFTObjectModel* som = in_som.read();
// Save point cloud.
std::string name_cloud_xyzrgb_normals = std::string(dir) + std::string("/") + std::string(SOMname) + std::string("_xyzrgb_normals.pcd");
pcl::io::savePCDFileASCII (name_cloud_xyzrgb_normals, *(som->cloud_xyzrgb_normals));
CLOG(LTRACE) << "Write: saved " << som->cloud_xyzrgb_normals->points.size () << " cloud points to "<< name_cloud_xyzrgb_normals;
// Save feature cloud.
std::string name_cloud_xyzsift = std::string(dir) + std::string("/") + std::string(SOMname) + std::string("_xyzsift.pcd");
pcl::io::savePCDFileASCII (name_cloud_xyzsift, *(som->cloud_xyzsift));
CLOG(LTRACE) << "Write: saved " << som->cloud_xyzsift->points.size () << " feature points to "<< name_cloud_xyzsift;
// Save JSON model description.
ptree_file.put("name", SOMname);
ptree_file.put("type", "SIFTObjectModel");
ptree_file.put("mean_viewpoint_features_number", som->mean_viewpoint_features_number);
ptree_file.put("cloud_xyzsift", name_cloud_xyzsift);
}
// Try to save the model retrieved from the three separate data streams.
if (!in_cloud_xyzsift.empty() && !in_mean_viewpoint_features_number.empty()) {
LOG(LDEBUG) << "!in_cloud_xyzsift.empty() && !in_mean_viewpoint_features_number.empty()";
// Get model from datastreams.
pcl::PointCloud<PointXYZSIFT>::Ptr cloud_xyzsift = in_cloud_xyzsift.read();
int mean_viewpoint_features_number = in_mean_viewpoint_features_number.read();
// Save feature cloud.
std::string name_cloud_xyzsift = std::string(dir) + std::string("/") + std::string(SOMname) + std::string("_xyzsift.pcd");
pcl::io::savePCDFileASCII (name_cloud_xyzsift, *(cloud_xyzsift));
CLOG(LTRACE) << "Write: saved " << cloud_xyzsift->points.size () << " feature points to "<< name_cloud_xyzsift;
// Save JSON model description.
ptree_file.put("name", SOMname);
ptree_file.put("type", "SIFTObjectModel");
ptree_file.put("mean_viewpoint_features_number", mean_viewpoint_features_number);
ptree_file.put("cloud_xyzsift", name_cloud_xyzsift);
}
if (!in_cloud_xyzrgb_normals.empty()) {
LOG(LDEBUG) << "!in_cloud_xyzrgb_normals.empty()";
// Get model from datastreams.
pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_xyzrgb_normals = in_cloud_xyzrgb_normals.read();
// Save point cloud.
std::string name_cloud_xyzrgb_normals = std::string(dir) + std::string("/") + std::string(SOMname) + std::string("_xyzrgb_normals.pcd");
pcl::io::savePCDFileASCII (name_cloud_xyzrgb_normals, *(cloud_xyzrgb_normals));
CLOG(LTRACE) << "Write: saved " << cloud_xyzrgb_normals->points.size () << " cloud points to "<< name_cloud_xyzrgb_normals;
// Save JSON model description.
//ptree ptree_file;
ptree_file.put("name", SOMname);
ptree_file.put("type", "SIFTObjectModel");
ptree_file.put("cloud_xyzrgb_normals", name_cloud_xyzrgb_normals);
}
write_json (std::string(dir) + std::string("/") + std::string(SOMname) + std::string(".json"), ptree_file);
}
示例3: CLOG
void CalcStatistics::calculate() {
CLOG(LDEBUG)<<"in calculate()";
Types::HomogMatrix homogMatrix;
cv::Mat_<double> rvec;
cv::Mat_<double> tvec;
cv::Mat_<double> axis;
cv::Mat_<double> rotMatrix;
float fi;
rotMatrix= cv::Mat_<double>::zeros(3,3);
tvec = cv::Mat_<double>::zeros(3,1);
axis = cv::Mat_<double>::zeros(3,1);
// first homogMatrix on InputStream
if(counter == 0) {
homogMatrix = in_homogMatrix.read();
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
rotMatrix(i,j)=homogMatrix(i, j);
}
tvec(i, 0) = homogMatrix(i, 3);
}
Rodrigues(rotMatrix, rvec);
cumulatedHomogMatrix = homogMatrix;
cumulatedTvec = tvec;
cumulatedRvec = rvec;
fi = sqrt((pow(rvec(0,0), 2) + pow(rvec(1,0), 2)+pow(rvec(2,0),2)));
cumulatedFi = fi;
for(int k=0;k<3;k++) {
axis(k,0)=rvec(k,0)/fi;
}
cumulatedAxis = axis;
counter=1;
return;
}
homogMatrix=in_homogMatrix.read();
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
rotMatrix(i,j)=homogMatrix(i, j);
}
tvec(i, 0) = homogMatrix(i, 3);
}
Rodrigues(rotMatrix, rvec);
fi = sqrt((pow(rvec(0,0), 2) + pow(rvec(1,0), 2)+pow(rvec(2,0),2)));
for(int k=0;k<3;k++) {
axis(k,0)=rvec(k,0)/fi;
}
cumulatedFi += fi;
cumulatedTvec += tvec;
cumulatedRvec += rvec;
cumulatedAxis += axis;
counter++;
avgFi = cumulatedFi/counter;
avgAxis = cumulatedAxis/counter;
avgRvec = avgAxis * avgFi;
avgTvec = cumulatedTvec/counter;
Types::HomogMatrix hm;
cv::Mat_<double> rottMatrix;
Rodrigues(avgRvec, rottMatrix);
CLOG(LINFO)<<"Uśredniona macierz z "<<counter<<" macierzy \n";
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
hm(i, j) = rottMatrix(i, j);
CLOG(LINFO) << hm(i, j) << " ";
}
hm(i, 3) = avgTvec(i, 0);
CLOG(LINFO) << hm(i, 3) <<" \n";
}
out_homogMatrix.write(hm);
CLOG(LINFO)<<"Uśredniony kąt: " << avgFi;
float standardDeviationFi = sqrt(pow(avgFi - fi, 2));
CLOG(LINFO)<<"Odchylenie standardowe kąta: "<<standardDeviationFi;
}
示例4: CLOG
void Projection::project_xyzsift() {
CLOG(LTRACE) << "Projection::project_xyzsift";
std::vector<Types::HomogMatrix> rototranslations = in_poses.read();
pcl::PointCloud<PointXYZSIFT>::Ptr scene = in_cloud_xyzsift_scene.read();
pcl::PointCloud<PointXYZSIFT>::Ptr model = in_cloud_xyzsift_model.read();
/**
* Generates clouds for each instances found
*/
std::vector<pcl::PointCloud<PointXYZSIFT>::ConstPtr> instances;
std::vector<pcl::PointCloud<PointXYZSIFT>::ConstPtr> parts_of_scene;
for (size_t i = 0; i < rototranslations.size (); ++i)
{
//rotate model
pcl::PointCloud<PointXYZSIFT>::Ptr rotated_model (new pcl::PointCloud<PointXYZSIFT> ());
pcl::transformPointCloud (*model, *rotated_model, rototranslations[i]);
instances.push_back (rotated_model);
//cut part of scene
PointXYZSIFT minPt, maxPt;
pcl::getMinMax3D(*rotated_model, minPt, maxPt);
minPt.x -= bounding_box_epsilon;
minPt.y -= bounding_box_epsilon;
minPt.z -= bounding_box_epsilon;
maxPt.x += bounding_box_epsilon;
maxPt.y += bounding_box_epsilon;
maxPt.z += bounding_box_epsilon;
pcl::PointCloud<PointXYZSIFT>::Ptr part_of_scene (new pcl::PointCloud<PointXYZSIFT> ());
*part_of_scene = *scene;
pcl::PassThrough<PointXYZSIFT> pass;
pass.setInputCloud (part_of_scene);
pass.setFilterFieldName ("x");
pass.setFilterLimits (minPt.x, maxPt.x);
pass.setFilterLimitsNegative (false);
pass.filter (*part_of_scene);
pass.setFilterFieldName ("y");
pass.setFilterLimits (minPt.y, maxPt.y);
pass.setFilterLimitsNegative (false);
pass.filter (*part_of_scene);
pass.setFilterFieldName ("z");
pass.setFilterLimits (minPt.z, maxPt.z);
pass.setFilterLimitsNegative (false);
pass.filter (*part_of_scene);
parts_of_scene.push_back(part_of_scene);
CLOG(LDEBUG) << "part of scene " << i << " points " << part_of_scene->size();
}
/**
* ICP
*/
if(use_icp){
std::vector<pcl::PointCloud<PointXYZSIFT>::ConstPtr> registered_instances;
CLOG(LINFO) << "ICP";
for (size_t i = 0; i < rototranslations.size (); ++i)
{
pcl::IterativeClosestPoint<PointXYZSIFT, PointXYZSIFT> icp;
icp.setMaximumIterations (icp_max_iter);
icp.setMaxCorrespondenceDistance (icp_corr_distance);
icp.setInputTarget (scene);
icp.setInputSource (instances[i]);
pcl::PointCloud<PointXYZSIFT>::Ptr registered (new pcl::PointCloud<PointXYZSIFT>);
icp.align (*registered);
registered_instances.push_back (registered);
CLOG(LINFO) << "Instance " << i << " ";
if (icp.hasConverged ())
{
CLOG(LINFO) << "Aligned!" << endl;
}
else
{
CLOG(LINFO) << "Not Aligned!" << endl;
}
}
out_registered_instances_xyzsift.write(registered_instances);
}
else
out_registered_instances_xyzsift.write(instances);
//link parts to one cloud
pcl::PointCloud<PointXYZSIFT>::Ptr scene1(new pcl::PointCloud<PointXYZSIFT>);
for(int i = 0; i < parts_of_scene.size(); i++){
*scene1 += *(parts_of_scene[i]);
}
out_parts_of_scene_xyzsift.write(scene1);
}
示例5: CLOG
void SIFTNOMWriter::onSOMNameChanged(const std::string & old_SOMname, const std::string & new_SOMname) {
SOMname = new_SOMname;
CLOG(LTRACE) << "onSOMNameChanged: " << std::string(SOMname) << std::endl;
}
示例6: CLOG
CvWindow_Sink::~CvWindow_Sink() {
CLOG(LTRACE) << "Good bye CvWindow_Sink";
}
示例7: rvec
void DrawCoordinateSystem::projectPoints(){
cv::Mat_<double> rvec(3,1);
cv::Mat_<double> tvec(3,1);
Types::HomogMatrix homogMatrix;
cv::Mat_<double> rotMatrix;
rotMatrix.create(3,3);
// Try to read rotation and translation from rvec and tvec or homogenous matrix.
if(!in_rvec.empty()&&!in_tvec.empty()){
rvec = in_rvec.read();
tvec = in_tvec.read();
}
else if(!in_homogMatrix.empty()){
homogMatrix = in_homogMatrix.read();
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
rotMatrix(i,j)=homogMatrix(i, j);
}
tvec(i, 0) = homogMatrix(i, 3);
}
Rodrigues(rotMatrix, rvec);
}
else
return;
// Get camera info properties.
Types::CameraInfo camera_matrix = in_camera_matrix.read();
vector<cv::Point3f> object_points;
vector<cv::Point2f> image_points;
// Create four points constituting ends of three lines.
object_points.push_back(cv::Point3f(0,0,0));
object_points.push_back(cv::Point3f(0.1,0,0));
object_points.push_back(cv::Point3f(0,0.1,0));
object_points.push_back(cv::Point3f(0,0,0.1));
// Project points taking into account the camera properties.
if (rectified) {
cv::Mat K, _r, _t;
cv::decomposeProjectionMatrix(camera_matrix.projectionMatrix(), K, _r, _t);
cv::projectPoints(object_points, rvec, tvec, K, cv::Mat(), image_points);
} else {
cv::projectPoints(object_points, rvec, tvec, camera_matrix.cameraMatrix(), camera_matrix.distCoeffs(), image_points);
}
// Draw lines between projected points.
Types::Line ax(image_points[0], image_points[1]);
ax.setCol(cv::Scalar(255, 0, 0));
Types::Line ay(image_points[0], image_points[2]);
ay.setCol(cv::Scalar(0, 255, 0));
Types::Line az(image_points[0], image_points[3]);
az.setCol(cv::Scalar(0, 0, 255));
// Add lines to container.
Types::DrawableContainer ctr;
ctr.add(ax.clone());
ctr.add(ay.clone());
ctr.add(az.clone());
CLOG(LINFO)<<"Image Points: "<<image_points;
// Write output to dataports.
out_csystem.write(ctr);
out_impoints.write(image_points);
}
示例8: attr
TCodParser::TCodAttr TCodParser::AttrName()
{
TCodAttr attr( ECodUnknownAttr );
const TText* start = iCurP;
while
(
iCurP < iEndP &&
!IsControl() &&
!IsSeparator() &&
*iCurP != KCodCarriageRet &&
*iCurP != KCodLineFeed
)
{
iCurP++;
}
TPtrC token( start, iCurP - start );
if ( !token.Length() )
{
Error( KErrCodInvalidDescriptor );
}
else if ( !token.Compare( KCodName ) )
{
attr = ECodName;
}
else if ( !token.Compare( KCodVendor ) )
{
attr = ECodVendor;
}
else if ( !token.Compare( KCodDescription ) )
{
attr = ECodDescription;
}
else if ( !token.Compare( KCodUrl ) )
{
attr = ECodUrl;
}
else if ( !token.Compare( KCodSize ) )
{
attr = ECodSize;
}
else if ( !token.Compare( KCodType ) )
{
attr = ECodType;
}
else if ( !token.Compare( KCodInstallNotify ) )
{
attr = ECodInstallNotify;
}
else if ( !token.Compare( KCodNextUrl ) )
{
attr = ECodNextUrl;
}
else if ( !token.Compare( KCodNextUrlAtError ) )
{
attr = ECodNextUrlAtError;
}
else if ( !token.Compare( KCodInfoUrl ) )
{
attr = ECodInfoUrl;
}
else if ( !token.Compare( KCodPrice ) )
{
attr = ECodPrice;
}
else if ( !token.Compare( KCodIcon ) )
{
attr = ECodIcon;
}
CLOG(( EParse, 4, _L("TCodParser::AttrName token<%S> attr(%d)"), \
&token, attr ));
return attr;
}
示例9: CLOG
// ---------------------------------------------------------
// TCodParser::ParseL()
// ---------------------------------------------------------
//
void TCodParser::ParseL( const TDesC& aBuf, CCodData& aData )
{
CLOG(( EParse, 2, _L("-> TCodParser::ParseL") ));
CDUMP(( EParse, 2, _S("Buf:"), _S(" "), \
(const TUint8*)aBuf.Ptr(), aBuf.Size() ));
iError = KErrNone;
iData = &aData;
iBuf = &aBuf;
iCurP = iBuf->Ptr();
iEndP = iCurP + iBuf->Length();
CMediaObjectData *mediaObject = CMediaObjectData::NewL();
// Processing lines (attribute and value) until there is more lines to read.
while ( AttrLineL(mediaObject) )
{
// Some compilers require empty controlled statement block instead of
// just a semicolon.
}
iData->AppendMediaObjectL( mediaObject );
#ifdef __TEST_COD_LOG
TPtrC ptr16;
TPtrC8 ptr8;
CLOG(( EParse, 3, _L("TCodParser::ParseL data:") ));
ptr16.Set( aData.Name() );
CLOG(( EParse, 3, _L(" Name<%S>"), &ptr16 ));
ptr16.Set( aData.Vendor() );
CLOG(( EParse, 3, _L(" Vendor<%S>"), &ptr16 ));
ptr16.Set( aData.Description() );
CLOG(( EParse, 3, _L(" Desc<%S>"), &ptr16 ));
CLOG(( EParse, 3, _L(" Size(%d)"), aData.Size() ));
ptr8.Set( aData.InstallNotify() );
CLOG(( EParse, 3, _L8(" InstNotif<%S>"), &ptr8 ));
ptr8.Set( aData.NextUrl() );
CLOG(( EParse, 3, _L8(" NextUrl<%S>"), &ptr8 ));
ptr8.Set( aData.NextUrlAtError() );
CLOG(( EParse, 3, _L8(" NextUrlAtErr<%S>"), &ptr8 ));
ptr8.Set( aData.InfoUrl() );
CLOG(( EParse, 3, _L8(" InfoUrl<%S>"), &ptr8 ));
ptr16.Set( aData.Price() );
CLOG(( EParse, 3, _L(" Price<%S>"), &ptr16 ));
ptr8.Set( aData.Icon() );
CLOG(( EParse, 3, _L8(" Icon<%S>"), &ptr8 ));
#endif /* def __TEST_COD_LOG */
// NULL data for clarity. These are never used later, but don't keep
// pointers to objects which are out of reach.
iBuf = NULL;
iData = NULL;
iCurP = NULL;
iEndP = NULL;
User::LeaveIfError( iError );
CLOG(( EParse, 2, _L("<- TCodParser::ParseL") ));
}
示例10: CLOG
void StereoSequence::onTriggeredLoadNextImage(){
CLOG(LDEBUG) << "Sequence::onTriggeredLoadNextImage - next image from the sequence will be loaded";
in_load_next_image_trigger.read();
frame++;
}
示例11: CLOG
bool
ApplyLedgerChainWork::applyHistoryOfSingleLedger()
{
LedgerHeaderHistoryEntry hHeader;
LedgerHeader& header = hHeader.header;
if (!mHdrIn || !mHdrIn.readOne(hHeader))
{
return false;
}
auto& lm = mApp.getLedgerManager();
auto const& lclHeader = lm.getLastClosedLedgerHeader();
// If we are >1 before LCL, skip
if (header.ledgerSeq + 1 < lclHeader.header.ledgerSeq)
{
CLOG(DEBUG, "History")
<< "Catchup skipping old ledger " << header.ledgerSeq;
return true;
}
// If we are one before LCL, check that we knit up with it
if (header.ledgerSeq + 1 == lclHeader.header.ledgerSeq)
{
if (hHeader.hash != lclHeader.header.previousLedgerHash)
{
throw std::runtime_error(
fmt::format("replay of {:s} failed to connect on hash of LCL "
"predecessor {:s}",
LedgerManager::ledgerAbbrev(hHeader),
LedgerManager::ledgerAbbrev(
lclHeader.header.ledgerSeq - 1,
lclHeader.header.previousLedgerHash)));
}
CLOG(DEBUG, "History") << "Catchup at 1-before LCL ("
<< header.ledgerSeq << "), hash correct";
return true;
}
// If we are at LCL, check that we knit up with it
if (header.ledgerSeq == lclHeader.header.ledgerSeq)
{
if (hHeader.hash != lm.getLastClosedLedgerHeader().hash)
{
mApplyLedgerFailure.Mark();
throw std::runtime_error(
fmt::format("replay of {:s} at LCL {:s} disagreed on hash",
LedgerManager::ledgerAbbrev(hHeader),
LedgerManager::ledgerAbbrev(lclHeader)));
}
CLOG(DEBUG, "History")
<< "Catchup at LCL=" << header.ledgerSeq << ", hash correct";
return true;
}
// If we are past current, we can't catch up: fail.
if (header.ledgerSeq != lclHeader.header.ledgerSeq + 1)
{
mApplyLedgerFailure.Mark();
throw std::runtime_error(
fmt::format("replay overshot current ledger: {:d} > {:d}",
header.ledgerSeq, lclHeader.header.ledgerSeq + 1));
}
// If we do not agree about LCL hash, we can't catch up: fail.
if (header.previousLedgerHash != lm.getLastClosedLedgerHeader().hash)
{
mApplyLedgerFailure.Mark();
throw std::runtime_error(fmt::format(
"replay at current ledger {:s} disagreed on LCL hash {:s}",
LedgerManager::ledgerAbbrev(header.ledgerSeq - 1,
header.previousLedgerHash),
LedgerManager::ledgerAbbrev(lclHeader)));
}
auto txset = getCurrentTxSet();
CLOG(DEBUG, "History") << "Ledger " << header.ledgerSeq << " has "
<< txset->size() << " transactions";
// We've verified the ledgerHeader (in the "trusted part of history"
// sense) in CATCHUP_VERIFY phase; we now need to check that the
// txhash we're about to apply is the one denoted by that ledger
// header.
if (header.scpValue.txSetHash != txset->getContentsHash())
{
mApplyLedgerFailure.Mark();
throw std::runtime_error(fmt::format(
"replay txset hash differs from txset hash in replay ledger: hash "
"for txset for {:d} is {:s}, expected {:s}",
header.ledgerSeq, hexAbbrev(txset->getContentsHash()),
hexAbbrev(header.scpValue.txSetHash)));
}
LedgerCloseData closeData(header.ledgerSeq, txset, header.scpValue);
lm.closeLedger(closeData);
CLOG(DEBUG, "History") << "LedgerManager LCL:\n"
<< xdr::xdr_to_string(
//.........这里部分代码省略.........
示例12: CLOG
void CvFlann::onNewImage()
{
CLOG(LTRACE) << "CvFlann::onNewImage\n";
try {
// Read input features.
Types::Features features_1 = in_features0.read();
Types::Features features_2 = in_features1.read();
// Read input descriptors.
cv::Mat img_1 = in_img0.read();
cv::Mat img_2 = in_img1.read();
// Read input images.
cv::Mat descriptors_1 = in_descriptors0.read();
cv::Mat descriptors_2 = in_descriptors1.read();
// Matching descriptor vectors using FLANN matcher.
FlannBasedMatcher matcher;
std::vector< DMatch > matches;
matcher.match( descriptors_1, descriptors_2, matches );
if (distance_recalc) {
double max_dist = 0;
double min_dist = 100;
//-- Quick calculation of max and min distances between keypoints.
for( int i = 0; i < descriptors_1.rows; i++ )
{
double dist = matches[i].distance;
if( dist < min_dist ) min_dist = dist;
if( dist > max_dist ) max_dist = dist;
}
dist = 2*min_dist;
CLOG(LINFO) << " Max dist : " << (double)max_dist;
CLOG(LINFO) << " Min dist : " << (double)min_dist;
CLOG(LINFO) << " Dist : " << (double)dist << std::endl;
}
//Draw only "good" matches (i.e. whose distance is less than 2*min_dist ).
//PS.- radiusMatch can also be used here.
std::vector< DMatch > good_matches;
for( int i = 0; i < descriptors_1.rows; i++ )
{
if( matches[i].distance < dist )
good_matches.push_back( matches[i]);
}
//-- Draw only "good" matches
Mat img_matches;
drawMatches( img_1, features_1.features, img_2, features_2.features,
good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
// Print stats.
if (print_stats) {
for( int i = 0; i < good_matches.size(); i++ )
{
CLOG(LINFO) << " Good Match [" << i <<"] Keypoint 1: " << good_matches[i].queryIdx << " -- Keypoint 2: " << good_matches[i].trainIdx;
}
CLOG(LINFO) << std::endl;
}
// Write the result to the output.
out_img.write(img_matches);
} catch (...) {
CLOG(LERROR) << "CvFlann::onNewImage failed\n";
}
}
示例13: CLOG
void Sequence::onLoadImage() {
CLOG(LDEBUG) << "Sequence::onLoadImage";
if(reload_flag) {
// Try to reload sequence.
if (!findFiles()) {
CLOG(LERROR) << name() << ": There are no files matching the regular expression "
<< prop_pattern << " in " << prop_directory;
}
frame = -1;
reload_flag = false;
}
// Check whether there are any images loaded.
if(files.empty())
return;
// Check streaming
if(!prop_auto_streaming && !streaming_flag)
return;
streaming_flag = false;
// Check triggering mode.
if ((prop_auto_next_image) || (!prop_auto_next_image && next_image_flag))
frame++;
// Anyway, reset flag.
next_image_flag = false;
// Check frame number.
if (frame <0)
frame = 0;
// Check the size of the dataset.
if (frame >= files.size()) {
if (prop_loop) {
frame = 0;
CLOG(LINFO) << name() << ": loop";
// TODO: endOfSequence->raise();
} else {
frame = files.size() -1;
CLOG(LINFO) << name() << ": end of sequence";
// TODO: endOfSequence->raise();
}
}
CLOG(LINFO) << "Sequence: reading image " << files[frame];
try {
// Get file extension.
std::string ext = files[frame].substr(files[frame].rfind(".")+1);
CLOG(LDEBUG) << "Extracted file Extension " << ext;
// Read depth from yaml.
if ((ext == "yaml") || (ext == "yml")){
cv::FileStorage file(files[frame], cv::FileStorage::READ);
file["img"] >> img;
}
else
img = cv::imread(files[frame], CV_LOAD_IMAGE_ANYDEPTH | CV_LOAD_IMAGE_ANYCOLOR);
} catch (...) {
示例14: err
void
ApplicationImpl::start()
{
mDatabase->upgradeToCurrentSchema();
if (mPersistentState->getState(PersistentState::kForceSCPOnNextLaunch) ==
"true")
{
mConfig.FORCE_SCP = true;
}
if (mConfig.NETWORK_PASSPHRASE.empty())
{
throw std::invalid_argument("NETWORK_PASSPHRASE not configured");
}
if (mConfig.QUORUM_SET.threshold == 0)
{
throw std::invalid_argument("Quorum not configured");
}
if (!mHerder->isQuorumSetSane(mConfig.QUORUM_SET, !mConfig.UNSAFE_QUORUM))
{
std::string err("Invalid QUORUM_SET: duplicate entry or bad threshold "
"(should be between ");
if (mConfig.UNSAFE_QUORUM)
{
err = err + "1";
}
else
{
err = err + "51";
}
err = err + " and 100)";
throw std::invalid_argument(err);
}
bool done = false;
mLedgerManager->loadLastKnownLedger(
[this, &done](asio::error_code const& ec)
{
if (ec)
{
throw std::runtime_error("Unable to restore last-known ledger state");
}
// restores the SCP state before starting overlay
mHerder->restoreSCPState();
// perform maintenance tasks if configured to do so
// for now, we only perform it when CATCHUP_COMPLETE is not set
if (mConfig.MAINTENANCE_ON_STARTUP && !mConfig.CATCHUP_COMPLETE)
{
maintenance();
}
mOverlayManager->start();
auto npub = mHistoryManager->publishQueuedHistory();
if (npub != 0)
{
CLOG(INFO, "Ledger") << "Restarted publishing " << npub
<< " queued snapshots";
}
if (mConfig.FORCE_SCP)
{
std::string flagClearedMsg = "";
if (mPersistentState->getState(
PersistentState::kForceSCPOnNextLaunch) == "true")
{
flagClearedMsg = " (`force scp` flag cleared in the db)";
mPersistentState->setState(
PersistentState::kForceSCPOnNextLaunch, "false");
}
LOG(INFO) << "* ";
LOG(INFO) << "* Force-starting scp from the current db state."
<< flagClearedMsg;
LOG(INFO) << "* ";
mHerder->bootstrap();
}
done = true;
});
while (!done)
{
mVirtualClock.crank(true);
}
}
示例15: CLOG
void CalcObjectLocation::calculate() {
CLOG(LDEBUG)<<"in calculate()";
Types::HomogMatrix homogMatrix;
vector<cv::Mat_<double> > rvec;
vector<cv::Mat_<double> > tvec;
vector<cv::Mat_<double> > axis;
vector<double> fi;
cv::Mat_<double> tvectemp;
cv::Mat_<double> rotMatrix;
rotMatrix = cv::Mat_<double>::zeros(3,3);
tvectemp = cv::Mat_<double>::zeros(3,1);
while (!in_homogMatrix.empty()) {
cv::Mat_<double> rvectemp;
homogMatrix=in_homogMatrix.read();
if (homogMatrix.getElements() == Eigen::Matrix4f::Identity()) {
continue;
}
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
rotMatrix(i,j)=homogMatrix.getElement(i, j);
}
tvectemp(i, 0) = homogMatrix.getElement(i, 3);
}
Rodrigues(rotMatrix, rvectemp);
CLOG(LINFO) << rvectemp << "\n";
rvec.push_back(rvectemp);
tvec.push_back(tvectemp);
}
if (rvec.size() == 1) {
out_homogMatrix.write(homogMatrix);
return;
}
float fi_sum=0, fi_avg;
cv::Mat_<double> axis_sum, axis_avg;
cv::Mat_<double> rvec_avg;
cv::Mat_<double> tvec_avg, tvec_sum;
axis_sum = cv::Mat_<double>::zeros(3,1);
tvec_sum = cv::Mat_<double>::zeros(3,1);
for(int i = 0; i < rvec.size(); i++) {
float fitmp = sqrt((pow(rvec.at(i)(0,0), 2) + pow(rvec.at(i)(1,0), 2)+pow(rvec.at(i)(2,0),2)));
fi.push_back(fitmp);
fi_sum+=fitmp;
cv::Mat_<double> axistemp;
axistemp.create(3,1);
for(int k=0; k<3; k++) {
axistemp(k,0)=rvec.at(i)(k,0)/fitmp;
}
axis.push_back(axistemp);
axis_sum+=axistemp;
tvec_sum+=tvec.at(i);
}
fi_avg = fi_sum/fi.size();
axis_avg = axis_sum/axis.size();
rvec_avg = axis_avg * fi_avg;
tvec_avg = tvec_sum/tvec.size();
Types::HomogMatrix hm;
cv::Mat_<double> rottMatrix;
Rodrigues(rvec_avg, rottMatrix);
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
hm.setElement(i, j, rottMatrix(i, j));
CLOG(LINFO) << hm.getElement(i, j) << " ";
}
hm.setElement(i, 3, tvec_avg(i, 0));
CLOG(LINFO) << hm.getElement(i, 3) << "\n";
}
out_homogMatrix.write(hm);
}