本文整理汇总了C++中CActionCollection::insert方法的典型用法代码示例。如果您正苦于以下问题:C++ CActionCollection::insert方法的具体用法?C++ CActionCollection::insert怎么用?C++ CActionCollection::insert使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类CActionCollection
的用法示例。
在下文中一共展示了CActionCollection::insert方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: addAction
/*---------------------------------------------------------------
---------------------------------------------------------------*/
void CRawlog::addAction( CAction &action )
{
CActionCollection *temp = new CActionCollection();
temp->insert( action );
m_seqOfActObs.push_back( CSerializablePtr(temp) );
}
示例2: main
//.........这里部分代码省略.........
landmarkMap.simulateRangeBearingReadings(
realPose,
sensorPoseOnRobot,
*obs,
sensorDetectsIDs, // wheter to identy landmarks
stdRange,
stdYaw,
stdPitch );
// Keep the GT of the robot pose:
GT_path.setSize(i+1,6);
for (size_t k=0; k<6; k++)
GT_path(i,k)=realPose[k];
cout << obs->sensedData.size() << " landmarks in sight";
if (obs->sensedData.empty()) nWarningsNoSight++;
SF.push_back( obs );
// Simulate odometry, from "incPose3D" with noise:
if (!incPose_is_3D)
{ // 2D odometry:
CActionRobotMovement2D act;
CPose2D incOdo( incPose3D );
if (incPose3D.x()!=0 || incPose3D.y()!=0 || incPose3D.yaw()!=0)
{
incOdo.x_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
incOdo.y_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
incOdo.phi_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoisePhi_std );
}
act.computeFromOdometry(incOdo, opts);
acts.insert( act );
}
else
{ // 3D odometry:
CActionRobotMovement3D act;
act.estimationMethod = CActionRobotMovement3D::emOdometry;
CPose3D noisyIncPose = incPose3D;
if (incPose3D.x()!=0 || incPose3D.y()!=0 || incPose3D.yaw()!=0)
{
noisyIncPose.x_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
noisyIncPose.y_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
noisyIncPose.z_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
noisyIncPose.setYawPitchRoll(
noisyIncPose.yaw()+ randomGenerator.drawGaussian1D_normalized() * odometryNoisePhi_std,
noisyIncPose.pitch()+ randomGenerator.drawGaussian1D_normalized() * odometryNoisePitch_std,
noisyIncPose.roll()+ randomGenerator.drawGaussian1D_normalized() * odometryNoiseRoll_std );
}
act.poseChange.mean = noisyIncPose;
act.poseChange.cov.eye();
act.poseChange.cov(0,0) =
act.poseChange.cov(1,1) =
act.poseChange.cov(2,2) = square(odometryNoiseXY_std);
act.poseChange.cov(3,3) = square(odometryNoisePhi_std);
act.poseChange.cov(4,4) = square(odometryNoisePitch_std);
act.poseChange.cov(5,5) = square(odometryNoiseRoll_std);
acts.insert( act );
}
示例3: OnMenuConvertSF
// Convert from observations only to actions-SF format:
void xRawLogViewerFrame::OnMenuConvertSF(wxCommandEvent& event)
{
WX_START_TRY
bool onlyOnePerLabel =
(wxYES == wxMessageBox(
_("Keep only one observation of each label within each "
"sensoryframe?"),
_("Convert to sensoryframe's"), wxYES_NO, this));
wxString strMaxL = wxGetTextFromUser(
_("Maximum length of each sensoryframe (seconds):"),
_("Convert to sensoryframe's"), _("1.0"));
double maxLengthSF;
strMaxL.ToDouble(&maxLengthSF);
// Process:
CRawlog new_rawlog;
new_rawlog.setCommentText(rawlog.getCommentText());
wxBusyCursor waitCursor;
unsigned int nEntries = (unsigned int)rawlog.size();
wxProgressDialog progDia(
wxT("Progress"), wxT("Parsing rawlog..."),
nEntries, // range
this, // parent
wxPD_CAN_ABORT | wxPD_APP_MODAL | wxPD_SMOOTH | wxPD_AUTO_HIDE |
wxPD_ELAPSED_TIME | wxPD_ESTIMATED_TIME | wxPD_REMAINING_TIME);
wxTheApp->Yield(); // Let the app. process messages
CSensoryFrame SF_new;
set<string> SF_new_labels;
TTimeStamp SF_new_first_t = INVALID_TIMESTAMP;
CObservationOdometry::Ptr last_sf_odo, cur_sf_odo;
for (unsigned int countLoop = 0; countLoop < nEntries; countLoop++)
{
if (countLoop % 20 == 0)
{
if (!progDia.Update(
countLoop,
wxString::Format(
wxT("Parsing rawlog... %u objects"), countLoop)))
{
return;
}
wxTheApp->Yield(); // Let the app. process messages
}
switch (rawlog.getType(countLoop))
{
case CRawlog::etSensoryFrame:
case CRawlog::etActionCollection:
{
THROW_EXCEPTION(
"The rawlog already has sensory frames and/or actions!");
}
break;
case CRawlog::etObservation:
{
CObservation::Ptr o = rawlog.getAsObservation(countLoop);
// Update stats:
bool label_existed =
SF_new_labels.find(o->sensorLabel) != SF_new_labels.end();
double SF_len =
SF_new_first_t == INVALID_TIMESTAMP
? 0
: timeDifference(SF_new_first_t, o->timestamp);
// Decide:
// End SF and start a new one?
if (SF_len > maxLengthSF && SF_new.size() != 0)
{
new_rawlog.addObservations(SF_new);
// Odometry increments:
CActionCollection acts;
if (last_sf_odo && cur_sf_odo)
{
CActionRobotMovement2D act;
act.timestamp = cur_sf_odo->timestamp;
CActionRobotMovement2D::TMotionModelOptions opts;
act.computeFromOdometry(
cur_sf_odo->odometry - last_sf_odo->odometry, opts);
acts.insert(act);
}
new_rawlog.addActions(acts);
last_sf_odo = cur_sf_odo;
cur_sf_odo.reset();
SF_new.clear();
SF_new_labels.clear();
SF_new_first_t = INVALID_TIMESTAMP;
}
//.........这里部分代码省略.........
示例4: processActionObservation
/*---------------------------------------------------------------
processActionObservation
---------------------------------------------------------------*/
void CMetricMapBuilderRBPF::processActionObservation(
CActionCollection &action,
CSensoryFrame &observations )
{
MRPT_START
// Enter critical section (updating map)
enterCriticalSection();
// Update the traveled distance estimations:
{
CActionRobotMovement3DPtr act3D = action.getActionByClass<CActionRobotMovement3D>();
CActionRobotMovement2DPtr act2D = action.getActionByClass<CActionRobotMovement2D>();
if (act3D)
{
odoIncrementSinceLastMapUpdate += act3D->poseChange.getMeanVal();
odoIncrementSinceLastLocalization += act3D->poseChange;
}
else if (act2D)
{
odoIncrementSinceLastMapUpdate += act2D->poseChange->getMeanVal();
odoIncrementSinceLastLocalization.mean += act2D->poseChange->getMeanVal();
}
else
{
std::cerr << "[CMetricMapBuilderRBPF] WARNING: action contains no odometry." << std::endl;
}
}
// Execute particle filter:
// But only if the traveled distance since the last update is long enough,
// or this is the first observation, etc...
// ----------------------------------------------------------------------------
bool do_localization = (
!mapPDF.SFs.size() || // This is the first observation!
options.debugForceInsertion ||
odoIncrementSinceLastLocalization.mean.norm()>localizeLinDistance ||
std::abs(odoIncrementSinceLastLocalization.mean.yaw())>localizeAngDistance);
bool do_map_update = (
!mapPDF.SFs.size() || // This is the first observation!
options.debugForceInsertion ||
odoIncrementSinceLastMapUpdate.norm()>insertionLinDistance ||
std::abs(odoIncrementSinceLastMapUpdate.yaw())>insertionAngDistance);
// Used any "options.alwaysInsertByClass" ??
for (CListOfClasses::const_iterator itCl=options.alwaysInsertByClass.begin();!do_map_update && itCl!=options.alwaysInsertByClass.end();++itCl)
for ( CSensoryFrame::iterator it=observations.begin();it!=observations.end();++it)
if ((*it)->GetRuntimeClass()==*itCl)
{
do_map_update = true;
do_localization = true;
break;
}
if (do_map_update)
do_localization = true;
if (do_localization)
{
// Create an artificial action object, since
// we've been collecting them until a threshold:
// ------------------------------------------------
CActionCollection fakeActs;
{
CActionRobotMovement3DPtr act3D = action.getActionByClass<CActionRobotMovement3D>();
if (act3D)
{
CActionRobotMovement3D newAct;
newAct.estimationMethod = act3D->estimationMethod;
newAct.poseChange = odoIncrementSinceLastLocalization;
newAct.timestamp = act3D->timestamp;
fakeActs.insert(newAct);
}
else
{
// It must be 2D odometry:
CActionRobotMovement2DPtr act2D = action.getActionByClass<CActionRobotMovement2D>();
ASSERT_(act2D)
CActionRobotMovement2D newAct;
newAct.computeFromOdometry( CPose2D(odoIncrementSinceLastLocalization.mean), act2D->motionModelConfiguration );
newAct.timestamp = act2D->timestamp;
fakeActs.insert(newAct);
}
}
// Reset distance counters:
odoIncrementSinceLastLocalization.mean.setFromValues(0,0,0,0,0,0);
odoIncrementSinceLastLocalization.cov.zeros();
CParticleFilter pf;
pf.m_options = m_PF_options;
pf.executeOn( mapPDF, &fakeActs, &observations );
//.........这里部分代码省略.........
示例5: OnMenuLossLessDecimate
//.........这里部分代码省略.........
// For each entry:
for (i = 0; i < N; i++)
{
CSerializable::Ptr obj = rawlog.getAsGeneric(i);
if (rawlog.getType(i) == CRawlog::etActionCollection)
{
// Accumulate Actions
// ----------------------
CActionCollection::Ptr curActs =
std::dynamic_pointer_cast<CActionCollection>(obj);
CActionRobotMovement2D::Ptr mov =
curActs->getBestMovementEstimation();
if (mov)
{
CPose2D incrPose = mov->poseChange->getMeanVal();
// If we have previous observations, shift them according to
// this new increment:
if (accum_sf)
{
CPose3D inv_incrPose3D(
CPose3D(0, 0, 0) - CPose3D(incrPose));
for (CSensoryFrame::iterator it = accum_sf->begin();
it != accum_sf->end(); ++it)
{
CPose3D tmpPose;
(*it)->getSensorPose(tmpPose);
tmpPose = inv_incrPose3D + tmpPose;
(*it)->setSensorPose(tmpPose);
}
}
// Accumulate from odometry:
accumMovement = accumMovement + incrPose;
// Copy the probabilistic options from the first entry we find:
if (!cummMovementInit)
{
odometryOptions = mov->motionModelConfiguration;
cummMovementInit = true;
}
}
}
else if (rawlog.getType(i) == CRawlog::etSensoryFrame)
{
// Decimate Observations
// ---------------------------
// Add observations to the accum. SF:
if (!accum_sf)
accum_sf = mrpt::make_aligned_shared<CSensoryFrame>();
// Copy pointers to observations only (fast):
accum_sf->moveFrom(*std::dynamic_pointer_cast<CSensoryFrame>(obj));
if (++SF_counter >= DECIMATE_RATIO)
{
SF_counter = 0;
// INSERT OBSERVATIONS:
newRawLog.addObservationsMemoryReference(accum_sf);
accum_sf.reset();
// INSERT ACTIONS:
CActionCollection actsCol;
if (cummMovementInit)
{
CActionRobotMovement2D cummMovement;
cummMovement.computeFromOdometry(
accumMovement, odometryOptions);
actsCol.insert(cummMovement);
// Reset odometry accumulation:
accumMovement = CPose2D(0, 0, 0);
}
newRawLog.addActions(actsCol);
}
}
else
THROW_EXCEPTION(
"This operation implemented for SF-based rawlogs only.");
// Delete object
obj.reset();
} // end for i each entry
// Clear the list only (objects already deleted)
rawlog.clear();
// Copy as new log:
rawlog = newRawLog;
rebuildTreeView();
WX_END_TRY
}
示例6: main
//.........这里部分代码省略.........
// Simulate:
// ---------------------------------------------
CActionRobotMovement2D::TMotionModelOptions opts;
CPoint3D null3D(0,0,0);
opts.modelSelection = CActionRobotMovement2D::mmGaussian;
opts.gausianModel.a1=0;
opts.gausianModel.a2=0;
opts.gausianModel.a3=0;
opts.gausianModel.a4=0;
opts.gausianModel.minStdXY = odometryNoiseXY_std;
opts.gausianModel.minStdPHI = DEG2RAD( 0.002f );
os::sprintf(auxStr,sizeof(auxStr),"%s/%s",outDir.c_str(),outFile.c_str());
CFileOutputStream fil(auxStr);
CPose2D realPose;
CPose2D incPose;
int stopSteps = 4;
for (i=0;i<nSteps;i++)
{
printf("Generating step %i...",i);
CSensoryFrame SF;
CActionCollection acts;
CActionRobotMovement2D act;
CPose3D pose3D( realPose );
if (i>=stopSteps)
{
if (circularPath)
{
// Circular path:
float Ar = DEG2RAD(5);
incPose = CPose2D(0.20f*cos(Ar),0.20f*sin(Ar),Ar);
}
else
{
// Square path:
if ( (i % squarePathLength) > (squarePathLength-5) )
incPose = CPose2D(0,0,DEG2RAD(90.0f/4));
else incPose = CPose2D(0.20f,0,0);
}
}
else incPose = CPose2D(0,0,0);
// Simulate observations:
CObservationBeaconRangesPtr obs=CObservationBeaconRanges::Create();
obs->minSensorDistance=minSensorDistance;
obs->maxSensorDistance=maxSensorDistance;
obs->stdError=stdError;
beaconMap.simulateBeaconReadings( pose3D,null3D,*obs );
// Corrupt with ourliers:
float probability_corrupt = i==0 ? ratio_outliers_first_step : ratio_outliers;
for (size_t q=0;q<obs->sensedData.size();q++)
{
if ( randomGenerator.drawUniform(0.0f,1.0f) < probability_corrupt )
{
obs->sensedData[q].sensedDistance += randomGenerator.drawUniform( outlier_uniform_min,outlier_uniform_max );
if (obs->sensedData[q].sensedDistance<0)
obs->sensedData[q].sensedDistance = 0;
}
}
std::cout << obs->sensedData.size() << " beacons at range";
SF.push_back( obs );
// Simulate actions:
CPose2D incOdo( incPose );
if (incPose.x()!=0 || incPose.y()!=0 || incPose.phi()!=0)
{
incOdo.x_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
incOdo.y_incr( odometryBias_Y + randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
}
act.computeFromOdometry( incOdo, opts);
acts.insert( act );
// Save:
fil << SF << acts;
// Next pose:
realPose = realPose + incPose;
printf("\n");
}
cout << "Data saved to directory: " << outDir << endl;
}
catch(std::exception &e)
{
std::cout << e.what();
}
catch(...)
{
std::cout << "Untyped exception!";
}
}
示例7: processActionObservation
/*---------------------------------------------------------------
processActionObservation
---------------------------------------------------------------*/
void CMetricMapBuilderRBPF::processActionObservation(
CActionCollection& action, CSensoryFrame& observations)
{
MRPT_START
std::lock_guard<std::mutex> csl(
critZoneChangingMap); // Enter critical section (updating map)
// Update the traveled distance estimations:
{
CActionRobotMovement3D::Ptr act3D =
action.getActionByClass<CActionRobotMovement3D>();
CActionRobotMovement2D::Ptr act2D =
action.getActionByClass<CActionRobotMovement2D>();
if (act3D)
{
MRPT_LOG_DEBUG_STREAM(
"processActionObservation(): Input action is "
"CActionRobotMovement3D="
<< act3D->poseChange.getMeanVal().asString());
odoIncrementSinceLastMapUpdate += act3D->poseChange.getMeanVal();
odoIncrementSinceLastLocalization += act3D->poseChange;
}
else if (act2D)
{
MRPT_LOG_DEBUG_STREAM(
"processActionObservation(): Input action is "
"CActionRobotMovement2D="
<< act2D->poseChange->getMeanVal().asString());
odoIncrementSinceLastMapUpdate +=
mrpt::poses::CPose3D(act2D->poseChange->getMeanVal());
odoIncrementSinceLastLocalization.mean +=
mrpt::poses::CPose3D(act2D->poseChange->getMeanVal());
}
else
{
MRPT_LOG_WARN("Action contains no odometry.\n");
}
}
// Execute particle filter:
// But only if the traveled distance since the last update is long enough,
// or this is the first observation, etc...
// ----------------------------------------------------------------------------
bool do_localization =
(!mapPDF.SFs.size() || // This is the first observation!
options.debugForceInsertion ||
odoIncrementSinceLastLocalization.mean.norm() > localizeLinDistance ||
std::abs(odoIncrementSinceLastLocalization.mean.yaw()) >
localizeAngDistance);
bool do_map_update =
(!mapPDF.SFs.size() || // This is the first observation!
options.debugForceInsertion ||
odoIncrementSinceLastMapUpdate.norm() > insertionLinDistance ||
std::abs(odoIncrementSinceLastMapUpdate.yaw()) > insertionAngDistance);
// Used any "options.alwaysInsertByClass" ??
for (auto itCl = options.alwaysInsertByClass.data.begin();
!do_map_update && itCl != options.alwaysInsertByClass.data.end();
++itCl)
for (auto& o : observations)
if (o->GetRuntimeClass() == *itCl)
{
do_map_update = true;
do_localization = true;
break;
}
if (do_map_update) do_localization = true;
MRPT_LOG_DEBUG(mrpt::format(
"do_map_update=%s do_localization=%s", do_map_update ? "YES" : "NO",
do_localization ? "YES" : "NO"));
if (do_localization)
{
// Create an artificial action object, since
// we've been collecting them until a threshold:
// ------------------------------------------------
CActionCollection fakeActs;
{
CActionRobotMovement3D::Ptr act3D =
action.getActionByClass<CActionRobotMovement3D>();
if (act3D)
{
CActionRobotMovement3D newAct;
newAct.estimationMethod = act3D->estimationMethod;
newAct.poseChange = odoIncrementSinceLastLocalization;
newAct.timestamp = act3D->timestamp;
fakeActs.insert(newAct);
}
else
{
// It must be 2D odometry:
CActionRobotMovement2D::Ptr act2D =
action.getActionByClass<CActionRobotMovement2D>();
ASSERT_(act2D);
//.........这里部分代码省略.........
示例8: main
//.........这里部分代码省略.........
TThreadParams threParms;
threParms.cfgFile = &iniFile;
threParms.sensor_label = *it;
TThreadHandle thre = createThread(SensorThread, threParms);
lstThreads.push_back(thre);
sleep(time_between_launches);
}
// ----------------------------------------------
// Run:
// ----------------------------------------------
CFileGZOutputStream out_file;
out_file.open( rawlog_filename, rawlog_GZ_compress_level );
CSensoryFrame curSF;
CGenericSensor::TListObservations copy_of_global_list_obs;
cout << endl << "Press any key to exit program" << endl;
while (!os::kbhit() && !allThreadsMustExit)
{
// See if we have observations and process them:
{
synch::CCriticalSectionLocker lock (&cs_global_list_obs);
copy_of_global_list_obs.clear();
if (!global_list_obs.empty())
{
CGenericSensor::TListObservations::iterator itEnd = global_list_obs.begin();
std::advance( itEnd, global_list_obs.size() / 2 );
copy_of_global_list_obs.insert(global_list_obs.begin(),itEnd );
global_list_obs.erase(global_list_obs.begin(), itEnd);
}
} // End of cs lock
if (use_sensoryframes)
{
// -----------------------
// USE SENSORY-FRAMES
// -----------------------
for (CGenericSensor::TListObservations::iterator it=copy_of_global_list_obs.begin();it!=copy_of_global_list_obs.end();++it)
{
// If we have an action, save the SF and start a new one:
if (IS_DERIVED(it->second, CAction))
{
CActionPtr act = CActionPtr( it->second);
out_file << curSF;
cout << "[" << dateTimeToString(now()) << "] Saved SF with " << curSF.size() << " objects." << endl;
curSF.clear();
CActionCollection acts;
acts.insert(*act);
act.clear_unique();
out_file << acts;
}
else
if (IS_CLASS(it->second,CObservationOdometry) )
{
CObservationOdometryPtr odom = CObservationOdometryPtr( it->second );
CActionRobotMovement2DPtr act = CActionRobotMovement2D::Create();
示例9: CActionCollection_insert2
void CActionCollection_insert2(CActionCollection &self, CActionRobotMovement3D &action)
{
self.insert(action);
}