当前位置: 首页>>代码示例>>C++>>正文


C++ CActionRobotMovement2D::computeFromOdometry方法代码示例

本文整理汇总了C++中CActionRobotMovement2D::computeFromOdometry方法的典型用法代码示例。如果您正苦于以下问题:C++ CActionRobotMovement2D::computeFromOdometry方法的具体用法?C++ CActionRobotMovement2D::computeFromOdometry怎么用?C++ CActionRobotMovement2D::computeFromOdometry使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在CActionRobotMovement2D的用法示例。


在下文中一共展示了CActionRobotMovement2D::computeFromOdometry方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: main


//.........这里部分代码省略.........
            obs->sensorLocationOnRobot = sensorPoseOnRobot;

            landmarkMap.simulateRangeBearingReadings(
                realPose,
                sensorPoseOnRobot,
                *obs,
                sensorDetectsIDs, // wheter to identy landmarks
                stdRange,
                stdYaw,
                stdPitch );

            // Keep the GT of the robot pose:
            GT_path.setSize(i+1,6);
            for (size_t k=0; k<6; k++)
                GT_path(i,k)=realPose[k];

            cout << obs->sensedData.size() << " landmarks in sight";

            if (obs->sensedData.empty()) nWarningsNoSight++;

            SF.push_back( obs );

            // Simulate odometry, from "incPose3D" with noise:
            if (!incPose_is_3D)
            {   // 2D odometry:
                CActionRobotMovement2D    act;
                CPose2D     incOdo( incPose3D );
                if (incPose3D.x()!=0 || incPose3D.y()!=0 || incPose3D.yaw()!=0)
                {
                    incOdo.x_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    incOdo.y_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    incOdo.phi_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoisePhi_std );
                }
                act.computeFromOdometry(incOdo, opts);
                acts.insert( act );
            }
            else
            {   // 3D odometry:
                CActionRobotMovement3D    act;
                act.estimationMethod	= CActionRobotMovement3D::emOdometry;

                CPose3D   noisyIncPose = incPose3D;

                if (incPose3D.x()!=0 || incPose3D.y()!=0 || incPose3D.yaw()!=0)
                {
                    noisyIncPose.x_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    noisyIncPose.y_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    noisyIncPose.z_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
                    noisyIncPose.setYawPitchRoll(
                        noisyIncPose.yaw()+ randomGenerator.drawGaussian1D_normalized() * odometryNoisePhi_std,
                        noisyIncPose.pitch()+ randomGenerator.drawGaussian1D_normalized() * odometryNoisePitch_std,
                        noisyIncPose.roll()+ randomGenerator.drawGaussian1D_normalized() * odometryNoiseRoll_std );
                }

                act.poseChange.mean = noisyIncPose;
                act.poseChange.cov.eye();

                act.poseChange.cov(0,0) =
                    act.poseChange.cov(1,1) =
                        act.poseChange.cov(2,2) = square(odometryNoiseXY_std);
                act.poseChange.cov(3,3) = square(odometryNoisePhi_std);
                act.poseChange.cov(4,4) = square(odometryNoisePitch_std);
                act.poseChange.cov(5,5) = square(odometryNoiseRoll_std);

                acts.insert( act );
            }
开发者ID:MTolba,项目名称:mrpt,代码行数:67,代码来源:simul-landmarks-main.cpp

示例2: processActionObservation

/*---------------------------------------------------------------
					processActionObservation
  ---------------------------------------------------------------*/
void  CMetricMapBuilderRBPF::processActionObservation(
					CActionCollection	&action,
					CSensoryFrame		&observations )
{
	MRPT_START

	// Enter critical section (updating map)
	enterCriticalSection();

	// Update the traveled distance estimations:
	{
		CActionRobotMovement3DPtr act3D = action.getActionByClass<CActionRobotMovement3D>();
		CActionRobotMovement2DPtr act2D = action.getActionByClass<CActionRobotMovement2D>();
		if (act3D)
		{
			odoIncrementSinceLastMapUpdate += act3D->poseChange.getMeanVal();
			odoIncrementSinceLastLocalization += act3D->poseChange;
		}
		else if (act2D)
		{
			odoIncrementSinceLastMapUpdate += act2D->poseChange->getMeanVal();
			odoIncrementSinceLastLocalization.mean += act2D->poseChange->getMeanVal();
		}
		else
		{
			std::cerr << "[CMetricMapBuilderRBPF] WARNING: action contains no odometry." << std::endl;
		}
	}

	// Execute particle filter:
	//   But only if the traveled distance since the last update is long enough,
	//    or this is the first observation, etc...
	// ----------------------------------------------------------------------------
	bool do_localization = (
			!mapPDF.SFs.size() ||	// This is the first observation!
			options.debugForceInsertion ||
			odoIncrementSinceLastLocalization.mean.norm()>localizeLinDistance ||
			std::abs(odoIncrementSinceLastLocalization.mean.yaw())>localizeAngDistance);

	bool do_map_update = (
			!mapPDF.SFs.size() ||	// This is the first observation!
			options.debugForceInsertion ||
			odoIncrementSinceLastMapUpdate.norm()>insertionLinDistance ||
			std::abs(odoIncrementSinceLastMapUpdate.yaw())>insertionAngDistance);

	// Used any "options.alwaysInsertByClass" ??
	for (CListOfClasses::const_iterator itCl=options.alwaysInsertByClass.begin();!do_map_update && itCl!=options.alwaysInsertByClass.end();++itCl)
		for ( CSensoryFrame::iterator it=observations.begin();it!=observations.end();++it)
			if ((*it)->GetRuntimeClass()==*itCl)
			{
				do_map_update = true;
				do_localization = true;
				break;
			}


	if (do_map_update)
		do_localization = true;


	if (do_localization)
	{
		// Create an artificial action object, since
		//  we've been collecting them until a threshold:
		// ------------------------------------------------
		CActionCollection	fakeActs;
		{
			CActionRobotMovement3DPtr act3D = action.getActionByClass<CActionRobotMovement3D>();
			if (act3D)
			{
				CActionRobotMovement3D newAct;
				newAct.estimationMethod = act3D->estimationMethod;
				newAct.poseChange = odoIncrementSinceLastLocalization;
				newAct.timestamp = act3D->timestamp;
				fakeActs.insert(newAct);
			}
			else
			{
				// It must be 2D odometry:
				CActionRobotMovement2DPtr act2D = action.getActionByClass<CActionRobotMovement2D>();
				ASSERT_(act2D)
				CActionRobotMovement2D newAct;
				newAct.computeFromOdometry( CPose2D(odoIncrementSinceLastLocalization.mean), act2D->motionModelConfiguration );
				newAct.timestamp = act2D->timestamp;
				fakeActs.insert(newAct);
			}
		}

		// Reset distance counters:
		odoIncrementSinceLastLocalization.mean.setFromValues(0,0,0,0,0,0);
		odoIncrementSinceLastLocalization.cov.zeros();

		CParticleFilter	pf;
		pf.m_options = m_PF_options;

		pf.executeOn( mapPDF, &fakeActs, &observations );

//.........这里部分代码省略.........
开发者ID:MTolba,项目名称:mrpt,代码行数:101,代码来源:CMetricMapBuilderRBPF.cpp

示例3: pose

/** The PF algorithm implementation for "optimal sampling for non-parametric observation models"
  */
void  CLSLAM_RBPF_2DLASER::prediction_and_update_pfAuxiliaryPFOptimal(
	CLocalMetricHypothesis	*LMH,
	const mrpt::slam::CActionCollection	* actions,
	const mrpt::slam::CSensoryFrame		* sf,
	const bayes::CParticleFilter::TParticleFilterOptions &PF_options )
{
	MRPT_START

		// Get the current robot pose estimation:
	TPoseID		currentPoseID = LMH->m_currentRobotPose;

	size_t				i,k,N,M = LMH->m_particles.size();

	// ----------------------------------------------------------------------
	//	  We can execute optimal PF only when we have both, an action, and
	//     a valid observation from which to compute the likelihood:
	//   Accumulate odometry/actions until we have a valid observation, then
	//    process them simultaneously.
	// ----------------------------------------------------------------------
//	static CActionRobotMovement2D	accumRobotMovement;
//	static bool						accumRobotMovementIsValid = false;
	bool  SFhasValidObservations = false;
	// A valid action?
	if (actions!=NULL)
	{
		CActionRobotMovement2DPtr act = actions->getBestMovementEstimation();	// Find a robot movement estimation:
		if (!act) THROW_EXCEPTION("Action list does not contain any CActionRobotMovement2D derived object!");

		if (!LMH->m_accumRobotMovementIsValid) // Reset accum.
		{
			act->poseChange->getMean( LMH->m_accumRobotMovement.rawOdometryIncrementReading );
			LMH->m_accumRobotMovement.motionModelConfiguration = act->motionModelConfiguration;
		}
		else
			LMH->m_accumRobotMovement.rawOdometryIncrementReading =
				LMH->m_accumRobotMovement.rawOdometryIncrementReading +
				act->poseChange->getMeanVal();

		LMH->m_accumRobotMovementIsValid = true;
	}

	if (sf!=NULL)
	{
		ASSERT_(LMH->m_particles.size()>0);
		SFhasValidObservations = (*LMH->m_particles.begin()).d->metricMaps.canComputeObservationsLikelihood( *sf );
	}

	// All the needed things?
	if (!LMH->m_accumRobotMovementIsValid || !SFhasValidObservations)
		return; // Nothing we can do here...

	// OK, we have all we need, let's start!

	// Take the accum. actions as input:
	CActionRobotMovement2D		theResultingRobotMov;

	// Over
	keep_max(
		LMH->m_accumRobotMovement.motionModelConfiguration.gausianModel.minStdXY,
		LMH->m_parent->m_options.MIN_ODOMETRY_STD_XY);
	keep_max(
		LMH->m_accumRobotMovement.motionModelConfiguration.gausianModel.minStdPHI,
		LMH->m_parent->m_options.MIN_ODOMETRY_STD_PHI);

	theResultingRobotMov.computeFromOdometry( LMH->m_accumRobotMovement.rawOdometryIncrementReading, LMH->m_accumRobotMovement.motionModelConfiguration );

	const CActionRobotMovement2D		*robotMovement = &theResultingRobotMov;

	LMH->m_accumRobotMovementIsValid = false; // To reset odometry at next iteration!


	// ----------------------------------------------------------------------
	//		0) Common part:  Prepare m_particles "draw" and compute
	// ----------------------------------------------------------------------
	// Precompute a list of "random" samples from the movement model:
	LMH->m_movementDraws.clear();

	// Fast pseudorandom generator of poses...
	//m_movementDraws.resize( max(2000,(int)(PF_options.pfAuxFilterOptimal_MaximumSearchSamples * 5.6574) ) );
	LMH->m_movementDraws.resize( PF_options.pfAuxFilterOptimal_MaximumSearchSamples * M );
	size_t			size_movementDraws = LMH->m_movementDraws.size();
	LMH->m_movementDrawsIdx = (unsigned int)floor(randomGenerator.drawUniform(0.0f,((float)size_movementDraws)-0.01f));

	robotMovement->prepareFastDrawSingleSamples();
	for (size_t i=0;i<LMH->m_movementDraws.size();i++)
		robotMovement->fastDrawSingleSample( LMH->m_movementDraws[i] );

	LMH->m_pfAuxiliaryPFOptimal_estimatedProb.resize(M);
	LMH->m_maxLikelihood.clear();
	LMH->m_maxLikelihood.resize(M,0);
	LMH->m_movementDrawMaximumLikelihood.resize(M);

	// Prepare data for executing "fastDrawSample"
	CTicTac		tictac;
	tictac.Tic();
	LMH->prepareFastDrawSample(
		PF_options,
		particlesEvaluator_AuxPFOptimal,
//.........这里部分代码省略.........
开发者ID:Insane-neal,项目名称:mrpt,代码行数:101,代码来源:CHMTSLAM_LSLAM_RBPF_2DLASER.cpp

示例4: OnMenuConvertSF

// Convert from observations only to actions-SF format:
void xRawLogViewerFrame::OnMenuConvertSF(wxCommandEvent& event)
{
	WX_START_TRY

	bool onlyOnePerLabel =
		(wxYES == wxMessageBox(
					  _("Keep only one observation of each label within each "
						"sensoryframe?"),
					  _("Convert to sensoryframe's"), wxYES_NO, this));

	wxString strMaxL = wxGetTextFromUser(
		_("Maximum length of each sensoryframe (seconds):"),
		_("Convert to sensoryframe's"), _("1.0"));
	double maxLengthSF;
	strMaxL.ToDouble(&maxLengthSF);

	// Process:
	CRawlog new_rawlog;
	new_rawlog.setCommentText(rawlog.getCommentText());

	wxBusyCursor waitCursor;
	unsigned int nEntries = (unsigned int)rawlog.size();

	wxProgressDialog progDia(
		wxT("Progress"), wxT("Parsing rawlog..."),
		nEntries,  // range
		this,  // parent
		wxPD_CAN_ABORT | wxPD_APP_MODAL | wxPD_SMOOTH | wxPD_AUTO_HIDE |
			wxPD_ELAPSED_TIME | wxPD_ESTIMATED_TIME | wxPD_REMAINING_TIME);

	wxTheApp->Yield();  // Let the app. process messages

	CSensoryFrame SF_new;
	set<string> SF_new_labels;
	TTimeStamp SF_new_first_t = INVALID_TIMESTAMP;
	CObservationOdometry::Ptr last_sf_odo, cur_sf_odo;

	for (unsigned int countLoop = 0; countLoop < nEntries; countLoop++)
	{
		if (countLoop % 20 == 0)
		{
			if (!progDia.Update(
					countLoop,
					wxString::Format(
						wxT("Parsing rawlog... %u objects"), countLoop)))
			{
				return;
			}
			wxTheApp->Yield();  // Let the app. process messages
		}

		switch (rawlog.getType(countLoop))
		{
			case CRawlog::etSensoryFrame:
			case CRawlog::etActionCollection:
			{
				THROW_EXCEPTION(
					"The rawlog already has sensory frames and/or actions!");
			}
			break;

			case CRawlog::etObservation:
			{
				CObservation::Ptr o = rawlog.getAsObservation(countLoop);

				// Update stats:
				bool label_existed =
					SF_new_labels.find(o->sensorLabel) != SF_new_labels.end();
				double SF_len =
					SF_new_first_t == INVALID_TIMESTAMP
						? 0
						: timeDifference(SF_new_first_t, o->timestamp);

				// Decide:
				// End SF and start a new one?
				if (SF_len > maxLengthSF && SF_new.size() != 0)
				{
					new_rawlog.addObservations(SF_new);

					// Odometry increments:
					CActionCollection acts;
					if (last_sf_odo && cur_sf_odo)
					{
						CActionRobotMovement2D act;
						act.timestamp = cur_sf_odo->timestamp;
						CActionRobotMovement2D::TMotionModelOptions opts;
						act.computeFromOdometry(
							cur_sf_odo->odometry - last_sf_odo->odometry, opts);
						acts.insert(act);
					}
					new_rawlog.addActions(acts);

					last_sf_odo = cur_sf_odo;
					cur_sf_odo.reset();

					SF_new.clear();
					SF_new_labels.clear();
					SF_new_first_t = INVALID_TIMESTAMP;
				}
//.........这里部分代码省略.........
开发者ID:Triocrossing,项目名称:mrpt,代码行数:101,代码来源:main_convert_ops.cpp

示例5: OnMenuLossLessDecFILE


//.........这里部分代码省略.........
		try
		{
			fil >> newObj;
			entryIndex++;

			// Check type:
			if (newObj->GetRuntimeClass() == CLASS_ID(CSensoryFrame))
			{
				// Decimate Observations
				// ---------------------------

				// Add observations to the accum. SF:
				if (!accum_sf)
					accum_sf = mrpt::make_aligned_shared<CSensoryFrame>();

				// Copy pointers to observations only (fast):
				accum_sf->moveFrom(
					*std::dynamic_pointer_cast<CSensoryFrame>(newObj));

				if (++SF_counter >= DECIMATE_RATIO)
				{
					SF_counter = 0;

					// INSERT OBSERVATIONS:
					f_out << *accum_sf;
					accum_sf.reset();

					// INSERT ACTIONS:
					CActionCollection::Ptr actsCol =
						mrpt::make_aligned_shared<CActionCollection>();
					if (cummMovementInit)
					{
						CActionRobotMovement2D cummMovement;
						cummMovement.computeFromOdometry(
							accumMovement, odometryOptions);
						cummMovement.timestamp = timestamp_lastAction;
						actsCol->insert(cummMovement);
						// Reset odometry accumulation:
						accumMovement = CPose2D(0, 0, 0);
					}
					f_out << *actsCol;
					actsCol.reset();
				}
			}
			else if (newObj->GetRuntimeClass() == CLASS_ID(CActionCollection))
			{
				// Accumulate Actions
				// ----------------------
				CActionCollection::Ptr curActs =
					std::dynamic_pointer_cast<CActionCollection>(newObj);
				CActionRobotMovement2D::Ptr mov =
					curActs->getBestMovementEstimation();
				if (mov)
				{
					timestamp_lastAction = mov->timestamp;
					CPose2D incrPose = mov->poseChange->getMeanVal();

					// If we have previous observations, shift them according to
					// this new increment:
					if (accum_sf)
					{
						CPose3D inv_incrPose3D(
							CPose3D(0, 0, 0) - CPose3D(incrPose));

						for (CSensoryFrame::iterator it = accum_sf->begin();
							 it != accum_sf->end(); ++it)
开发者ID:Triocrossing,项目名称:mrpt,代码行数:67,代码来源:main_convert_ops.cpp

示例6: OnMenuLossLessDecimate


//.........这里部分代码省略.........

	// For each entry:
	for (i = 0; i < N; i++)
	{
		CSerializable::Ptr obj = rawlog.getAsGeneric(i);

		if (rawlog.getType(i) == CRawlog::etActionCollection)
		{
			// Accumulate Actions
			// ----------------------
			CActionCollection::Ptr curActs =
				std::dynamic_pointer_cast<CActionCollection>(obj);
			CActionRobotMovement2D::Ptr mov =
				curActs->getBestMovementEstimation();
			if (mov)
			{
				CPose2D incrPose = mov->poseChange->getMeanVal();

				// If we have previous observations, shift them according to
				// this new increment:
				if (accum_sf)
				{
					CPose3D inv_incrPose3D(
						CPose3D(0, 0, 0) - CPose3D(incrPose));

					for (CSensoryFrame::iterator it = accum_sf->begin();
						 it != accum_sf->end(); ++it)
					{
						CPose3D tmpPose;

						(*it)->getSensorPose(tmpPose);
						tmpPose = inv_incrPose3D + tmpPose;
						(*it)->setSensorPose(tmpPose);
					}
				}

				// Accumulate from odometry:
				accumMovement = accumMovement + incrPose;

				// Copy the probabilistic options from the first entry we find:
				if (!cummMovementInit)
				{
					odometryOptions = mov->motionModelConfiguration;
					cummMovementInit = true;
				}
			}
		}
		else if (rawlog.getType(i) == CRawlog::etSensoryFrame)
		{
			// Decimate Observations
			// ---------------------------

			// Add observations to the accum. SF:
			if (!accum_sf)
				accum_sf = mrpt::make_aligned_shared<CSensoryFrame>();

			// Copy pointers to observations only (fast):
			accum_sf->moveFrom(*std::dynamic_pointer_cast<CSensoryFrame>(obj));

			if (++SF_counter >= DECIMATE_RATIO)
			{
				SF_counter = 0;

				// INSERT OBSERVATIONS:
				newRawLog.addObservationsMemoryReference(accum_sf);
				accum_sf.reset();

				// INSERT ACTIONS:
				CActionCollection actsCol;
				if (cummMovementInit)
				{
					CActionRobotMovement2D cummMovement;
					cummMovement.computeFromOdometry(
						accumMovement, odometryOptions);
					actsCol.insert(cummMovement);
					// Reset odometry accumulation:
					accumMovement = CPose2D(0, 0, 0);
				}
				newRawLog.addActions(actsCol);
			}
		}
		else
			THROW_EXCEPTION(
				"This operation implemented for SF-based rawlogs only.");

		// Delete object
		obj.reset();

	}  // end for i each entry

	// Clear the list only (objects already deleted)
	rawlog.clear();

	// Copy as new log:
	rawlog = newRawLog;

	rebuildTreeView();

	WX_END_TRY
}
开发者ID:Triocrossing,项目名称:mrpt,代码行数:101,代码来源:main_convert_ops.cpp

示例7: main


//.........这里部分代码省略.........
        // Simulate:
        // ---------------------------------------------
        CActionRobotMovement2D::TMotionModelOptions   opts;
        CPoint3D                null3D(0,0,0);
        opts.modelSelection = CActionRobotMovement2D::mmGaussian;
	opts.gausianModel.a1=0;
	opts.gausianModel.a2=0;
	opts.gausianModel.a3=0;
	opts.gausianModel.a4=0;
	opts.gausianModel.minStdXY = odometryNoiseXY_std;
	opts.gausianModel.minStdPHI = DEG2RAD( 0.002f );

        os::sprintf(auxStr,sizeof(auxStr),"%s/%s",outDir.c_str(),outFile.c_str());
        CFileOutputStream     fil(auxStr);
        CPose2D         realPose;
	CPose2D         incPose;
	int	stopSteps = 4;

        for (i=0;i<nSteps;i++)
        {
            printf("Generating step %i...",i);
            CSensoryFrame           SF;
            CActionCollection         acts;
            CActionRobotMovement2D    act;
            CPose3D                 pose3D( realPose );


			if (i>=stopSteps)
			{
				if (circularPath)
				{
					// Circular path:
					float Ar = DEG2RAD(5);
					incPose = CPose2D(0.20f*cos(Ar),0.20f*sin(Ar),Ar);
				}
				else
				{
					// Square path:
					if ( (i % squarePathLength) > (squarePathLength-5) )
							incPose = CPose2D(0,0,DEG2RAD(90.0f/4));
					else		incPose = CPose2D(0.20f,0,0);
				}
			}
			else	incPose = CPose2D(0,0,0);

            // Simulate observations:
            CObservationBeaconRangesPtr obs=CObservationBeaconRanges::Create();
            obs->minSensorDistance=minSensorDistance;
            obs->maxSensorDistance=maxSensorDistance;
            obs->stdError=stdError;

            beaconMap.simulateBeaconReadings( pose3D,null3D,*obs );

            // Corrupt with ourliers:
            float probability_corrupt = i==0 ? ratio_outliers_first_step : ratio_outliers;
            for (size_t q=0;q<obs->sensedData.size();q++)
            {
            	if ( randomGenerator.drawUniform(0.0f,1.0f) < probability_corrupt )
            	{
					obs->sensedData[q].sensedDistance += randomGenerator.drawUniform( outlier_uniform_min,outlier_uniform_max );
					if (obs->sensedData[q].sensedDistance<0)
						obs->sensedData[q].sensedDistance = 0;
            	}
            }

			std::cout << obs->sensedData.size() << " beacons at range";

            SF.push_back( obs );

            // Simulate actions:
            CPose2D     incOdo( incPose );
            if (incPose.x()!=0 || incPose.y()!=0 || incPose.phi()!=0)
			{
	            incOdo.x_incr( randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
        	    incOdo.y_incr( odometryBias_Y + randomGenerator.drawGaussian1D_normalized() * odometryNoiseXY_std );
			}
            act.computeFromOdometry( incOdo, opts);
            acts.insert( act );

            // Save:
            fil << SF << acts;

            // Next pose:
            realPose = realPose + incPose;

	    printf("\n");
        }


		cout << "Data saved to directory: " << outDir << endl;
    }
    catch(std::exception &e)
    {
        std::cout << e.what();
    }
    catch(...)
    {
        std::cout << "Untyped exception!";
    }
}
开发者ID:Aharobot,项目名称:mrpt,代码行数:101,代码来源:simul-beacons-main.cpp

示例8: processActionObservation


//.........这里部分代码省略.........
				break;
			}

	if (do_map_update) do_localization = true;

	MRPT_LOG_DEBUG(mrpt::format(
		"do_map_update=%s do_localization=%s", do_map_update ? "YES" : "NO",
		do_localization ? "YES" : "NO"));

	if (do_localization)
	{
		// Create an artificial action object, since
		//  we've been collecting them until a threshold:
		// ------------------------------------------------
		CActionCollection fakeActs;
		{
			CActionRobotMovement3D::Ptr act3D =
				action.getActionByClass<CActionRobotMovement3D>();
			if (act3D)
			{
				CActionRobotMovement3D newAct;
				newAct.estimationMethod = act3D->estimationMethod;
				newAct.poseChange = odoIncrementSinceLastLocalization;
				newAct.timestamp = act3D->timestamp;
				fakeActs.insert(newAct);
			}
			else
			{
				// It must be 2D odometry:
				CActionRobotMovement2D::Ptr act2D =
					action.getActionByClass<CActionRobotMovement2D>();
				ASSERT_(act2D);
				CActionRobotMovement2D newAct;
				newAct.computeFromOdometry(
					CPose2D(odoIncrementSinceLastLocalization.mean),
					act2D->motionModelConfiguration);
				newAct.timestamp = act2D->timestamp;
				fakeActs.insert(newAct);
			}
		}

		MRPT_LOG_DEBUG_STREAM(
			"odoIncrementSinceLastLocalization before resetting = "
			<< odoIncrementSinceLastLocalization.mean);
		// Reset distance counters:
		odoIncrementSinceLastLocalization.mean.setFromValues(0, 0, 0, 0, 0, 0);
		odoIncrementSinceLastLocalization.cov.zeros();

		CParticleFilter pf;
		pf.m_options = m_PF_options;
		pf.setVerbosityLevel(this->getMinLoggingLevel());

		pf.executeOn(mapPDF, &fakeActs, &observations);

		if (isLoggingLevelVisible(mrpt::system::LVL_INFO))
		{
			// Get current pose estimation:
			CPose3DPDFParticles poseEstimation;
			CPose3D meanPose;
			mapPDF.getEstimatedPosePDF(poseEstimation);
			poseEstimation.getMean(meanPose);

			CPose3D estPos;
			CMatrixDouble66 cov;
			poseEstimation.getCovarianceAndMean(cov, estPos);
开发者ID:jiapei100,项目名称:mrpt,代码行数:66,代码来源:CMetricMapBuilderRBPF.cpp


注:本文中的CActionRobotMovement2D::computeFromOdometry方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。