本文整理汇总了C++中CVector2::GetX方法的典型用法代码示例。如果您正苦于以下问题:C++ CVector2::GetX方法的具体用法?C++ CVector2::GetX怎么用?C++ CVector2::GetX使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类CVector2
的用法示例。
在下文中一共展示了CVector2::GetX方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: GetPixelLengthVectors
void CCameraData::GetPixelLengthVectors( const CVector3& Pos, const CVector2& vp, CVector3& OutX, CVector3& OutY ) const
{
/////////////////////////////////////////////////////////////////
int ivpw = int(vp.GetX());
int ivph = int(vp.GetY());
/////////////////////////////////////////////////////////////////
CVector4 va = Pos;
CVector4 va_xf = va.Transform(mVPMatrix);
va_xf.PerspectiveDivide();
va_xf = va_xf*CVector4(vp.GetX(),vp.GetY(),0.0f);
/////////////////////////////////////////////////////////////////
CVector4 vdx = Pos+mCamXNormal;
CVector4 vdx_xf = vdx.Transform(mVPMatrix);
vdx_xf.PerspectiveDivide();
vdx_xf = vdx_xf*CVector4(vp.GetX(),vp.GetY(),0.0f);
float MagX = (vdx_xf-va_xf).Mag(); // magnitude in pixels of mBillboardRight
/////////////////////////////////////////////////////////////////
CVector4 vdy = Pos+mCamYNormal;
CVector4 vdy_xf = vdy.Transform(mVPMatrix);
vdy_xf.PerspectiveDivide();
vdy_xf = vdy_xf*CVector4(vp.GetX(),vp.GetY(),0.0f);
float MagY = (vdy_xf-va_xf).Mag(); // magnitude in pixels of mBillboardUp
/////////////////////////////////////////////////////////////////
OutX = mCamXNormal*(2.0f/MagX);
OutY = mCamYNormal*(2.0f/MagY);
/////////////////////////////////////////////////////////////////
}
示例2: CloseToArenaEnd
/**
* Returns true if the given vector is near the arena end.
*/
bool CCombinedLoopFunctions::CloseToArenaEnd(const CVector2& pos){
if(pos.GetX() > arenaSize.GetX()/2.0f - CLOSE_TO_ARENA_END_PARAMETER ||
pos.GetX() < -arenaSize.GetX()/2.0f + CLOSE_TO_ARENA_END_PARAMETER ||
pos.GetY() > arenaSize.GetY()/2.0f - CLOSE_TO_ARENA_END_PARAMETER ||
pos.GetY() < -arenaSize.GetY()/2.0f + CLOSE_TO_ARENA_END_PARAMETER) {
return true;
}
return false;
}
示例3: CloseToNest
/**
* Returns true if the given vector is near the nest, relative to food patch size.
*/
bool CCombinedLoopFunctions::CloseToNest(const CVector2& pos){
/*if(pos.GetX() > -(nestSize/2.0f)-foodPatchSize/1 && pos.GetX() < (nestSize/2.0f) + foodPatchSize/1 && pos.GetY() > -(nestSize/2.0f) - foodPatchSize/1 && pos.GetY() < (nestSize/2.0f) + foodPatchSize/1) {
return true;
}
return false;*/
if(pos.GetX() > -(nestSize/2.0f) - CLOSE_TO_NEST_PARAMETER && pos.GetX() < (nestSize/2.0f) + CLOSE_TO_NEST_PARAMETER && pos.GetY() > -(nestSize/2.0f) - CLOSE_TO_NEST_PARAMETER && pos.GetY() < (nestSize/2.0f) + CLOSE_TO_NEST_PARAMETER) {
return true;
}
return false;
}
示例4: Update
void CFootBotMotorGroundSensor::Update() {
/* We make the assumption that the foot-bot is rotated only wrt to Z */
CFloorEntity& cFloorEntity = m_cSpace.GetFloorEntity();
const CVector3& cEntityPos = GetEntity().GetEmbodiedEntity().GetPosition();
const CQuaternion& cEntityRot = GetEntity().GetEmbodiedEntity().GetOrientation();
CRadians cRotZ, cRotY, cRotX;
cEntityRot.ToEulerAngles( cRotZ, cRotY, cRotX );
CVector2 cCenterPos(cEntityPos.GetX(), cEntityPos.GetY());
CVector2 cSensorPos;
for(UInt32 i = 0; i < CCI_FootBotMotorGroundSensor::NUM_READINGS; ++i) {
cSensorPos = m_tReadings[i].Offset;
cSensorPos.Rotate(cRotZ);
cSensorPos *= 0.01;
cSensorPos += cCenterPos;
const CColor& cColor = cFloorEntity.GetColorAtPoint(cSensorPos.GetX(),cSensorPos.GetY());
m_tReadings[i].Value = cColor.ToGrayScale()/255*FOOTBOT_MOTOR_GROUND_SENSOR_READING_RANGE.GetSpan();
if( m_fNoiseLevel > 0.0f ) {
AddNoise(i);
}
/* Normalize reading between 0 and 1, only if calibration has been performed */
if( m_bCalibrated ) {
m_tReadings[i].Value = FOOTBOT_MOTOR_GROUND_SENSOR_READING_RANGE.NormalizeValue(m_tReadings[i].Value);
}
}
}
示例5: Update
void CFootBotBaseGroundRotZOnlySensor::Update() {
/*
* We make the assumption that the robot is rotated only wrt to Z
*/
/* Get robot position and orientation */
const CVector3& cEntityPos = m_pcEmbodiedEntity->GetPosition();
const CQuaternion& cEntityRot = m_pcEmbodiedEntity->GetOrientation();
CRadians cRotZ, cRotY, cRotX;
cEntityRot.ToEulerAngles(cRotZ, cRotY, cRotX);
/* Set robot center */
CVector2 cCenterPos(cEntityPos.GetX(), cEntityPos.GetY());
/* Position of sensor on the ground after rototranslation */
CVector2 cSensorPos;
/* Go through the sensors */
for(UInt32 i = 0; i < m_tReadings.size(); ++i) {
/* Calculate sensor position on the ground */
cSensorPos = m_pcGroundSensorEntity->GetSensor(i+4).Offset;
cSensorPos.Rotate(cRotZ);
cSensorPos += cCenterPos;
/* Get the color */
const CColor& cColor = m_pcFloorEntity->GetColorAtPoint(cSensorPos.GetX(),
cSensorPos.GetY());
/* Set the reading */
m_tReadings[i].Value = cColor.ToGrayScale() / 255.0f;
/* Apply noise to the sensor */
if(m_bAddNoise) {
m_tReadings[i].Value += m_pcRNG->Uniform(m_cNoiseRange);
}
/* Set the final reading */
m_tReadings[i].Value = m_tReadings[i].Value < 0.5f ? 0.0f : 1.0f;
}
}
示例6: GetFloorColor
CColor CFloorPowerFunctions::GetFloorColor(const CVector2& c_position_on_plane) {
if(c_position_on_plane.GetX() < -1.0f) {
return CColor::GRAY50;
}
return CColor::GRAY20;
}
示例7: SetTargetInBounds
void iAnt_controller::SetTargetInBounds(CVector2 t) {
/* Bound the X value based on the forage range. */
if(t.GetX() > data->ForageRangeX.GetMax())
t = CVector2(data->ForageRangeX.GetMax(), t.GetY());
if(t.GetX() < data->ForageRangeX.GetMin())
t = CVector2(data->ForageRangeX.GetMin(), t.GetY());
/* Bound the Y value based on the forage range. */
if(t.GetY() > data->ForageRangeY.GetMax())
t = CVector2(t.GetX(), data->ForageRangeY.GetMax());
if(t.GetY() < data->ForageRangeY.GetMin())
t = CVector2(t.GetX(), data->ForageRangeY.GetMin());
/* Set the robot's target to the bounded t position. */
target = t;
}
示例8: addObstacleAtPoint
void HRVOAgent::addObstacleAtPoint(CVector2 p,CVector2 v,Real r)
{
HRVO::Agent *a=new HRVO::Agent();
a->velocity_=HRVO::Vector2((float)v.GetX(),(float)v.GetY());
a->position_=HRVO::Vector2((float)p.GetX(),(float)p.GetY());
Real distance;
CVector2 relativePosition=relativePositionOfObstacleAt(p,r,distance);
//a->radius_=r+marginForObstacleAtDistance(distance,r,safetyMargin,socialMargin);
a->radius_=r+fmin(distance-r-_HRVOAgent->radius_-0.001,marginForObstacleAtDistance(distance,r,safetyMargin,socialMargin));
// printf("Obstacle radius %.3f\n",a->radius_);
//a->radius_=r+marginForObstacleAtDistance(p.Length(),r,safetyMargin,socialMargin);
a->prefVelocity_=a->velocity_;
_HRVOAgent->agents_.push_back(a);
_HRVOAgent->insertAgentNeighbor(agentIndex, rangeSq);
agentIndex++;
}
示例9: return
bool CKinematics2DEngine::CheckCollisions( const CKinematics2DCollisionCircle* pc_circle, const CKinematics2DCollisionRectangle* pc_rectangle ) {
/* Rototranslate the plane so that the rectangle is axis aligned and centered in O */
CVector2 center = pc_circle->GetPosition() - pc_rectangle->GetPosition();
center.Rotate(pc_rectangle->GetOrientation() );
center.Absolute();
/* Find the Voronoi Region that the circle is in, exploiting the symmetries */
CVector2 c_half_size = pc_rectangle->GetHalfSize();
Real f_radius = pc_circle->GetRadius();
if( center.GetX() <= c_half_size.GetX() ) {
/* The circle is in the top or bottom region */
return (center.GetY() <= c_half_size.GetY() + f_radius);
}
if( center.GetY() <= c_half_size.GetY() ) {
/* The circle is in the left or right region */
return (center.GetX() <= c_half_size.GetX() + f_radius);
}
/* The circle is in one of the four corner regions */
return (SquareDistance( c_half_size, center ) <= f_radius*f_radius);
}
示例10: checkDistanceY
bool iAnt_controller::checkDistanceY() {
cout << "checkDistanceY\n";
CVector2 curPosition = GetPosition();
cout << "curPositionY" << curPosition << '\n';
bool stop = false;
//cout << "curPosition" << curPosition.GetY() << '\n';
float distanceY = curPosition.GetX()- data->NestPosition.GetX();
cout << "distance: " << distanceY << "\n";
if (distanceY > 0.3)
{ cout << "line 115";
stop = true;
}
return stop;
}
示例11: GetFloorColor
CColor CLandmarks::GetFloorColor(const CVector2& c_position_on_plane) {
if((c_position_on_plane.GetX() >= -2.0f &&
c_position_on_plane.GetX() <= 2.0f) &&
(c_position_on_plane.GetY() >= -5.0f &&
c_position_on_plane.GetY() <= -1.0f)) {
return CColor::GRAY90;
}
for(size_t i = 0; i < TARGETS; ++i) {
if(SquareDistance(c_position_on_plane, m_vecTargets[i]) < TARGET_RADIUS2) {
return CColor::BLACK;
}
}
return CColor::WHITE;
}
示例12: SetTexel
void Texture::SetTexel( const CColor4 & color, const CVector2 & ST )
{
CReal Mul(255.0f);
U8* pu8data = (U8*) mpImageData;
u8 ur = (u8) (color.GetX()*Mul);
u8 ug = (u8) (color.GetY()*Mul);
u8 ub = (u8) (color.GetZ()*Mul);
u8 ua = (u8) (color.GetW()*Mul);
int ix = (int) (miWidth * fmod( (float) ST.GetY(), 1.0f ));
int iy = (int) (miHeight * fmod( (float) ST.GetX(), 1.0f ));
int idx = (int) (4 * (ix*miHeight + iy));
pu8data[idx+0] = ub;
pu8data[idx+1] = ug;
pu8data[idx+2] = ur;
pu8data[idx+3] = ua;
SetDirty(true);
}
示例13: ComputeFeatureValues
void CProprioceptiveFeatureVector::ComputeFeatureValues()
{
CProprioceptiveFeatureVector::FEATURE_RANGE = 30.0f;
unsigned unCloseRangeNbrCount = CountNeighbors(FEATURE_RANGE/2.0f);
unsigned unFarRangeNbrCount = CountNeighbors(FEATURE_RANGE) - unCloseRangeNbrCount;
bool neighbours_present = ((unCloseRangeNbrCount + unFarRangeNbrCount) > 0) ? true : false;
int CurrentStepNumber = m_sSensoryData.m_rTime;
// Feature (from LS to MS bits in FV)
// Sensors
//1st: set if bot has atleast one neighbor in range 0-30cm in the majority of of past X time-steps
//2nd: set if bot has atleast one neighbor in range 30-60cm in the majority of of past X time-steps
if(CurrentStepNumber >= m_iEventSelectionTimeWindow)
{
// decision based on the last X time-steps
if(m_unSumTimeStepsNbrsRange0to30 > (unsigned)(0.5*(double)m_iEventSelectionTimeWindow))
m_pfAllFeatureValues[0] = 1.0;
else
m_pfAllFeatureValues[0] = 0.0;
if(m_unSumTimeStepsNbrsRange30to60 > (unsigned)(0.5*(double)m_iEventSelectionTimeWindow))
m_pfAllFeatureValues[1] = 1.0;
else
m_pfAllFeatureValues[1] = 0.0;
// removing the fist entry of the moving time window from the sum
m_unSumTimeStepsNbrsRange0to30 -= m_punNbrsRange0to30AtTimeStep[m_unNbrsCurrQueueIndex];
m_unSumTimeStepsNbrsRange30to60 -= m_punNbrsRange30to60AtTimeStep[m_unNbrsCurrQueueIndex];
}
// adding new values into the queue
if (unCloseRangeNbrCount > 0)
{
m_punNbrsRange0to30AtTimeStep[m_unNbrsCurrQueueIndex] = 1;
m_unSumTimeStepsNbrsRange0to30++;
}
else
m_punNbrsRange0to30AtTimeStep[m_unNbrsCurrQueueIndex] = 0;
if (unFarRangeNbrCount > 0)
{
m_punNbrsRange30to60AtTimeStep[m_unNbrsCurrQueueIndex] = 1;
m_unSumTimeStepsNbrsRange30to60++;
}
else
m_punNbrsRange30to60AtTimeStep[m_unNbrsCurrQueueIndex] = 0;
m_unNbrsCurrQueueIndex = (m_unNbrsCurrQueueIndex + 1) % m_iEventSelectionTimeWindow;
// Sensors-motor interactions
// Set if the occurance of the following event, atleast once in time window X
// 3rd: distance to nbrs 0-6 && change in angular acceleration
// 4th: no neighbors detected && change in angular acceleration
if(neighbours_present &&
(m_sSensoryData.AngularAcceleration > m_tAngularAccelerationThreshold ||
m_sSensoryData.AngularAcceleration < -m_tAngularAccelerationThreshold))
{
m_piLastOccuranceEvent[2] = CurrentStepNumber;
}
if(!neighbours_present &&
(m_sSensoryData.AngularAcceleration > m_tAngularAccelerationThreshold ||
m_sSensoryData.AngularAcceleration < -m_tAngularAccelerationThreshold))
{
m_piLastOccuranceEvent[3] = CurrentStepNumber;
}
for(unsigned int featureindex = 2; featureindex <=3; featureindex++)
{
if ((CurrentStepNumber - m_piLastOccuranceEvent[featureindex]) <= m_iEventSelectionTimeWindow)
{
m_pfAllFeatureValues[featureindex] = 1.0;
}
else
{
m_pfAllFeatureValues[featureindex] = 0.0;
}
}
// Motors
//5th: distance travelled by bot in past 100 time-steps. Higher than 5% of max-possible distance travelled is accepted as feature=1.
CVector2 vecAgentPos = m_sSensoryData.pos;
m_fCumulativeDistTravelled += m_sSensoryData.dist;
if(CurrentStepNumber >= m_iDistTravelledTimeWindow)
{
// distance travelled in last 100 time-steps
m_fSquaredDistTravelled = (vecAgentPos.GetX() - m_pvecCoordAtTimeStep[m_unCoordCurrQueueIndex].GetX()) *
(vecAgentPos.GetX() - m_pvecCoordAtTimeStep[m_unCoordCurrQueueIndex].GetX()) +
//.........这里部分代码省略.........
示例14: SetTargetW
/*****
* Sets target West of the robot's current target.
*****/
void iAnt_controller::SetTargetW(char x) {
CVector2 position = GetTarget();
target = CVector2(position.GetX(),position.GetY()+stepSize);
}
示例15: PreStep
void CRecruitmentLoopFunctions::PreStep() {
/* Logic to pick and drop food items */
/*
* If a robot is in the nest, drop the food item
* If a robot is on a food item, pick it
* Each robot can carry only one food item per time
*/
/* Check whether a robot is on a food item */
//CSpace::TMapPerType& m_cFootbots = m_cSpace.GetEntitiesByType("foot-bot");
CSpace::TMapPerType& m_cFootbots = GetSpace().GetEntitiesByType("foot-bot");
for(CSpace::TMapPerType::iterator it = m_cFootbots.begin();
it != m_cFootbots.end();
++it) {
/* Get handle to foot-bot entity and controller */
CFootBotEntity& cFootBot = *any_cast<CFootBotEntity*>(it->second);
CBTFootbotRecruitmentController& cController = dynamic_cast<CBTFootbotRecruitmentController&>(cFootBot.GetControllableEntity().GetController());
/* Get the position of the foot-bot on the ground as a CVector2 */
CVector2 cPos;
cPos.Set(cFootBot.GetEmbodiedEntity().GetPosition().GetX(),
cFootBot.GetEmbodiedEntity().GetPosition().GetY());
/* Get state data */
CBTFootbotRecruitmentController::SStateData* sStateData = &cController.GetStateData();
sStateData->CurrentPosition = cPos;
/* Get food data */
CBTFootbotRecruitmentController::SFoodData* sFoodData = &cController.GetFoodData();
/* The foot-bot has a food item */
if(sFoodData->HasFoodItem) {
/* Check whether the foot-bot is in the nest */
if(InNest(cPos)) {
/* Place a new food item on the ground */
CVector2 tmp = sFoodData->LastFoodPosition;
m_cFoodPos[sFoodData->FoodItemIdx].Set(tmp.GetX(), tmp.GetY());
//m_cFoodPos[sFoodData->FoodItemIdx].Set(m_pcRNG->Uniform(m_cForagingArenaSideX), m_pcRNG->Uniform(m_cForagingArenaSideY));
/* Drop the food item */
sFoodData->HasFoodItem = false;
sFoodData->FoodItemIdx = 0;
++sFoodData->TotalFoodItems;
m_nbCollectedFood++;
/* The floor texture must be updated */
m_pcFloor->SetChanged();
}
}
else {
/* The foot-bot has no food item */
/* Check whether the foot-bot is out of the nest */
if(!InNest(cPos)) {
/* Check whether the foot-bot is on a food item */
bool bDone = false;
for(size_t i = 0; i < m_cFoodPos.size() && !bDone; ++i) {
if((cPos - m_cFoodPos[i]).SquareLength() < m_fFoodSquareRadius) {
/* If so, we move that item out of sight */
m_cFoodPos[i].Set(100.0f, 100.f);
/* The foot-bot is now carrying an item */
sFoodData->HasFoodItem = true;
sFoodData->FoodItemIdx = i;
sFoodData->LastFoodPosition = cPos;
/* The floor texture must be updated */
m_pcFloor->SetChanged();
/* We are done */
bDone = true;
}
}
}
}
}
if(GetSpace().GetSimulationClock() % 1000 == 0){
m_cOutput << GetSpace().GetSimulationClock() << "\t"
<< m_nbCollectedFood << "\t" << m_nbCollectedFood / (GetSpace().GetSimulationClock()/1000)<< "\n";
}
}