本文整理汇总了C++中ros::Timer::stop方法的典型用法代码示例。如果您正苦于以下问题:C++ Timer::stop方法的具体用法?C++ Timer::stop怎么用?C++ Timer::stop使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ros::Timer
的用法示例。
在下文中一共展示了Timer::stop方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: checkCopter
bool checkCopter(double copter_x, double copter_y, double copter_z,
double x, double y, double z, char &colour){
// Initially red. For when the QC is far away.
colour = 'r';
// Region when marker should turn yellow. When the QC is reaching the tapping vicinity.
double markerZone_height = z + (Radius/2) + 0.0625;
double markerZone_bottom_x = x - Radius*2;
double markerZone_top_x = x + Radius*2;
double markerZone_bottom_y = y - Radius*2;
double markerZone_top_y = y + Radius*2;
//This uses one tenth of the radius. If it seems to be too much, raise the "10" below.
//The radius does need to be present otherwise it may not detect the copter every time.
double roomba_height = z + (Radius/2) + 0.0625;
//The x coordinates that the copter has to be in
double bottom_x = x - Radius*2;
double top_x = x + Radius*2;
//The y coordinates that the copter has to be in
double bottom_y = y - Radius*2;
double top_y = y + Radius*2;
//If the copter coordinates are (0,0,0), something is probably wrong
if (copter_x == 0 && copter_y == 0 && copter_z == 0) return false;
// If the copter is close to the marker.
if(markerZone_bottom_x <= copter_x && copter_x <= markerZone_top_x){
if(markerZone_bottom_y <= copter_y && copter_y <= markerZone_top_y){
if(copter_z <= markerZone_height){
colour = 'y';
}
}
}
//If the copter is within the radius, return true
if (bottom_x <= copter_x && copter_x <= top_x){
if (bottom_y <= copter_y && copter_y <= top_y){
if((copter_z - 0.182) <= roomba_height){ // 0.182 is the length between copter centre of mass and the tip of its legs (base)
timer_5.stop();
timer_20.stop();
timer_5.start();
timer_20.start();
count_5 = looprate*3; // 3 so it never goes lower than limit
count_20 = looprate*3;
colour = 'g';
return true;
}
}
}
return false;
}
示例2: handle_heartbeat
void handle_heartbeat(const mavlink_message_t *msg, uint8_t sysid, uint8_t compid) {
if (!uas->is_my_target(sysid)) {
ROS_DEBUG_NAMED("sys", "HEARTBEAT from [%d, %d] dropped.", sysid, compid);
return;
}
mavlink_heartbeat_t hb;
mavlink_msg_heartbeat_decode(msg, &hb);
// update context && setup connection timeout
uas->update_heartbeat(hb.type, hb.autopilot);
uas->update_connection_status(true);
timeout_timer.stop();
timeout_timer.start();
// build state message after updating uas
auto state_msg = boost::make_shared<mavros::State>();
state_msg->header.stamp = ros::Time::now();
state_msg->armed = hb.base_mode & MAV_MODE_FLAG_SAFETY_ARMED;
state_msg->guided = hb.base_mode & MAV_MODE_FLAG_GUIDED_ENABLED;
state_msg->mode = uas->str_mode_v10(hb.base_mode, hb.custom_mode);
state_pub.publish(state_msg);
hb_diag.tick(hb.type, hb.autopilot, state_msg->mode, hb.system_status);
}
示例3: autopilot_version_cb
void autopilot_version_cb(const ros::TimerEvent &event) {
bool ret = false;
try {
auto client = nh.serviceClient<mavros::CommandLong>("cmd/command");
mavros::CommandLong cmd{};
cmd.request.command = MAV_CMD_REQUEST_AUTOPILOT_CAPABILITIES;
cmd.request.confirmation = false;
cmd.request.param1 = 1.0;
ROS_DEBUG_NAMED("sys", "VER: Sending request.");
ret = client.call(cmd);
}
catch (ros::InvalidNameException &ex) {
ROS_ERROR_NAMED("sys", "VER: %s", ex.what());
}
ROS_ERROR_COND_NAMED(!ret, "sys", "VER: command plugin service call failed!");
if (version_retries > 0) {
version_retries--;
ROS_WARN_COND_NAMED(version_retries != RETRIES_COUNT - 1, "sys",
"VER: request timeout, retries left %d", version_retries);
}
else {
uas->update_capabilities(false);
autopilot_version_timer.stop();
ROS_WARN_NAMED("sys", "VER: your FCU don't support AUTOPILOT_VERSION, "
"switched to default capabilities");
}
}
示例4: connection_cb
void connection_cb(bool connected) {
// if connection changes, start delayed version request
version_retries = RETRIES_COUNT;
if (connected)
autopilot_version_timer.start();
else
autopilot_version_timer.stop();
// add/remove APM diag tasks
if (connected && disable_diag && uas->is_ardupilotmega()) {
#ifdef MAVLINK_MSG_ID_MEMINFO
UAS_DIAG(uas).add(mem_diag);
#endif
#ifdef MAVLINK_MSG_ID_HWSTATUS
UAS_DIAG(uas).add(hwst_diag);
#endif
#if !defined(MAVLINK_MSG_ID_MEMINFO) || !defined(MAVLINK_MSG_ID_HWSTATUS)
ROS_INFO_NAMED("sys", "SYS: APM detected, but mavros uses different dialect. "
"Extra diagnostic disabled.");
#else
ROS_DEBUG_NAMED("sys", "SYS: APM extra diagnostics enabled.");
#endif
}
else {
UAS_DIAG(uas).removeByName(mem_diag.getName());
UAS_DIAG(uas).removeByName(hwst_diag.getName());
ROS_DEBUG_NAMED("sys", "SYS: APM extra diagnostics disabled.");
}
}
示例5: init_caplist
/**
* Callback
* initialisation de la liste des publishers inactif pendant une periode T
*/
void init_caplist(const ros::TimerEvent& ){
ROS_INFO("Initialisation des Publishers.......!");
timer2.stop();
for(int i=0; i<(int)CAP_LIST.size()+1; i++){
chatter_pub[i].shutdown();
}
//ros::spinOnce();
timer2.start();
ROS_INFO("Initialisation terminee *");
}
示例6: setVelocity
void setVelocity(double linearVel, double angularVel)
{
// Stopping and starting the timer causes it to start counting from 0 again.
// As long as this is called before the kill swith timer reaches killSwitchTimeout seconds
// the rover's kill switch wont be called.
killSwitchTimer.stop();
killSwitchTimer.start();
velocity.linear.x = linearVel * 1.5;
velocity.angular.z = angularVel * 8; //scaling factor for sim; removed by aBridge node
velocityPublish.publish(velocity);
}
示例7: sonarDisconnectCb
void RosAriaNode::sonarDisconnectCb()
{
if (!robot->tryLock()) {
ROS_ERROR("Skipping sonarConnectCb because could not lock");
return;
}
if (robot->areSonarsEnabled())
{
robot->disableSonar();
sonar_tf_timer.stop();
}
robot->unlock();
}
示例8: TimerCallback
void TimerCallback(const ros::TimerEvent&) {
aero_srr_msgs::ObjectLocationMsg test_msg;
test_msg.pose.pose.position.x = x_pos;
test_msg.pose.pose.position.y = y_pos;
test_msg.pose.pose.position.z = z_pos;
tf::Quaternion q;
q.setRPY(rx_pos, ry_pos, rz_pos);
tf::quaternionTFToMsg(q, test_msg.pose.pose.orientation);
test_msg.header.frame_id = "/arm_mount";
test_msg.pose.header.frame_id = test_msg.header.frame_id;
test_msg.header.stamp = ros::Time::now();
test_msg.pose.header.stamp = ros::Time::now();
pub.publish(test_msg);
timer.stop();
}
示例9: openCamera
void openCamera(ros::Timer &timer)
{
timer.stop();
while (!camera_)
{
try
{
boost::lock_guard<boost::recursive_mutex> lock(config_mutex_);
camera_ = boost::make_shared<PMDCamboardNano>(device_serial, plugin_dir, source_plugin, process_plugin);
device_serial = camera_->getSerialNumber().c_str();
updater.setHardwareIDf("%s", device_serial.c_str());
NODELET_INFO("Opened PMD camera with serial number \"%s\"", camera_->getSerialNumber().c_str());
loadCalibrationData();
NODELET_INFO("Loaded calibration data");
camera_->update();
camera_info_ = camera_->getCameraInfo();
}
catch (PMDCameraNotOpenedException& e)
{
camera_state_ = CAMERA_NOT_FOUND;
if (device_serial != "")
{
std::stringstream err;
err << "Unable to open PMD camera with serial number " << device_serial;
state_info_ = err.str();
NODELET_INFO("%s",state_info_.c_str());
}
else
{
state_info_ = "Unable to open PMD camera..";
NODELET_INFO("%s",state_info_.c_str());
}
boost::lock_guard<boost::recursive_mutex> lock(config_mutex_);
camera_.reset();
}
updater.update();
boost::this_thread::sleep(boost::posix_time::seconds(open_camera_retry_period));
}
timer.start();
}
示例10: cmd_vel_callback
/********** callback for the cmd velocity from the autonomy **********/
void cmd_vel_callback(const geometry_msgs::Twist& msg)
{
watchdogTimer.stop();
error.setValue(msg.linear.x - body_vel.linear.x, msg.linear.y - body_vel.linear.y, msg.linear.z - body_vel.linear.z);
//std::cout << "error x: " << error.getX() << " y: " << error.getY() << " z: " << error.getZ() << std::endl;
//std::cout << std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) << std::endl;
error_yaw = msg.angular.z - body_vel.angular.z;
//std::cout << "error yaw: " << error_yaw << std::endl;
// if some time has passed between the last body velocity time and the current body velocity time then will calculate the (feed forward PD)
if (std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) > 0.00001)
{
errorDot = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error - last_error);
//std::cout << "errordot x: " << errorDot.getX() << " y: " << errorDot.getY() << " z: " << errorDot.getZ() << std::endl;
errorDot_yaw = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error_yaw - last_error_yaw);
//std::cout << "error dot yaw " << errorDot_yaw << std::endl;
velocity_command.linear.x = cap_vel_auton(kx*msg.linear.x + (kp*error).getX() + (kd*errorDot).getX());
velocity_command.linear.y = cap_vel_auton(ky*msg.linear.y + (kp*error).getY() + (kd*errorDot).getY());
velocity_command.linear.z = cap_vel_auton(kz*msg.linear.z + (kp*error).getZ() + (kd*errorDot).getZ());
velocity_command.angular.z = -1*cap_vel_auton(kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw); // z must be switched because bebop driver http://bebop-autonomy.readthedocs.org/en/latest/piloting.html
}
last_body_vel_time = curr_body_vel_time;// update last time body velocity was recieved
last_error = error;
last_error_yaw = error_yaw;
error_gm.linear.x = error.getX(); error_gm.linear.y = error.getY(); error_gm.linear.z = error.getZ(); error_gm.angular.z = error_yaw;
errorDot_gm.linear.x = errorDot.getX(); errorDot_gm.linear.y = errorDot.getY(); errorDot_gm.linear.z = errorDot.getZ(); errorDot_gm.angular.z = kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw;
error_pub.publish(error_gm);
errorDot_pub.publish(errorDot_gm);
if (start_autonomous)
{
recieved_command_from_tracking = true;
}
watchdogTimer.start();
}
示例11: handle_autopilot_version
void handle_autopilot_version(const mavlink_message_t *msg, uint8_t sysid, uint8_t compid) {
mavlink_autopilot_version_t apv;
mavlink_msg_autopilot_version_decode(msg, &apv);
autopilot_version_timer.stop();
uas->update_capabilities(true, apv.capabilities);
// Note based on current APM's impl.
// APM uses custom version array[8] as a string
ROS_INFO_NAMED("sys", "VER: Capabilities 0x%016llx", (long long int)apv.capabilities);
ROS_INFO_NAMED("sys", "VER: Flight software: %08x (%*s)",
apv.flight_sw_version,
8, apv.flight_custom_version);
ROS_INFO_NAMED("sys", "VER: Middleware software: %08x (%*s)",
apv.middleware_sw_version,
8, apv.middleware_custom_version);
ROS_INFO_NAMED("sys", "VER: OS software: %08x (%*s)",
apv.os_sw_version,
8, apv.os_custom_version);
ROS_INFO_NAMED("sys", "VER: Board hardware: %08x", apv.board_version);
ROS_INFO_NAMED("sys", "VER: VID/PID: %04x:%04x", apv.vendor_id, apv.product_id);
ROS_INFO_NAMED("sys", "VER: UID: %016llx", (long long int)apv.uid);
}
示例12: cmdVelCallback
/*!
* @param cmd_vel_msg Received message from topic
*/
void cmdVelCallback(const geometry_msgs::Twist::ConstPtr& cmd_vel_msg)
{
if(autonomus_mode_) // Only if we are in autonomous mode
{
// Lock proxy to avoid interfering with laser scan reading
boost::mutex::scoped_lock lock(proxy_lock_);
ROS_DEBUG("Sent (vx,vtheta) [%f %f]", cmd_vel_msg->linear.x, cmd_vel_msg->angular.z);
if (cmd_vel_msg->linear.x == 0.0) //If stopping
{
soft_stop(cmd_vel_msg->angular.z);
}
//ROS_DEBUG("Received cmd_vel msg [linear.x linear.y angular.z] [%f %f %f]", cmd_vel_msg->linear.x , cmd_vel_msg->linear.y,cmd_vel_msg->angular.z);
// float vel_linear = cmd_vel_msg->linear.x;
// Send command to wheelchair
last_linear_vel_ = cmd_vel_msg->linear.x;
proxy_.motionSetSpeed(cmd_vel_msg->linear.x, cmd_vel_msg->angular.z);
watchdog_timer.stop();
if(watchdog_duration > 0)
watchdog_timer = node_handle_.createTimer(ros::Duration(watchdog_duration), &BBRobotNode::watchdog_callback, this, true);
}
}
示例13: nhp
EKF()
{
// Get Parameters
ros::NodeHandle nhp("~");
nhp.param<bool>("deadReckoning", deadReckoning, false);
nhp.param<bool>("artificialSwitching", artificialSwitching, false);
nhp.param<double>("visibilityTimeout", visibilityTimeout, 0.2);
nhp.param<string>("cameraName",cameraName,"camera");
nhp.param<string>("markerID",markerID,"100");
nhp.param<double>("q",q,4.0); // process noise
nhp.param<double>("r",r,4.0); // measurement noise
nhp.param<double>("delTon",delTon,4.0);
nhp.param<double>("delToff",delToff,1.0);
// Initialize states
xhat << 0,0,0.1;
xlast << 0,0,0.1;
lastImageTime = ros::Time::now().toSec();
lastVelTime = lastImageTime;
estimatorOn = true;
gotCamParam = false;
// Initialize EKF matrices
Q = q*Matrix3d::Identity();
R = r*Matrix2d::Identity();
H << 1,0,0,
0,1,0;
// Get camera parameters
cout << cameraName+"/camera_info" << endl;
camInfoSub = nh.subscribe(cameraName+"/camera_info",1,&EKF::camInfoCB,this);
ROS_DEBUG("Waiting for camera parameters on topic ...");
do {
ros::spinOnce();
ros::Duration(0.1).sleep();
} while (!(ros::isShuttingDown()) and !gotCamParam);
ROS_DEBUG("Got camera parameters");
// Output publishers
outputPub = nh.advertise<switch_vis_exp::Output>("output",10);
pointPub = nh.advertise<geometry_msgs::PointStamped>("output_point",10);
// Subscribers for feature and velocity data
if (deadReckoning)
{
targetVelSub = nh.subscribe("ugv0/odom",1,&EKF::targetVelCBdeadReckoning,this);
}
else
{
targetVelSub = nh.subscribe("ugv0/body_vel",1,&EKF::targetVelCBmocap,this);
}
camVelSub = nh.subscribe("image/body_vel",1,&EKF::camVelCB,this);
featureSub = nh.subscribe("markerCenters",1,&EKF::featureCB,this);
// Switching
if (artificialSwitching)
{
switchingTimer = nh.createTimer(ros::Duration(delTon),&EKF::switchingTimerCB,this,true);
}
else
{
// Initialize watchdog timer for feature visibility check
watchdogTimer = nh.createTimer(ros::Duration(visibilityTimeout),&EKF::timeout,this,true);
watchdogTimer.stop(); // Dont start watchdog until feature first visible
}
}
示例14: featureCB
// Callback for estimator
void featureCB(const aruco_ros::CenterConstPtr& center)
{
// Disregard erroneous tag tracks
if (markerID.compare(center->header.frame_id) != 0)
{
return;
}
// Switching
if (artificialSwitching)
{
if (!estimatorOn)
{
return;
}
}
else
{
// Feature in FOV
watchdogTimer.stop();
estimatorOn = true;
}
// Time
ros::Time timeStamp = center->header.stamp;
double timeNow = timeStamp.toSec();
double delT = timeNow - lastImageTime;
lastImageTime = timeNow;
// Object trans w.r.t. image frame, for ground truth
Vector3d trans;
tf::StampedTransform transform;
tfl.waitForTransform("image","ugv0",timeStamp,ros::Duration(0.1));
tfl.lookupTransform("image","ugv0",timeStamp,transform);
tf::Vector3 temp_trans = transform.getOrigin();
trans << temp_trans.getX(),temp_trans.getY(),temp_trans.getZ();
// Object pose w.r.t. image frame
if (deadReckoning)
{
tfl.waitForTransform("image",string("marker")+markerID,timeStamp,ros::Duration(0.1));
tfl.lookupTransform("image",string("marker")+markerID,timeStamp,transform);
try
{
// Additional transforms for predictor
tf::StampedTransform tfWorld2Marker;
tf::StampedTransform tfMarker2Odom;
tfl.waitForTransform("world",string("marker")+markerID,timeStamp,ros::Duration(0.1));
tfl.lookupTransform("world",string("marker")+markerID,timeStamp,tfWorld2Marker);
tfl.waitForTransform("ugv0/base_footprint","ugv0/odom",timeStamp,ros::Duration(0.1));
tfl.lookupTransform("ugv0/base_footprint","ugv0/odom",timeStamp,tfMarker2Odom);
// Save transform
tf::Quaternion temp_quat = tfWorld2Marker.getRotation();
Quaterniond qW2M = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
temp_quat = tfMarker2Odom.getRotation();
Quaterniond qM2O = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
qWorld2Odom = qW2M*qM2O;
}
catch (tf::TransformException e)
{
}
}
// else, use quaternion from image to ugv0 transform
tf::Quaternion temp_quat = transform.getRotation();
Quaterniond quat = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
// Undistort image coordinates. Returns normalized Euclidean coordinates
double ptsArray[2] = {center->x,center->y};
cv::Mat pts(1,1,CV_64FC2,ptsArray);
cv::Mat undistPts;
cv::undistortPoints(pts,undistPts,camMat,distCoeffs);
Vector3d x;
x << undistPts.at<double>(0,0),undistPts.at<double>(0,1),1/trans(2);
// Target velocities expressed in camera coordinates
Vector3d vTc = quat*vTt;
// Observer velocities
Vector3d b = vTc - vCc;
Vector3d w = -wGCc; //Vector3d::Zero();
// EKF update
Matrix<double,3,2> K = P*H.transpose()*(H*P*H.transpose()+R).inverse();
xhat += K*(x.head<2>()-xhat.head<2>());
P = (Matrix3d::Identity()-K*H)*P;
// Publish output
publishOutput(x,xhat,trans,timeStamp);
if (!artificialSwitching)
{
// Restart watchdog timer for feature visibility check
watchdogTimer.start();
}
}
示例15: Setup
//.........这里部分代码省略.........
RevCount = robot->getOrigRobotConfig()->getRevCount();
dynConf_default.TicksMM = TicksMM;
dynConf_default.DriftFactor = DriftFactor;
dynConf_default.RevCount = RevCount;
dynamic_reconfigure_server->setConfigDefault(dynConf_default);
for(int i = 0; i < 16; i++)
{
sonar_tf_array[i].header.frame_id = frame_id_base_link;
std::stringstream _frame_id;
_frame_id << "sonar" << i;
sonar_tf_array[i].child_frame_id = _frame_id.str();
ArSensorReading* _reading = NULL;
_reading = robot->getSonarReading(i);
sonar_tf_array[i].transform.translation.x = _reading->getSensorX() / 1000.0;
sonar_tf_array[i].transform.translation.y = _reading->getSensorY() / 1000.0;
sonar_tf_array[i].transform.translation.z = 0.19;
sonar_tf_array[i].transform.rotation = tf::createQuaternionMsgFromYaw(_reading->getSensorTh() * M_PI / 180.0);
}
for (int i=0;i<16;i++) {
sensor_msgs::Range r;
ranges.data.push_back(r);
}
int i=0,j=0;
if (sonars__crossed_the_streams) {
i=8;
j=8;
}
for(; i<16; i++) {
//populate the RangeArray msg
std::stringstream _frame_id;
_frame_id << "sonar" << i;
ranges.data[i].header.frame_id = _frame_id.str();
ranges.data[i].radiation_type = 0;
ranges.data[i].field_of_view = 0.2618f;
ranges.data[i].min_range = 0.03f;
ranges.data[i].max_range = 5.0f;
}
// Enable the motors
robot->enableMotors();
robot->disableSonar();
// Initialize bumpers with robot number of bumpers
bumpers.front_bumpers.resize(robot->getNumFrontBumpers());
bumpers.rear_bumpers.resize(robot->getNumRearBumpers());
robot->unlock();
pose_pub = n.advertise<nav_msgs::Odometry>("pose",1000);
bumpers_pub = n.advertise<rosaria::BumperState>("bumper_state",1000);
voltage_pub = n.advertise<std_msgs::Float64>("battery_voltage", 1000);
combined_range_pub = n.advertise<rosaria::RangeArray>("ranges", 1000,
boost::bind(&RosAriaNode::sonarConnectCb,this),
boost::bind(&RosAriaNode::sonarDisconnectCb, this));
for(int i =0; i < 16; i++) {
std::stringstream topic_name;
topic_name << "range" << i;
range_pub[i] = n.advertise<sensor_msgs::Range>(topic_name.str().c_str(), 1000,
boost::bind(&RosAriaNode::sonarConnectCb,this),
boost::bind(&RosAriaNode::sonarDisconnectCb, this));
}
recharge_state_pub = n.advertise<std_msgs::Int8>("battery_recharge_state", 5, true /*latch*/ );
recharge_state.data = -2;
state_of_charge_pub = n.advertise<std_msgs::Float32>("battery_state_of_charge", 100);
motors_state_pub = n.advertise<std_msgs::Bool>("motors_state", 5, true /*latch*/ );
motors_state.data = false;
published_motors_state = false;
// subscribe to services
cmdvel_sub = n.subscribe( "cmd_vel", 1, (boost::function <void(const geometry_msgs::TwistConstPtr&)>)
boost::bind(&RosAriaNode::cmdvel_cb, this, _1 ));
// advertise enable/disable services
enable_srv = n.advertiseService("enable_motors", &RosAriaNode::enable_motors_cb, this);
disable_srv = n.advertiseService("disable_motors", &RosAriaNode::disable_motors_cb, this);
veltime = ros::Time::now();
sonar_tf_timer = n.createTimer(ros::Duration(0.033), &RosAriaNode::sonarCallback, this);
sonar_tf_timer.stop();
dynamic_reconfigure_server->setCallback(boost::bind(&RosAriaNode::dynamic_reconfigureCB, this, _1, _2));
// callback will be called by ArRobot background processing thread for every SIP data packet received from robot
robot->addSensorInterpTask("ROSPublishingTask", 100, &myPublishCB);
// Run ArRobot background processing thread
robot->runAsync(true);
return 0;
}