本文整理汇总了C++中ros::Timer类的典型用法代码示例。如果您正苦于以下问题:C++ Timer类的具体用法?C++ Timer怎么用?C++ Timer使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Timer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: init_caplist
/**
* Callback
* initialisation de la liste des publishers inactif pendant une periode T
*/
void init_caplist(const ros::TimerEvent& ){
ROS_INFO("Initialisation des Publishers.......!");
timer2.stop();
for(int i=0; i<(int)CAP_LIST.size()+1; i++){
chatter_pub[i].shutdown();
}
//ros::spinOnce();
timer2.start();
ROS_INFO("Initialisation terminee *");
}
示例2: setVelocity
void setVelocity(double linearVel, double angularVel)
{
// Stopping and starting the timer causes it to start counting from 0 again.
// As long as this is called before the kill swith timer reaches killSwitchTimeout seconds
// the rover's kill switch wont be called.
killSwitchTimer.stop();
killSwitchTimer.start();
velocity.linear.x = linearVel * 1.5;
velocity.angular.z = angularVel * 8; //scaling factor for sim; removed by aBridge node
velocityPublish.publish(velocity);
}
示例3: FollowerFSM
FollowerFSM() : hold(true),
currentState(STOPPED),
nextState(STOPPED),
nPriv("~")
{
//hold = true;
//currentState = STOPPED;
//nextState = STOPPED;
nPriv.param<std::string>("world_frame", world_frame, "world_lol");
nPriv.param<std::string>("robot_frame", robot_frame, "robot");
nPriv.param<std::string>("fixed_frame", fixed_frame_id, "odom");
nPriv.param<double>("minimum_distance", minimum_distance, 2);
nPriv.param<double>("planner_activation_distance", planner_activation_distance, 1.7);
nPriv.param("minimum_planner_distance", minimum_planner_distance, 1.7);
nPriv.param("distance_to_target", distance_to_target, 1.5);
nPriv.param<double>("kv", kv, 0.03);
nPriv.param<double>("kalfa", kalfa, 0.08);
//nPriv.param<double>("rate", frequency, 10);
rate = new ros::Rate(10.0);
nPriv.param<std::string>("control_type", cType, "planner");
if(cType == "planner")
{ ac = new MoveBaseClient("move_base", true);}
notReceivedNumber = 0;
sub = n.subscribe("person_position", 1, &FollowerFSM::receiveInformation, this);
cmdPub = n.advertise<geometry_msgs::Twist>("twist_topic", 1);
timer = n.createTimer(ros::Duration(0.1), &FollowerFSM::checkInfo, this); //If we didn't receive any position on 2 seconds STOP the robot!
timer.start();
}
示例4: autopilot_version_cb
void autopilot_version_cb(const ros::TimerEvent &event) {
bool ret = false;
try {
auto client = nh.serviceClient<mavros::CommandLong>("cmd/command");
mavros::CommandLong cmd{};
cmd.request.command = MAV_CMD_REQUEST_AUTOPILOT_CAPABILITIES;
cmd.request.confirmation = false;
cmd.request.param1 = 1.0;
ROS_DEBUG_NAMED("sys", "VER: Sending request.");
ret = client.call(cmd);
}
catch (ros::InvalidNameException &ex) {
ROS_ERROR_NAMED("sys", "VER: %s", ex.what());
}
ROS_ERROR_COND_NAMED(!ret, "sys", "VER: command plugin service call failed!");
if (version_retries > 0) {
version_retries--;
ROS_WARN_COND_NAMED(version_retries != RETRIES_COUNT - 1, "sys",
"VER: request timeout, retries left %d", version_retries);
}
else {
uas->update_capabilities(false);
autopilot_version_timer.stop();
ROS_WARN_NAMED("sys", "VER: your FCU don't support AUTOPILOT_VERSION, "
"switched to default capabilities");
}
}
示例5: openCamera
void openCamera(ros::Timer &timer)
{
timer.stop();
while (!camera_)
{
try
{
boost::lock_guard<boost::recursive_mutex> lock(config_mutex_);
camera_ = boost::make_shared<PMDCamboardNano>(device_serial, plugin_dir, source_plugin, process_plugin);
device_serial = camera_->getSerialNumber().c_str();
updater.setHardwareIDf("%s", device_serial.c_str());
NODELET_INFO("Opened PMD camera with serial number \"%s\"", camera_->getSerialNumber().c_str());
loadCalibrationData();
NODELET_INFO("Loaded calibration data");
camera_->update();
camera_info_ = camera_->getCameraInfo();
}
catch (PMDCameraNotOpenedException& e)
{
camera_state_ = CAMERA_NOT_FOUND;
if (device_serial != "")
{
std::stringstream err;
err << "Unable to open PMD camera with serial number " << device_serial;
state_info_ = err.str();
NODELET_INFO("%s",state_info_.c_str());
}
else
{
state_info_ = "Unable to open PMD camera..";
NODELET_INFO("%s",state_info_.c_str());
}
boost::lock_guard<boost::recursive_mutex> lock(config_mutex_);
camera_.reset();
}
updater.update();
boost::this_thread::sleep(boost::posix_time::seconds(open_camera_retry_period));
}
timer.start();
}
示例6: cmd_vel_callback
/********** callback for the cmd velocity from the autonomy **********/
void cmd_vel_callback(const geometry_msgs::Twist& msg)
{
watchdogTimer.stop();
error.setValue(msg.linear.x - body_vel.linear.x, msg.linear.y - body_vel.linear.y, msg.linear.z - body_vel.linear.z);
//std::cout << "error x: " << error.getX() << " y: " << error.getY() << " z: " << error.getZ() << std::endl;
//std::cout << std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) << std::endl;
error_yaw = msg.angular.z - body_vel.angular.z;
//std::cout << "error yaw: " << error_yaw << std::endl;
// if some time has passed between the last body velocity time and the current body velocity time then will calculate the (feed forward PD)
if (std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) > 0.00001)
{
errorDot = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error - last_error);
//std::cout << "errordot x: " << errorDot.getX() << " y: " << errorDot.getY() << " z: " << errorDot.getZ() << std::endl;
errorDot_yaw = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error_yaw - last_error_yaw);
//std::cout << "error dot yaw " << errorDot_yaw << std::endl;
velocity_command.linear.x = cap_vel_auton(kx*msg.linear.x + (kp*error).getX() + (kd*errorDot).getX());
velocity_command.linear.y = cap_vel_auton(ky*msg.linear.y + (kp*error).getY() + (kd*errorDot).getY());
velocity_command.linear.z = cap_vel_auton(kz*msg.linear.z + (kp*error).getZ() + (kd*errorDot).getZ());
velocity_command.angular.z = -1*cap_vel_auton(kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw); // z must be switched because bebop driver http://bebop-autonomy.readthedocs.org/en/latest/piloting.html
}
last_body_vel_time = curr_body_vel_time;// update last time body velocity was recieved
last_error = error;
last_error_yaw = error_yaw;
error_gm.linear.x = error.getX(); error_gm.linear.y = error.getY(); error_gm.linear.z = error.getZ(); error_gm.angular.z = error_yaw;
errorDot_gm.linear.x = errorDot.getX(); errorDot_gm.linear.y = errorDot.getY(); errorDot_gm.linear.z = errorDot.getZ(); errorDot_gm.angular.z = kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw;
error_pub.publish(error_gm);
errorDot_pub.publish(errorDot_gm);
if (start_autonomous)
{
recieved_command_from_tracking = true;
}
watchdogTimer.start();
}
示例7: callback
void callback(octoflex::OctoFlexConfig &config, uint32_t level)
{
checkFullVolume_ = config.checkFullVolume;
simTime_ = config.forwardTime;
numSteps_ = config.numberOfSteps;
length_ = config.length;
width_ = config.width;
height_ = config.height;
originOffsetX_ = config.originOffsetX;
originOffsetY_ = config.originOffsetY;
originOffsetZ_ = config.originOffsetZ;
resolution_ = config.resolution;
rate_ = config.rate;
visPause_ = config.visualizationPause;
loopTimer_.setPeriod(ros::Duration(1.0/rate_));
}
示例8: TimerCallback
void TimerCallback(const ros::TimerEvent&) {
aero_srr_msgs::ObjectLocationMsg test_msg;
test_msg.pose.pose.position.x = x_pos;
test_msg.pose.pose.position.y = y_pos;
test_msg.pose.pose.position.z = z_pos;
tf::Quaternion q;
q.setRPY(rx_pos, ry_pos, rz_pos);
tf::quaternionTFToMsg(q, test_msg.pose.pose.orientation);
test_msg.header.frame_id = "/arm_mount";
test_msg.pose.header.frame_id = test_msg.header.frame_id;
test_msg.header.stamp = ros::Time::now();
test_msg.pose.header.stamp = ros::Time::now();
pub.publish(test_msg);
timer.stop();
}
示例9: cmdVelCallback
/*!
* @param cmd_vel_msg Received message from topic
*/
void cmdVelCallback(const geometry_msgs::Twist::ConstPtr& cmd_vel_msg)
{
if(autonomus_mode_) // Only if we are in autonomous mode
{
// Lock proxy to avoid interfering with laser scan reading
boost::mutex::scoped_lock lock(proxy_lock_);
ROS_DEBUG("Sent (vx,vtheta) [%f %f]", cmd_vel_msg->linear.x, cmd_vel_msg->angular.z);
if (cmd_vel_msg->linear.x == 0.0) //If stopping
{
soft_stop(cmd_vel_msg->angular.z);
}
//ROS_DEBUG("Received cmd_vel msg [linear.x linear.y angular.z] [%f %f %f]", cmd_vel_msg->linear.x , cmd_vel_msg->linear.y,cmd_vel_msg->angular.z);
// float vel_linear = cmd_vel_msg->linear.x;
// Send command to wheelchair
last_linear_vel_ = cmd_vel_msg->linear.x;
proxy_.motionSetSpeed(cmd_vel_msg->linear.x, cmd_vel_msg->angular.z);
watchdog_timer.stop();
if(watchdog_duration > 0)
watchdog_timer = node_handle_.createTimer(ros::Duration(watchdog_duration), &BBRobotNode::watchdog_callback, this, true);
}
}
示例10: main
int main(int argc, char **argv) {
int mobot_count = 1; //counter for number of mobots working
if (argv[1] != NULL) {
if (atoi(argv[1]) == 2)
mobot_count = 2;
if (atoi(argv[1]) >= 3)
mobot_count = 3;
}
//initialising ROS
ros::init(argc, argv, "path_planner");
ros::NodeHandle nh;
nh_ = &nh;
//initialising Mobot 1
ros::Subscriber infraredScan_1 = nh.subscribe("/mobot0/infrared", 1, irCallback1);
ros::Subscriber userInput_1 = nh.subscribe("/mobot0/waypoint_user", 20, userCallback);
ros::Subscriber userInput = nh.subscribe("/path_planner/waypoint_user", 20, userInputCallback);
nextPoseRel_1 = nh.advertise<mobots_msgs::Pose2DPrio> ("/mobot0/waypoint_rel", 20);
mobots[0].id = 1;
mobots[0].theta = 0.0;
mobots[0].x = 0.0;
mobots[0].y = 0.0;
mobots[0].obstacle = false;
mobots[0].userControlled = 0;
mobots[0].timer = 0;
timerMobot_1 = nh.createTimer(ros::Duration(1), timerCallback1);
#if 0
if(mobot_count > 1){ //initialising Mobot 2
ros::Subscriber infraredScan_2 = nh.subscribe("/mobot2/infrared", 1, irCallback2);
ros::Subscriber userInput_2 = nh.subscribe("/mobot2/waypoint_user", 20, userCallback);
nextPoseRel_2 = nh.advertise<mobots_msgs::Pose2DPrio> ("/mobot2/waypoint_rel", 20);
mobots[1].id = 2;
mobots[1].theta = 0.0;
mobots[1].x = 0.0;
mobots[1].y = 0.0;
mobots[1].obstacle = false;
mobots[1].userControlled = 0;
mobots[1].timer = 0;
timerMobot_2 = nh.createTimer(ros::Duration(1), timerCallback2);
}
if(mobot_count > 2){ //initialising Mobot 3
ros::Subscriber infraredScan_3 = nh.subscribe("/mobot3/infrared", 1, irCallback3);
ros::Subscriber userInput_3 = nh.subscribe("/mobot3/waypoint_user", 20, userCallback);
nextPoseRel_3 = nh.advertise<mobots_msgs::Pose2DPrio> ("/mobot3/waypoint_rel", 20);
mobots[2].id = 3;
mobots[2].theta = 0.0;
mobots[2].x = 0.0;
mobots[2].y = 0.0;
mobots[2].obstacle = false;
mobots[2].userControlled = 0;
mobots[2].timer = 0;
timerMobot_3 = nh.createTimer(ros::Duration(1), timerCallback3);
}
keyReqServer = nh.advertiseService("/path_planner/keyboard_request", keyReqCallback);
if(argv[2] != NULL){
std::string str = std::string(argv[2]);
if(str == "square"){
exploreSquare(1, 1000);
}
}
/* Entering the event loop. If there is no obstacle or the user is not controlling the
* Mobot, it is driving straight till something occurs. */
while (ros::ok()) {
for (int i = 0; i <= mobot_count-1; i++) {
if (mobots[i].userControlled == 0 && !mobots[i].obstacle) {
moveMobot(mobots[i].id, 1);
}
}
wait(5000, true);
}
//stopping the timers
timerMobot_1.stop();
if(mobot_count > 1) timerMobot_2.stop();
if(mobot_count > 2) timerMobot_3.stop();
return 0;
#endif
ros::spin();
}
示例11: joy_callback
/********** callback for the controller **********/
void joy_callback(const sensor_msgs::Joy& msg)
{
watchdogTimer.stop();
command_from_xbox = geometry_msgs::Twist();// cleaning the xbox twist message
a_button = msg.buttons[0];// a button lands
if (a_button > 0)
{
land_pub.publish(std_msgs::Empty());
}
x_button = msg.buttons[2];// x button sends constant
if (x_button > 0)
{
send_0 = true;
}
b_button = msg.buttons[1];// b button kills motors
if (b_button > 0)
{
reset_pub.publish(std_msgs::Empty());
}
y_button = msg.buttons[3];// y button takes off
if (y_button > 0)
{
takeoff_pub.publish(std_msgs::Empty());
}
dpad_l = msg.buttons[11];//dpad left value, fine adjustment of tilt angle negatively
dpad_r = msg.buttons[12];//dpad right value, fine adjustment of tilt angle positively
dpad_u = msg.buttons[13];//dpad up value, coarse adjustment of tilt angle positively
dpad_d = msg.buttons[14];//dpad down value, coarse adjustment of tilt angle negatively
int tilt_neg = -1*dpad_l - 10*dpad_d;// tilt negatively
int tilt_pos = 1*dpad_r + 10*dpad_u;// tilt positively
gimbal_state_desired.angular.y = gimbal_state_desired.angular.y + tilt_neg + tilt_pos;// adjust the gimbal
camera_state_pub.publish(gimbal_state_desired);// update the angle
left_bumper = msg.buttons[4];// left bumper for using the tracking as the controller output
right_bumper = msg.buttons[5];// right bumper for using the mocap as the controller output
start_autonomous = (left_bumper > 0) || (right_bumper > 0);// start the autonomous if either are greater than 0
right_stick_vert = joy_deadband(msg.axes[4]);// right thumbstick vert controls linear x
command_from_xbox.linear.x = joy_gain*right_stick_vert;
right_stick_horz = joy_deadband(msg.axes[3]);// right thumbstick horz controls linear y
command_from_xbox.linear.y = joy_gain*right_stick_horz;
left_stick_vert = joy_deadband(msg.axes[1]);// left thumbstick vert controls linear z
command_from_xbox.linear.z = joy_gain*left_stick_vert;
left_stick_horz = joy_deadband(msg.axes[0]);// left thumbstick horz controls angular z
command_from_xbox.angular.z = -1*joy_gain*left_stick_horz;
if (!start_autonomous)// if not using the autonomous velocities as output will indicate recieved a new control input
{
if (send_0)
{
//command_from_xbox.linear.x = 0;
//command_from_xbox.linear.y = 0;
//command_from_xbox.linear.z = 0;
//command_from_xbox.angular.z = 0;
send_0 = false;
}
recieved_command_from_xbox = true;
}
watchdogTimer.start();
}
示例12: featureCB
// Callback for estimator
void featureCB(const aruco_ros::CenterConstPtr& center)
{
// Disregard erroneous tag tracks
if (markerID.compare(center->header.frame_id) != 0)
{
return;
}
// Switching
if (artificialSwitching)
{
if (!estimatorOn)
{
return;
}
}
else
{
// Feature in FOV
watchdogTimer.stop();
estimatorOn = true;
}
// Time
ros::Time timeStamp = center->header.stamp;
double timeNow = timeStamp.toSec();
double delT = timeNow - lastImageTime;
lastImageTime = timeNow;
// Object trans w.r.t. image frame, for ground truth
Vector3d trans;
tf::StampedTransform transform;
tfl.waitForTransform("image","ugv0",timeStamp,ros::Duration(0.1));
tfl.lookupTransform("image","ugv0",timeStamp,transform);
tf::Vector3 temp_trans = transform.getOrigin();
trans << temp_trans.getX(),temp_trans.getY(),temp_trans.getZ();
// Object pose w.r.t. image frame
if (deadReckoning)
{
tfl.waitForTransform("image",string("marker")+markerID,timeStamp,ros::Duration(0.1));
tfl.lookupTransform("image",string("marker")+markerID,timeStamp,transform);
try
{
// Additional transforms for predictor
tf::StampedTransform tfWorld2Marker;
tf::StampedTransform tfMarker2Odom;
tfl.waitForTransform("world",string("marker")+markerID,timeStamp,ros::Duration(0.1));
tfl.lookupTransform("world",string("marker")+markerID,timeStamp,tfWorld2Marker);
tfl.waitForTransform("ugv0/base_footprint","ugv0/odom",timeStamp,ros::Duration(0.1));
tfl.lookupTransform("ugv0/base_footprint","ugv0/odom",timeStamp,tfMarker2Odom);
// Save transform
tf::Quaternion temp_quat = tfWorld2Marker.getRotation();
Quaterniond qW2M = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
temp_quat = tfMarker2Odom.getRotation();
Quaterniond qM2O = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
qWorld2Odom = qW2M*qM2O;
}
catch (tf::TransformException e)
{
}
}
// else, use quaternion from image to ugv0 transform
tf::Quaternion temp_quat = transform.getRotation();
Quaterniond quat = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
// Undistort image coordinates. Returns normalized Euclidean coordinates
double ptsArray[2] = {center->x,center->y};
cv::Mat pts(1,1,CV_64FC2,ptsArray);
cv::Mat undistPts;
cv::undistortPoints(pts,undistPts,camMat,distCoeffs);
Vector3d x;
x << undistPts.at<double>(0,0),undistPts.at<double>(0,1),1/trans(2);
// Target velocities expressed in camera coordinates
Vector3d vTc = quat*vTt;
// Observer velocities
Vector3d b = vTc - vCc;
Vector3d w = -wGCc; //Vector3d::Zero();
// EKF update
Matrix<double,3,2> K = P*H.transpose()*(H*P*H.transpose()+R).inverse();
xhat += K*(x.head<2>()-xhat.head<2>());
P = (Matrix3d::Identity()-K*H)*P;
// Publish output
publishOutput(x,xhat,trans,timeStamp);
if (!artificialSwitching)
{
// Restart watchdog timer for feature visibility check
watchdogTimer.start();
}
}
示例13: grab_vicon_pose_callback
bool grab_vicon_pose_callback(vicon_mocap::viconGrabPose::Request& req, vicon_mocap::viconGrabPose::Response& resp)
{
ROS_INFO("Got request for a VICON pose");
updateTimer.stop();
tf::StampedTransform transform;
//tf::Quaternion quat;
if (not segment_data_enabled)
{
MyClient.EnableSegmentData();
ROS_ASSERT(MyClient.IsSegmentDataEnabled().Enabled);
segment_data_enabled = true;
}
// Gather data:
int N = req.n_measurements;
double accum_trans[3] = {0, 0, 0};
double accum_quat[4] = {0, 0, 0, 0};
Result::Enum the_result;
int n_success = 0;
for (int k = 0; k < N; k++)
{
ros::Duration(1 / vicon_capture_rate).sleep(); // Wait at least as long as vicon needs to capture the next frame
if ((the_result = MyClient.GetFrame().Result) == Result::Success)
{
try
{
Output_GetSegmentGlobalTranslation trans = MyClient.GetSegmentGlobalTranslation(req.subject_name,
req.segment_name);
Output_GetSegmentGlobalRotationQuaternion quat =
MyClient.GetSegmentGlobalRotationQuaternion(req.subject_name, req.segment_name);
if ((!trans.Occluded) && (!quat.Occluded))
{
accum_trans[0] += trans.Translation[0] / 1000;
accum_trans[1] += trans.Translation[1] / 1000;
accum_trans[2] += trans.Translation[2] / 1000;
accum_quat[0] += quat.Rotation[3];
accum_quat[1] += quat.Rotation[0];
accum_quat[2] += quat.Rotation[1];
accum_quat[3] += quat.Rotation[2];
n_success++;
}
}
catch (tf::TransformException ex)
{
ROS_ERROR("%s", ex.what());
resp.success = false;
return false; // TODO: should we really bail here, or just try again?
}
}
else
{
if (the_result != Result::NoFrame)
{
ROS_ERROR_STREAM("GetFrame() returned " << (int)the_result);
}
}
}
ROS_INFO("Averaging %d measurements", n_success);
// Average the data:
double normalized_quat[4];
double quat_norm = sqrt(
accum_quat[0] * accum_quat[0] + accum_quat[1] * accum_quat[1] + accum_quat[2]
* accum_quat[2] + accum_quat[3] * accum_quat[3]);
for (int i = 0; i < 4; i++)
{
normalized_quat[i] = accum_quat[i] / quat_norm;
}
double normalized_vector[3];
// Copy to inputs:
for (int i = 0; i < 3; i++)
{
normalized_vector[i] = accum_trans[i] / n_success;
}
// copy what we used to service call response:
resp.success = true;
resp.pose.header.stamp = ros::Time::now();
resp.pose.header.frame_id = tf_ref_frame_id;
resp.pose.pose.position.x = normalized_vector[0];
resp.pose.pose.position.y = normalized_vector[1];
resp.pose.pose.position.z = normalized_vector[2];
resp.pose.pose.orientation.w = normalized_quat[0];
resp.pose.pose.orientation.x = normalized_quat[1];
resp.pose.pose.orientation.y = normalized_quat[2];
resp.pose.pose.orientation.z = normalized_quat[3];
updateTimer.start();
return true;
}
示例14: handle_heartbeat
void handle_heartbeat(const mavlink_message_t *msg, uint8_t sysid, uint8_t compid) {
if (!uas->is_my_target(sysid)) {
ROS_DEBUG_NAMED("sys", "HEARTBEAT from [%d, %d] dropped.", sysid, compid);
return;
}
mavlink_heartbeat_t hb;
mavlink_msg_heartbeat_decode(msg, &hb);
// update context && setup connection timeout
uas->update_heartbeat(hb.type, hb.autopilot);
uas->update_connection_status(true);
timeout_timer.stop();
timeout_timer.start();
// build state message after updating uas
auto state_msg = boost::make_shared<mavros::State>();
state_msg->header.stamp = ros::Time::now();
state_msg->armed = hb.base_mode & MAV_MODE_FLAG_SAFETY_ARMED;
state_msg->guided = hb.base_mode & MAV_MODE_FLAG_GUIDED_ENABLED;
state_msg->mode = uas->str_mode_v10(hb.base_mode, hb.custom_mode);
state_pub.publish(state_msg);
hb_diag.tick(hb.type, hb.autopilot, state_msg->mode, hb.system_status);
}
示例15: connection_cb
void connection_cb(bool connected) {
// if connection changes, start delayed version request
version_retries = RETRIES_COUNT;
if (connected)
autopilot_version_timer.start();
else
autopilot_version_timer.stop();
// add/remove APM diag tasks
if (connected && disable_diag && uas->is_ardupilotmega()) {
#ifdef MAVLINK_MSG_ID_MEMINFO
UAS_DIAG(uas).add(mem_diag);
#endif
#ifdef MAVLINK_MSG_ID_HWSTATUS
UAS_DIAG(uas).add(hwst_diag);
#endif
#if !defined(MAVLINK_MSG_ID_MEMINFO) || !defined(MAVLINK_MSG_ID_HWSTATUS)
ROS_INFO_NAMED("sys", "SYS: APM detected, but mavros uses different dialect. "
"Extra diagnostic disabled.");
#else
ROS_DEBUG_NAMED("sys", "SYS: APM extra diagnostics enabled.");
#endif
}
else {
UAS_DIAG(uas).removeByName(mem_diag.getName());
UAS_DIAG(uas).removeByName(hwst_diag.getName());
ROS_DEBUG_NAMED("sys", "SYS: APM extra diagnostics disabled.");
}
}