本文整理汇总了Python中Session.Session.save_arm_pose方法的典型用法代码示例。如果您正苦于以下问题:Python Session.save_arm_pose方法的具体用法?Python Session.save_arm_pose怎么用?Python Session.save_arm_pose使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Session.Session
的用法示例。
在下文中一共展示了Session.save_arm_pose方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from Session import Session [as 别名]
# 或者: from Session.Session import save_arm_pose [as 别名]
class Interaction:
'''Finite state machine for the human interaction'''
_is_programming = True
_is_recording_motion = False
_arm_trajectory = None
_trajectory_start_time = None
def __init__(self):
self.arms = Arms()
self.world = World()
self.session = Session(object_list=self.world.get_frame_list(),
is_debug=True)
self._viz_publisher = rospy.Publisher('visualization_marker_array',
MarkerArray)
rospy.Subscriber('recognized_command', Command, self.speech_command_cb)
rospy.Subscriber('gui_command', GuiCommand, self.gui_command_cb)
self._undo_function = None
self.responses = {
Command.TEST_MICROPHONE: Response(Interaction.empty_response,
[RobotSpeech.TEST_RESPONSE, GazeGoal.NOD]),
Command.NEW_DEMONSTRATION: Response(self.create_action, None),
Command.TAKE_TOOL: Response(self.take_tool, 0),
Command.RELEASE_TOOL: Response(self.open_hand, 0),
Command.DETECT_SURFACE: Response(self.record_object_pose, None),
Command.START_RECORDING: Response(self.start_recording, None),
Command.STOP_RECORDING: Response(self.stop_recording, None),
Command.REPLAY_DEMONSTRATION: Response(self.execute_action, None),
Command.SAVE_ARM_POSE: Response(self.save_arm_pose, None)
}
rospy.loginfo('Will wait until arms ready to respond.')
while ((self.arms.get_ee_state(0) is None) or
(self.arms.get_ee_state(1) is None)):
time.sleep(0.1)
rospy.loginfo('Starting to move to the initial pose.')
self._move_to_arm_pose('initial', 0)
self._move_to_arm_pose('initial', 1)
rospy.loginfo('Interaction initialized.')
def load_known_arm_poses(self):
'''This loads important poses from the hard drive'''
# TODO
pass
def open_hand(self, arm_index):
'''Opens gripper on the indicated side'''
if self.arms.set_gripper_state(arm_index, GripperState.OPEN):
speech_response = Response.open_responses[arm_index]
if (Interaction._is_programming and self.session.n_actions() > 0):
self.save_gripper_step(arm_index, GripperState.OPEN)
speech_response = (speech_response + ' ' +
RobotSpeech.STEP_RECORDED)
return [speech_response, Response.glance_actions[arm_index]]
else:
return [Response.already_open_responses[arm_index],
Response.glance_actions[arm_index]]
def take_tool(self, arm_index):
self.close_hand(arm_index)
self._move_to_arm_pose('initial', arm_index)
def close_hand(self, arm_index):
'''Closes gripper on the indicated side'''
if Arms.set_gripper_state(arm_index, GripperState.CLOSED):
speech_response = Response.close_responses[arm_index]
return [speech_response, Response.glance_actions[arm_index]]
else:
return [Response.already_closed_responses[arm_index],
Response.glance_actions[arm_index]]
def relax_arm(self, arm_index):
'''Relaxes arm on the indicated side'''
if self.arms.set_arm_mode(arm_index, ArmMode.RELEASE):
return [Response.release_responses[arm_index],
Response.glance_actions[arm_index]]
else:
return [Response.already_released_responses[arm_index],
Response.glance_actions[arm_index]]
def freeze_arm(self, arm_index):
'''Stiffens arm on the indicated side'''
if self.arms.set_arm_mode(arm_index, ArmMode.HOLD):
return [Response.hold_responses[arm_index],
Response.glance_actions[arm_index]]
else:
return [Response.already_holding_responses[arm_index],
Response.glance_actions[arm_index]]
def edit_action(self, dummy=None):
'''Goes back to edit mode'''
if (self.session.n_actions() > 0):
if (Interaction._is_programming):
return [RobotSpeech.ALREADY_EDITING, GazeGoal.SHAKE]
#.........这里部分代码省略.........