本文整理汇总了Python中tf_conversions.posemath.toMatrix函数的典型用法代码示例。如果您正苦于以下问题:Python toMatrix函数的具体用法?Python toMatrix怎么用?Python toMatrix使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了toMatrix函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Rot_k_phi
def Rot_k_phi(self, k_vec, phi): #Eigen::Vector3d k_vec,float phi
#Eigen::Matrix3d R_k_phi;
kx = k_vec[0]
ky = k_vec[1]
kz = k_vec[2]
#K_nrow = Vector(0.0, -kz, ky)
#K_trow = Vector(kz, 0.0, -kx)
#K_brow = Vector(-ky, kx, 0.0)
N = 3
K = Rotation(0.0,-kz,ky, kz,0.0,-kx, -ky,kx,0.0)
print "K",K
Ksquare = K*K
print "K^2",Ksquare
I = Rotation()
print "I",I
K_Frame = Frame(K)
print "K_Frame",K_Frame
K_Frame_numpy = posemath.toMatrix(K_Frame)
print "K_Frame_numpy",K_Frame_numpy
K_numpy = K_Frame_numpy[0:3,0:3]
print "K_numpy",K_numpy
I_numpy = np.identity(N)
print "I_numpy",I_numpy
K_square_Frame = Frame(Ksquare)
print "K_square_Frame",K_square_Frame
K_square_Frame_numpy = posemath.toMatrix(K_square_Frame)
print "K_square_Frame",K_square_Frame
K_square_numpy = K_square_Frame_numpy[0:3,0:3]
I_numpy = np.identity(N)
print "math.sin(phi)",(math.sin(phi))
print "1-math.cos(phi)",(1-math.cos(phi))
print "K_numpy*K_numpy",K_numpy*K_numpy
R_k_phi_numpy = I_numpy + math.sin(phi)*K_numpy + (1-math.cos(phi))*K_numpy*K_numpy
print "R_k_phi_numpy",R_k_phi_numpy
R_k_phi_numpy_FrameTemp = np.c_[R_k_phi_numpy, np.ones(N)]
print "R_k_phi_numpy_FrameTemp",R_k_phi_numpy_FrameTemp
R_k_phi_numpy_Frame = np.r_[R_k_phi_numpy_FrameTemp,[R_k_phi_numpy_FrameTemp[1]]]
print "R_k_phi_numpy_Frame",R_k_phi_numpy_Frame
R_k_phi_Frame = posemath.fromMatrix(R_k_phi_numpy_Frame)
print "R_k_phi_Frame",R_k_phi_Frame
R_k_phi = R_k_phi_Frame.M
print "R_k_phi",R_k_phi
R_k_phi_numpy_square = I_numpy + math.sin(phi)*K_numpy + (1-math.cos(phi))*K_square_numpy
print "R_k_phi_numpy_square",R_k_phi_numpy_square
R_k_phi_numpy_FrameTemp_square = np.c_[R_k_phi_numpy_square, np.ones(N)]
print "R_k_phi_numpy_FrameTemp_square",R_k_phi_numpy_FrameTemp_square
R_k_phi_numpy_Frame_square = np.r_[R_k_phi_numpy_FrameTemp_square,[R_k_phi_numpy_FrameTemp_square[1]]]
print "R_k_phi_numpy_Frame_square",R_k_phi_numpy_Frame_square
R_k_phi_Frame_square = posemath.fromMatrix(R_k_phi_numpy_Frame_square)
print "R_k_phi_Frame",R_k_phi_Frame_square
R_k_phi_square = R_k_phi_Frame_square.M
print "R_k_phi",R_k_phi_square
return R_k_phi
示例2: optitrack_cb
def optitrack_cb(self, msg):
if msg.header.frame_id != self.global_frame:
return
for rigid_body in msg.bodies:
if rigid_body.tracking_valid:
if rigid_body.id == self.table_id:
frame = posemath.fromMsg(rigid_body.pose)
self.Ttable = posemath.toMatrix(frame)
if rigid_body.id == self.stick_id:
frame = posemath.fromMsg(rigid_body.pose)
self.Tshort = posemath.toMatrix(frame)
示例3: align_pose
def align_pose(self, pose):
objectInCamera = pm.toMatrix(pm.fromMsg(pose))
ModelRecManager.tf_listener.waitForTransform("/world", "/camera_rgb_optical_frame", rospy.Time(0), rospy.Duration(5))
cameraInWorld = pm.toMatrix(pm.fromTf(ModelRecManager.tf_listener.lookupTransform("/world", "/camera_rgb_optical_frame",rospy.Time(0))))
objectInWorld = dot(cameraInWorld, objectInCamera)
"""45 degrees rotated around z axis"""
objectInWorld[0:3,0:3] = mat([[0.7071,-0.7071,0],
[0.7071,0.7071,0],
[0,0,1]]);
worldInCamera = linalg.inv(cameraInWorld)
objectInCameraModified = dot(worldInCamera, objectInWorld)
return pm.toMsg(pm.fromMatrix(objectInCameraModified))
示例4: omni_callback
def omni_callback(joint_state):
global update_pub, last_button_state
sendTf(marker_tf, '/marker', fixed_frame)
sendTf(zero_tf, '/base', fixed_frame)
sendTf(marker_ref, '/marker_ref', fixed_frame)
sendTf(stylus_ref, '/stylus_ref', fixed_frame)
try:
update = InteractionCursorUpdate()
update.pose.header = std_msgs.msg.Header()
update.pose.header.stamp = rospy.Time.now()
update.pose.header.frame_id = 'marker_ref'
if button_clicked and last_button_state == update.GRAB:
update.button_state = update.KEEP_ALIVE
elif button_clicked and last_button_state == update.KEEP_ALIVE:
update.button_state = update.KEEP_ALIVE
elif button_clicked:
update.button_state = update.GRAB
elif last_button_state == update.KEEP_ALIVE:
update.button_state = update.RELEASE
else:
update.button_state = update.NONE
updateRefs()
update.key_event = 0
control_tf = ((0, 0, 0), tf.transformations.quaternion_from_euler(0, 0, control_offset))
if button_clicked:
# Get pose corresponding to transform between stylus reference and current position.
stylus_tf = listener.lookupTransform('/stylus_ref', '/stylus', rospy.Time(0))
stylus_matrix = pm.toMatrix(pm.fromTf(stylus_tf))
control_matrix = pm.toMatrix(pm.fromTf(control_tf))
p = pm.toMsg(pm.fromMatrix(numpy.dot(control_matrix, stylus_matrix)))
else:
p = pm.toMsg(pm.fromTf(control_tf))
# Simply scale this up a bit to increase the workspace.
workspace = 4
p.position.x = p.position.x * workspace
p.position.y = p.position.y * workspace
p.position.z = p.position.z * workspace
update.pose.pose = p
last_button_state = update.button_state
# Publish feedback.
update_pub.publish(update)
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
rospy.logerr("Couldn't look up transform. These things happen...")
示例5: processMarkerFeedback
def processMarkerFeedback(feedback):
global marker_tf, marker_name
marker_name = feedback.marker_name
marker_offset_tf = ((0, 0, marker_offset), tf.transformations.quaternion_from_euler(0, 0, 0))
marker_tf = pm.toTf(pm.fromMatrix(numpy.dot(pm.toMatrix(pm.fromMsg(feedback.pose)), pm.toMatrix(pm.fromTf(marker_offset_tf)))))
if feedback.menu_entry_id:
marker_tf = zero_tf
示例6: get_staubli_cartesian_as_tran
def get_staubli_cartesian_as_tran():
"""@brief - Read the staubli end effector's cartesian position as a 4x4 numpy matrix
Returns 4x4 numpy message on success, empty message on failure
"""
current_pose = get_staubli_cartesian_as_pose_msg()
return pm.toMatrix(pm.fromMsg(current_pose))
示例7: simple_horiz_kvec_motion_psm2
def simple_horiz_kvec_motion_psm2(self, O_needle, r_needle, kvec_yaw, gripper_affines_wrt_camera):
dphi = math.pi/40.0
bvec0 = Vector(1,0,0)
nvec = Vector(0,0,1)
bvec = self.Rotz(kvec_yaw)*bvec0
tvec = bvec*nvec
R0 = Rotation(nvec,tvec,bvec)
R0 = R0.Inverse()
tip_pos = O_needle - r_needle*tvec
self.affine_gripper_frame_wrt_camera_frame_.p = tip_pos
self.affine_gripper_frame_wrt_camera_frame_.M = R0
del gripper_affines_wrt_camera[:]
nsolns = 0
nphi = 0
print "nphi: "
for phi in range(0.0,-math.pi,-dphi):
R = self.Rot_k_phi(bvec, phi)*R0
self.affine_gripper_frame_wrt_camera_frame_.M=R
R_Frame = posemath.toMatrix(self.affine_gripper_frame_wrt_camera_frame_)
R_column2_numpy = R_Frame[0:3,1]
R_column2 = Vector(R_column2_numpy[0],R_column2_numpy[1],R_column2_numpy[2])
tip_pos = O_needle - r_needle*R_column2
self.affine_gripper_frame_wrt_camera_frame_.p = tip_pos
des_gripper1_wrt_base = self.default_affine_lcamera_to_psm_two_.Inverse()*self.affine_gripper_frame_wrt_camera_frame_
''' ERDEM
if (ik_solver_.ik_solve(des_gripper1_wrt_base))
{ nsolns++;
cout<<nphi<<",";
#cout<<": found IK; nsolns = "<<nsolns<<endl;
gripper_affines_wrt_camera.push_back(affine_gripper_frame_wrt_camera_frame_);
}'''
nphi += 1
print "\n"
示例8: compute_needle_drive_gripper_affines
def compute_needle_drive_gripper_affines(self, gripper_affines_wrt_camera, gripper_affines_wrt_psm):
phi_insertion_ = 0.0
self.affine_needle_frame_wrt_tissue_ = self.affine_init_needle_frame_wrt_tissue_
dphi = math.pi/(2.0*(NSAMPS_DRIVE_PLAN-1))
self.R0_needle_wrt_tissue_ = self.affine_needle_frame_wrt_tissue_.M
self.affine_needle_frame_wrt_tissue_.M = self.Rotx(self.psi_needle_axis_tilt_wrt_tissue_)*self.R0_needle_wrt_tissue_
self.R0_needle_wrt_tissue_= self.affine_needle_frame_wrt_tissue_.M
kvec_needle_Frame = posemath.toMatrix(self.affine_needle_frame_wrt_tissue_)
kvec_needle_numpy = kvec_needle_Frame[0:3,2]
kvec_needle = Vector(kvec_needle_numpy[0],kvec_needle_numpy[1],kvec_needle_numpy[2])
if(debug_needle_print):
print "kvec_needle=" , kvec_needle
if(debug_needle_print):
print "R0 needle:"
needle_origin = self.affine_needle_frame_wrt_tissue_.p
self.affine_needle_frame_wrt_tissue_.p = self.Rotx(self.psi_needle_axis_tilt_wrt_tissue_)*needle_origin;
if(debug_needle_print):
print self.affine_needle_frame_wrt_tissue_.M
for ipose in range(0,NSAMPS_DRIVE_PLAN):
Rot_needle = self.Rot_k_phi(kvec_needle,phi_insertion_)
#R_needle_wrt_tissue_ = Roty_needle*R0_needle_wrt_tissue_; #update rotation of needle drive
self.R_needle_wrt_tissue_ = Rot_needle*self.R0_needle_wrt_tissue_ #update rotation of needle drive
if(debug_needle_print):
print "R_needle w/rt tissue:"
if(debug_needle_print):
print self.R_needle_wrt_tissue_
#need to check these transforms...
self.affine_needle_frame_wrt_tissue_.M = self.R_needle_wrt_tissue_
self.affine_gripper_frame_wrt_tissue_ = self.affine_needle_frame_wrt_tissue_*self.affine_needle_frame_wrt_gripper_frame_.Inverse()
if(debug_needle_print):
print("affine_gripper_frame_wrt_tissue_")
if(debug_needle_print):
print_affine(self.affine_gripper_frame_wrt_tissue_)
self.affine_gripper_frame_wrt_camera_frame_ = self.affine_tissue_frame_wrt_camera_frame_*self.affine_gripper_frame_wrt_tissue_
if(debug_needle_print):
print("affine_gripper_frame_wrt_camera_frame_")
print_affine(self.affine_gripper_frame_wrt_camera_frame_)
gripper_affines_wrt_camera.append(self.affine_gripper_frame_wrt_camera_frame_)
self.affine_gripper_frame_wrt_psm_frame_ = self.default_affine_lcamera_to_psm_one_.Inverse()*self.affine_gripper_frame_wrt_camera_frame_
gripper_affines_wrt_psm.append(self.affine_gripper_frame_wrt_psm_frame_)
phi_insertion_+=dphi
示例9: process_grasp_msg
def process_grasp_msg(self, grasp_msg):
"""@brief - Attempt to make the adjustment specified by grasp_msg
1. release the hand
2. backup the hand
3. plan a path to the a place 15cm back from the new pose
4. use guarded motion to move forward
"""
print 'adjustment:'
print grasp_msg
#release the object
tp.open_barrett()
#get the robot's current location and calculate the absolute tran after the adjustment
#update the robot status
tp.update_robot(self.global_data.or_env.GetRobots()[0])
adjustInPalm = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))
arm_goal = tp.convert_cartesian_relative_goal(self.global_data, adjustInPalm)
backup = numpy.matrix([[1,0,0,0],
[0,1,0,0],
[0,0,1,-.05],
[0,0,0,1]]);
back_from_arm_goal = arm_goal * backup
#move back by 10cm
#move_forward(-.1)
#raw_input("Press enter to go to the target pose...")
#update the robot status
tp.update_robot(self.global_data.or_env.GetRobots()[0])
#just go to the new place without a OpenRave trajectory plan
#adjustInPalm = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))
#blocking motion set to be true
#send_cartesian_relative_goal(self.global_data, adjustInPalm, True)
send_cartesian_goal(back_from_arm_goal, True)
raw_input("Press enter to use guarded motion to move forward...")
#guarded move forward
tp.MoveHandSrv(1, [0,0,0, grasp_msg.pre_grasp_dof[0]])
GuardedMoveUntilHit(self.global_data, array([0,0,1]), 'PALM', 0.05, 20)
raw_input("Press enter to close the hand...")
#close the hand
#tp.close_barrett()
#tp.move_hand_velocity([0.5,0.5,0.5,0.0])
GuardedCloseHand(self.global_data)
selection = int(raw_input('Lift up (1) or not (0): '))
if selection == 1:
print 'lift up the object'
success = tp.lift_arm(.1, True)
if not success:
grasp_status = graspit_msgs.GraspStatus.UNREACHABLE
grasp_status_msg = "Couldn't lift object"
else:
print 'not lift up the object'
示例10: getposecb
def getposecb(self, a, b):
print "Setting Pose based on world model"
if((rospy.Time.now().to_sec() - b.pose.header.stamp.to_sec()) > 4.0):
print "time now = ", rospy.Time.now().to_sec(), "time from pose = ", b.pose.header.stamp.to_sec()
print "Time stamp of detection to old: diff=", (rospy.Time.now().to_sec() - b.pose.header.stamp.to_sec())
if(self.detection_counter <= 4):
self.detection_counter += 1
return "preempted"
else:
self.detection_counter = 0
return "aborted"
self.config_over_object.header.stamp = rospy.Time.now()
self.config_over_object.pose.position.x = b.pose.pose.position.x
self.config_over_object.pose.position.y = b.pose.pose.position.y
#ao = PyKDL.Rotation.Quaternion(self.config_over_object.pose.orientation.x, self.config_over_object.pose.orientation.y, self.config_over_object.pose.orientation.z, self.config_over_object.pose.orientation.w)
#ao = PyKDL.Rotation.Quaternion(self.config_over_object.pose.orientation.x, self.config_over_object.pose.orientation.y, self.config_over_object.pose.orientation.z, self.config_over_object.pose.orientation.w)
#bo = PyKDL.Rotation.Quaternion(b.pose.pose.orientation.x, b.pose.pose.orientation.y, b.pose.pose.orientation.z, b.pose.pose.orientation.w)
#c = ao #* bo#.Inverse()
m_manip = pm.toMatrix( pm.fromMsg(self.config_over_object.pose) )
m_obj = pm.toMatrix( pm.fromMsg(b.pose.pose) )
import numpy
from tf.transformations import *
m_ori = pm.toMatrix( pm.fromTf( ((0,0,0), quaternion_from_euler(0, 0, math.radians(-20.0))) ))
box_pose = pm.toMsg( pm.fromMatrix(numpy.dot(m_manip,m_obj)) )
m_test = numpy.dot(m_manip,m_obj)
newrot = pm.toMsg( pm.fromMatrix(m_test))
print newrot
print self.config_over_object.pose
#print c.GetQuaternion()
self.config_over_object.pose.orientation = box_pose.orientation
#self.config_over_object.pose.orientation.y = c.GetQuaternion()[1]
#self.config_over_object.pose.orientation.z = c.GetQuaternion()[2]
#self.config_over_object.pose.orientation.w = c.GetQuaternion()[3]
#self.config_over_object.pose.position.z = 0.30#0.430
self.config_object.header.stamp = rospy.Time.now()
self.config_object.pose.position.x = b.pose.pose.position.x
self.config_object.pose.position.y = b.pose.pose.position.y
#self.config_object.pose.position.z = 0.2 #0.095
self.config_object.pose.orientation = box_pose.orientation
示例11: test_roundtrip
def test_roundtrip(self):
c = Frame()
d = pm.fromMsg(pm.toMsg(c))
self.assertEqual(repr(c), repr(d))
d = pm.fromMatrix(pm.toMatrix(c))
self.assertEqual(repr(c), repr(d))
d = pm.fromTf(pm.toTf(c))
self.assertEqual(repr(c), repr(d))
示例12: analyze_demonstration_pose
def analyze_demonstration_pose(self, demo_grasp):
if(len(self.data) ==0):
return 1.0
demo_pose = pm.toMatrix(pm.fromMsg(demo_grasp.final_grasp_pose))
demo_position = demo_pose[:3,3]
distances, indices = self.model.kneighbors(demo_position)
indices = unique(indices)
nbrs = [t for t in itertools.compress(self.data, indices)]
valid_nbrs = []
for n in nbrs:
pose = pm.toMatrix(pm.fromMsg(n[0].final_grasp_pose))
if (acos(dot(pose[:3,2],demo_pose[:3,2]) < .52)):
valid_nbrs.append(n)
if len(valid_nbrs):
success_probability = len([n for n in valid_nbrs if n[1] & 1])/(1.0*len(valid_nbrs))
else:
success_probability = 0
self.demo_pose_analysis_publisher.publish(success_probability)
return success_probability
示例13: model_state_publisher
def model_state_publisher(self):
#############
targetFrame = PyKDL.Frame(PyKDL.Rotation.Quaternion(self.target.pose.orientation.x,self.target.pose.orientation.y,self.target.pose.orientation.z,self.target.pose.orientation.w), PyKDL.Vector(self.target.pose.position.x,self.target.pose.position.y,self.target.pose.position.z))
targetFrameMatrix = pm.toMatrix(targetFrame)
sourceFrame = PyKDL.Frame(PyKDL.Rotation.Quaternion(self.source.pose.orientation.x,self.source.pose.orientation.y,self.source.pose.orientation.z,self.source.pose.orientation.w),PyKDL.Vector(self.source.pose.position.x,self.source.pose.position.y,self.source.pose.position.z))
pose = pm.toMsg(pm.fromMatrix(linalg.inv(targetFrameMatrix))*sourceFrame)
poseStamped = PoseStamped()
poseStamped.pose = pose
poseStamped.header.stamp = rospy.get_rostime()
poseStamped.header.frame_id = '/'+self.targetLinkName[5:len(self.targetLinkName)] # This strips 'pr2::' from the frame_id string
self.wPub.publish(poseStamped)
示例14: pregrasp_object
def pregrasp_object(global_data, object_name, adjust_height, grasp_tran, object_height_col = 2,
pregrasp_dist = -.05, object_height_adjustment = 0.0, run_it = True):
"""@brief - Move the hand to the pregrasp position. This procedure is suprisingly intricate.
Given the object filename and the grasp transform, this transforms the grasp to world coordinates
then runs a planner to avoid collisions in moving the arm the long distance to the
object. This uses the scene graph, which is currently provided by a camera.
Returns whether the pregrasp location was reachable, the filename in which the trajectory from OpenRave
is stored, and the list of the dof values that are contained in the trajectory.
"""
""" Sets up object location in openrave -- this code is all to compensate for the current object localization
method only knowing the location of the base of the object - the height of the object's origin from the base
must be used to adjust it's location in openrave before planning.
"""
print "Pregrasping object: %s"%(object_name)
#Get the object transform in the world.
obj_tran = pm.toMatrix(pm.fromTf(global_data.listener.lookupTransform("/world",object_name, rospy.Time(0))))
if (obj_tran == []):
warn("Scene graph in TF is broken! Most\
likely, the object transform could not be found.")
return [],[], [], []
"""Set up and visualize the grasp transforms"""
pregrasp_tran = get_pregrasp_tran_from_tran(grasp_tran, pregrasp_dist)
publish_grasp_tran(pregrasp_tran)
final_tran = dot(obj_tran, pregrasp_tran)
publish_grasp_tran(final_tran)
success = False
"""Run the planner"""
plan = []
j = []
try:
success, trajectory_filename, dof_list, j = run_cbirrt_with_tran( global_data.or_env, final_tran )
except Exception,e:
warn("Failed running planner with\
error %s"%e)
示例15: convert_cartesian_world_goal
def convert_cartesian_world_goal(global_data, world_tran):
"""@brief - Get armtip goal pose in the arms coordinate system from hand world goal pose.
This is useful to convert between a hand goal in the world coordinate system to a cartesian goal that
can be sent directly to the staubli controller.
@param world_tf - tf of hand goal pose in world coordinates
"""
try:
world_tf = pm.toTf(pm.fromMatrix(world_tran))
bc = tf.TransformBroadcaster()
now = rospy.Time.now()
bc.sendTransform(world_tf[0], world_tf[1], now, "hand_goal", "world")
#listener = tf.TransformListener()
global_data.listener.waitForTransform("armbase", "arm_goal", now, rospy.Duration(1.0))
armtip_robot = global_data.listener.lookupTransform('armbase', 'arm_goal', now)
return pm.toMatrix(pm.fromTf(armtip_robot))
except Exception, e:
handle_fatal('convert_cartesian_world_goal failed: error %s.'%e)