当前位置: 首页>>代码示例>>Python>>正文


Python ALProxy.setState方法代码示例

本文整理汇总了Python中naoqi.ALProxy.setState方法的典型用法代码示例。如果您正苦于以下问题:Python ALProxy.setState方法的具体用法?Python ALProxy.setState怎么用?Python ALProxy.setState使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在naoqi.ALProxy的用法示例。


在下文中一共展示了ALProxy.setState方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
def main(robot_IP, robot_PORT=9559):
	# ----------> Connect to robot <----------
	global tts, motion, autonomous
	tts = ALProxy("ALTextToSpeech", robot_IP, robot_PORT)
	motion = ALProxy("ALMotion", robot_IP, robot_PORT)
	autonomous = ALProxy("ALAutonomousLife", robot_IP, robot_PORT)
	autonomous.setState("disabled") # turn ALAutonomousLife off

	# ----------> <----------
	record()
	reappear()
开发者ID:axmtec0eric,项目名称:Nao-Robot,代码行数:13,代码来源:record_posture.py

示例2: __init__

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class NaoMonkey:
	PART = {
		'LShoulder': ['LShoulderPitch', 'LShoulderRoll'],
		'RShoulder': ['RShoulderPitch', 'RShoulderRoll'],
		'LElbow': ['LElbowYaw', 'LElbowRoll'],
		'RElbow': ['RElbowYaw', 'RElbowRoll'],
		'Head': ['HeadYaw', 'HeadPitch'],
	}
	LIMITS = {
		'Head': [[-2.0, 2.0], [-0.67, 0.51]],
		'LShoulder': [[-2.0, 2.0], [-0.31, 1.32]],
		'RShoulder': [[-2.0, 2.0], [-1.32, 0.31]],
		'LElbow': [[-2.0, 2.0], [-1.54, -0.03]],
		'RElbow': [[-2.0, 2.0], [0.03, 1.54]],
	}

	def __init__(self):
		rospy.init_node('nao_mykinect', anonymous=True)

		self.listener = rospy.Subscriber('nao', NaoCoords, self.move)
                ip = rospy.get_param('~ip', '10.104.16.141')
		port = int(rospy.get_param('~port', '9559'))

		self.al = ALProxy("ALAutonomousLife", ip, port)
		self.postureProxy = ALProxy("ALRobotPosture", ip, port)
		self.motionProxy = ALProxy("ALMotion", ip, port)
		
		self.al.setState("disabled")
		for part in ["Head", "LArm", "RArm"]:
			self.motionProxy.setStiffnesses(part, 1.0)
		rospy.loginfo(self.motionProxy.getSummary())

	def move(self, coords):
		part = coords.Part.data
		angles1 = coords.Angles1
		angles2 = coords.Angles2
		angles = [float(angles1.data), float(angles2.data)]
		speed = 1.0
		if part not in NaoMonkey.PART:
			error_msg = 'Wat? I Do not have ' + str(part)
			rospy.loginfo(error_msg)
			return
		if len(NaoMonkey.PART[part]) != len(angles):
			error_msg = 'Wat? What shall i do with rest joint?'
			rospy.loginfo(error_msg)
			return
		angles = map(lambda x: float(x)*math.pi/180.0, angles)
		for limit, angle in zip(NaoMonkey.LIMITS[part], angles):
			if angle < limit[0] or angle > limit[1]:
				error_msg = 'Wat? Limits man!'
				rospy.loginfo(error_msg)
		self.motionProxy.setAngles(NaoMonkey.PART[part], angles, speed);
开发者ID:PatrykBogd,项目名称:NaoMultimodalInteraction,代码行数:54,代码来源:nao.py

示例3: exe

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
 def exe(self, args=None, addr=None):
     
     # get proxies
     life = ALProxy("ALAutonomousLife", Settings.naoHostName, Settings.naoPort)
     
     # set life state
     if len(args) > 0:
         try:
             if str(args[0]) not in life.getState():
                 life.setState( str(args[0]) )
         except:
             print "can not set life state " + str(args[0]) + ". Set disabled instead."
             life.setState( 'disabled')
开发者ID:Adriencerbelaud,项目名称:NAO-Communication-server,代码行数:15,代码来源:cmdSetLifeState.py

示例4: __init__

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class InitPose:
	def __init__(self):
		ip = rospy.get_param('~ip', '10.104.16.141')
		port = int(rospy.get_param('~port', '9559'))

		self.al = ALProxy("ALAutonomousLife", ip, port)
		self.postureProxy = ALProxy("ALRobotPosture", ip, port)
		self.motionProxy = ALProxy("ALMotion", ip, port)
		self.al.setState("disabled")
		self.postureProxy.goToPosture("StandInit", 0.5)
		for part in ["Head", "LArm", "RArm"]:
			self.motionProxy.setStiffnesses(part, 1.0)
		self.init_kinnect_pose()
	
	def init_kinnect_pose(self):
		self.motionProxy.setAngles(['LShoulderRoll', 'RShoulderRoll'], self.to_rad([57.9, -57.9]), 1.0);
		self.motionProxy.setAngles(['LShoulderPitch', 'RShoulderPitch'], self.to_rad([-94.1, -94.1]), 1.0);
		self.motionProxy.setAngles(['LElbowYaw', 'RElbowYaw'], self.to_rad([-11.5, 11.5]), 1.0);
		self.motionProxy.setAngles(['LElbowRoll', 'RElbowRoll'], self.to_rad([-61.3, 61.3]), 1.0);
		self.motionProxy.setAngles(['LWristYaw', 'RWristYaw'], self.to_rad([7.0, -7.0]), 1.0);
		self.motionProxy.setAngles(['LHand', 'RHand'], self.to_rad([0.99, 0.99]), 1.0);

	def to_rad(self, angles):
		return map(lambda x: float(x)*math.pi/180.0, angles)
开发者ID:PatrykBogd,项目名称:NaoMultimodalInteraction,代码行数:26,代码来源:init_pose.py

示例5: Tracker

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class Tracker():

    def __init__(self):
	rospy.init_node('tracker_mykinect', anonymous=True)
	self.myFrame = 'kinect_link'
	self.r = rospy.Rate(20)		#rospy.Rate(1) = 1Hz
	
#Skeleton Listener
	self.tf = tf.TransformListener()
#Nao init and connect
	self.nao_init()
	self.startXPosition = 0.0;
	self.startYPosition = 0.0;
	self.naoXPosition = self.startXPosition
	self.naoYPosition = self.startYPosition
	rospy.loginfo("Start tracking for 3s...")
        rospy.sleep(3.0)
	rospy.loginfo("Tracking started!")
	self.hoh = 0.86 #Nao Hight in meters
	for i in range(1, 4):	
#Get all nessecery joins. (Right side = Left side if you stand front to sensor)		
		try:
			self.j_head = "{0}_{1}".format(rospy.get_param('~target_joint', '/head'), i)
			self.j_l_hand = "{0}_{1}".format(rospy.get_param('~target_joint', '/left_hand'), i) 
			self.j_r_hand = "{0}_{1}".format(rospy.get_param('~target_joint', '/right_hand'), i)
			self.j_l_shoul = "{0}_{1}".format(rospy.get_param('~target_joint', '/left_shoulder'), i)
			self.j_torso = "{0}_{1}".format(rospy.get_param('~target_joint', '/torso'), i)
			self.j_l_ehand = "{0}_{1}".format(rospy.get_param('~target_joint', '/left_elbow'), i)
			self.j_r_ehand = "{0}_{1}".format(rospy.get_param('~target_joint', '/right_elbow'), i)
			break
		except:
			pass
    
    def toRAD(self,angle):
	return angle*almath.TO_RAD

    def nao_init(self):
	#NAO CONNECT AND INIT
	# PORT AND IP NAO ROBOT
	ip = rospy.get_param('~ip', '127.0.0.1')
	port = int(rospy.get_param('~port', '51888'))
	#ip = rospy.get_param('~ip', '10.104.16.53')
	#port = int(rospy.get_param('~port', '9559'))

	self.al = ALProxy("ALAutonomousLife", ip, port)
	self.postureProxy = ALProxy("ALRobotPosture", ip, port)
	self.tts = ALProxy("ALTextToSpeech", ip, port)
	self.motionProxy = ALProxy("ALMotion", ip, port)
	self.mistake = almath.Pose2D(self.motionProxy.getRobotPosition(False))
	#self.audioProxy = ALProxy("ALSpeechRecognition",ip,port)
	#self.audioProxy.setLanguage("English")
	#vocabulary = ["go"]
	#self.audioProxy.setVocabulary(vocabulary, False)
	self.al.setState("disabled")
	self.postureProxy.goToPosture("StandInit", 0.5)	
	#ALProxy("ALBasicAwareness", ip, port).stopAwareness()
	#self.motionProxy.setFallManagerEnabled(False)
	self.motionProxy.setExternalCollisionProtectionEnabled("All",False)

    def nao_hello(self):
#Nao hello module
	self.postureProxy.goToPosture("Standing",1.0)
	self.motionProxy.setAngles('RShoulderPitch',self.toRAD(-97.3),1.0)
	self.motionProxy.setAngles('RShoulderRoll',self.toRAD(-39.2),1.0)
	self.motionProxy.setAngles('RElbowRoll',self.toRAD(50.8),1.0)
	self.motionProxy.setAngles('RElbowYaw',self.toRAD(8.5),1.0)
	self.motionProxy.setAngles('RWristYaw',self.toRAD(-13.4),1.0)
	self.motionProxy.openHand('RHand')
	thread.start_new_thread((lambda:self.tts.say("Hello my friend")),())
	for i in range(0,2):
		self.motionProxy.angleInterpolation("RShoulderRoll",self.toRAD(-12.2),0.3,True)
		self.motionProxy.angleInterpolation("RShoulderRoll",self.toRAD(39.2),0.3,True)
		self.motionProxy.angleInterpolation("RShoulderRoll",self.toRAD(-50.4),0.3,True)
	
	self.postureProxy.goToPosture("Standing",1.0)



    def nao_sensei(self):
#Nao hello(sensei) module
	self.postureProxy.goToPosture("Standing",1.0)
	self.motionProxy.setAngles(['RShoulderPitch','LShoulderPitch'],[self.toRAD(52.7),self.toRAD(49.1)],1.0)
	self.motionProxy.setAngles(['RShoulderRoll','LShoulderRoll'],[self.toRAD(13.3),self.toRAD(-8.9)],1.0)
	self.motionProxy.setAngles(['RElbowRoll','LElbowRoll'],[self.toRAD(88.3),self.toRAD(-87.6)],1.0)
	self.motionProxy.setAngles(['RElbowYaw','LElbowYaw'],[self.toRAD(68.1),self.toRAD(-60.9)],1.0)
	self.motionProxy.setAngles(['RWristYaw','LWristYaw'],[self.toRAD(-10.3),self.toRAD(11.0)],1.0)
	self.motionProxy.setAngles(['RHand','LHand'],[self.toRAD(-10.3),self.toRAD(11.0)],1.0)
	self.motionProxy.openHand('RHand')
	self.motionProxy.openHand('LHand')
	#sequence head
	self.tts.say("Domo Arigato")
	#self.motionProxy.angleInterpolation("HeadPitch",0.0,1.0,True)
	self.motionProxy.angleInterpolation("HeadPitch",self.toRAD(26.5),1.0,True)
	self.motionProxy.angleInterpolation("HeadPitch",0.0,1.0,True)
	
	self.postureProxy.goToPosture("Standing",1)
	
    def hello_process(self):
#HELLO module
	self.count = 0
#.........这里部分代码省略.........
开发者ID:PatrykBogd,项目名称:NaoMultimodalInteraction,代码行数:103,代码来源:Gesty.py

示例6: main

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
def main():
	# ----------> 命令行解析 <----------
	global ROBOT_IP
	parser = OptionParser()
	parser.add_option("--pip",
		help="Parent broker port. The IP address or your robot",
		dest="pip")
	parser.add_option("--pport",
		help="Parent broker port. The port NAOqi is listening to",
		dest="pport",
		type="int")
	parser.set_defaults(
		pip=ROBOT_IP,
		pport=9559)

	(opts, args_) = parser.parse_args()
	pip   = opts.pip
	pport = opts.pport

	# 如果运行前指定ip参数,则更新ROBOT_IP全局变量;
	# 其他模块会用到ROBOT_IP变量;
	ROBOT_IP = pip

	# ----------> 创建python broker <----------
    # We need this broker to be able to construct
    # NAOqi modules and subscribe to other modules
    # The broker must stay alive until the program exists
	myBroker = ALBroker("myBroker",
		"0.0.0.0",   # listen to anyone
		0,           # find a free port and use it
		pip,         # parent broker IP
		pport)       # parent broker port


	# ----------> 创建Robot ALProxy Module<----------
	global tts, motion, memory, battery, autonomous, posture, sonar
	global leds			# leds.py中的全局变量;
	tts = ALProxy("ALTextToSpeech")
	# 默认为英语语言包
	tts.setLanguage("English")
	motion = ALProxy("ALMotion")
	posture = ALProxy("ALRobotPosture")
	memory = ALProxy("ALMemory")
	battery = ALProxy("ALBattery")
	sonar = ALProxy("ALSonar")
	leds = ALProxy("ALLeds")
	# turn ALAutonomousLife off
	autonomous = ALProxy("ALAutonomousLife")
	autonomous.setState("disabled") 			

	# ----------> 自己实现的类 <----------
	global avoid
	avoid = avoidance(ROBOT_IP, ROBOT_PORT) 	# 超声波避障类

	global mp3player
	mp3player = MP3player(ROBOT_IP, ROBOT_PORT)	# 音乐播放器模块
	
	global touch
	touch = touchPasswd("touch")				# 触摸登录模块
	touch.setPassword('132312')	
	
	global video
	video = VideoSend(ROBOT_IP, ROBOT_PORT)

	# TopCamera:0 	/	BottomCamera:1
	# XtionCamera: 2 (optional)
#	video.addXtionCamera()
	video.setCamera(0)
	video.setFPS(30)
	video.start()	# 开启视频传输服务器
	
	global chatrobot
	chatrobot = ChatRobot()						# Chat robot
#	chatrobot.setRobot('SIMSIMI')
	chatrobot.setRobot('TULING')

#	跳过验证, 便于调试程序
	touch.skipVerify()

	# ----------> 开启socket服务器监听端口 <----------
	sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
	sock.bind((ROBOT_IP, LISTEN_PORT))
	sock.listen(10)
	# 目前只开放监听端口,但是不接受客户端连接,只有通过系统验证模块以后,才会接受socket连接;

	global connection
	global CONNECT_FLAG
	try:
		while True:		# 死循环	
			if touch.isVerify() == True: # 通过验证则开始接受socket连接
				setEarFlushFlag(False)
				setFaceFlushFlag(False)
				time.sleep(2)
				thread.start_new_thread(LED_face_SwitchColor, (leds, 'green', 2))
				# 等待客户端连接,单线程监听单一客户端; 
				connection,address = sock.accept()
				tts.say("OK, socket connected")
				print "socket connected, waitting for command"
				# 开始发送视频;
				video.setSendFlag(True)
#.........这里部分代码省略.........
开发者ID:axmtec0eric,项目名称:Nao-Robot,代码行数:103,代码来源:xpserver.py

示例7: len

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
#!/usr/bin/env python
#-*- coding: utf-8 -*-

from naoqi import ALProxy
import sys

PORT = 9559

if len(sys.argv) > 1:
    IP = sys.argv[1]
else:
    print "Usage: python nao_kill.py nao_ip"
    exit()

autolife=ALProxy("ALAutonomousLife", IP, PORT)
autolife.setState("disabled")
开发者ID:kochigami,项目名称:science_agora,代码行数:18,代码来源:nao_kill.py

示例8: emotional_demo_module

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class emotional_demo_module(ALModule):
    """ A simple module to change the eye LEDs colour to represent emotions.

    """
    def __init__(self, name):
        ALModule.__init__(self, name)

        # Create proxies for the instance.
        global memory
        memory = ALProxy("ALMemory")

        self.tts = ALProxy("ALTextToSpeech")
        self.leds = ALProxy("ALLeds")
        self.motion = ALProxy("ALMotion")

        # Write empty valence and arousal values to memory.
        valence = 0
        arousal = 0
        param1 = 'null'
        current_emotion = [(valence, arousal), ("valence_mood", "arousal_mood"), ("personality"), (param1, "param2")]
        memory.insertData("Emotion/Current", current_emotion)

        # Disable ALAutonomousLife to better demonstrate emotional actions.
        self.autonomous_life = ALProxy("ALAutonomousLife")
        if (self.autonomous_life.getState() != "disabled"):
            self.autonomous_life.setState("disabled")
        time.sleep(1.0)
        self.motion.wakeUp()

        # Run behaviour when a tactile touched.
        memory.subscribeToEvent("VAChanged", self.getName(), "express_current_emotion")

    def express_current_emotion(self, *_args):
        """ Expresses the current emotion from the current valence and arousal values in ALMemory.
                        
        """

        # SETUP
        # Events.
        memory.unsubscribeToEvent("VAChanged", self.getName())

        # Motion.
        motion_names = list()
        motion_times = list()
        motion_keys = list()
        
        # Eyes.
        eye_colour_lookup_table = [[(0xF82C35),(0xF82C35),(0xD55528),(0xD55528),(0xFF622B),(0xFF622B),(0xFFB047),(0xFFB047),(0xFFB047),(0xFFB047),(0xFFB047)],
                                [(0xF82C35),(0xF82C35),(0xD5542A),(0xD5542A),(0xE96A37),(0xFF8232),(0xFF8232),(0xFEB340),(0xFEB340),(0xFEB340),(0xFFFF00)],
                                [(0xF62D35),(0xF62D35),(0xF62D35),(0xE96A37),(0xE96A37),(0xFF984D),(0xFF8232),(0xFDC147),(0xFFB144),(0xFFFF00),(0xFFFF00)],
                                [(0xF72C32),(0xF72C32),(0xFF4048),(0xFE5761),(0xED8659),(0xFEB278),(0xFECE6A),(0xFECE6A),(0xFEE566),(0xFFFF00),(0xFFFF00)],
                                [(0xF6255C),(0xF6255C),(0xF9386F),(0xFD585E),(0xF78C84),(0xFFB379),(0xFEDEA1),(0xFEE67C),(0xFFE564),(0xFFFF00),(0xFFFF00)],
                                [(0xF6255C),(0xF93871),(0xF93871),(0xFE9EB9),(0xFE9EB9),(0xFFFFFF),(0xD0E7B3),(0xA5D277),(0x85B957),(0x6EAB34),(0x6EAB34)],
                                [(0xA82C72),(0xA82C72),(0xC03381),(0xDB5CA1),(0xE8A1C3),(0xD1E5F0),(0xCFDADE),(0x73B8B3),(0x87B958),(0x6EAB34),(0x6EAB34)],
                                [(0xA82C72),(0xA82C72),(0xC03381),(0x9C3F74),(0xB36893),(0xD1E4F2),(0x91C3E6),(0x91C3E6),(0x219A95),(0x00948E),(0x6BAC34)],
                                [(0xA82C72),(0xA82C72),(0x86305D),(0x86305D),(0x94C8D6),(0x93C8D8),(0x92C2E6),(0x3196CE),(0x009591),(0x009591),(0x009591)],
                                [(0xA62D72),(0x692850),(0x692850),(0x692850),(0x2D9DB1),(0x2C9FB2),(0x2F96CE),(0x0085BE),(0x00968D),(0x00968D),(0x00968D)],
                                [(0x692850),(0x692850),(0x692850),(0x692850),(0x037F9B),(0x037F9B),(0x0085BE),(0x0085BE),(0x0085BE),(0x0085BE),(0x0085BE)]
                                ]

        # Speech.
        # Speech parameter lookup table. Format (pitch modifier, volume modifier)
        speech_parameter_lookup_table = [((1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00)),
                                        ((1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00),(1.00,1.00)),
                                        ((1.00,0.75),(0.81,0.75),(0.00,0.00),(0.00,0.00),(-0.25,0.00),(0.50,1.00),(0.62,0.50),(0.75,),(0.75,),(0.75,0.75),(1.00,0.75)),
                                        ((1.00,0.50),(0.63,0.50),(-0.20,-0.50),(-1.00,-1.00),(-0.25,-0.50),(0.25,0.50),(0.25,0.50),(0.50,),(0.50,0.50),(0.50,0.50),(0.00,0.50)),
                                        ((1.00,0.25),(0.44,0.25),(0.40,-0.50),(0.30,-0.50),(0.25,-0.50),(0.25,0.00),(0.25,0.00),(0.25,0.25),(0.25,0.25),(0.25,0.25),(0.00,0.25)),
                                        ((1.00,0.00),(0.25,0.00),(0.10,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.10,0.00),(0.10,0.00),(0.10,0.00),(0.00,0.00)),
                                        ((0.25,-0.25),(0.06,-0.25),(-0.10,-0.25),(-0.20,0.00),(-0.20,0.00),(-0.10,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00)),
                                        ((-0.25,-0.50),(-0.13,-0.50),(-0.35,-0.50),(-0.20,-0.25),(-0.10,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00)),
                                        ((-0.25,-0.75),(-0.31,-0.75),(-0.35,-0.75),(-0.10,-0.50),(-0.10,-0.25),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00)),
                                        ((-0.50,-1.00),(-0.50,-1.00),(-0.40,-1.00),(-0.20,-0.75),(-0.10,-0.50),(0.00,-0.25),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00)),
                                        ((-0.50,-1.00),(-0.50,-1.00),(-0.50,-1.00),(-0.25,-0.75),(0.00,-0.50),(0.00,-0.25),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00),(0.00,0.00))]
                      
        # CALCULATIONS
        # Get current emotional values and generic calcs.
        current_emotion = memory.getData("Emotion/Current")
        print "current_emotion (module): ", current_emotion
        valence = current_emotion[0][0]
        arousal = current_emotion[0][1]
        emotion_name = current_emotion[3][0]
        # Valence and arousal are normalised between -1 and 1, with an axis intersection at (0, 0). Convert axis intersection
        # to index.
        valence_index = (int(valence * 5) + 5)
        arousal_index = 10 - (int(arousal * 5) + 5)

        # Speech.
        # The pitch and volume modifier values need scaled, final value to be determined. e.g. a value of 4 will divide the parameter by 4 to give a +/- of 25% of the default value
        speech_parameter_scaling_value = 4
        string_to_say = "I am feeling " + emotion_name
        scaled_pitch_modifier = 1 + (speech_parameter_lookup_table[arousal_index][valence_index][0] / speech_parameter_scaling_value)
        # NAO can only increase pitch! So need to check if a pitch reduction required and negate it. Range 1.0 - 4.0.
        if scaled_pitch_modifier < 1.0:
            scaled_pitch_modifier = 1.0
        # NAO volume (gain) range 0.0 - 1.0.
        scaled_volume_modifier = 0.5 + (speech_parameter_lookup_table[arousal_index][valence_index][1] / speech_parameter_scaling_value)
        self.tts.setParameter("pitchShift", scaled_pitch_modifier)
        self.tts.setVolume(scaled_volume_modifier)
        
        # Eyes.        
#.........这里部分代码省略.........
开发者ID:Sandy4321,项目名称:nao-emotional-framework,代码行数:103,代码来源:standalone_emotional_demo.py

示例9: __init__

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class NaoBehavior:
    def __init__(self,robotIP):
        # Create proxy to ALBehaviorManager
        self.managerProxy = ALProxy("ALBehaviorManager", robotIP, 9559)
        self.motionProxy  = ALProxy("ALMotion", robotIP, 9559)
        self.animatedSpeechProxy = ALProxy("ALAnimatedSpeech", robotIP, 9559)
        rospy.Subscriber("/nao_behavior/run_behavior", String, self.run_callback)
        rospy.Subscriber("/nao_behavior/stop_behavior", String, self.stop)
        self.wakeup()

        self.auto = ALProxy("ALAutonomousLife", robotIP, 9559)

        self.activeBehavior = 0
        self.behaviorCount =0
        self.nextBehavior = 1
        self.autostate()
    def disableAuto(self):
        self.auto.stopAll()

    def autostate(self):

        self.auto.setState('solitary')


    def run_callback(self,data):
        self.auto.setState('disabled')

        self.say(data.data)
        self.autostate()
        return

        if data.data == 'wakeup':
            self.wakeup()
            return
        if data.data == 'rest':
            self.rest()
            return
        if data.data == "help":
            self.getBehaviors()
            #  self.Action(data.data)
            return

        self.launchBehavior(data.data)
    def say(self,data):
        # set the local configuration
        configuration = {"bodyLanguageMode": "contextual"}
        # say the text with the local configuration
        self.animatedSpeechProxy.say(data, configuration)


    def launchBehavior(self, behaviorName):
        ''' Launch and stop a behavior, if possible. '''
        self.behaviorCount = self.behaviorCount +1
        behaviorNumber = self.behaviorCount

        while True:
            if self.activeBehavior ==0 and self.nextBehavior == behaviorNumber:
                self.activeBehavior =1
                # Check that the behavior exists.
                if (self.managerProxy.isBehaviorInstalled(behaviorName)):
                    # Check that it is not already running.
                    if (not self.managerProxy.isBehaviorRunning(behaviorName)):
                        # Launch behavior. This is a blocking call, use post if you do not
                        # want to wait for the behavior to finish.
                        rospy.loginfo("Running Behavior")
                        self.managerProxy.post.runBehavior(behaviorName)

                        while( self.managerProxy.isBehaviorRunning(behaviorName) == True):
                            time.sleep(0.2)

                    else:
                        rospy.loginfo( "Behavior is already running.")

                else:
                    rospy.loginfo( "Behavior not found.")

                self.nextBehavior = behaviorNumber +1

                self.activeBehavior =0
                return
            elif self.activeBehavior ==1:
                print( str(behaviorNumber) +'queud')




    def stop(self,behaviorName):
            # Stop the behavior.
            if (self.managerProxy.isBehaviorRunning(behaviorName.data)):
                self.managerProxy.stopBehavior(behaviorName.data)
                rospy.loginfo("Behavior stopped.")

                time.sleep(1.0)
            else:
                rospy.loginfo("Behavior is already stopped.")

    def wakeup(self):
        self.motionProxy.wakeUp()
    def rest(self):
        self.motionProxy.rest()
#.........这里部分代码省略.........
开发者ID:vainmouse,项目名称:Nao,代码行数:103,代码来源:behaviors.py

示例10: toRAD

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class Monkey:

    CALIBRATION_LIMIT = 50
    GAIN = 400

    PART = {
        "LShoulder": ["LShoulderPitch", "LShoulderRoll"],
        "RShoulder": ["RShoulderPitch", "RShoulderRoll"],
        "LElbow": ["LElbowYaw", "LElbowRoll"],
        "RElbow": ["RElbowYaw", "RElbowRoll"],
        "Head": ["HeadYaw", "HeadPitch"],
    }
    LIMITS = {
        "Head": [[-2.0, 2.0], [-0.67, 0.51]],
        "LShoulder": [[-2.0, 2.0], [-0.31, 1.32]],
        "RShoulder": [[-2.0, 2.0], [-1.32, 0.31]],
        "LElbow": [[-2.0, 2.0], [-1.54, -0.03]],
        "RElbow": [[-2.0, 2.0], [0.03, 1.54]],
    }

    def toRAD(self, angle):
        return angle * almath.TO_RAD

    def __init__(self):
        self.calibration = True
        self.vector = None
        self.rot = None
        self.msg_count = 0
        rospy.init_node("imitate_mykinect", anonymous=True)
        self.myFrame = "imitate_nao"
        # Skeleton Listener
        self.tf = tf.TransformListener()
        self.nao_init()
        self.r = rospy.Rate(20)  # rospy.Rate(1) = 1Hz
        rospy.loginfo("Start tracking for 3s...")
        rospy.sleep(3.0)
        rospy.loginfo("Tracking started!")
        for i in range(1, 4):
            # Get all nessecery joins. (Right side = Left side if you stand front to sensor)
            try:
                self.j_head = "{0}_{1}".format(rospy.get_param("~target_joint", "/head"), i)
                self.j_l_hand = "{0}_{1}".format(rospy.get_param("~target_joint", "/left_hand"), i)
                self.j_r_hand = "{0}_{1}".format(rospy.get_param("~target_joint", "/right_hand"), i)
                self.j_l_shoul = "{0}_{1}".format(rospy.get_param("~target_joint", "/left_shoulder"), i)
                self.j_r_shoul = "{0}_{1}".format(rospy.get_param("~target_joint", "/right_shoulder"), i)
                self.j_l_ehand = "{0}_{1}".format(rospy.get_param("~target_joint", "/left_elbow"), i)
                self.j_r_ehand = "{0}_{1}".format(rospy.get_param("~target_joint", "/right_elbow"), i)
                self.j_torso = "{0}_{1}".format(rospy.get_param("~target_joint", "/torso"), i)
                break
            except:
                pass

    def nao_init(self):
        # NAO CONNECT AND INIT
        # PORT AND IP NAO ROBOT
        ip = rospy.get_param("~ip", "127.0.0.1")
        port = int(rospy.get_param("~port", "35137"))
        # ip = rospy.get_param('~ip', '10.104.16.53')
        # port = int(rospy.get_param('~port', '9559'))

        self.al = ALProxy("ALAutonomousLife", ip, port)
        self.postureProxy = ALProxy("ALRobotPosture", ip, port)
        self.tts = ALProxy("ALTextToSpeech", ip, port)
        self.motionProxy = ALProxy("ALMotion", ip, port)
        self.al.setState("disabled")
        self.postureProxy.goToPosture("StandInit", 0.5)

    def nao_imitate(self):
        while not rospy.is_shutdown():
            trans_hand, rot_hand = self.tf.lookupTransform("/openni_depth_frame", self.j_l_hand, rospy.Duration())
            vec_hand = Vector3(*trans_hand)
            trans_torso, rot = self.tf.lookupTransform("/openni_depth_frame", self.j_torso, rospy.Duration())
            vec_torso = Vector3(*trans_torso)

            trans_handR, rot_handR = self.tf.lookupTransform("/openni_depth_frame", self.j_r_hand, rospy.Duration())
            vec_handR = Vector3(*trans_handR)

            x = vec_torso.x - vec_hand.x
            y = vec_torso.y - vec_hand.y
            z = (vec_torso.z - vec_hand.z) * (-1)
            x = (int)(1000 * x / 2.5)
            y = (int)(-1000 * y / 2.5)
            z = (int)(1000 * (z) / 2.5)

            xr = vec_torso.x - vec_handR.x
            yr = vec_torso.y - vec_handR.y
            zr = (vec_torso.z - vec_handR.z) * (-1)
            xr = (int)(1000 * xr / 2.5)
            yr = (int)(-1000 * yr / 2.5)
            zr = (int)(1000 * (zr) / 2.5)
            # print(x)
            # print(y)
            # print(z)
            # print("-----------------")
            try:
                (w1, w2, w3, w4) = self.nao_imitate_with_kinematic_right_hand(
                    self.get_transform_matrix((xr, yr, zr), (rot_handR[0], rot_handR[1], rot_handR[2]))
                )
                self.motionProxy.setAngles(["RShoulderRoll", "RShoulderPitch"], [w2, w1], 0.1)
                self.motionProxy.setAngles(["RElbowRoll", "RElbowYaw"], [w4, w3], 0.1)
#.........这里部分代码省略.........
开发者ID:PatrykBogd,项目名称:NaoMultimodalInteraction,代码行数:103,代码来源:Teleobecnosc.py

示例11: __init__

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class action_manager:
    """ A simple module to change the eye LEDs colour to represent emotions.
    """

    def __init__(self):

        self.tts = ALProxy("ALTextToSpeech")
        self.leds = ALProxy("ALLeds")
        self.motion = ALProxy("ALMotion")
        self.posture = ALProxy("ALRobotPosture")

        self.state_activity = " "
        self.activity = " "
        self.do_it_once = True
        self.motion.setStiffnesses("Body", 0.5)

        # Get the ~private namespace parameters from command line or launch file.
        topic = rospy.get_param("~topic", "current_emotion")
        rospy.loginfo("I will subscribe to the topic %s", topic)

        rospy.Subscriber(topic, PointStamped, self.current_emotion_callback)
        rospy.Subscriber("state_activity", String, self.current_state_callback)  # listen for when to stop
        rospy.Subscriber("activity", String, self.activity_callback)
        rospy.Subscriber("stop_learning", Empty, self.stop_request_callback)

        # Disable ALAutonomousLife to better demonstrate emotional actions.
        self.autonomous_life = ALProxy("ALAutonomousLife")
        if self.autonomous_life.getState() != "disabled":
            self.autonomous_life.setState("disabled")
        time.sleep(1.0)
        # self.motion.wakeUp()

        # Wait for messages on topic, go to callback function when new messages arrive.
        rospy.spin()

    # TODO: Run current emotion when a tactile is touch

    # Create a callback function for the subscriber.
    def current_emotion_callback(self, data, *_args):
        """ 
        Expresses the current emotion from the current valence and arousal values in ALMemory.                        
        """

        # Motion
        motion_names = list()
        motion_times = list()
        motion_keys = list()

        # Eyes.
        eye_colour_lookup_table = [
            [
                (0xF82C35),
                (0xF82C35),
                (0xD55528),
                (0xD55528),
                (0xFF622B),
                (0xFF622B),
                (0xFFB047),
                (0xFFB047),
                (0xFFB047),
                (0x56C427),
                (0x56C427),
            ],
            [
                (0xF82C35),
                (0xF82C35),
                (0xD5542A),
                (0xD5542A),
                (0xE96A37),
                (0xFF8232),
                (0xFF8232),
                (0xFEB340),
                (0xFEB340),
                (0x56C427),
                (0x56C427),
            ],
            [
                (0xF62D35),
                (0xF62D35),
                (0xF62D35),
                (0xE96A37),
                (0xE96A37),
                (0xFF984D),
                (0xFF8232),
                (0xFDC147),
                (0xFFB144),
                (0x56C427),
                (0x56C427),
            ],
            [
                (0xF72C32),
                (0xF72C32),
                (0xFF4048),
                (0xFE5761),
                (0xED8659),
                (0xFEB278),
                (0xFECE6A),
                (0xFECE6A),
                (0xFEE566),
                (0x56C427),
#.........这里部分代码省略.........
开发者ID:zhuangfangwang,项目名称:emotional-manager,代码行数:103,代码来源:action_manager.py

示例12: Tracker

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class Tracker():

    def __init__(self):
	rospy.init_node('control_by_keys', anonymous=True)
	self.myFrame = 'control'
	self.r = rospy.Rate(1)		#rospy.Rate(1) = 1Hz

#Nao init and connect
	self.nao_init()
	rospy.loginfo("\n\nYou can control nao by keys!\n\n")
	
    
    def toRAD(self,angle):
	return angle*almath.TO_RAD

    def nao_init(self):
	#NAO CONNECT AND INIT
	#PORT AND IP NAO ROBOT
	#ip = rospy.get_param('~ip', '127.0.0.1')
	#port = int(rospy.get_param('~port', '45404'))
	ip = rospy.get_param('~ip', '10.104.16.53')
	port = int(rospy.get_param('~port', '9559'))

	self.al = ALProxy("ALAutonomousLife", ip, port)
	self.postureProxy = ALProxy("ALRobotPosture", ip, port)
	self.tts = ALProxy("ALTextToSpeech", ip, port)
	self.motionProxy = ALProxy("ALMotion", ip, port)
	self.al.setState("disabled")
	self.postureProxy.goToPosture("StandInit", 0.5)	

    def getch(self):   # define non-Windows version
        import sys, tty, termios
        fd = sys.stdin.fileno()
        old_settings = termios.tcgetattr(fd)
        try:
            tty.setraw(sys.stdin.fileno())
            ch = sys.stdin.read(1)
        finally:
            termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
        return ch


    def control(self):
	rospy.loginfo("\nPress key W,A,S,D to control nao. Press 'c' to close application!\n\n")
	key_input = 'o'
	tmp_key = 'o'

	while(key_input != 'c'):
		while(True):
			tmp_key = self.getch()
			if (tmp_key == 'a' or tmp_key == 's' or tmp_key == 'd' or tmp_key == 'w' or tmp_key == 'c'):
				key_input = tmp_key
				break
		if(key_input == 'w'):
			self.motionProxy.moveTo(0.1,0.0,0.0)
		if(key_input == 'a'):
			self.motionProxy.moveTo(0.0,0.0,self.toRAD(-90))
		if(key_input == 's'):
			self.motionProxy.moveTo(-0.1,0.0,0.0)
		if(key_input == 'd'):
			self.motionProxy.moveTo(0.0,0.0,self.toRAD(90))
		self.r.sleep()
	rospy.loginfo("\n\nExit Application!\n\n")
开发者ID:PatrykBogd,项目名称:NaoMultimodalInteraction,代码行数:65,代码来源:control.py

示例13: main

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
def main():
	# ----------> 命令行解析 <----------
	global ROBOT_IP
	parser = OptionParser()
	parser.add_option("--pip",
		help="Parent broker port. The IP address or your robot",
		dest="pip")
	parser.add_option("--pport",
		help="Parent broker port. The port NAOqi is listening to",
		dest="pport",
		type="int")
	parser.set_defaults(
		pip=ROBOT_IP,
		pport=9559)

	(opts, args_) = parser.parse_args()
	pip   = opts.pip
	pport = opts.pport
	# 如果运行前指定ip参数,则更新ROBOT_IP全局变量;
	# 其他模块会用到ROBOT_IP变量;
	ROBOT_IP = pip

	# ----------> 创建python broker <----------
	myBroker = ALBroker("myBroker",
		"0.0.0.0",   # listen to anyone
		0,           # find a free port and use it
		pip,         # parent broker IP
		pport)       # parent broker port


	# ----------> 创建Robot ALProxy Module<----------
	global tts, motion, autonomous
	tts = ALProxy("ALTextToSpeech")
	# 默认为英语语言包
	tts.setLanguage("English")
	motion = ALProxy("ALMotion")
	# turn ALAutonomousLife off
	autonomous = ALProxy("ALAutonomousLife")
	autonomous.setState("disabled") 			

	# ----------> 自己实现的类 <----------
	global avoid
	avoid = avoidance(ROBOT_IP, ROBOT_PORT) 	# 超声波避障类
	
	global video
	video = VideoImage(ROBOT_IP, ROBOT_PORT)	# 拍照类

	try:
		video.addXtionCamera()
		video.subscribeCamera()
		video.setCamera(0)
		# 开启线程避障
		avoid.start()
		video.takeRGBDimage(1000)
	except KeyboardInterrupt:
		print
		print "Interrupted by user, shutting down"
		avoid.stop()				# 关闭避障
		video.close()				# 关闭视频传输模块
		myBroker.shutdown()			# 关闭代理Broker
		sys.exit(0)
开发者ID:axmtec0eric,项目名称:Nao-Robot,代码行数:63,代码来源:move_image.py

示例14: CoreRobot

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class CoreRobot(object):

    def __init__(self):
        self.animated_speech_proxy = None
        self.motion_proxy = None
        self.posture_proxy = None
        self.awareness_proxy = None
        self.autonomous_move_proxy = None
        self.autonomous_life_proxy = None

    def connect(self, host, port):
        """Takes connection params and builds list of ALProxies"""
        print 'Core - Connecting to robot on {0}:{1}...'.format(host, port)
        try:
            self.animated_speech_proxy = ALProxy("ALAnimatedSpeech", host, port)
            self.motion_proxy = ALProxy("ALMotion", host, port)
            self.posture_proxy = ALProxy("ALRobotPosture", host, port)
            self.awareness_proxy = ALProxy("ALBasicAwareness", host, port)
            self.autonomous_move_proxy = ALProxy("ALAutonomousMoves", host, port)
            self.autonomous_life_proxy = ALProxy("ALAutonomousLife", host, port)
        except Exception as exception: # pylint: disable=broad-except
            raise Exception('Could not create proxy:{0}', format(exception))
        self.set_autonomous_life(False)

    def say(self, animated_speech):
        self.animated_speech_proxy.say(animated_speech) 

    def move(self, rotation, distance):
        motion = self.motion_proxy
        motion.setMotionConfig([["ENABLE_FOOT_CONTACT_PROTECTION", True]])
        motion.setMotionConfig([["ENABLE_STIFFNESS_PROTECTION", True]])
        motion.setCollisionProtectionEnabled('Arms', True)
        motion.setExternalCollisionProtectionEnabled('All', True)
        motion.moveTo(0, 0, rotation)
        motion.moveTo(distance, 0, 0)
        
    def open_hand(self, hand):
        self.motion_proxy.openHand(hand)
 
    def close_hand(self, hand):
        self.motion_proxy.closeHand(hand)

    def set_stiffness(self, joint, stiffness):
        print 'Setting {0} to stiffness {1}...'.format(joint, stiffness)
        self.motion_proxy.stiffnessInterpolation(joint, stiffness, 1.0) 
        
    def set_joint_angle(self, joint, angle_degrees, speed=0.1):
        print 'Setting {0} to {1} angle in degrees...'.format(joint, angle_degrees)
        self.motion_proxy.angleInterpolationWithSpeed(joint, math.radians(angle_degrees), speed)
        
    def set_pose(self, pose):
        self.posture_proxy.goToPosture(pose, 0.5) 
      
    def set_autonomous_life(self, set_on):
        self.print_sub_system_update(set_on, 'Autonomous Life')
        target_state = 'solitary' if set_on else 'disabled'
        self.autonomous_life_proxy.setState(target_state)
        
    def get_autonomous_life_state(self):
        return self.autonomous_life_proxy.getState()
  
    def set_autonomous_moves(self, set_on):
        self.print_sub_system_update(set_on, 'Autonomous Moves')
        target_state = 'backToNeutral' if set_on else 'none'
        self.autonomous_move_proxy.setBackgroundStrategy(target_state)
        
    def set_awareness(self, set_on):
        self.print_sub_system_update(set_on, 'Basic Awareness')
        if set_on:
            self.awareness_proxy.startAwareness() 
        else:
            self.awareness_proxy.stopAwareness()

    def set_breathing(self, set_on):
        self.print_sub_system_update(set_on, 'body breathing')
        self.motion_proxy.setBreathEnabled('Body', set_on)
        self.print_sub_system_update(set_on, 'arm breathing')
        self.motion_proxy.setBreathEnabled('Arms', set_on)

    def set_move_arms_enabled(self, left_arm, right_arm):
        self.motion_proxy.setMoveArmsEnabled(left_arm, right_arm)

    @staticmethod
    def print_sub_system_update(set_on, sub_process):
        on_off = ['off', 'on']
        print 'Turning {0} {1}...'.format(on_off[set_on], sub_process)
开发者ID:lushdog,项目名称:NaoController,代码行数:88,代码来源:core_robot.py

示例15: Tracker

# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setState [as 别名]
class Tracker():

    def __init__(self):
	rospy.init_node('tracker_mykinect', anonymous=True)
	self.myFrame = 'kinect_link'
	self.r = rospy.Rate(20)		#rospy.Rate(1) = 1Hz

#Skeleton Listener
	self.tf = tf.TransformListener()
#Nao init and connect
	self.nao_init()

	rospy.loginfo("Start tracking for 3s...")
        rospy.sleep(3.0)
	rospy.loginfo("Tracking started!")
	self.hoh = 0.86 #Nao Hight in meters
	for i in range(1, 4):	
#Get all nessecery joins. (Right side = Left side if you stand front to sensor)		
		try:
			self.j_head = "{0}_{1}".format(rospy.get_param('~target_joint', '/head'), i)
			self.j_l_hand = "{0}_{1}".format(rospy.get_param('~target_joint', '/left_hand'), i) 
			self.j_r_hand = "{0}_{1}".format(rospy.get_param('~target_joint', '/right_hand'), i)
			self.j_l_shoul = "{0}_{1}".format(rospy.get_param('~target_joint', '/left_shoulder'), i)
			self.j_torso = "{0}_{1}".format(rospy.get_param('~target_joint', '/torso'), i)
			self.j_l_ehand = "{0}_{1}".format(rospy.get_param('~target_joint', '/left_elbow'), i)
			self.j_r_ehand = "{0}_{1}".format(rospy.get_param('~target_joint', '/right_elbow'), i)
			break
		except:
			pass
    
    def toRAD(self,angle):
	return angle*almath.TO_RAD

    def nao_init(self):
	#NAO CONNECT AND INIT
	# PORT AND IP NAO ROBOT
	ip = rospy.get_param('~ip', '10.104.16.50')
	port = int(rospy.get_param('~port', '9559'))

	self.al = ALProxy("ALAutonomousLife", ip, port)
	self.postureProxy = ALProxy("ALRobotPosture", ip, port)
	self.tts = ALProxy("ALTextToSpeech", ip, port)
	self.motionProxy = ALProxy("ALMotion", ip, port)
	self.al.setState("disabled")
	self.postureProxy.goToPosture("StandInit", 0.5)	

    def nao_hello(self):
#Nao hello module
	self.postureProxy.goToPosture("Standing",1.0)
	self.motionProxy.setAngles('RShoulderPitch',self.toRAD(-97.3),1.0)
	self.motionProxy.setAngles('RShoulderRoll',self.toRAD(-39.2),1.0)
	self.motionProxy.setAngles('RElbowRoll',self.toRAD(50.8),1.0)
	self.motionProxy.setAngles('RElbowYaw',self.toRAD(8.5),1.0)
	self.motionProxy.setAngles('RWristYaw',self.toRAD(-13.4),1.0)
	self.motionProxy.openHand('RHand')
	self.tts.say("Hello my friend")
	for i in range(0,2):
		self.motionProxy.angleInterpolation("RShoulderRoll",self.toRAD(-12.2),0.3,True)
		self.motionProxy.angleInterpolation("RShoulderRoll",self.toRAD(39.2),0.3,True)
		self.motionProxy.angleInterpolation("RShoulderRoll",self.toRAD(-50.4),0.3,True)
	self.postureProxy.goToPosture("Standing",1.0)


    def nao_sensei(self):
#Nao hello(sensei) module
	self.postureProxy.goToPosture("Standing",1.0)
	self.motionProxy.setAngles(['RShoulderPitch','LShoulderPitch'],[self.toRAD(52.7),self.toRAD(49.1)],1.0)
	self.motionProxy.setAngles(['RShoulderRoll','LShoulderRoll'],[self.toRAD(13.3),self.toRAD(-8.9)],1.0)
	self.motionProxy.setAngles(['RElbowRoll','LElbowRoll'],[self.toRAD(88.3),self.toRAD(-87.6)],1.0)
	self.motionProxy.setAngles(['RElbowYaw','LElbowYaw'],[self.toRAD(68.1),self.toRAD(-60.9)],1.0)
	self.motionProxy.setAngles(['RWristYaw','LWristYaw'],[self.toRAD(-10.3),self.toRAD(11.0)],1.0)
	self.motionProxy.setAngles(['RHand','LHand'],[self.toRAD(-10.3),self.toRAD(11.0)],1.0)
	self.motionProxy.openHand('RHand')
	self.motionProxy.openHand('LHand')
	#sequence head
	self.tts.say("Domo Arigato")
	#self.motionProxy.angleInterpolation("HeadPitch",0.0,1.0,True)
	self.motionProxy.angleInterpolation("HeadPitch",self.toRAD(26.5),1.0,True)
	self.motionProxy.angleInterpolation("HeadPitch",0.0,1.0,True)
	
	self.postureProxy.goToPosture("Standing",1)
	
    def hello_process(self):
#HELLO module
	self.count = 0
	self.side = 0		#left 0 right 1
	while not rospy.is_shutdown():
# Get all nessecery vectors
		trans_head, rot= self.tf.lookupTransform('/openni_depth_frame', self.j_head, rospy.Duration())
		vec_head = Vector3(*trans_head)
		trans_hand, rot = self.tf.lookupTransform('/openni_depth_frame', self.j_l_hand, rospy.Duration())
		vec_hand = Vector3(*trans_hand)
		trans_shoul, rot = self.tf.lookupTransform('/openni_depth_frame', self.j_l_shoul, rospy.Duration())
		vec_shoul = Vector3(*trans_shoul)
		trans_torso, rot = self.tf.lookupTransform('/openni_depth_frame', self.j_torso, rospy.Duration())
		vec_torso = Vector3(*trans_torso)
# IF right hand upper that torso
		if (vec_hand.z > vec_torso.z):
	#left side
			if (not self.side):
#.........这里部分代码省略.........
开发者ID:kamilmowinski,项目名称:nao_gesture,代码行数:103,代码来源:dtrack.py


注:本文中的naoqi.ALProxy.setState方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。