本文整理汇总了Python中naoqi.ALProxy.setWholeBodyOn方法的典型用法代码示例。如果您正苦于以下问题:Python ALProxy.setWholeBodyOn方法的具体用法?Python ALProxy.setWholeBodyOn怎么用?Python ALProxy.setWholeBodyOn使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类naoqi.ALProxy
的用法示例。
在下文中一共展示了ALProxy.setWholeBodyOn方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: FaceTracker
# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setWholeBodyOn [as 别名]
class FaceTracker():
def __init__(self, use_nao, ip = "10.0.1.3", port = 9559):
self.use_nao = use_nao
if use_nao:
self.faceProxy = ALProxy("ALFaceTracker", ip, port)
self.motion = ALProxy("ALMotion", ip, port)
def start_tracking(self):
if self.use_nao:
print "StartTracking"
self.motion.setStiffnesses("Head", 1.0)
self.faceProxy.startTracker()
self.faceProxy.setWholeBodyOn(True)
def stop_tracking(self):
if self.use_nao:
self.faceProxy.stopTracker()
self.motion.setStiffnesses("Head", 0.0)
self.to_default_pos()
print "Tracking stopped"
def to_default_pos(self):
if self.use_nao:
self.motion.setStiffnesses("Head", 0.5)
self.motion.setAngles("HeadYaw", 0.0, 0.1)
self.motion.setAngles("HeadPitch", -0.25, 0.1)
# self.motion.setStiffnesses("Head", 0)
def shake_no(self):
if self.use_nao:
names = "HeadYaw"
currentAngle = self.motion.getAngles("Head", True)[0]
angles = [0.25, 0, -0.25, 0, 0.25, currentAngle]
angles = [currentAngle+0.25, currentAngle, currentAngle-0.25, currentAngle, currentAngle+0.25, currentAngle]
times = [(i/len(angles))+0.2 for i in np.arange(1, len(angles)+1)]
self.faceProxy.stopTracker()
print "no"
self.motion.setStiffnesses("Head", 1.0)
self.motion.angleInterpolation(names, angles, times, True)
self.motion.setStiffnesses("Head", 0.0)
self.start_tracking()
def shake_yes(self):
if self.use_nao:
names = "HeadPitch"
currentAngle = self.motion.getAngles("Head", False)[1]
angles = [currentAngle+0.15, currentAngle]
times = [i/len(angles)*0.5 for i in np.arange(1, len(angles)+1)]
self.faceProxy.stopTracker()
self.motion.setStiffnesses("Head", 1.0)
self.motion.angleInterpolation(names, angles, times, True)
self.motion.setStiffnesses("Head", 0.0)
self.start_tracking()
示例2: __init__
# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setWholeBodyOn [as 别名]
class BallTracker:
def __init__(self, glade_file_path=c.GLADE_FILE_PATH):
self.glade_file_path = glade_file_path
# Gtk Builder Init
self.builder = Gtk.Builder()
self.builder.add_from_file(self.glade_file_path)
self.builder.connect_signals(self)
# Add UI Components
self.window = self.builder.get_object("ballWindow")
# Show UI
self.window.show_all()
self.myBroker = ALBroker("myBroker","0.0.0.0",0,"10.0.2.3",9559)
self.motion = ALProxy("ALMotion")
self.tracker = ALProxy("ALRedBallTracker")
self.vision = ALProxy("ALVideoDevice")
self.tts = ALProxy("ALTextToSpeech")
self.currentCamera = 0
self.setTopCamera(self)
self.tracker.setWholeBodyOn(False)
self.tracker.startTracker()
self.ballPosition = []
self.targetPosition = []
### Destroy GUI
def destroy(self, widget):
print "destroyed"
Gtk.main_quit()
def __del__(self, widget):
self.tracker.stopTracker(self)
pass
# If Nao has ball returns True
def hasBall(self, widget):
self.checkForBall(self)
time.sleep(0.5)
if self.checkForBall(self):
print "I see the Ball"
return True
else:
print "Sorry I cant find the ball"
return False
# Nao scans around for the redball
def searchBall(self, widget):
self.motion.stiffnessInterpolation("Body", 1.0, 0.1)
self.motion.walkInit()
for turnAngle in [0,math.pi/1.8,math.pi/1.8,math.pi/1.8]:
if turnAngle > 0:
self.motion.walkTo(0,0,turnAngle)
if self.hasBall(self):
self.turnToBall(self)
return
for headPitchAngle in [((math.pi*29)/180),((math.pi*12)/180)]:
self.motion.angleInterpolation("HeadPitch", headPitchAngle,0.3,True)
for headYawAngle in [-((math.pi*40)/180),-((math.pi*20)/180),0,((math.pi*20)/180),((math.pi*40)/180)]:
self.motion.angleInterpolation("HeadYaw",headYawAngle,0.3,True)
time.sleep(0.3)
if self.hasBall(self):
self.turnToBall(self)
return
# Nao walks close to ball
def walkToBall(self, widget):
ballPosition = self.tracker.getPosition()
headYawTreshold = ((math.pi*10)/180)
x = ballPosition[0]/2 + 0.05
self.motion.stiffnessInterpolation("Body", 1.0, 0.1)
self.motion.walkInit()
self.turnToBall(self)
self.motion.post.walkTo(x,0,0)
while True:
headYawAngle = self.motion.getAngles("HeadYaw", False)
if headYawAngle[0] >= headYawTreshold or headYawAngle[0] <= -headYawTreshold:
self.motion.stopWalk()
self.turnToBall(self)
self.walkToBall(self)
break
#dist = self.getDistance()
# if dist < 0.1:
# self.motion.stopWalk()
# self.turnToBall()
# self.safePosition()
# time.sleep(1)
# self.tts.say("You have 5 seconds to move the ball")
# time.sleep(5)
# # naoWalkToPosition()
# self.walkToPosition()
# self.setTopCamera()
# break
return
#.........这里部分代码省略.........
示例3: Robot
# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setWholeBodyOn [as 别名]
class Robot(ALModule):
def __init__( self, strName, address = "bobby.local", port = 9559):
ALModule.__init__( self, strName )
self.outfile = None
self.outfiles = [None]*(3)
self.count = 99999999
self.check = False
# --- audio ---
self.audio = ALProxy("ALAudioDevice", address, port)
self.audio.setClientPreferences(self.getName(), 48000, [1,1,1,1], 0, 0)
# --- speech recognition ---
# self.sr = ALProxy("SoundReceiver", address, port)
self.asr = ALProxy("ALSpeechRecognition", address, port)
self.asr.setLanguage("English")
self.yes_no_vocab = {
"yes": ["yes", "ya", "sure", "definitely"],
"no": ["no", "nope", "nah"]
}
# TODO: add unknown object names into this somehow
# also add other names for objects dynamically??????
self.object_vocab = {
"digital_clock": ["digital clock", "blue clock", "black alarm clock"],
"analog_clock": ["analog clock", "black clock", "black alarm clock"],
"red_soccer_ball": [u"red soccer ball", "red ball"],
"basketball": ["basketball", "orange ball"],
"football": ["football"],
"yellow_book": ["yellow book"],
"yellow_flashlight": ["yellow flashlight"],
"blue_soccer_ball": ["blue soccer ball", "blue ball"],
"apple": ["apple"],
"black_mug": ["black mug"],
"blue_book": ["blue book"],
"blue_flashlight": [u"blue flashlight"],
"cardboard_box": ["cardboard box"],
"pepper": ["pepper", "jalapeno"],
"green_mug": ["green mug"],
"polka_dot_box": ["polka dot box", "polka dotted box", "spotted box", "brown and white box"],
"scissors": ["scissors"]
}
self.asr.setVocabulary([j for i in self.yes_no_vocab.values() for j in i], False)
# Custom segmentationation module
self.segmentation = ALProxy("Segmentation", address, port)
# --- text to speech ---
self.tts = ALProxy("ALTextToSpeech", address, port)
# --- memory ---
self.mem = ALProxy("ALMemory", address, port)
# --- robot movement ---
self.motion = ALProxy("ALMotion", address, port)
self.pose = ALProxy("ALRobotPosture", address, port)
self.motion.stiffnessInterpolation("Body", 1.0, 1.0)
self.pose.goToPosture("Crouch", 0.2)
# --- face tracking ---
self.track = ALProxy("ALFaceTracker", address, port)
self.track.setWholeBodyOn(False)
# --- gaze analysis ---
self.gaze = ALProxy("ALGazeAnalysis", address, port)
# --- camera ---
self.cam = ALProxy("ALVideoDevice", address, port)
# --- leds ---
self.leds = ALProxy("ALLeds", address, port)
self.colors = {
"pink": 0x00FF00A2,
"red": 0x00FF0000,
"orange": 0x00FF7300,
"yellow": 0x00FFFB00,
"green": 0x000DFF00,
"blue": 0x000D00FF,
"purple": 0x009D00FF
}
# --- sound detection ---
self.sound = ALProxy("ALSoundDetection", address, port)
self.sound.setParameter("Sensibility", 0.99)
def __del__(self):
print "End Robot Class"
def say(self, text, block = True):
"""
Uses ALTextToSpeech to vocalize the given string.
If "block" argument is False, makes call asynchronous.
"""
#.........这里部分代码省略.........
示例4: NaoWalk
# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setWholeBodyOn [as 别名]
class NaoWalk():
# INCOMPLETE AT: walkUpToBall(), include MCC-CALL and delete walkToPosition()
# Usage description:
# When the NaoWalk Phase started, call
# hasBall()
#
# to determine if NAO has the Redball in vision.
# If not, call
# retrieveBall()
#
# to find and turn to the ball
# If NAO has a ball in vision, call
# walkUpToBall()
#
# NAO will walk up closely to the Redball and call the MCC to
# request the NXT to move to his next position
# After the NXT left his initial Position, call
# walkToPosition()
#
# and NAO makes his few last steps.
#
# Repeat this progress until NAO has reached
# the target area.
def __init__(self):
self.myBroker = ALBroker("myBroker","0.0.0.0",0,"194.95.174.146",9559)
self.motion = ALProxy("ALMotion")
self.tracker = ALProxy("ALRedBallTracker")
self.vision = ALProxy("ALVideoDevice")
self.tts = ALProxy("ALTextToSpeech")
self.currentCam = 0
self.__setTopCamera()
self.tracker.setWholeBodyOn(False)
self.tracker.startTracker()
self.ballPosition = []
self.targetPosition = []
# ALModule.__init__(self,name)
def __del__(self):
self.tracker.stopTracker()
pass
# determines whether NAO sees a redball or not
# returns true is he sees one, false if not
def hasBall(self):
self.__checkForBall()
time.sleep(0.5)
if self.__checkForBall():
return True
else :
return False
# NAO scans his environment for the redball
# after calling, NAO either has a redball in vision or there is none in
# his range of sight.
# Maybe include MCC-CALL after failure
def retrieveBall(self):
self.motion.stiffnessInterpolation("Body", 1.0, 0.1)
self.motion.walkInit()
for turnAngle in [0,math.pi/1.8,math.pi/1.8,math.pi/1.8]:
if turnAngle > 0:
self.motion.walkTo(0,0,turnAngle)
if self.hasBall():
self.__turnToBall()
return
for headPitchAngle in [((math.pi*29)/180),((math.pi*12)/180)]:
self.motion.angleInterpolation("HeadPitch", headPitchAngle,0.3,True)
for headYawAngle in [-((math.pi*40)/180),-((math.pi*20)/180),0,((math.pi*20)/180),((math.pi*40)/180)]:
self.motion.angleInterpolation("HeadYaw",headYawAngle,0.3,True)
time.sleep(0.3)
if self.hasBall():
self.__turnToBall()
return
# lets the nao walk close up to the ball
# Redball in vision is mandatory to call this method!
# !! NEED TO INCLUDE MCC-CALL TO MOVE NXT TO NEXT POSITION IN LINE 85 !!
def walkUpToBall(self):
ballPosi = self.tracker.getPosition()
headYawTreshold = ((math.pi*10)/180)
x = ballPosi[0]/2 + 0.05
self.motion.stiffnessInterpolation("Body", 1.0, 0.1)
self.motion.walkInit()
self.__turnToBall()
self.motion.post.walkTo(x,0,0)
while True:
headYawAngle = self.motion.getAngles("HeadYaw", False)
if headYawAngle[0] >= headYawTreshold or headYawAngle[0] <= -headYawTreshold:
self.motion.stopWalk()
self.__turnToBall()
self.walkUpToBall()
break
dist = self.__getDistance()
if dist < 0.65:
self.motion.stopWalk()
self.__turnToBall()
self.__safePosition()
# # !!
# # meldung ans mcc dass nxt weiterlaufen soll
#.........这里部分代码省略.........
示例5: ALProxy
# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setWholeBodyOn [as 别名]
import cv2
import time
from naoqi import ALProxy
IP = 'bobby.local'
PORT = 9559
print "Connecting to", IP, "with port", PORT
motion = ALProxy("ALMotion", IP, PORT)
faceTracker = ALProxy("ALFaceTracker", IP, PORT)
posture = ALProxy("ALRobotPosture", IP, PORT)
# change tracking from just head to whole body movement
faceTracker.setWholeBodyOn(True)
# stiffen body
motion.setStiffnesses("Body", 1.0)
# stand up in balanced stance
posture.goToPosture("StandInit", 0.5)
# Then, start tracker.
faceTracker.startTracker()
wait = 20
print "ALFaceTracker successfully started, now show your face to Nao!"
print "ALFaceTracker will be stopped in", wait, "seconds."
示例6: Nao
# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setWholeBodyOn [as 别名]
class Nao():
# Usage
# Nao().searchBall()
# Nao().hasBall()
# Nao().walkToBall()
# Nao().protectionOff() should call before moves
def __init__(self):
self.myBroker = ALBroker("myBroker","0.0.0.0",0,"10.51.5.167",9559)
self.motion = ALProxy("ALMotion")
self.tracker = ALProxy("ALRedBallTracker")
self.vision = ALProxy("ALVideoDevice")
self.tts = ALProxy("ALTextToSpeech")
self.currentCamera = 0
self.setTopCamera()
self.tracker.setWholeBodyOn(False)
self.tracker.startTracker()
self.ballPosition = []
self.targetPosition = []
def __del__(self):
self.tracker.stopTracker()
pass
# If Nao has ball returns True
def hasBall(self):
self.checkForBall()
time.sleep(0.5)
if self.checkForBall():
return True
else:
return False
# Nao scans around for the redball
def searchBall(self):
self.motion.stiffnessInterpolation("Body", 1.0, 0.1)
self.motion.walkInit()
for turnAngle in [0,math.pi/1.8,math.pi/1.8,math.pi/1.8]:
if turnAngle > 0:
self.motion.walkTo(0,0,turnAngle)
if self.hasBall():
self.turnToBall()
return
for headPitchAngle in [((math.pi*29)/180),((math.pi*12)/180)]:
self.motion.angleInterpolation("HeadPitch", headPitchAngle,0.3,True)
for headYawAngle in [-((math.pi*40)/180),-((math.pi*20)/180),0,((math.pi*20)/180),((math.pi*40)/180)]:
self.motion.angleInterpolation("HeadYaw",headYawAngle,0.3,True)
time.sleep(0.3)
if self.hasBall():
self.turnToBall()
return
# Nao walks close to ball
def walkToBall(self):
ballPosition = self.tracker.getPosition()
headYawTreshold = ((math.pi*10)/180)
x = ballPosition[0]/2 + 0.05
self.motion.stiffnessInterpolation("Body", 1.0, 0.1)
self.motion.walkInit()
self.turnToBall()
self.motion.post.walkTo(x,0,0)
while True:
headYawAngle = self.motion.getAngles("HeadYaw", False)
if headYawAngle[0] >= headYawTreshold or headYawAngle[0] <= -headYawTreshold:
self.motion.stopWalk()
self.turnToBall()
self.walkToBall()
break
#dist = self.getDistance()
# if dist < 0.1:
# self.motion.stopWalk()
# self.turnToBall()
# self.safePosition()
# time.sleep(1)
# self.tts.say("You have 5 seconds to move the ball")
# time.sleep(5)
# # naoWalkToPosition()
# self.walkToPosition()
# self.setTopCamera()
# break
return
# nao turns to ball
def turnToBall(self):
if not self.hasBall():
return False
self.ballPosition = self.tracker.getPosition()
b = self.ballPosition[1]/self.ballPosition[0]
z = math.atan(b)
self.motion.stiffnessInterpolation("Body", 1.0, 0.1)
self.motion.walkInit()
self.motion.walkTo(0,0,z)
#.........这里部分代码省略.........
示例7: Robot
# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setWholeBodyOn [as 别名]
class Robot(ALModule):
def __init__( self, strName, address = "bobby.local", port = 9559):
ALModule.__init__( self, strName )
# text to speech
self.tts = ALProxy("ALTextToSpeech", address, port)
# memory
self.mem = ALProxy("ALMemory", address, port)
# robot movement
self.motion = ALProxy("ALMotion", address, port)
self.pose = ALProxy("ALRobotPosture", address, port)
# face tracking
self.track = ALProxy("ALFaceTracker", address, port)
# initialize face-tracking to following with head movement only
self.track.setWholeBodyOn(False)
# gaze analysis
self.gaze = ALProxy("ALGazeAnalysis", address, port)
# set highest tolerance for determining if people are looking at the robot because those people's IDs are the only ones stored
self.gaze.setTolerance(1)
# start writing gaze data to robot memory
self.gaze.subscribe("_")
# camera
self.cam = ALProxy("ALVideoDevice", address, port)
# leds
self.leds = ALProxy("ALLeds", address, port)
self.colors = {
"pink": 0x00FF00A2,
"red": 0x00FF0000,
"orange": 0x00FF7300,
"yellow": 0x00FFFB00,
"green": 0x000DFF00,
"blue": 0x000D00FF,
"purple": 0x009D00FF
}
# sound detection
self.sound = ALProxy("ALSoundDetection", address, port)
# initialize sound sensitivity
self.sound.setParameter("Sensibility", 0.95)
def __del__(self):
print "End Robot Class"
def say(self, text, block = True):
"""
Uses ALTextToSpeech to vocalize the given string.
If "block" argument is False, makes call asynchronous,
i.e. does not wait until it finishes talking to move on to next part of the code.
"""
if block:
self.tts.say(text)
else:
self.tts.post.say(text)
def wake(self, stiffness = 1.0):
"""
Turns stiffnesses on, goes to Crouch position, and lifts head
"""
self.motion.stiffnessInterpolation("Body", stiffness, 1.0)
self.pose.goToPosture("Crouch", 0.2)
self.turnHead(pitch = math.radians(-10))
def rest(self):
"""
Goes to Crouch position and turns robot stiffnesses off
"""
self.motion.rest()
def sit(self):
"""
Goes to Crouch position
"""
self.pose.goToPosture("Crouch")
def stand(self):
"""
Robot stands
"""
self.pose.goToPosture("Stand", 0.4)
def turnHead(self, yaw = None, pitch = None, speed = 0.2):
"""
Turns robot head to the specified yaw and/or pitch in degrees at the given speed.
Yaw can range from 119.5 deg (left) to -119.5 deg (right) and pitch can range from 38.5 deg (down) to -29.5 deg (up).
"""
#.........这里部分代码省略.........
示例8: Robot
# 需要导入模块: from naoqi import ALProxy [as 别名]
# 或者: from naoqi.ALProxy import setWholeBodyOn [as 别名]
class Robot(ALModule):
def __init__( self, strName, address = "bobby.local", port = 9559):
ALModule.__init__( self, strName )
# Are these used for anything?
# self.outfile = None
# self.outfiles = [None]*(3)
# self.count = 99999999
# self.check = False
# --- audio ---
self.audio = ALProxy("ALAudioDevice", address, port)
self.audio.setClientPreferences(self.getName(), 48000, [1,1,1,1], 0, 0)
# --- speech recognition ---
self.sr = ALProxy("SoundReceiver", address, port)
self.asr = ALProxy("ALSpeechRecognition", address, port)
self.asr.setLanguage("English")
self.yes_no_vocab = {
True: ["yes", "ya", "sure", "definitely"],
False: ["no", "nope", "nah"]
}
# TODO: add unknown object names into this somehow
# also add other names for objects dynamically??????
self.object_vocab = {
"digital_clock": ["digital clock", "blue clock", "black alarm clock"],
"analog_clock": ["analog clock", "black clock", "black alarm clock"],
"red_soccer_ball": [u"red soccer ball", "red ball"],
"basketball": ["basketball", "orange ball"],
"football": ["football"],
"yellow_book": ["yellow book"],
"yellow_flashlight": ["yellow flashlight"],
"blue_soccer_ball": ["blue soccer ball", "blue ball"],
"apple": ["apple"],
"black_mug": ["black mug"],
"blue_book": ["blue book"],
"blue_flashlight": [u"blue flashlight"],
"cardboard_box": ["cardboard box"],
"pepper": ["pepper", "jalapeno"],
"green_mug": ["green mug"],
"polka_dot_box": ["polka dot box", "polka dotted box", "spotted box", "brown and white box"],
"scissors": ["scissors"]
}
self.asr.setVocabulary([j for i in self.yes_no_vocab.values() for j in i], False)
# Custom segmentationation module
self.segmentation = ALProxy("Segmentation", address, port)
# --- text to speech ---
self.tts = ALProxy("ALTextToSpeech", address, port)
# --- memory ---
self.mem = ALProxy("ALMemory", address, port)
# --- robot movement ---
self.motion = ALProxy("ALMotion", address, port)
self.pose = ALProxy("ALRobotPosture", address, port)
self.motion.stiffnessInterpolation("Body", 1.0, 1.0)
self.pose.goToPosture("Crouch", 0.2)
# --- face tracking ---
self.track = ALProxy("ALFaceTracker", address, port)
self.track.setWholeBodyOn(False)
# --- gaze analysis ---
self.gaze = ALProxy("ALGazeAnalysis", address, port)
# set highest tolerance for determining if people are looking at the robot because those people's IDs are the only ones stored
self.gaze.setTolerance(1)
# start writing gaze data to robot memory
self.gaze.subscribe("_")
# --- camera ---
self.cam = ALProxy("ALVideoDevice", address, port)
# --- leds ---
self.leds = ALProxy("ALLeds", address, port)
self.colors = {
"pink": 0x00FF00A2,
"red": 0x00FF0000,
"orange": 0x00FF7300,
"yellow": 0x00FFFB00,
"green": 0x000DFF00,
"blue": 0x000D00FF,
"purple": 0x009D00FF
}
# --- sound detection ---
self.sound = ALProxy("ALSoundDetection", address, port)
self.sound.setParameter("Sensibility", 0.95)
def __del__(self):
print "End Robot Class"
#.........这里部分代码省略.........