本文整理汇总了Python中queue.LifoQueue.put方法的典型用法代码示例。如果您正苦于以下问题:Python LifoQueue.put方法的具体用法?Python LifoQueue.put怎么用?Python LifoQueue.put使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类queue.LifoQueue
的用法示例。
在下文中一共展示了LifoQueue.put方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ThreadedNormalWorker
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class ThreadedNormalWorker(object):
def __init__(self, print_errors=False):
self.print_errors = print_errors
self.queue = LifoQueue()
def get_url_bulk(self):
normals = Normals.objects(access_success=False)
for i in normals:
self.queue.put(item=i)
def grab_from_queue(self):
while not self.queue.empty():
url = self.queue.get()
normals_finder = NormalsSpider(url=url.url,
print_errors=self.print_errors)
normals_finder.update_normals_data()
print(url.url)
self.queue.task_done()
def start(self, n_threads):
self.get_url_bulk()
for i in range(n_threads):
thread = Thread(target=self.grab_from_queue())
thread.start()
self.queue.join()
示例2: match_query
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
def match_query(self, query):
'''Given a search query, return a tuple containing a regex match and
trigger object that matches the given query. If no match can be found,
return a tuple of (None, None).'''
sink = LifoQueue()
while not self.triggers.empty():
trigger = self.triggers.get()
match = trigger.pattern.match(query)
if match:
break
else:
sink.put(trigger)
trigger = None
while not sink.empty():
self.triggers.put(sink.get())
if trigger:
self.triggers.put(trigger)
return (match, trigger)
return (None, None)
示例3: AbstractMessageSender
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class AbstractMessageSender(metaclass=ABCMeta):
#delegate this to a different class
def __init__(self):
self.message_q = LifoQueue()
def send_message(self, message, sock):
self.add_message(message)
sock.sendall(message.message)
def add_message(self, message):
self.message_q.put(message)
示例4: inorder_walk
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
def inorder_walk(a_root_node: BSTreeNode):
node_stack = LifoQueue()
current_item = a_root_node
while True:
while current_item:
node_stack.put(current_item)
current_item = current_item.left_child
if node_stack.empty():
break
tmp_item = node_stack.get()
yield tmp_item
current_item = tmp_item.right_child
示例5: get_friends
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
def get_friends(inlist, output_path, subpath):
##########
# task assignment
##########
global lock
lock = Lock()
global q
q = LifoQueue() # screen_name queue
global friend_list
friend_list = dict()
with open("./twitter_api_keys.pydict", "r") as f:
keys = eval(f.read())
# initiate task for crawler()
for input in inlist:
friend_list[input] = set()
q.put({input_type: input, "count": 5000, "cursor": -1})
for key in keys:
t = Thread(target=uid_crawler, kwargs={"key": key, "name": keys.index(key), "rest_url": "friends/ids"})
# t.daemon = True
# time.sleep(2)
t.start()
q.join()
# # RUN THIS AFTER FINISHED.
try:
mkdir("{}/{}".format(output_path, subpath))
except OSError:
pass
print("writing to disk.", end=".", flush=True)
if subpath == 0:
for key, vals in list(friend_list.items()):
print(".", end="", flush=True)
with BZ2File("{}/{}/{}.bz2".format(output_path, subpath, key), "w") as f:
f.writelines(map(lambda item: (str(item) + "\n").encode("utf-8"), vals))
else:
for key, vals in list(friend_list.items()):
print(".", end="", flush=True)
with ZipFile("{}/{}/{}.zip".format(output_path, subpath, str(key)[:3]), "a") as zf:
zf.writestr(str(key), "\n".join([str(item) for item in vals]).encode("utf-8"), ZIP_LZMA)
print("Done. Waiting remaining threads to quit", end=".", flush=True)
while activeCount() > 1:
print(activeCount(), end=".", flush=True)
time.sleep(2)
return friend_list
示例6: index
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
def index(self, conf):
session = requests.session()
urls = LifoQueue()
allowed_domains = conf['allowed_domains'].split(',')
start = conf['url']
ignore = re.compile(conf['ignore'])
found = set([start])
urls.put(start)
while not urls.empty():
url = urls.get()
r = session.get(url)
for link in BeautifulSoup(r.content, 'lxml').find_all('a'):
link_href = link.get('href')
if not link_href:
continue
if link_href.startswith('/'):
link_href = urljoin(url, link_href)
parsed = urlparse(link_href)
if parsed.hostname not in allowed_domains:
continue
if conf['ignore'] and ignore.match(link_href):
continue
if link_href not in found:
found.add(link_href)
urls.put(link_href)
file = MemoryFile(r.content)
file.url = url
file.mimetype = 'text/html'
file.size = 0
file.modified = None
yield file
示例7: __init__
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class QueryQueue:
def __init__(self):
self.queue = LifoQueue()
self.comm_sender = CommSender()
th = threading.Thread(target=self.send_require)
th.start()
def put(self, item):
self.queue.put(item)
def send_require(self):
while True:
time.sleep(1)
c = ConnInfo.objects.all()[0]
q = QueryInfo.objects.all()[0]
r = RoomInfo.objects.all()[0]
# if is logout or unconnected, only flush queue
if c.is_log == "False" or c.is_conn == "False":
while not self.queue.empty():
self.queue.get()
continue
# else get last item and flush queue
if not self.queue.empty():
query = self.queue.get()
while not self.queue.empty():
self.queue.get()
#
m = ModeInfo.objects.all()[0]
s = SensorInfo.objects.all()[0]
ss = SettingInfo.objects.all()[0]
if m.mode == 'cold' and ss.target_temp > s.current_temp:
query = 'standby'
elif m.mode == 'hot' and ss.target_temp < s.current_temp:
query = 'standby'
#
q.query_speed = query
q.save()
r = self.comm_sender.send_msg(data={'type': 'require', 'source': r.room_number, 'speed': query})
# if query is standby, we should change to standby immediately
if query == 'standby' and r.json()['ack_nak'] == 'ACK':
q.current_speed = 'standby'
q.query_speed = 'None'
q.save()
示例8: breadth_first_search
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
def breadth_first_search(start, goal):
visited = [[False for x in range(500)] for y in range(500)]
dad = [[None for x in range(500)] for y in range(500)]
queue = Queue()
visited[start.first()][start.second()] = True
dad[start.first()][start.second()] = start
current_node = start
neighbours_ = neighbours(current_node)
if not goal.first() - 10 <= current_node.first() <= goal.first() + 10 \
or not \
goal.second() - 10 <= current_node.second() <= goal.second() + 10:
for i in range(neighbours_.__len__()):
if visited[neighbours_[i].first()][neighbours_[i].second()] \
is False:
queue.put(neighbours_[i])
visited[neighbours_[i].first()][neighbours_[i].second()] \
= True
dad[neighbours_[i].first()][neighbours_[i].second()] \
= current_node
while (not goal.first() - 10 <= current_node.first() <= goal.first() + 10
or not goal.second() - 10 <= current_node.second()
<= goal.second() + 10) and not queue.empty():
current_node = queue.get_nowait()
neighbours_ = neighbours(current_node)
for i in range(neighbours_.__len__()):
if visited[neighbours_[i].first()][neighbours_[i].second()] \
is False:
queue.put(neighbours_[i])
visited[neighbours_[i].first()][neighbours_[i].second()] \
= True
dad[neighbours_[i].first()][neighbours_[i].second()] \
= current_node
path = LifoQueue()
while not current_node.first() == start.first() \
or not current_node.second() == start.second():
path.put(current_node)
current_node = dad[current_node.first()][current_node.second()]
return path
示例9: benchmark_iterator
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
def benchmark_iterator(components: List[str], max_builds: int) -> Iterator:
for component, category in showfast_iterator(components=components):
curr_metric, curr_release = None, None
queue = LifoQueue(maxsize=max_builds)
for benchmark in get_benchmarks(component, category):
if not benchmark['hidden']:
release = parse_release(benchmark['build'])
if curr_metric != benchmark['metric']:
curr_metric, curr_release = benchmark['metric'], release
queue.queue.clear()
if release != curr_release:
curr_release = release
queue.queue.clear()
if queue.full():
yield benchmark
else:
queue.put(benchmark)
示例10: __init__
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class LifoExecutor:
def __init__(self, executor, max_workers=None):
self.executor = executor
self.max_workers = max_workers or executor._max_workers
self.queue = LifoQueue()
self.loop = asyncio.get_event_loop()
self.sem = asyncio.Semaphore(self.max_workers)
def submit(self, f, *args):
future = self.loop.create_future()
self.queue.put((future, f, args))
self.loop.create_task(self.run_task())
return future
async def run_task(self):
await self.sem.acquire()
future, f, args = self.queue.get()
executor_future = self.loop.run_in_executor(self.executor, f, *args)
executor_future.add_done_callback(lambda f, ff=future: self.done_callback(future, f))
def done_callback(self, future, executor_future):
self.sem.release()
future.set_result(executor_future.result())
示例11: UnhandledExceptionHandler
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class UnhandledExceptionHandler(Singleton):
"""
This class implements functionality to catch and log exceptions in a block of code, and also execute a set of
teardown handlers intended to shut down the application gracefully and do any desired cleanup. It is implemented
as a singleton because the teardown handlers can have global effects (e.g., stopping the event loop).
This class is intended to be used as a context manager:
>>> unhandled_exception_handler = UnhandledExceptionHandler.singleton()
>>> with unhandled_exception_handler:
>>> # code which may throw an exception goes here!
"""
HANDLED_EXCEPTION_EXIT_CODE = 1
EXCEPTION_DURING_TEARDOWN_EXIT_CODE = 2
def __init__(self):
super().__init__()
self._handling_lock = Lock()
self._teardown_callback_stack = LifoQueue() # we execute callbacks in the reverse order that they were added
self._logger = log.get_logger(__name__)
self._handled_exceptions = Queue()
self._teardown_callback_raised_exception = False
# Set up a handler to be called when process receives SIGTERM.
# Note: this will raise if called on a non-main thread, but we should NOT work around that here. (That could
# prevent the teardown handler from ever being registered!) Calling code should be organized so that this
# singleton is only ever initialized on the main thread.
signal.signal(signal.SIGTERM, self._application_teardown_signal_handler)
signal.signal(signal.SIGINT, self._application_teardown_signal_handler)
def add_teardown_callback(self, callback, *callback_args, **callback_kwargs):
"""
Add a callback to be executed in the event of application teardown.
:param callback: The method callback to execute
:type callback: callable
:param callback_args: args to be passed to the callback function
:type callback_args: list
:param callback_kwargs: kwargs to be passed to the callback function
:type callback_kwargs: dict
"""
self._teardown_callback_stack.put((callback, callback_args, callback_kwargs))
def _application_teardown_signal_handler(self, sig, frame):
"""
A signal handler that will trigger application teardown.
:param sig: Signal number of the received signal
:type sig: int
:param frame: The interrupted stack frame
:type frame: frame
"""
signal_names = {
signal.SIGTERM: 'SIGTERM',
signal.SIGINT: 'SIGINT',
}
self._logger.info('{} signal received. Triggering teardown.', signal_names[sig])
raise AppTeardown
def __enter__(self):
"""
Enables this to be used as a context manager. No special handling is needed on enter.
"""
pass
def __exit__(self, exc_type, exc_value, traceback):
"""
Enables this to be used as a context manager. If an exception was raised during the execution block (inside the
"with" statement) then exc_value will be set to the exception object.
There are four situations in which we can go through this method:
1. Exception, on main thread
- The exception is logged and in some cases (e.g., SystemExit) may be immediately reraised.
- Teardown callbacks are executed.
- Example: A KeyboardInterrupt exception raised because user presses ctrl-c / sends SIGINT signal
2. Exception, not on main thread
- The exception is logged and in some cases may be passed to the main thread to be reraised.
- Teardown callbacks are executed.
- Example: Any unhandled exception that is raised on a SafeThread
3. Normal exit, on main thread
- We check to see if there was an exception that we need to reraise on the main thread. In almost all cases
we will *not* reraise an exception on the main thread since it has already been logged and teardown
callbacks have already been executed on the thread that raised the exception.
- Teardown callbacks are *not* executed.
- Example: A SystemExit exception raised by sys.exit() is passed from a SafeThread to the main thread to
make Python set the exit code.
4. Normal exit, not on main thread
- Do nothing! All is well.
"""
if exc_value:
# An exception occurred during execution, so run the teardown callbacks. We use a lock here since multiple
# threads could raise exceptions at the same time and we only want to execute these once.
with self._handling_lock:
if not isinstance(exc_value, (SystemExit, AppTeardown, KeyboardInterrupt)):
# It is not very useful to log the SystemExit exception since it is raised by sys.exit(), and thus
# application exit is completely expected.
self._logger.exception('Unhandled exception handler caught exception.')
#.........这里部分代码省略.........
示例12: ConnectionPool
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class ConnectionPool(object):
"""
Thread-safe connection pool.
.. versionadded:: 0.5
The `size` argument specifies how many connections this pool
manages. Additional keyword arguments are passed unmodified to the
:py:class:`happybase.Connection` constructor, with the exception of
the `autoconnect` argument, since maintaining connections is the
task of the pool.
:param int size: the maximum number of concurrently open connections
:param kwargs: keyword arguments passed to
:py:class:`happybase.Connection`
"""
def __init__(self, size, **kwargs):
if not isinstance(size, int):
raise TypeError("Pool 'size' arg must be an integer")
if not size > 0:
raise ValueError("Pool 'size' arg must be greater than zero")
logger.debug(
"Initializing connection pool with %d connections", size)
self._lock = threading.Lock()
self._queue = LifoQueue(maxsize=size)
self._thread_connections = threading.local()
connection_kwargs = kwargs
connection_kwargs['autoconnect'] = False
for i in xrange(size):
connection = Connection(**connection_kwargs)
self._queue.put(connection)
# The first connection is made immediately so that trivial
# mistakes like unresolvable host names are raised immediately.
# Subsequent connections are connected lazily.
with self.connection():
pass
def _acquire_connection(self, timeout=None):
"""Acquire a connection from the pool."""
try:
return self._queue.get(True, timeout)
except Empty:
raise NoConnectionsAvailable(
"No connection available from pool within specified "
"timeout")
def _return_connection(self, connection):
"""Return a connection to the pool."""
self._queue.put(connection)
@contextlib.contextmanager
def connection(self, timeout=None):
"""
Obtain a connection from the pool.
This method *must* be used as a context manager, i.e. with
Python's ``with`` block. Example::
with pool.connection() as connection:
pass # do something with the connection
If `timeout` is specified, this is the number of seconds to wait
for a connection to become available before
:py:exc:`NoConnectionsAvailable` is raised. If omitted, this
method waits forever for a connection to become available.
:param int timeout: number of seconds to wait (optional)
:return: active connection from the pool
:rtype: :py:class:`happybase.Connection`
"""
connection = getattr(self._thread_connections, 'current', None)
return_after_use = False
if connection is None:
# This is the outermost connection requests for this thread.
# Obtain a new connection from the pool and keep a reference
# in a thread local so that nested connection requests from
# the same thread can return the same connection instance.
#
# Note: this code acquires a lock before assigning to the
# thread local; see
# http://emptysquare.net/blog/another-thing-about-pythons-
# threadlocals/
return_after_use = True
connection = self._acquire_connection(timeout)
with self._lock:
self._thread_connections.current = connection
try:
# Open connection, because connections are opened lazily.
# This is a no-op for connections that are already open.
connection.open()
#.........这里部分代码省略.........
示例13: Similarity
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class Similarity(object):
"""Here the actual similarity computation and lookup happens."""
def __init__(self):
data_dir = player_get_data_dir()
self.db_path = os.path.join(data_dir, "similarity.db")
self.gaia_db_path = os.path.join(data_dir, "gaia.db")
self.db_queue = PriorityQueue()
self._db_wrapper = DatabaseWrapper()
self._db_wrapper.daemon = True
self._db_wrapper.set_path(self.db_path)
self._db_wrapper.set_queue(self.db_queue)
self._db_wrapper.start()
self.create_db()
self.network = LastFMNetwork(api_key=API_KEY)
self.cache_time = 90
if GAIA:
self.gaia_queue = LifoQueue()
self.gaia_analyser = GaiaAnalysis(
self.gaia_db_path, self.gaia_queue)
self.gaia_analyser.daemon = True
self.gaia_analyser.start()
def execute_sql(self, sql=None, priority=1, command=None):
"""Put sql command on the queue to be executed."""
if command is None:
command = SQLCommand(sql)
self.db_queue.put((priority, command))
def get_sql_command(self, sql, priority=1):
"""Build a SQLCommand, put it on the queue and return it."""
command = SQLCommand(sql)
self.execute_sql(command=command, priority=priority)
return command
def remove_track_by_filename(self, filename):
if not filename:
return
if GAIA:
self.gaia_queue.put((REMOVE, filename))
def get_ordered_gaia_tracks_by_request(self, filename, number, request):
start_time = time()
tracks = self.gaia_analyser.get_tracks(
filename, number, request=request)
print("finding gaia matches took %f s" % (time() - start_time,))
return tracks
def get_ordered_gaia_tracks(self, filename, number):
"""Get neighbours for track."""
start_time = time()
tracks = self.gaia_analyser.get_tracks(filename, number)
print("finding gaia matches took %f s" % (time() - start_time,))
return tracks
def get_artist(self, artist_name):
"""Get artist information from the database."""
sql = ("SELECT * FROM artists WHERE name = ?;", (artist_name,))
command = self.get_sql_command(sql, priority=1)
for row in command.result_queue.get():
return row
sql2 = ("INSERT INTO artists (name) VALUES (?);", (artist_name,))
command = self.get_sql_command(sql2, priority=0)
command.result_queue.get()
command = self.get_sql_command(sql, priority=1)
for row in command.result_queue.get():
return row
def get_track_from_artist_and_title(self, artist_name, title):
"""Get track information from the database."""
artist_id = self.get_artist(artist_name)[0]
sql = (
"SELECT * FROM tracks WHERE artist = ? AND title = ?;",
(artist_id, title))
command = self.get_sql_command(sql, priority=3)
for row in command.result_queue.get():
return row
sql2 = (
"INSERT INTO tracks (artist, title) VALUES (?, ?);",
(artist_id, title))
command = self.get_sql_command(sql2, priority=2)
command.result_queue.get()
command = self.get_sql_command(sql, priority=3)
for row in command.result_queue.get():
return row
def get_similar_tracks(self, track_id):
"""Get similar tracks from the database.
Sorted by descending match score.
"""
sql = (
"SELECT track_2_track.match, artists.name, tracks.title"
" FROM track_2_track INNER JOIN tracks ON"
" track_2_track.track2 = tracks.id INNER JOIN artists ON"
" artists.id = tracks.artist WHERE track_2_track.track1 = ? UNION "
"SELECT track_2_track.match, artists.name, tracks.title"
" FROM track_2_track INNER JOIN tracks ON"
#.........这里部分代码省略.........
示例14: Layer
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class Layer(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
rec = QApplication.desktop().screenGeometry()
self.w = rec.width()
self.h = rec.height()
self.kv = config.get('Settings', 'keys').split(' ')
self.ki = 0
self.keys = self.kv[self.ki]
self.rect = QRectF(0, 0, self.w, self.h)
self.shortcuts = []
self.rects = LifoQueue()
self.query = ''
self.resize(self.w, self.h)
self.setStyleSheet("background:rgba(0,0,0,%s)" % config.getfloat('Settings', 'background_opacity'))
self.setAttribute(Qt.WA_TranslucentBackground);
self.setWindowFlags(Qt.FramelessWindowHint);
view = QGraphicsView(self)
scene = QGraphicsScene()
scene.setSceneRect(0, 0, self.w, self.h)
view.setScene(scene)
view.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff);
view.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff);
self.setCentralWidget(view)
self.scene = scene
self.drawLines()
self.esc = QShortcut(QKeySequence('Esc'), self)
self.esc.activated.connect(lambda: move(*config.get('Settings', 'mouse_home_coords').split(' ')))
self.back = QShortcut(QKeySequence('Backspace'), self)
self.back.activated.connect(self.goBack)
def goBack(self):
if not self.query:
return
self.query = self.query[:-1]
self.setArea(self.rects.get(), True)
def setPointer(self, rect):
move(rect.center().x()+self.pos().x()-10, rect.center().y()+self.pos().y()-10)
# self.scene.addEllipse(rect.center().x()+self.pos().x()-2, rect.center().y()+self.pos().y()-2, 4, 4, QPen(QColor('blue')), QBrush(QColor('blue')))
def clickHere(self, rect):
self.showMinimized()
time.sleep(0.3)
click(rect.center().x()+self.pos().x()-10, rect.center().y()+self.pos().y()-10)
time.sleep(0.3)
move(*config.get('Settings', 'mouse_home_coords').split(' '))
sys.exit()
def getArea(self, k, direction):
rect = self.rect
i = self.keys.index(k)
c = len(self.keys)
y = rect.topLeft().y()
x = rect.topLeft().x()
if direction == 'hor':
new_rect = QRectF(rect.width()/c*i+x, y, rect.width()/c, rect.height())
else:
new_rect = QRectF(x, rect.height()/c*i+y, rect.width(), rect.height()/c)
return new_rect
def selectArea(self, hint, rect):
self.query += hint
self.setArea(rect)
def setArea(self, new_rect, back=False):
if not new_rect:
return
if not back:
self.rects.put(self.rect)
self.rect = new_rect
self.drawLines()
def drawLines(self):
for sh in self.shortcuts:
sh.activated.disconnect()
sh.setKey('')
sh.setEnabled(False)
self.shortcuts = []
self.scene.clear()
rect = self.rect
c = len(self.keys)
direction = 'hor' if rect.width() > rect.height() else 'vert'
if (direction == 'hor' and rect.width()/c < 60) or (direction == 'vert' and rect.height()/c < 60):
if c > 2:
self.ki += 1
else:
if c < len(self.kv[0]):
self.ki -= 1
self.keys = self.kv[self.ki]
c = len(self.keys)
color = QColor(config.get('Settings', 'lines_color'))
fontColor = QColor(config.get('Settings', 'text_color'))
pen = QPen(color)
if rect.width() > 100:
fsize = rect.width()/c/2
elif rect.width() > 50:
#.........这里部分代码省略.........
示例15: ROSProxy
# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import put [as 别名]
class ROSProxy(object):
""" RL-Glue agent that performs the actions it receives from rlpy
"""
def __init__(self, subscriptions, publications):
# Create queues for communication with the rlpy world
self.observations_queue = LifoQueue(1) # ROS updates constantly, keep only the last observation
self.actions_queue = Queue()
# Create the list of subscriptions so that the states can be
# constructed from observations
self.subscriptions = []
self.last_state = []
for subscription in subscriptions:
self.last_state.append(0.0)
sub = {}
sub['index'] = len(self.subscriptions) # Index in the state vector
sub['subscriber'] = rospy.topics.Subscriber(
subscription['path'],
subscription['type'],
self.subscription_callback,
sub
)
sub['f'] = subscription.get('f', lambda x: x)
self.subscriptions.append(sub)
# Create the list of publications so that actions can be mapped to
# publications
self.publications = []
self.actions = []
for publication in publications:
pub = {}
pub['type'] = publication['type']
pub['publisher'] = rospy.topics.Publisher(
publication['path'],
publication['type'],
queue_size=10
)
# Add an action descriptor so that actions can be taken
for value in publication['values']:
self.actions.append((pub, value))
self.publications.append(pub)
def run(self):
# Let ROS spin
rospy.spin()
def subscription_callback(self, data, sub):
""" Called whenever something happens in the ROS world. This method
updates the observation and publishes all the actions to be
published
"""
# Update the state. The last element of the state is the reward
index = sub['index']
f = sub['f']
self.last_state[index] = float(f(data.data))
self.observations_queue.put((
tuple(self.last_state[:-1]),
self.last_state[-1],
False
))
# Send the next action
action = self.actions[self.actions_queue.get()]
pub = action[0]
value = action[1]
pub['publisher'].publish(value)
def numberOfActions(self):
""" Number of possible actions, built based on the publications of this
agent.
"""
return len(self.actions) - 1 # The last action resets the world
def observation(self):
""" Wait for an observation to be available, and return a (state,
reward, finished) tuple
"""
return self.observations_queue.get()
def setAction(self, action):
""" Inform the agent that it has to take the given action
"""
self.actions_queue.put(action)