本文整理汇总了Python中rq.Connection方法的典型用法代码示例。如果您正苦于以下问题:Python rq.Connection方法的具体用法?Python rq.Connection怎么用?Python rq.Connection使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类rq
的用法示例。
在下文中一共展示了rq.Connection方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_status
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def get_status(task_id):
with Connection(redis.from_url(current_app.config["REDIS_URL"])):
q = Queue()
task = q.fetch_job(task_id)
if task:
response_object = {
"status": "success",
"data": {
"task_id": task.get_id(),
"task_status": task.get_status(),
"task_result": task.result,
},
}
else:
response_object = {"status": "error"}
return jsonify(response_object)
示例2: start_rq_worker
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def start_rq_worker(queue_name: str, burst: bool = False) -> None:
"""Start an RQ worker for the given queue. """
try:
with Connection(connection=redis.Redis(db=13)):
q = Queue(name=queue_name)
logger.info(f"Starting {'bursted ' if burst else ''}"
f"RQ worker for in {queue_name}")
if burst:
Worker(q).work(burst=True)
else:
# This is to bypass a problem when the user closes their laptop
# (All the workers time out and die). This should prevent that up until a week.
wk_in_secs = 60 * 60 * 24 * 7
Worker(q, default_result_ttl=wk_in_secs, default_worker_ttl=wk_in_secs).work()
except Exception as e:
logger.exception("Worker in pid {} failed with exception {}".format(os.getpid(), e))
raise
示例3: _calc_rq_free
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def _calc_rq_free() -> Dict[str, Any]:
"""Parses the output of `rq info` to return total number
of workers and the count of workers currently idle."""
conn = default_redis_conn()
with rq.Connection(connection=conn):
workers: List[rq.Worker] = [w for w in rq.Worker.all()]
idle_workers = [w for w in workers if w.get_state() == 'idle']
resp = {
'workersTotal': len(workers),
'workersIdle': len(idle_workers),
'workersUnknown': len([w for w in workers if w.get_state() == '?'])
}
queues = 'default', 'build', 'publish'
resp.update({f'queue{q.capitalize()}Size':
len(rq.Queue(f'gigantum-{q}-queue', connection=conn)) for q in queues})
return resp
示例4: request_poller
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def request_poller(queue_1, queue_2, nb_to_do):
requests = pollout_requests(queue_2, nb_to_do)
try:
with Connection(Redis()) as conn:
q = Queue(queue_1, connection=conn)
except:
mod.display("ASYNC_HTTP",
"ERROR",
"Could not establish connection with Redis, check if you have redis_host, \
redis_port and maybe redis_password in /config/config.ini")
loop = asyncio.get_event_loop()
future = asyncio.ensure_future(run(requests))
x = loop.run_until_complete(future)
loop.close()
for y in x:
if y is not None:
try:
q.enqueue(module_worker_response, args=(y), result_ttl=0)
except:
mod.display("ASYNC_HTTP",
"ERROR",
"Could not enqueue job to Redis in func request_poller")
示例5: grade_project
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def grade_project(project_id):
project = Project.query.filter_by(id=project_id).first_or_404()
with Connection(redis.from_url(current_app.config['REDIS_URL'])):
q = Queue()
task = q.enqueue(
create_task,
project.url,
current_app.config["OPENFAAS_URL"]
)
response_object = {
'status': 'success',
'data': {
'task_id': task.get_id()
}
}
return jsonify(response_object), 202
示例6: run_worker
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def run_worker():
redis_url = app.config['REDIS_URL']
redis_connection = redis.from_url(redis_url)
with Connection(redis_connection):
worker = Worker(app.config['QUEUES'])
worker.work()
示例7: runworker
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def runworker(app):
REDIS_HOST = app.config['REDIS_HOST']
REDIS_PORT = app.config['REDIS_PORT']
REDIS_DB = app.config['REDIS_DB']
QUEUES = app.config['QUEUES']
redis_conn = Connection(Redis(REDIS_HOST,
REDIS_PORT,
REDIS_DB))
with redis_conn:
w = Worker(QUEUES)
w.work()
示例8: get_queue
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def get_queue() -> Queue:
"""Return the current queue
Returns:
Queue: The current RQ work queue
"""
if "queue" not in g:
with Connection():
g.queue = Queue(connection=get_redis())
return g.queue
示例9: run_task
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def run_task():
task_type = request.form["type"]
with Connection(redis.from_url(current_app.config["REDIS_URL"])):
q = Queue()
task = q.enqueue(create_task, task_type)
response_object = {
"status": "success",
"data": {
"task_id": task.get_id()
}
}
return jsonify(response_object), 202
示例10: run_worker
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def run_worker():
redis_url = app.config["REDIS_URL"]
redis_connection = redis.from_url(redis_url)
with Connection(redis_connection):
worker = Worker(app.config["QUEUES"])
worker.work()
示例11: runworker
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def runworker():
redis_url = app.config['REDIS_URL']
redis_connection = redis.from_url(redis_url)
with Connection(redis_connection):
worker = Worker(app.config['QUEUES'])
worker.work()
示例12: start
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def start(redis_connection, queues):
with Connection(redis_connection):
w = Worker(queues)
w.work()
示例13: rq_worker
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def rq_worker():
"""Start an rq worker in the context of dallinger."""
setup_experiment(log)
with Connection(redis_conn):
# right now we care about low queue for bots
worker = Worker("low")
worker.work()
示例14: main
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def main():
import gevent.monkey
gevent.monkey.patch_all()
from gevent.queue import LifoQueue
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
import os
from redis import BlockingConnectionPool, StrictRedis
from rq import Queue, Connection
from dallinger.heroku.rq_gevent_worker import GeventWorker as Worker
from dallinger.config import initialize_experiment_package
initialize_experiment_package(os.getcwd())
import logging
logging.basicConfig(format="%(asctime)s %(message)s", level=logging.DEBUG)
redis_url = os.getenv("REDIS_URL", "redis://localhost:6379")
# Specify queue class for improved performance with gevent.
# see http://carsonip.me/posts/10x-faster-python-gevent-redis-connection-pool/
redis_pool = BlockingConnectionPool.from_url(redis_url, queue_class=LifoQueue)
redis_conn = StrictRedis(connection_pool=redis_pool)
with Connection(redis_conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
示例15: worker
# 需要导入模块: import rq [as 别名]
# 或者: from rq import Connection [as 别名]
def worker():
logging.info('this is worker')
with Connection(conn):
worker = Worker(map(Queue, listen))
worker.work()