本文整理汇总了Python中furious.batcher.MessageProcessor.start方法的典型用法代码示例。如果您正苦于以下问题:Python MessageProcessor.start方法的具体用法?Python MessageProcessor.start怎么用?Python MessageProcessor.start使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类furious.batcher.MessageProcessor
的用法示例。
在下文中一共展示了MessageProcessor.start方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get
# 需要导入模块: from furious.batcher import MessageProcessor [as 别名]
# 或者: from furious.batcher.MessageProcessor import start [as 别名]
def get(self):
from furious import context
from furious.batcher import Message
from furious.batcher import MessageProcessor
try:
color, value, count = self.get_params()
except (KeyError, AssertionError):
response = {
"success": False,
"message": "Invalid parameters."
}
self.response.write(json.dumps(response))
return
payload = {
"color": color,
"value": value,
"timestamp": time.mktime(datetime.datetime.utcnow().timetuple())
}
tag = "color"
# create a context to insert multiple Messages
with context.new() as ctx:
# loop through the count adding a task to the context per increment
for _ in xrange(count):
# insert the message with the payload
ctx.add(Message(task_args={"payload": payload, "tag": tag}))
# insert a processor to fetch the messages in batches
# this should always be inserted. the logic will keep it from inserting
# too many processors
processor = MessageProcessor(
target=process_messages, args=(tag,), tag=tag,
task_args={"countdown": 0})
processor.start()
response = {
"success": True,
"message": "Task inserted successfully with %s" % (payload,)
}
self.response.write(json.dumps(response))
示例2: process_messages
# 需要导入模块: from furious.batcher import MessageProcessor [as 别名]
# 或者: from furious.batcher.MessageProcessor import start [as 别名]
def process_messages(tag, retries=0):
"""Processes the messages pulled fromm a queue based off the tag passed in.
Will insert another processor if any work was processed or the retry count
is under the max retry count. Will update a aggregated stats object with
the data in the payload of the messages processed.
:param tag: :class: `str` Tag to query the queue on
:param retry: :class: `int` Number of retries the job has processed
"""
from furious.batcher import bump_batch
from furious.batcher import MESSAGE_DEFAULT_QUEUE
from furious.batcher import MessageIterator
from furious.batcher import MessageProcessor
from google.appengine.api import memcache
# since we don't have a flag for checking complete we'll re-insert a
# processor task with a retry count to catch any work that may still be
# filtering in. If we've hit our max retry count we just bail out and
# consider the job complete.
if retries > 5:
logging.info("Process messages hit max retry and is exiting")
return
# create a message iteragor for the tag in batches of 500
message_iterator = MessageIterator(tag, MESSAGE_DEFAULT_QUEUE, 500)
client = memcache.Client()
# get the stats object from cache
stats = client.gets(tag)
# json decode it if it exists otherwise get the default state.
stats = json.loads(stats) if stats else get_default_stats()
work_processed = False
# loop through the messages pulled from the queue.
for message in message_iterator:
work_processed = True
value = int(message.get("value", 0))
color = message.get("color").lower()
# update the total stats with the value pulled
set_stats(stats["totals"], value)
# update the specific color status via the value pulled
set_stats(stats["colors"][color], value)
# insert the stats back into cache
json_stats = json.dumps(stats)
# try and do an add first to see if it's new. We can't trush get due to
# a race condition.
if not client.add(tag, json_stats):
# if we couldn't add than lets do a compare and set to safely
# update the stats
if not client.cas(tag, json_stats):
raise Exception("Transaction Collision.")
# bump the process batch id
bump_batch(tag)
if work_processed:
# reset the retries as we've processed work
retries = 0
else:
# no work was processed so increment the retries
retries += 1
# insert another processor
processor = MessageProcessor(
target=process_messages, args=("colors",),
kwargs={'retries': retries}, tag="colors")
processor.start()