本文整理汇总了Python中redis.call方法的典型用法代码示例。如果您正苦于以下问题:Python redis.call方法的具体用法?Python redis.call怎么用?Python redis.call使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类redis
的用法示例。
在下文中一共展示了redis.call方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def __init__(self, *args, **kwargs):
"""Initialize atomic integer instance.
:param initial_value: Optional. The initial value of this
variable in case it doesn't exist in Redis yet.
"""
super().__init__(*args, **kwargs)
self._lua_add = ManagerState.LuaFunction(self, "oldval + arg", short=True)
self._lua_mul = ManagerState.LuaFunction(self, "oldval * arg", short=True)
self._lua_floordiv = ManagerState.LuaFunction(
self, "floor(oldval / arg)", short=True
)
self._lua_cas = ManagerState.LuaFunction(
self,
"""
local oldval = tonumber(redis.call('EXISTS', KEYS[1]) and redis.call('GET', KEYS[1]) or ARGV[3])
if oldval == tonumber(ARGV[1]) then
redis.call('SET', KEYS[1], tonumber(ARGV[2]))
return 1
else
return 0
end
""",
)
self.initial_value = kwargs.get("initial_value", 0)
示例2: gather_stage
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def gather_stage(harvester, job):
'''Calls the harvester's gather_stage, returning harvest object ids, with
some error handling.
This is split off from gather_callback so that tests can call it without
dealing with queue stuff.
'''
job.gather_started = datetime.datetime.utcnow()
try:
harvest_object_ids = harvester.gather_stage(job)
except (Exception, KeyboardInterrupt):
harvest_objects = model.Session.query(HarvestObject).filter_by(
harvest_job_id=job.id
)
for harvest_object in harvest_objects:
model.Session.delete(harvest_object)
model.Session.commit()
raise
finally:
job.gather_finished = datetime.datetime.utcnow()
job.save()
return harvest_object_ids
示例3: test_execute_pipeline_script
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def test_execute_pipeline_script(self, can_replicate_commands):
if not self.scripts.can_replicate_commands:
assert False, 'test suite needs Redis 3.2 or higher'
self.scripts._can_replicate_commands = can_replicate_commands
self.conn.script_flush()
s = self.conn.register_script("redis.call('set', 'x', 'y')")
# Uncached execution
p = self.conn.pipeline()
s(client=p)
self.scripts.execute_pipeline(p)
assert self.conn.get('x') == 'y'
self.conn.delete('x')
# Cached execution
p = self.conn.pipeline()
s(client=p)
self.scripts.execute_pipeline(p)
assert self.conn.get('x') == 'y'
示例4: consume
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def consume(self):
"""Consume value gotten from queue.
Raise UserWarning if consume() called before get()
"""
if self._item is None:
raise UserWarning("Must call get() before consume()")
self.LOCKS.pop(self._h_k)
rv = raw_client().evalsha(
self._SHAS['lq_consume'],
len(self.SCRIPTS['lq_consume']['keys']),
self._h_k, self._path, self._q_lookup, self._client_id)
assert rv == 1
self._h_k = None
self._item = None
示例5: _upload_batch_continuing
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def _upload_batch_continuing(self, queue):
ret = {
"count": 0
}
res = self._upload_batch(queue) # Remember this call will raise exception upon server error
ret["count"] = res["count"]
if "error" in res:
ret["error"] = res["error"]
# Continue to upload if more pending batches present
while not "error" in res and (res["count"] == self.options.get("max_batch_size", 100) or res.get("continue", False)):
res = self._upload_batch(queue) # Remember this call will raise exception upon server error
ret["count"] += res["count"]
if "error" in res:
ret["error"] = res["error"]
return ret
示例6: upload_pending
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def upload_pending(self):
ret = {
"total": 0,
}
try:
res = self._upload_batch_continuing(self.PENDING_QUEUE) # Remember this call will raise exception upon server error
ret["total"] += res["count"]
if "error" in res:
ret.setdefault("errors", []).append(res["error"])
except RequestException as rex:
ret.setdefault("errors", []).append(str(rex))
# Retry queue logic is moved to '_upload_batch' method
return ret
示例7: script_load
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def script_load(script):
sha = [None] #A
def call(conn, keys=[], args=[], force_eval=False): #B
if not force_eval:
if not sha[0]: #C
sha[0] = conn.execute_command( #D
"SCRIPT", "LOAD", script, parse="LOAD") #D
try:
return conn.execute_command( #E
"EVALSHA", sha[0], len(keys), *(keys+args)) #E
except redis.exceptions.ResponseError as msg:
if not msg.args[0].startswith("NOSCRIPT"): #F
raise #F
return conn.execute_command( #G
"EVAL", script, len(keys), *(keys+args)) #G
return call #H
# <end id="script-load"/>
#A Store the cached SHA1 hash of the result of SCRIPT LOAD in a list so we can change it later from within the call() function
#B When calling the "loaded script", you must provide the connection, the set of keys that the script will manipulate, and any other arguments to the function
#C We will only try loading the script if we don't already have a cached SHA1 hash
#D Load the script if we don't already have the SHA1 hash cached
#E Execute the command from the cached SHA1
#F If the error was unrelated to a missing script, re-raise the exception
#G If we received a script-related error, or if we need to force-execute the script, directly execute the script, which will automatically cache the script on the server (with the same SHA1 that we've already cached) when done
#H Return the function that automatically loads and executes scripts when called
#END
开发者ID:fuqi365,项目名称:https---github.com-josiahcarlson-redis-in-action,代码行数:32,代码来源:ch11_listing_source.py
示例8: queue_purge
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def queue_purge(self, queue=None):
'''
Purge the consumer's queue.
The ``queue`` parameter exists only for compatibility and is
ignored.
'''
# Use a script to make the operation atomic
lua_code = b'''
local routing_key = KEYS[1]
local message_key = ARGV[1]
local count = 0
while true do
local s = redis.call("lpop", routing_key)
if s == false then
break
end
local value = cjson.decode(s)
local id = value[message_key]
local persistance_key = routing_key .. ":" .. id
redis.call("del", persistance_key)
count = count + 1
end
return count
'''
script = self.redis.register_script(lua_code)
return script(keys=[self.routing_key], args=[self.message_key])
示例9: _upload_batch
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def _upload_batch(self, queue):
ret = {
"count": 0
}
source_queue = re.sub(self.WORK_QUEUE_REGEX, "", queue) # Remove suffix if already a work queue
work_queue = self.WORK_QUEUE.format(source_queue)
# Pop next batch into work queue
batch = self._dequeue_batch(source_queue, work_queue, self.options.get("max_batch_size", 100))
if not batch:
if log.isEnabledFor(logging.DEBUG):
log.debug("No batch found to upload from queue '{:}'".format(queue))
return ret
# Upload batch
payload = self._prepare_payload_for(batch)
ok, msg = self._upload(payload) # Remember this call will raise exception upon server error
if ok:
log.info("Uploaded batch with {:} entries from queue '{:}'".format(len(batch), queue))
ret["count"] = len(batch)
# Batch uploaded equals work completed
self.client.pipeline() \
.delete(work_queue) \
.bgsave() \
.execute()
else:
log.warning("Temporarily unable to upload batch with {:} entries from queue '{:}': {:}".format(len(batch), queue, msg))
ret["error"] = msg
return ret
示例10: upload_failing
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def upload_failing(self):
ret = {
"total": 0,
}
queues = self.list_queues(pattern="fail_*") # This will also include work queues if present
if queues:
log.warning("Found {:} fail queue(s)".format(len(queues)))
try:
for queue in queues:
res = self._upload_batch_continuing(queue) # Remember this call will raise exception upon server error
ret["total"] += res["count"]
# Stop upon first error
if "error" in res:
ret.setdefault("errors", []).append(res["error"])
break
except RequestException as rex:
ret.setdefault("errors", []).append(str(rex))
log.warning("Still unable to upload failed batch(es): {:}".format(rex))
return ret
示例11: script_load
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def script_load(script):
# 将 SCRIPT LOAD 命令返回的已缓存脚本 SHA1 校验和储存到一个列表里面,
# 以便之后在 call() 函数内部对其进行修改。
sha = [None]
# 在调用已载入脚本的时候,
# 用户需要将 Redis 连接、脚本要处理的键以及脚本的其他参数传递给脚本。
def call(conn, keys=[], args=[], force_eval=False):
if not force_eval:
# 程序只会在 SHA1 校验和未被缓存的情况下尝试载入脚本。
if not sha[0]:
# 如果 SHA1 校验和未被缓存,那么载入给定的脚本
sha[0] = conn.execute_command(
"SCRIPT", "LOAD", script, parse="LOAD")
try:
# 使用已缓存的 SHA1 校验和执行命令。
return conn.execute_command(
"EVALSHA", sha[0], len(keys), *(keys+args))
except redis.exceptions.ResponseError as msg:
# 如果错误与脚本缺失无关,那么重新抛出异常。
if not msg.args[0].startswith("NOSCRIPT"):
raise
# 当程序接收到脚本错误的时候,
# 又或者程序需要强制执行脚本的时候,
# 它会使用 EVAL 命令直接执行给定的脚本。
# EVAL 命令在执行完脚本之后,
# 会自动地把脚本缓存起来,
# 而缓存产生的 SHA1 校验和跟使用 EVALSHA 命令缓存脚本产生的 SHA1 校验和是完全相同的。
return conn.execute_command(
"EVAL", script, len(keys), *(keys+args))
# 返回一个函数,这个函数在被调用的时候会自动载入并执行脚本。
return call
# <end id="script-load"/>
示例12: __call__
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def __call__(self, record_id):
# Support DAL shortcut query: table(record_id)
q = self.id # This will call the __getattr__ below
# returning a MockQuery
# Instructs MockQuery, to behave as db(table.id == record_id)
q.op = 'eq'
q.value = record_id
row = q.select()
return row[0] if row else Storage()
示例13: __init__
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def __init__(self, ring_size): # type: (int) -> None
self.metrics_counter_getter = None # type: Optional[Callable[[six.text_type], Counter]]
# These should not be overridden by subclasses. The standard or Sentinel base class determines the ring size
# and passes it in, and then we create a randomized cycle-iterator (which can be infinitely next-ed) of
# connection indexes to use for choosing a connection when posting to request queues (response queues use a
# consistent hashing algorithm).
self._ring_size = ring_size
self._connection_index_generator = itertools.cycle(random.sample(range(self._ring_size), k=self._ring_size))
# It doesn't matter which connection we use for this. The underlying socket connection isn't even used (or
# established, for that matter). But constructing a Script with the `redis` library requires passing it a
# "default" connection that will be used if we ever call that script without a connection (we won't).
self.send_message_to_queue = SendMessageToQueueCommand(self._get_connection(0))
示例14: deleteBanchoSessions
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def deleteBanchoSessions(self):
"""
Remove all `peppy:sessions:*` redis keys.
Call at bancho startup to delete old cached sessions
:return:
"""
try:
# TODO: Make function or some redis meme
glob.redis.eval("return redis.call('del', unpack(redis.call('keys', ARGV[1])))", 0, "peppy:sessions:*")
except redis.RedisError:
pass
示例15: _lookup_clublogAPI
# 需要导入模块: import redis [as 别名]
# 或者: from redis import call [as 别名]
def _lookup_clublogAPI(self, callsign=None, timestamp=None, url="https://secure.clublog.org/dxcc", apikey=None):
""" Set up the Lookup object for Clublog Online API
"""
params = {"year" : timestamp.strftime("%Y"),
"month" : timestamp.strftime("%m"),
"day" : timestamp.strftime("%d"),
"hour" : timestamp.strftime("%H"),
"minute" : timestamp.strftime("%M"),
"api" : apikey,
"full" : "1",
"call" : callsign
}
if timestamp is None:
timestamp = datetime.utcnow().replace(tzinfo=UTC)
if sys.version_info.major == 3:
encodeurl = url + "?" + urllib.parse.urlencode(params)
else:
encodeurl = url + "?" + urllib.urlencode(params)
response = requests.get(encodeurl, timeout=5)
if not self._check_html_response(response):
raise LookupError
jsonLookup = response.json()
lookup = {}
for item in jsonLookup:
if item == "Name": lookup[const.COUNTRY] = jsonLookup["Name"]
elif item == "DXCC": lookup[const.ADIF] = int(jsonLookup["DXCC"])
elif item == "Lon": lookup[const.LONGITUDE] = float(jsonLookup["Lon"])*(-1)
elif item == "Lat": lookup[const.LATITUDE] = float(jsonLookup["Lat"])
elif item == "CQZ": lookup[const.CQZ] = int(jsonLookup["CQZ"])
elif item == "Continent": lookup[const.CONTINENT] = jsonLookup["Continent"]
if lookup[const.ADIF] == 0:
raise KeyError
else:
return lookup