本文整理汇总了Python中redis.StrictRedis.hset方法的典型用法代码示例。如果您正苦于以下问题:Python StrictRedis.hset方法的具体用法?Python StrictRedis.hset怎么用?Python StrictRedis.hset使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类redis.StrictRedis
的用法示例。
在下文中一共展示了StrictRedis.hset方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: sync_get
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
def sync_get(self, hash_id, identity, *args, **kwargs):
"""
For getting data from cache
:param hash_id: Unique Hash key for the data
:param identity: Unique Integer for the data
:param args: Args for the sync function. (Default: None)
"""
redis = StrictRedis(connection_pool=self.redis_pool)
hash_key = key_generator(self.hash_key, hash_id)
key = key_generator(self.key, identity)
try:
if redis.hexists(hash_key, key):
data = self.get_func(redis.hget(hash_key, key))
else:
data = self.sync_func(identity, *args, **kwargs)
redis.hset(hash_key, key, self.set_func(data))
if data is not None or data != "":
return data
return None
except RedisError as re:
self.log.error("[REDIS] %s", str(re))
data = self.sync_func(identity, args)
return data
finally:
del redis
示例2: __init__
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class WordList:
def __init__(self):
self.conn = Redis()
self.CACHE_SIZE = 50
self.CACHE_KEYS = "words-keys"
self.CACHE_STORE = "words-store"
self.WORD_FILE = os.path.join(os.path.expanduser("~"), '.words.txt')
def _reorganize(self):
pop_n = self.conn.zcard(self.CACHE_KEYS) - self.CACHE_SIZE
if pop_n >= 0:
to_pop = self.conn.zrange(self.CACHE_KEYS, 0, pop_n)
#print pop_n, to_pop
self.conn.zremrangebyrank(self.CACHE_KEYS, 0, pop_n)
for k in to_pop:
self.conn.hdel(self.CACHE_STORE, k)
def _add_word(self, key, value):
result = self.conn.hget(self.CACHE_STORE, key)
if result:
self.conn.zincrby(self.CACHE_KEYS, key, 1.0)
else:
self._reorganize()
self.conn.hset(self.CACHE_STORE, key, value)
self.conn.zadd(self.CACHE_KEYS, 1, key)
def _get_words(self):
try:
words = self.conn.zrevrange(self.CACHE_KEYS, 0, -1, True)
#hashs = self.conn.hgetall(self.CACHE_STORE)
#print words
#print hashs
return words
except redis.exceptions.ConnectionError:
return None
def dump_console(self):
if os.path.isfile(self.WORD_FILE):
with open(self.WORD_FILE, 'r') as f:
print f.read()
def write_file(self):
words = self._get_words()
if words is None:
return
content = '\n'.join(["%d. %s\t %d"%(i, x[0], int(x[1])) for i, x in enumerate(words)])
with open(self.WORD_FILE, 'w+') as f:
f.write(content)
f.write('\n')
def add_word(self, key):
try:
self._add_word(key,key)
self.write_file()
except redis.exceptions.ConnectionError:
return
示例3: RedisDB
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class RedisDB(object):
def __init__(self):
if not hasattr(RedisDB, 'pool'):
RedisDB.createPool()
self.r = StrictRedis(connection_pool = RedisDB.pool)
@staticmethod
def createPool():
RedisDB.pool = redis.ConnectionPool(
host = '127.0.0.1',
port = 6379,
db = 0
)
def saveToRedis(self, receiver, to_station_ab, to_station_name, from_station_ab, from_station_name, querydate, purpose_code, noticetime, publishtime):
'''将需要抓取的信息存入redis'''
uid = self.r.incr('uid')
tickets_info = {'uid':uid, 'receiver':receiver, 'to_station_ab':to_station_ab, 'to_station_name':to_station_name, 'from_station_ab':from_station_ab,'from_station_name':from_station_name, 'querydate':querydate, 'purpose_code':purpose_code, 'noticetime':noticetime, 'publishtime': publishtime}
self.r.zadd('email_que_set_all', uid, str(tickets_info))
if noticetime == '9am':
self.r.zadd('email_que_set_9am', uid, str(tickets_info))
elif noticetime == '11am':
self.r.zadd('email_que_set_11am', uid, str(tickets_info))
elif noticetime == '3pm':
self.r.zadd('email_que_set_3pm', uid, str(tickets_info))
elif noticetime == '5pm':
self.r.zadd('email_que_set_5pm', uid, str(tickets_info))
self.r.save()
def getStation(self, set, name):
return self.r.hget(set, name)
def zrevrange(self, set, begin, end):
return self.r.zrevrange(set, begin, end)
def zremrangebyscore(self, queue, uid):
return self.r.zremrangebyscore(queue, uid, uid)
def station_validate(self, form, field):
'''
表单tostation和tostation验证函数
'''
if not self.r.getStation(field.data):
raise ValidationError(u'木有这个站')
def saveJSONToSet(self, setName, json):
if not self.r.exists(setName):
for i, name in enumerate(json):
self.r.hset(setName, name, json[name])
print 'insert'+name
self.r.save()
else:
pass
示例4: RedisDict
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class RedisDict(MutableMapping):
""" RedisDict - a dictionary-like interface for ``redis`` key-stores
"""
def __init__(self, namespace, collection_name='redis_dict_data',
connection=None):
"""
The actual key name on the redis server will be
``namespace``:``collection_name``
In order to deal with how redis stores data/keys,
everything, i.e. keys and data, must be pickled.
:param namespace: namespace to use
:param collection_name: name of the hash map stored in redis
(default: redis_dict_data)
:param connection: ``redis.StrictRedis`` instance.
If it's ``None`` (default), a new connection with
default options will be created
"""
if connection is not None:
self.connection = connection
else:
self.connection = Redis()
self._self_key = ':'.join([namespace, collection_name])
def __getitem__(self, key):
result = self.connection.hget(self._self_key, pickle.dumps(key))
if result is None:
raise KeyError
return pickle.loads(bytes(result))
def __setitem__(self, key, item):
self.connection.hset(self._self_key, pickle.dumps(key),
pickle.dumps(item))
def __delitem__(self, key):
if not self.connection.hdel(self._self_key, pickle.dumps(key)):
raise KeyError
def __len__(self):
return self.connection.hlen(self._self_key)
def __iter__(self):
for v in self.connection.hkeys(self._self_key):
yield pickle.loads(bytes(v))
def clear(self):
self.connection.delete(self._self_key)
def __str__(self):
return str(dict(self.items()))
示例5: parse_url
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
def parse_url(url):
try:
url['status'] = 'PARSED'
resp = requests.get(url['url'])
soup = BeautifulSoup(resp._content, 'html.parser')
url['result'] = 'title: %s, h1: %s, img: %s' % (str(soup.title.text), str(soup.h1.text), str(soup.img['src']))
except BaseException:
url['status'] = 'FAILED'
redis_db = StrictRedis(db=USER_DATA_REDIS_DB)
redis_db.hset(CLIENT_PERFIX + url['user_id'], url['task_id'], json.dumps(url))
requests.post('http://localhost:5566/notify', data={'user_id': url['user_id']})
del redis_db
return
示例6: post_updates
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
def post_updates(self, updates, log_index):
'''
Update the cache with CRUD changes
'''
cache = StrictRedis(db=config.tokens_cache_redis_db)
self.log.info('post_updates(): posting updates to local storage')
for update in updates: # TODO: could re-add the Redis "Pipelines" feature to combine Redis requests for better performance when available
(user, token, date, action) = update
if action == 'add':
cache.hset('general', token, user) # future method - user-by-token -- really just existence of a token
cache.hset('users', user, token) # future-method - token-by-user: allow lookup of previous token on token changes
cache.set(token, user) # Current method
self.log.info('post_updates(): added token for user: ' + user)
elif action == 'delete':
cache.hdel('general', token) # future method - disables the ability to authenticate
cache.hdel('users', user) # future method - removes history of token
cache.delete(token)
self.log.info('post_updates(): deleted token for user: ' + user)
elif action == 'update':
prev_token = cache.hget('users', user)
cache.hdel('general', prev_token) # future method - disables the ability to authenticate with previous token
cache.hset('general', token, user) # future method - set the new token for the user
cache.hset('users', user, token) # future method - set the user as possessing the new token
cache.set(token, user)
self.log.info('post_updates(): updated token for user: ' + user)
else:
self.log.critical('post_updates(): unexpected change type: ' + action)
if len(updates) > 0: # don't set if there is nothing to do and also don't set if there are errors
cache.set('log_index', log_index)
示例7: RedisManager
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class RedisManager(NoSqlManager):
def __init__(self, namespace, url=None, data_dir=None, lock_dir=None, **params):
self.expiretime = params.pop('expiretime', None)
NoSqlManager.__init__(self, namespace, url=url, data_dir=data_dir, lock_dir=lock_dir, **params)
def open_connection(self, host, port, **params):
self.db_conn = StrictRedis(host=host, port=int(port), **params)
def __getitem__(self, key):
return pickle.loads(self.db_conn.hget(self._format_key(key), 'data'))
def __contains__(self, key):
return self.db_conn.exists(self._format_key(key))
def set_value(self, key, value, expiretime=None):
key = self._format_key(key)
#
# beaker.container.Value.set_value calls NamespaceManager.set_value
# however it (until version 1.6.4) never sets expiretime param.
#
# Checking "type(value) is tuple" is a compromise
# because Manager class can be instantiated outside container.py (See: session.py)
#
if (expiretime is None) and (type(value) is tuple):
expiretime = value[1]
self.db_conn.hset(key, 'data', pickle.dumps(value))
self.db_conn.hset(key, 'accessed', datetime.now())
self.db_conn.hsetnx(key, 'created', datetime.now())
if expiretime or self.expiretime:
self.db_conn.expire(key, expiretime or self.expiretime)
def __delitem__(self, key):
self.db_conn.delete(self._format_key(key))
def _format_key(self, key):
return 'beaker:%s:%s' % (self.namespace, key.replace(' ', '\302\267'))
def do_remove(self):
self.db_conn.flushdb()
def keys(self):
return self.db_conn.keys('beaker:%s:*' % self.namespace)
示例8: RedisBackend
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class RedisBackend(BaseBackend):
def __init__(self):
from redis import StrictRedis
host = conf.METADATA.get('host', 'localhost')
port = conf.METADATA.get('port', 6379)
password = conf.METADATA.get('password', None)
db = conf.METADATA.get('db', 0)
prefix = conf.METADATA.get('PREFIX', 'djthumbs')
self.prefix = prefix + ":"
self.redis = StrictRedis(host=host, port=port, password=password, db=db)
def get_source_key(self, name):
return "%ssources:%s" % (self.prefix, name)
def get_thumbnail_key(self, name):
return "%sthumbnails:%s" % (self.prefix, name)
def add_source(self, name):
self.redis.hset(self.get_source_key(name), name, name)
return name
def get_source(self, name):
return compat.as_text(self.redis.hget(self.get_source_key(name), name))
def delete_source(self, name):
return self.redis.hdel(self.get_source_key(name), name)
def get_thumbnails(self, name):
metas = self.redis.hgetall(self.get_thumbnail_key(name))
return [ImageMeta(name, thumbnail_name, size) for size, thumbnail_name in metas.items()]
def get_thumbnail(self, source_name, size):
name = compat.as_text(self.redis.hget(self.get_thumbnail_key(source_name), size))
if name:
return ImageMeta(source_name, name, size)
return None
def add_thumbnail(self, source_name, size, name):
self.redis.hset(self.get_thumbnail_key(source_name), size, name)
return ImageMeta(source_name, name, size)
def delete_thumbnail(self, source_name, size):
self.redis.hdel(self.get_thumbnail_key(source_name), size)
示例9: RedisStorage
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class RedisStorage(BaseSharedStorage):
def __init__(self, db_num):
self._redis = StrictRedis(db=db_num)
def save(self, filename, key, data):
self._purge_duplicates(filename)
self._redis.hset(filename, key, data)
def load(self, filename):
redis_keys = self._redis.hkeys(filename)
key = redis_keys[0] # Should never be more than one key
data = self._redis.hget(filename, key)
return key, data
def _purge_duplicates(self, dict_key):
"""Remove identical files from server to be replaced by new files."""
keys = self._redis.hkeys(dict_key)
for key in keys:
self._redis.hdel(dict_key, key)
示例10: async_set
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
async def async_set(self, hash_id, identity, *args, data=None, **kwargs):
"""
For setting data
:param hash_id: Unique Hash key for the data
:param identity: Unique Integer for the data
:param args: Args for the sync function. (Default: None)
:param data: Data to be set. By default it will pick data from the sync_function. (Default: None)
:param kwargs: Args for the sync function.
"""
if data is None:
data = await self.sync_func(identity, *args, **kwargs)
redis = StrictRedis(connection_pool=self.redis_pool)
hash_key = key_generator(self.hash_key, hash_id)
key = key_generator(self.key, identity)
try:
redis.hset(hash_key, key, self.set_func(data))
return 1
except RedisError as re:
self.log.error("[REDIS] %s", str(re))
return 0
finally:
del redis
示例11: SearchIndex
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class SearchIndex(object):
def __init__(self, *args, **kwargs):
self.redis = StrictRedis(*args, **kwargs)
def index(self, value, uid=None, key='text', autocompletion=False, **kwargs):
if not uid:
uid = self.redis.incr('indexed')
self.redis.hset(uid, key, value)
pipe = self.redis.pipeline()
if autocompletion:
for i, word in enumerate(_get_words(value, weighted=False)):
for i, letter in enumerate(word):
if len(word) > i + 1:
pipe.zadd('a:' + word[:2+i], 0, uid+':'+word)
else:
for word, value in _get_words(value).iteritems():
pipe.zadd('w:' + word, value, uid)
pipe.execute()
def index_autocomplete(self, value, uid=None, key='text'):
self.index(value, uid, key, autocompletion=True)
示例12: MispMySQLConnector
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class MispMySQLConnector(object):
def __init__(self):
Base = automap_base()
engine = create_engine('mysql://{}:{}@{}/{}'.format(user, password, host, dbname))
# reflect the tables
Base.prepare(engine, reflect=True)
metadata = MetaData()
metadata.reflect(bind=engine)
self.connection = engine.connect()
self.attributes = Table("attributes", metadata, autoload=True)
self.events = Table("events", metadata, autoload=True)
self.organisations = Table("organisations", metadata, autoload=True)
self.threat_levels = Table("threat_levels", metadata, autoload=True)
self.users = Table("users", metadata, autoload=True)
self.tags = Table("tags", metadata, autoload=True)
self.event_tags = Table("event_tags", metadata, autoload=True)
self.r = StrictRedis(unix_socket_path=redis_socket)
# ####### Full import no respect of ACL ########
def import_table(self, table):
p = self.r.pipeline(False)
tab_name = table.name
for row in self.connection.execute(select([table])):
p.sadd(tab_name, row['id'])
p.hmset('{}:{}'.format(tab_name[:-1], row['id']), dict(row))
p.execute()
def import_all_tables(self):
# Mass import of everything
self.import_table(self.attributes)
self.import_table(self.events)
self.import_table(self.organisations)
self.import_table(self.threat_levels)
self.import_table(self.users)
# Specific import
p = self.r.pipeline(False)
tag_ids = {}
for row in self.connection.execute(select([self.tags])):
tag_ids[row['id']] = row['name']
p.sadd('tags', row['name'])
p.execute()
p = self.r.pipeline(False)
for row in self.connection.execute(select([self.event_tags])):
tag_name = tag_ids[row['tag_id']]
p.sadd('event:{}:tags'.format(row['event_id']), tag_name)
p.sadd('{}:events'.format(tag_name), row['event_id'])
p.execute()
# Create usefull helpers & correlations
p = self.r.pipeline(False)
for a in self.connection.execute(select([self.attributes])):
p.sadd('event_attrs:{}'.format(a['event_id']), a['id'])
# Hashing the values again avoid very long entries (snort/yara rules)
hash_value = SHA256.new(a['value1'].strip().lower()).hexdigest()
p.sadd(hash_value, a['event_id'])
p.sadd('event_vals:{}'.format(a['event_id']), hash_value)
p.set('val:{}'.format(hash_value), a['value1'])
p.sadd('{}:attrs'.format(hash_value), a['id'])
if a['value2'].strip():
hash_value = SHA256.new(a['value2'].strip().lower()).hexdigest()
p.sadd(hash_value, a['event_id'])
p.sadd('event_vals:{}'.format(a['event_id']), hash_value)
p.set('val:{}'.format(hash_value), a['value2'])
p.execute()
# ####### Other functions ########
def import_auth(self):
users = self.connection.execute(select([self.users]))
for u in users:
self.r.set(u['authkey'], u['org_id'])
# ####### Helpers ########
def __get_all_event_uuid(self):
# Build hashtable of event ID - event UUID
eid_uuid = {}
results = self.connection.execute(select([self.events]))
for event in results:
eid_uuid[event['id']] = event['uuid']
self.r.hset('uuid_id', event['uuid'], event['id'])
return eid_uuid
# ####### Get specific information from the database, Auth NOT preserved.#######
def get_event_digest(self, list_eids=None):
'''
Returns a igest of the events in list_eids. If None: all the events.
List:
[
[id, uuid, info, date of the event, timestamp of the last update],
...
]
'''
#.........这里部分代码省略.........
示例13: JobsDB
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class JobsDB(object):
prefix = { 'job' : 'multivac_job',
'log' : 'multivac_log',
'group' : 'multivac_group',
'action' : 'multivac_action',
'worker' : 'multivac_worker' }
def __init__(self, redis_host, redis_port):
self.redis = StrictRedis(
host=redis_host,
port=redis_port,
decode_responses=True)
self.subs = {}
# TODO: add connection test with r.config_get('port')
#######
# Job Methods
#######
def create_job(self, action_name, args=None, initiator=None):
"""
Create a new job with unique ID and subscribe to log channel
params:
- action_name(str): Name of the action this job uses
- args(str): Optional space-delimited series of arguments to be
appended to the job command
- initiator(str): Optional name of the user who initiated this job
"""
job = self.get_action(action_name)
if not job:
return (False, 'No such action')
#check that user has privilege for this command
if not self.check_user(initiator, job['allow_groups'].split(',')):
log.debug('action denied: %s for user %s' % \
(action_name, initiator))
return (False, 'Invalid user command')
job['id'] = str(uuid4().hex)
job['args'] = args
job['created'] = unix_time(datetime.utcnow())
if job['confirm_required'] == "True":
job['status'] = 'pending'
else:
job['status'] = 'ready'
self._subscribe_to_log(job['id'])
if initiator:
self.append_job_log(job['id'], 'Job initiated by %s' % initiator)
self.redis.hmset(self._key('job', job['id']), job)
return (True, job['id'])
def cancel_job(self, job_id):
""" Cancel and cleanup a pending job by ID """
job = self.get_job(job_id)
if job['status'] != 'pending':
return (False, 'Cannot cancel job in %s state' % job['status'])
self.cleanup_job(job_id, canceled=True)
return (True, '')
def update_job(self, job_id, field, value):
""" Update an arbitrary field for a job """
self.redis.hset(self._key('job', job_id), field, value)
return (True,)
def cleanup_job(self, job_id, canceled=False):
"""
Cleanup log subscriptions for a given job id and mark completed
params:
- canceled(bool): If True, mark job as canceled instead of completed
"""
logkey = self._key('log', job_id)
# send EOF signal to streaming clients
self.redis.publish(logkey, 'EOF')
if job_id in self.subs:
self.subs[job_id].unsubscribe()
del self.subs[job_id]
log.debug('Unsubscribed from log channel: %s' % logkey)
if canceled:
self.update_job(job_id, 'status', 'canceled')
else:
self.update_job(job_id, 'status', 'completed')
def get_job(self, job_id):
"""
Return single job dict given a job id
"""
return self.redis.hgetall(self._key('job', job_id))
#.........这里部分代码省略.........
示例14: __init__
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class RespectfulRequester:
def __init__(self):
self.redis = StrictRedis(
host=config["redis"]["host"],
port=config["redis"]["port"],
password=config["redis"]["password"],
db=config["redis"]["database"])
try:
self.redis.echo("Testing Connection")
except ConnectionError:
raise RequestsRespectfulRedisError("Could not establish a connection to the provided Redis server")
def __getattr__(self, attr):
if attr in ["delete", "get", "head", "options", "patch", "post", "put"]:
return getattr(self, "_requests_proxy_%s" % attr)
else:
raise AttributeError()
@property
def redis_prefix(self):
return "RespectfulRequester"
def request(self, request_func, realms, wait=False):
if not isinstance(realms, Sequence) or isinstance(realms, basestring):
realms = [realms]
for realm in realms:
if realm not in self.fetch_registered_realms():
raise RequestsRespectfulError("Realm '%s' hasn't been registered" % realm)
if wait:
while True:
try:
return self._perform_request(request_func, realms)
except RequestsRespectfulRateLimitedError:
pass
time.sleep(1)
else:
return self._perform_request(request_func, realms)
def fetch_registered_realms(self):
return list(map(lambda k: k.decode("utf-8"), self.redis.smembers("%s:REALMS" % self.redis_prefix)))
def register_realm(self, realm, max_requests, timespan):
redis_key = self._realm_redis_key(realm)
if not self.redis.hexists(redis_key, "max_requests"):
self.redis.hmset(redis_key, {"max_requests": max_requests, "timespan": timespan})
self.redis.sadd("%s:REALMS" % self.redis_prefix, realm)
return True
def update_realm(self, realm, **kwargs):
redis_key = self._realm_redis_key(realm)
updatable_keys = ["max_requests", "timespan"]
for updatable_key in updatable_keys:
if updatable_key in kwargs and type(kwargs[updatable_key]) == int:
self.redis.hset(redis_key, updatable_key, kwargs[updatable_key])
return True
def unregister_realm(self, realm):
self.redis.delete(self._realm_redis_key(realm))
self.redis.srem("%s:REALMS" % self.redis_prefix, realm)
request_keys = self.redis.keys("%s:REQUEST:%s:*" % (self.redis_prefix, realm))
[self.redis.delete(k) for k in request_keys]
return True
def realm_max_requests(self, realm):
realm_info = self._fetch_realm_info(realm)
return int(realm_info["max_requests".encode("utf-8")].decode("utf-8"))
def realm_timespan(self, realm):
realm_info = self._fetch_realm_info(realm)
return int(realm_info["timespan".encode("utf-8")].decode("utf-8"))
@classmethod
def configure(cls, **kwargs):
if "redis" in kwargs:
if type(kwargs["redis"]) != dict:
raise RequestsRespectfulConfigError("'redis' key must be a dict")
expected_redis_keys = ["host", "port", "password", "database"]
missing_redis_keys = list()
for expected_redis_key in expected_redis_keys:
if expected_redis_key not in kwargs["redis"]:
missing_redis_keys.append(expected_redis_key)
if len(missing_redis_keys):
raise RequestsRespectfulConfigError("'%s' %s missing from the 'redis' configuration key" % (
", ".join(missing_redis_keys),
"is" if len(missing_redis_keys) == 1 else "are"
))
#.........这里部分代码省略.........
示例15: RedisBackendTest
# 需要导入模块: from redis import StrictRedis [as 别名]
# 或者: from redis.StrictRedis import hset [as 别名]
class RedisBackendTest(TestCase):
def setUp(self):
self.backend = RedisBackend()
self.redis = StrictRedis()
def test_get_source_key(self):
self.assertEqual(self.backend.get_source_key("a.jpg"), "djthumbs-test:sources:a.jpg")
def test_get_thumbnail_key(self):
self.assertEqual(self.backend.get_thumbnail_key("a.jpg"), "djthumbs-test:thumbnails:a.jpg")
def test_add_delete_source(self):
source_name = "test-thumbnail.jpg"
source_key = self.backend.get_source_key(source_name)
self.backend.add_source(source_name)
self.assertTrue(self.redis.hexists(source_key, source_name))
self.backend.delete_source(source_name)
self.assertFalse(self.redis.hexists(source_key, source_name))
def test_get_source(self):
source_name = "test-thumbnail.jpg"
source_key = self.backend.get_source_key(source_name)
self.redis.hset(source_key, source_name, source_name)
self.assertEqual(self.backend.get_source(source_name), source_name)
# Delete Source
self.redis.hdel(source_key, source_name)
def test_add_delete_thumbnail(self):
source_name = "test-thumbnail.jpg"
size = "small"
thumbnail_key = self.backend.get_thumbnail_key(source_name)
self.backend.add_source(source_name)
self.backend.add_thumbnail(source_name, size, "test-thumbnail_small.jpg")
self.assertTrue(self.redis.hexists(thumbnail_key, size))
self.backend.delete_thumbnail(source_name, size)
self.assertFalse(self.redis.hexists(thumbnail_key, size))
# Delete Source
self.redis.hdel(self.backend.get_source_key(source_name), source_name)
def test_get_thumbnail(self):
source_name = "test-thumbnail.jpg"
self.backend.add_source(source_name)
self.backend.add_thumbnail(source_name, "small", "test-thumbnail_small.jpg")
self.assertEqual(
self.backend.get_thumbnail(source_name, "small"),
ImageMeta(source_name, "test-thumbnail_small.jpg", "small"),
)
self.backend.add_thumbnail(source_name, "large", "test-thumbnail_large.jpg")
expected = ["test-thumbnail_large.jpg", "test-thumbnail_small.jpg"]
result = [image_meta.name for image_meta in self.backend.get_thumbnails(source_name)]
self.assertEqual(result.sort(), expected.sort())
# Delete Source & Thumbnails
thumbnail_key = self.backend.get_thumbnail_key(source_name)
self.redis.hdel(self.backend.get_source_key(source_name), source_name)
self.redis.hdel(thumbnail_key, "small")
self.redis.hdel(thumbnail_key, "large")