本文整理汇总了Python中redis.client.StrictRedis.set方法的典型用法代码示例。如果您正苦于以下问题:Python StrictRedis.set方法的具体用法?Python StrictRedis.set怎么用?Python StrictRedis.set使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类redis.client.StrictRedis
的用法示例。
在下文中一共展示了StrictRedis.set方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Record
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
class Record(object):
def __init__(self,host='127.0.0.1',port=6379):
self.r=StrictRedis()
def run(self):
while True:
value=self.r.rpop('alerts')
if value:
obj=json.loads(value)
keyredis=obj['src_ip']+'_'+str(obj['src_port'])+'_'+ obj['dest_ip']+'_'+str(obj['dest_port'])
entry=self.r.get(keyredis)
if entry:
restruct=json.loads(entry)
else:
restruct={}
if not 'http' in restruct:
restruct['http']=[]
if not 'alerts' in restruct:
restruct['alerts']=[]
if not 'files' in restruct:
restruct['files']=[]
if 'alert' in obj:
restruct['alerts'].append(obj['alert']['signature'])
if 'fileinfo' in obj:
restruct['files'].append(obj['fileinfo'])
if 'http' in obj:
restruct['http'].append(obj['http'])
if len(restruct)>0:
self.r.set(keyredis, json.dumps(restruct))
else:
sleep(1)
示例2: redis
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
class redis(object):
def __init__(self,host='127.0.0.1',port=6379):
self.r = StrictRedis(host,port)
def rec(self,k,v):
self.r.set(k, v)
def rpush(self,v):
self.r.rpush('alerts',v)
示例3: RedisDataSource
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
class RedisDataSource(AbstractDataSource):
_r = None
def __init__(self,config):
if self._validateConfig(config):
self._r = StrictRedis(host=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_HOST],
port=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_PORT],
db=config[REDIS_DATASOURCE_CONFIG][REDIS_DATASOURCE_CONFIG_DB])
logger.debug("Obtained internal redis handler" + str(self._r))
else:
raise BaseException("Error validating config ")
def update(self,item):
self.store(item)
def store(self,item):
self._r.set(item.getHash(), item.getValue())
def get(self,item):
return self._r.get(item.getHash())
def exists(self,item):
return self.get(item) is not None
def all(self):
result = []
# Obtain all keys
keys = self._r.keys()
#For each key, get value
for k in keys:
value = self._r.get(k)
result.append(BaseItem({"origin":"redis"},value))
#return result
return result
def _validateConfig(self,config):
validator = MultipleConfigValidator(
{VALIDATORS_LIST:[ContainsKeyConfigValidator({KEY_VALUE:REDIS_DATASOURCE_CONFIG})]})
if not validator.validate(config):
raise BaseException("Config validation error : does not contain " + REDIS_DATASOURCE_CONFIG)
# Validate redis datasource config
validator = MultipleConfigValidator(
{VALIDATORS_LIST:[ContainsKeysConfigValidator({KEYS_LIST:[REDIS_DATASOURCE_CONFIG_DB,
REDIS_DATASOURCE_CONFIG_HOST,
REDIS_DATASOURCE_CONFIG_PORT]})]})
if not validator.validate(config[REDIS_DATASOURCE_CONFIG]):
raise BaseException("Config validation error : config not complete ")
return True
def delete(self,item):
self._r.delete(item.getHash())
示例4: test_ensure_timeline_scheduled_script
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
def test_ensure_timeline_scheduled_script(self):
client = StrictRedis(db=9)
timeline = 'timeline'
timestamp = 100.0
waiting_set_size = functools.partial(client.zcard, 'waiting')
ready_set_size = functools.partial(client.zcard, 'ready')
timeline_score_in_waiting_set = functools.partial(client.zscore, 'waiting', timeline)
timeline_score_in_ready_set = functools.partial(client.zscore, 'ready', timeline)
keys = ('waiting', 'ready', 'last-processed')
# The first addition should cause the timeline to be added to the ready set.
with self.assertChanges(ready_set_size, before=0, after=1), \
self.assertChanges(timeline_score_in_ready_set, before=None, after=timestamp):
assert ensure_timeline_scheduled(keys, (timeline, timestamp, 1, 10), client) == 1
# Adding it again with a timestamp in the future should not change the schedule time.
with self.assertDoesNotChange(waiting_set_size), \
self.assertDoesNotChange(ready_set_size), \
self.assertDoesNotChange(timeline_score_in_ready_set):
assert ensure_timeline_scheduled(keys, (timeline, timestamp + 50, 1, 10), client) is None
# Move the timeline from the ready set to the waiting set.
client.zrem('ready', timeline)
client.zadd('waiting', timestamp, timeline)
client.set('last-processed', timestamp)
increment = 1
with self.assertDoesNotChange(waiting_set_size), \
self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp + increment):
assert ensure_timeline_scheduled(keys, (timeline, timestamp, increment, 10), client) is None
# Make sure the schedule respects the maximum value.
with self.assertDoesNotChange(waiting_set_size), \
self.assertChanges(timeline_score_in_waiting_set, before=timestamp + 1, after=timestamp):
assert ensure_timeline_scheduled(keys, (timeline, timestamp, increment, 0), client) is None
# Test to ensure a missing last processed timestamp can be handled
# correctly (chooses minimum of schedule value and record timestamp.)
client.zadd('waiting', timestamp, timeline)
client.delete('last-processed')
with self.assertDoesNotChange(waiting_set_size), \
self.assertDoesNotChange(timeline_score_in_waiting_set):
assert ensure_timeline_scheduled(keys, (timeline, timestamp + 100, increment, 10), client) is None
with self.assertDoesNotChange(waiting_set_size), \
self.assertChanges(timeline_score_in_waiting_set, before=timestamp, after=timestamp - 100):
assert ensure_timeline_scheduled(keys, (timeline, timestamp - 100, increment, 10), client) is None
示例5: test_truncate_timeline_script
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
def test_truncate_timeline_script(self):
client = StrictRedis(db=9)
timeline = 'timeline'
# Preload some fake records (the contents don't matter.)
records = list(itertools.islice(self.records, 10))
for record in records:
client.zadd(timeline, record.timestamp, record.key)
client.set(make_record_key(timeline, record.key), 'data')
with self.assertChanges(lambda: client.zcard(timeline), before=10, after=5):
truncate_timeline((timeline,), (5,), client)
# Ensure the early records don't exist.
for record in records[:5]:
assert not client.zscore(timeline, record.key)
assert not client.exists(make_record_key(timeline, record.key))
# Ensure the later records do exist.
for record in records[-5:]:
assert client.zscore(timeline, record.key) == float(record.timestamp)
assert client.exists(make_record_key(timeline, record.key))
示例6: RedisCache
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
class RedisCache(CacheBase):
def __init__(self, config, section):
from redis.client import StrictRedis
self.conn = StrictRedis(
config.get(section, 'redis-server'),
config.getint(section, 'redis-port'),
config.getint(section, 'redis-db'),
decode_responses=True
)
def check_password(self, user, password):
"""Check the given user and password.
Returns None on cache miss, True if password matches, False if not.
"""
cached = self.conn.get(self.prefix('%s-pass' % user))
if cached is None:
return cached
else:
return cached == self.hash(password, cached)
def set_password(self, user, password):
self.conn.set(self.prefix('%s-pass' % user), self.hash(password, None), ex=self.expire)
def in_groups(self, user, groups):
key = self.prefix('%s-groups' % user)
if not self.conn.exists(key):
return None
return not self.conn.smembers(key).isdisjoint(groups)
def set_groups(self, user, groups):
key = self.prefix('%s-groups' % user)
pipe = self.conn.pipeline()
pipe.sadd(key, *groups).expire(key, self.expire)
pipe.execute()
示例7: Command
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
class Command(object):
def __init__(self):
self.redis = StrictRedis(Config.REDIS['HOST'], Config.REDIS['PORT'], Config.REDIS['DB'])
def run(self):
log.debug("Updating mirror database")
geoip = GeoIP(Config.GEOIP_PATH_V4)
for status in mirror_statuses(unofficial_mirrors=Config.UNOFFICIAL_MIRRORS):
name = status['mirror']
if name == "a.pypi.python.org":
# don't include 'a' in the list of mirrors - it's no mirror after all
continue
time_diff = status['time_diff']
if not isinstance(time_diff, timedelta):
continue
log.debug(" Processing mirror '%s'", name)
record = geoip.record_by_name(name)
lat = record['latitude']
lon = record['longitude']
log.debug(" Age: %d, Lat: %0.5f, Lon: %0.5f", time_diff.total_seconds(), lat, lon)
try:
mirror = Mirror.objects.get(name=name)
except ObjectNotFound:
mirror = Mirror(name=name)
mirror.age = time_diff.total_seconds()
mirror.lat = lat
mirror.lon = lon
mirror.save()
self.redis.set(Config.KEY_LAST_UPDATE, time.time())
log.debug("Finished updating mirror database")
示例8: RedisStore
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
class RedisStore():
def __init__(self, dispatcher, db_host, db_port, db_num, db_pw):
self.dispatcher = dispatcher
pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw)
self.redis = StrictRedis(connection_pool=pool)
self.encoder = JSONEncoder()
self.decoder = JSONDecoder()
self.class_map = {}
self.object_map = {}
def create_object(self, dbo, update_rev=False):
self.save_object(dbo)
dbo.on_loaded()
def save_object(self, dbo, update_rev=False, autosave=False):
if update_rev:
dbo.dbo_rev = getattr(dbo, "dbo_rev", 0) + 1
json_obj = self.build_json(dbo)
key = dbo.dbo_key
self.redis.set(key, self.encoder.encode(json_obj))
if dbo.dbo_set_key:
self.redis.sadd(dbo.dbo_set_key, key)
self.dispatcher.dispatch("db_log{0}".format("_auto" if autosave else ""), "object saved: " + key)
self.object_map[dbo.dbo_key] = dbo;
def build_json(self, dbo):
dbo.before_save()
json_obj = {}
if dbo.__class__ != dbo.dbo_base_class:
json_obj["class_name"] = dbo.__module__ + "." + dbo.__class__.__name__
for field_name in dbo.dbo_fields:
json_obj[field_name] = getattr(dbo, field_name, None)
for dbo_col in dbo.dbo_collections:
coll_list = list()
for child_dbo in getattr(dbo, dbo_col.field_name):
if dbo_col.key_type:
coll_list.append(child_dbo.dbo_id)
else:
coll_list.append(self.build_json(child_dbo))
json_obj[dbo_col.field_name] = coll_list
for dbo_ref in dbo.dbo_refs:
ref = getattr(dbo, dbo_ref.field_name, None)
if ref:
json_obj[dbo_ref.field_name] = ref.dbo_id
return json_obj
def cache_object(self, dbo):
self.object_map[dbo.dbo_key]
def load_cached(self, key):
return self.object_map.get(key)
def evict(self, dbo):
try:
del self.object_map[dbo.dbo_key]
except:
self.dispatcher.dispatch("db_log", "Failed to evict " + dbo.dbo_key + " from db cache")
def load_by_key(self, key_type, key, base_class=None):
dbo_key = key_type + ":" + key
cached_dbo = self.object_map.get(dbo_key)
if cached_dbo:
return cached_dbo
json_str = self.redis.get(dbo_key)
if not json_str:
return None
json_obj = self.decoder.decode(json_str)
dbo = self.load_class(json_obj, base_class)(key)
if dbo.dbo_key_type:
self.object_map[dbo.dbo_key] = dbo
self.load_json(dbo, json_obj)
return dbo
def load_class(self, json_obj, base_class):
class_path = json_obj.get("class_name")
if not class_path:
return base_class
clazz = self.class_map.get(class_path)
if clazz:
return clazz
split_path = class_path.split(".")
module_name = ".".join(split_path[:-1])
class_name = split_path[-1]
module = __import__(module_name, globals(), locals(), [class_name])
clazz = getattr(module, class_name)
self.class_map[class_path] = clazz
return clazz
def load_object(self, dbo_class, key):
return self.load_by_key(dbo_class.dbo_key_type, key, dbo_class)
def load_json(self, dbo, json_obj):
for field_name in dbo.dbo_fields:
try:
setattr(dbo, field_name, json_obj[field_name])
except KeyError:
self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing field " + field_name)
for dbo_col in dbo.dbo_collections:
coll = getattr(dbo, dbo_col.field_name, [])
#.........这里部分代码省略.........
示例9: StrictRedis
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
# -*- coding: gbk -*-
'''
Created on 2012-5-26
@author: Sky
'''
from redis.client import StrictRedis
Sr = StrictRedis(host='localhost', port=6379, db=0)
Sr.set('foo', 'bar')
#print(str(Sr.get('foo'), encoding = "utf-8") == 'bar')
print(Sr.get('foo'))
Sr.hset("MyHash", "field1", "ль¤У")
print(Sr.hget("MyHash", "field11"))
Sr.rpush("list", "one")
Sr.rpush("list", "two")
print(Sr.llen("list"))
Sr.ltrim("list", 1, 0)
print(Sr.llen("list"))
Sr.hset("MyHash", "Key1", "Value1")
Sr.hset("MyHash", "Key2", "Value2")
for i in Sr.hkeys("MyHash"):
print(i)
print(Sr.hlen("PlayerHash"))
print(Sr.get("XXX"))
print(type(Sr.smembers("EnemyTemplate:16:LOOT")))
for i in Sr.smembers("EnemyTemplate:16:LOOT"):
print(i)
示例10: RedisStore
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
class RedisStore():
def __init__(self, db_host, db_port, db_num, db_pw):
pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw)
self.redis = StrictRedis(connection_pool=pool)
self.class_map = {}
self.object_map = {}
def create_object(self, dbo, update_rev=False):
self.save_object(dbo, update_rev)
dbo.on_loaded()
def save_object(self, dbo, update_rev=False, autosave=False):
if update_rev:
rev = getattr(dbo, "dbo_rev", None)
dbo.dbo_rev = 1 if not rev else rev + 1
dbo.before_save()
key = dbo.dbo_key
self.redis.set(key, self.json_encode(dbo.save_json_obj))
if dbo.dbo_set_key:
self.redis.sadd(dbo.dbo_set_key, key)
self.dispatch("db_log{0}".format("_auto" if autosave else ""), "object saved: " + key)
self.object_map[dbo.dbo_key] = dbo
def load_cached(self, key):
return self.object_map.get(key)
def evict_object(self, dbo):
try:
del self.object_map[dbo.dbo_key]
except:
debug("Failed to evict " + dbo.dbo_key + " from db cache", self)
@logged
def load_by_key(self, key_type, key, base_class=None):
dbo_key = unicode('{0}:{1}'.format(key_type, key))
cached_dbo = self.object_map.get(dbo_key)
if cached_dbo:
return cached_dbo
json_str = self.redis.get(dbo_key)
if not json_str:
return None
json_obj = self.json_decode(json_str)
dbo = self._load_class(json_obj, base_class)(key)
if dbo.dbo_key_type:
self.object_map[dbo.dbo_key] = dbo
self.load_json(dbo, json_obj)
return dbo
def object_exists(self, obj_type, obj_id):
key = unicode('{0}:{1}'.format(obj_type, obj_id))
return key in self.redis.keys(key)
def load_object(self, dbo_class, key):
return self.load_by_key(dbo_class.dbo_key_type, key, dbo_class)
def update_object(self, dbo, json_obj):
self.load_json(dbo, json_obj)
self.save_object(dbo, True)
def delete_object(self, dbo):
key = dbo.dbo_key
self.redis.delete(key)
if dbo.dbo_set_key:
self.redis.srem(dbo.dbo_set_key, key)
for dbo_col in dbo.dbo_collections:
if dbo_col.key_type:
coll = getattr(dbo, dbo_col.field_name, set())
for child_dbo in coll:
self.delete_object(child_dbo)
debug("object deleted: " + key, self)
if self.object_map.get(dbo.dbo_key):
del self.object_map[dbo.dbo_key]
return True
def fetch_set_keys(self, set_key):
return self.redis.smembers(set_key)
def set_index(self, index_name, key, value):
return self.redis.hset(index_name, key, value)
def get_index(self, index_name, key):
return self.redis.hget(index_name, key)
def delete_index(self, index_name, key):
return self.redis.hdel(index_name, key)
def _load_class(self, json_obj, base_class):
class_path = json_obj.get("class_name")
if not class_path:
return self.cls_registry(base_class)
clazz = self.class_map.get(class_path)
if clazz:
return clazz
split_path = class_path.split(".")
module_name = ".".join(split_path[:-1])
class_name = split_path[-1]
module = __import__(module_name, globals(), locals(), [class_name])
clazz = getattr(module, class_name)
clazz = self.cls_registry(clazz)
self.class_map[class_path] = clazz
#.........这里部分代码省略.........
示例11: RedisStore
# 需要导入模块: from redis.client import StrictRedis [as 别名]
# 或者: from redis.client.StrictRedis import set [as 别名]
class RedisStore():
def __init__(self, db_host, db_port, db_num, db_pw):
self.pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw,
decode_responses=True)
self.redis = StrictRedis(connection_pool=self.pool)
self.redis.ping()
self._object_map = WeakValueDictionary()
def create_object(self, dbo_class, dbo_dict, update_timestamp=True):
dbo_class = get_dbo_class(getattr(dbo_class, 'dbo_key_type', dbo_class))
if not dbo_class:
return
try:
dbo_id = dbo_dict['dbo_id']
except KeyError:
dbo_id, dbo_dict = dbo_dict, {}
if dbo_id is None or dbo_id == '':
warn("create_object called with empty dbo_id")
return
dbo_id = str(dbo_id).lower()
if self.object_exists(dbo_class.dbo_key_type, dbo_id):
raise ObjectExistsError(dbo_id)
dbo = dbo_class()
dbo.dbo_id = dbo_id
dbo.hydrate(dbo_dict)
dbo.db_created()
if dbo.dbo_set_key:
self.redis.sadd(dbo.dbo_set_key, dbo.dbo_id)
self.save_object(dbo, update_timestamp)
return dbo
def save_object(self, dbo, update_timestamp=False, autosave=False):
if update_timestamp:
dbo.dbo_ts = int(time.time())
if dbo.dbo_indexes:
self._update_indexes(dbo)
self._clear_old_refs(dbo)
save_root, new_refs = dbo.to_db_value()
self.redis.set(dbo.dbo_key, json_encode(save_root))
if new_refs:
self._set_new_refs(dbo, new_refs)
debug("db object {} {}saved", dbo.dbo_key, "auto" if autosave else "")
self._object_map[dbo.dbo_key] = dbo
return dbo
def save_raw(self, key, raw):
self.redis.set(key, json_encode(raw))
def load_raw(self, key, default=None):
json = self.redis.get(key)
if json:
return json_decode(json)
return default
def load_cached(self, dbo_key):
return self._object_map.get(dbo_key)
def load_object(self, dbo_key, key_type=None, silent=False):
if key_type:
try:
key_type = key_type.dbo_key_type
except AttributeError:
pass
try:
dbo_key, dbo_id = ':'.join([key_type, dbo_key]), dbo_key
except TypeError:
if not silent:
exception("Invalid dbo_key passed to load_object", stack_info=True)
return
else:
key_type, _, dbo_id = dbo_key.partition(':')
cached_dbo = self._object_map.get(dbo_key)
if cached_dbo:
return cached_dbo
json_str = self.redis.get(dbo_key)
if not json_str:
if not silent:
warn("Failed to find {} in database", dbo_key)
return
return self.load_from_json(json_str, key_type, dbo_id)
def load_from_json(self, json_str, key_type, dbo_id):
dbo_dict = json_decode(json_str)
dbo = get_mixed_type(key_type, dbo_dict.get('mixins'))()
dbo.dbo_id = dbo_id
self._object_map[dbo.dbo_key] = dbo
dbo.hydrate(dbo_dict)
return dbo
def object_exists(self, obj_type, obj_id):
return self.redis.exists('{}:{}'.format(obj_type, obj_id))
def load_object_set(self, dbo_class, set_key=None):
key_type = dbo_class.dbo_key_type
if not set_key:
set_key = dbo_class.dbo_set_key
results = set()
keys = deque()
pipeline = self.redis.pipeline()
for key in self.fetch_set_keys(set_key):
#.........这里部分代码省略.........