本文整理汇总了Python中sumo.redis_utils.redis_client函数的典型用法代码示例。如果您正苦于以下问题:Python redis_client函数的具体用法?Python redis_client怎么用?Python redis_client使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了redis_client函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: handle_reindex
def handle_reindex(request):
"""Caculates and kicks off indexing tasks"""
write_index = es_utils.WRITE_INDEX
# This is truthy if the user wants us to delete and recreate
# the index first.
delete_index_first = bool(request.POST.get("delete_index"))
if delete_index_first:
# Coming from the delete form, so we reindex all models.
models_to_index = None
else:
# Coming from the reindex form, so we reindex whatever we're
# told.
models_to_index = [name.replace("check_", "") for name in request.POST.keys() if name.startswith("check_")]
# TODO: If this gets fux0rd, then it's possible this could be
# non-zero and we really want to just ignore it. Need the ability
# to ignore it.
try:
client = redis_client("default")
val = client.get(OUTSTANDING_INDEX_CHUNKS)
if val is not None and int(val) > 0:
raise ReindexError("There are %s outstanding chunks." % val)
# We don't know how many chunks we're building, but we do want
# to make sure another reindex request doesn't slide in here
# and kick off a bunch of chunks.
#
# There is a race condition here.
client.set(OUTSTANDING_INDEX_CHUNKS, 1)
except RedisError:
log.warning("Redis not running. Can not check if there are " "outstanding tasks.")
batch_id = create_batch_id()
# Break up all the things we want to index into chunks. This
# chunkifies by class then by chunk size.
chunks = []
for cls, indexable in get_indexable(search_models=models_to_index):
chunks.extend((cls, chunk) for chunk in chunked(indexable, CHUNK_SIZE))
if delete_index_first:
# The previous lines do a lot of work and take some time to
# execute. So we wait until here to wipe and rebuild the
# index. That reduces the time that there is no index by a little.
recreate_index()
chunks_count = len(chunks)
try:
client = redis_client("default")
client.set(OUTSTANDING_INDEX_CHUNKS, chunks_count)
except RedisError:
log.warning("Redis not running. Can't denote outstanding tasks.")
for chunk in chunks:
index_chunk_task.delay(write_index, batch_id, chunk)
return HttpResponseRedirect(request.path)
示例2: setUp
def setUp(self, switch_is_active):
switch_is_active.return_value = True
super(KarmaManagerTests, self).setUp()
try:
self.mgr = KarmaManager()
redis_client('karma').flushdb()
except RedisError:
raise SkipTest
self.user1 = user(save=True)
self.user2 = user(save=True)
self.user3 = user(save=True)
today = date.today()
# user1 actions (3 + 3 + 7):
TestAction1(user=self.user1, day=today).save()
TestAction1(user=self.user1, day=today).save()
TestAction2(user=self.user1, day=today).save()
# user2 actions (3 + 7 + 7):
TestAction1(user=self.user2, day=today - timedelta(days=8)).save()
TestAction2(user=self.user2, day=today - timedelta(days=32)).save()
TestAction2(user=self.user2, day=today - timedelta(days=360)).save()
# user3 actions (3 + 3 + 3 + 7):
TestAction1(user=self.user3, day=today - timedelta(days=10)).save()
TestAction1(user=self.user3, day=today - timedelta(days=40)).save()
TestAction1(user=self.user3, day=today - timedelta(days=190)).save()
TestAction2(user=self.user3, day=today - timedelta(days=3)).save()
示例3: setUp
def setUp(self, switch_is_active):
switch_is_active.return_value = True
super(KarmaAPITests, self).setUp()
try:
self.mgr = KarmaManager()
redis_client('karma').flushdb()
except RedisError:
raise SkipTest
self.user1 = user(save=True)
self.user2 = user(save=True)
self.user3 = user(save=True)
TestAction1(user=self.user1).save()
TestAction2(user=self.user2).save()
TestAction2(user=self.user2).save()
TestAction1(user=self.user3).save()
TestAction1(user=self.user3).save()
TestAction1(user=self.user3).save()
self.mgr.update_top()
self.client.login(username=self.user1.username, password='testpass')
add_permission(self.user1, models.Title, 'view_dashboard')
示例4: setUp
def setUp(self):
super(KarmaActionTests, self).setUp()
self.user = user(save=True)
try:
self.mgr = KarmaManager()
redis_client('karma').flushdb()
except RedisError:
raise SkipTest
示例5: find_related_documents
def find_related_documents(doc):
"""
Returns a QuerySet of related_docuemnts or of the
parent's related_documents in the case of translations
"""
if doc.locale == settings.WIKI_DEFAULT_LANGUAGE:
return doc.related_documents.order_by('-related_to__in_common')[0:5]
# Not English, so may need related docs which are
# stored on the English version.
try:
redis = redis_client('default')
except RedisError as e:
# Problem with Redis. Log and return the related docs.
statsd.incr('redis.errror')
log.error('Redis error: %s' % e)
return related_translated_documents(doc)
doc_key = 'translated_doc_id:%s' % doc.id
related_ids = redis.lrange(doc_key, 0, -1)
if related_ids == ['0']:
return Document.objects.get_empty_query_set()
if related_ids:
return Document.objects.filter(id__in=related_ids)
related = related_translated_documents(doc)
if not related:
# Add '0' to prevent recalulation on a known empty set.
redis.lpush(doc_key, 0)
else:
for r in related:
redis.lpush(doc_key, r.id)
# Cache expires in 2 hours.
redis.expire(doc_key, 60 * 60 * 2)
return related
示例6: __init__
def __init__(self, redis=None):
if not redis:
try:
redis = redis_client(name='karma')
except RedisError as e:
log.error('Redis error: %s' % e)
self.redis = redis
示例7: setUp
def setUp(self):
super(TestDocumentLocking, self).setUp()
try:
self.redis = redis_client('default')
self.redis.flushdb()
except RedisError:
raise SkipTest
示例8: init_karma
def init_karma():
"""Flushes the karma redis backend and populates with fresh data.
Goes through all questions/answers/votes and save karma actions for them.
"""
if not waffle.switch_is_active('karma'):
return
redis_client('karma').flushdb()
questions = Question.objects.all()
for chunk in chunked(questions.values_list('pk', flat=True), 200):
_process_question_chunk.apply_async(args=[chunk])
votes = AnswerVote.objects.all()
for chunk in chunked(votes.values_list('pk', flat=True), 1000):
_process_answer_vote_chunk.apply_async(args=[chunk])
示例9: setUp
def setUp(self):
super(TopUnhelpfulArticlesCronTests, self).setUp()
self.REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
try:
self.redis = redis_client('helpfulvotes')
self.redis.flushdb()
except RedisError:
raise SkipTest
示例10: test_creator_nums_redis
def test_creator_nums_redis(self, switch_is_active):
"""Test creator_num_* pulled from karma data."""
try:
KarmaManager()
redis_client('karma').flushdb()
except RedisError:
raise SkipTest
switch_is_active.return_value = True
answer = Answer.objects.all()[0]
AnswerAction(answer.creator).save()
AnswerAction(answer.creator).save()
SolutionAction(answer.creator).save()
eq_(answer.creator_num_solutions, 1)
eq_(answer.creator_num_answers, 2)
示例11: _process_answer_vote_chunk
def _process_answer_vote_chunk(data, **kwargs):
"""Save karma data for a chunk of answer votes."""
redis = redis_client(name="karma")
v_qs = AnswerVote.objects.select_related("answer")
for vote in v_qs.filter(pk__in=data):
if vote.helpful:
action_class = AnswerMarkedHelpfulAction
else:
action_class = AnswerMarkedNotHelpfulAction
action_class(vote.answer.creator_id, vote.created).save(async=False, redis=redis)
示例12: get_helpful_graph_async
def get_helpful_graph_async(request):
doc_data = []
REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
try:
redis = redis_client('helpfulvotes')
length = redis.llen(REDIS_KEY)
output = redis.lrange(REDIS_KEY, 0, length)
except RedisError as e:
log.error('Redis error: %s' % e)
output = []
def _format_r(strresult):
result = strresult.split('::')
dic = dict(title=result[6].decode('utf-8'),
id=result[0],
url=reverse('wiki.document_revisions',
args=[result[5].decode('utf-8')],
locale=settings.WIKI_DEFAULT_LANGUAGE),
total=int(float(result[1])),
currperc=float(result[2]),
diffperc=float(result[3]),
colorsize=float(result[4])
)
# Blue #418CC8 = HSB 207/67/78
# Go from blue to light grey. Grey => smaller number.
r, g, b = colorsys.hsv_to_rgb(0.575, 1 - dic['colorsize'], .75)
color_shade = '#%02x%02x%02x' % (255 * r, 255 * g, 255 * b)
size = math.pow(dic['total'], 0.33) * 1.5
return {'x': 100 * dic['currperc'],
'y': 100 * dic['diffperc'],
'total': dic['total'],
'title': dic['title'],
'url': dic['url'],
'currperc': '%.2f' % (100 * dic['currperc']),
'diffperc': '%+.2f' % (100 * dic['diffperc']),
'colorsize': dic['colorsize'],
'marker': {'radius': size,
'fillColor': color_shade}}
doc_data = [_format_r(r) for r in output]
# Format data for Highcharts
send = {'data': [{
'name': _('Document'),
'id': 'doc_data',
'data': doc_data
}]}
return HttpResponse(json.dumps(send),
mimetype='application/json')
示例13: setUp
def setUp(self):
super(HelpfulVotesGraphTests, self).setUp()
self.user = user(save=True)
self.client.login(username=self.user.username, password='testpass')
self.group = group(name='Contributors', save=True)
# Without this, there were unrelated failures with l10n dashboard
self.REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
try:
self.redis = redis_client('helpfulvotes')
self.redis.flushdb()
except RedisError:
raise SkipTest
示例14: rows
def rows(self, max=None):
REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
try:
redis = redis_client('helpfulvotes')
length = redis.llen(REDIS_KEY)
max_get = max or length
output = redis.lrange(REDIS_KEY, 0, max_get)
except RedisError as e:
log.error('Redis error: %s' % e)
output = []
return [self._format_row(r) for r in output]
示例15: cache_most_unhelpful_kb_articles
def cache_most_unhelpful_kb_articles():
"""Calculate and save the most unhelpful KB articles in the past month."""
REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
old_formatted = _get_old_unhelpful()
final = _get_current_unhelpful(old_formatted)
if final == {}:
return
def _mean(vals):
"""Argument: List of floats"""
if len(vals) == 0:
return None
return sum(vals) / len(vals)
def _bayes_avg(C, m, R, v):
# Bayesian Average
# C = mean vote, v = number of votes,
# R = mean rating, m = minimum votes to list in topranked
return (C * m + R * v) / (m + v)
mean_perc = _mean([float(final[key]['currperc']) for key in final.keys()])
mean_total = _mean([float(final[key]['total']) for key in final.keys()])
# TODO: Make this into namedtuples
sorted_final = [(key,
final[key]['total'],
final[key]['currperc'],
final[key]['diffperc'],
_bayes_avg(mean_perc, mean_total,
final[key]['currperc'],
final[key]['total']))
for key in final.keys()]
sorted_final.sort(key=lambda entry: entry[4]) # Sort by Bayesian Avg
redis = redis_client('helpfulvotes')
redis.delete(REDIS_KEY)
max_total = max([b[1] for b in sorted_final])
for entry in sorted_final:
doc = Document.objects.get(pk=entry[0])
redis.rpush(REDIS_KEY, (u'%s::%s::%s::%s::%s::%s::%s' %
(entry[0], # Document ID
entry[1], # Total Votes
entry[2], # Current Percentage
entry[3], # Difference in Percentage
1 - (entry[1] / max_total), # Graph Color
doc.slug, # Document slug
doc.title))) # Document title