本文整理汇总了Python中multidb.get_slave函数的典型用法代码示例。如果您正苦于以下问题:Python get_slave函数的具体用法?Python get_slave怎么用?Python get_slave使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了get_slave函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_get_slave
def test_get_slave(self, mock_get_replica):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
multidb.get_slave()
assert mock_get_replica.called
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
示例2: update_user_ratings
def update_user_ratings():
"""Update add-on author's ratings."""
cursor = connections[multidb.get_slave()].cursor()
# We build this query ahead of time because the cursor complains about data
# truncation if it does the parameters. Also, this query is surprisingly
# quick, <1sec for 6100 rows returned
q = """ SELECT
addons_users.user_id as user_id,
AVG(rating) as avg_rating
FROM reviews
INNER JOIN versions
INNER JOIN addons_users
INNER JOIN addons
ON reviews.version_id = versions.id
AND addons.id = versions.addon_id
AND addons_users.addon_id = addons.id
WHERE reviews.reply_to IS NULL
AND reviews.rating > 0
AND addons.status IN (%s)
GROUP BY addons_users.user_id
""" % (",".join(map(str, VALID_STATUSES)))
cursor.execute(q)
d = cursor.fetchall()
cursor.close()
ts = [update_user_ratings_task.subtask(args=[chunk])
for chunk in chunked(d, 1000)]
TaskSet(ts).apply_async()
示例3: update_perf
def update_perf():
cursor = connections[multidb.get_slave()].cursor()
# The baseline is where addon_id is null.
cursor.execute(
"SELECT AVG(average) FROM perf_results WHERE addon_id IS NULL")
baseline = cursor.fetchone()[0]
# The perf_results table is a mess right now, so pull out one row
# for each addon by finding the MAX(created) and then the AVG(average)
# since there are many rows with the same (addon, created).
# This scheme completely ignores app, os, and test.
cursor.execute("""
SELECT J.addon_id, AVG(average) av FROM perf_results P INNER JOIN
(SELECT addon_id, MAX(created) c FROM perf_results
GROUP BY addon_id) J
ON ((P.addon_id=J.addon_id) AND P.created=J.c)
WHERE test='ts'
GROUP BY P.addon_id
HAVING av > %s""", (baseline,))
# A bunch of (addon, perf_average) pairs.
perf = cursor.fetchall()
with establish_connection() as conn:
for chunk in chunked(perf, 25):
tasks.update_perf.apply_async(args=[baseline, chunk],
connection=conn)
cursor.close()
示例4: recs
def recs():
start = time.time()
cursor = connections[multidb.get_slave()].cursor()
cursor.execute("""
SELECT addon_id, collection_id
FROM synced_addons_collections ac
INNER JOIN addons ON
(ac.addon_id=addons.id AND inactive=0 AND status=4
AND addontype_id <> 9 AND current_version IS NOT NULL)
ORDER BY addon_id, collection_id
""")
qs = cursor.fetchall()
recs_log.info('%.2fs (query) : %s rows' % (time.time() - start, len(qs)))
addons = _group_addons(qs)
recs_log.info('%.2fs (groupby) : %s addons' %
((time.time() - start), len(addons)))
if not len(addons):
return
# Check our memory usage.
try:
p = subprocess.Popen('%s -p%s -o rss' % (settings.PS_BIN, os.getpid()),
shell=True, stdout=subprocess.PIPE)
recs_log.info('%s bytes' % ' '.join(p.communicate()[0].split()))
except Exception:
log.error('Could not call ps', exc_info=True)
sim = recommend.similarity # Locals are faster.
sims, start, timers = {}, [time.time()], {'calc': [], 'sql': []}
def write_recs():
calc = time.time()
timers['calc'].append(calc - start[0])
try:
_dump_recs(sims)
except Exception:
recs_log.error('Error dumping recommendations. SQL issue.',
exc_info=True)
sims.clear()
timers['sql'].append(time.time() - calc)
start[0] = time.time()
for idx, (addon, collections) in enumerate(addons.iteritems(), 1):
xs = [(other, sim(collections, cs))
for other, cs in addons.iteritems()]
# Sort by similarity and keep the top N.
others = sorted(xs, key=operator.itemgetter(1), reverse=True)
sims[addon] = [(k, v) for k, v in others[:11] if k != addon]
if idx % 50 == 0:
write_recs()
else:
write_recs()
avg_len = sum(len(v) for v in addons.itervalues()) / float(len(addons))
recs_log.info('%s addons: average length: %.2f' % (len(addons), avg_len))
recs_log.info('Processing time: %.2fs' % sum(timers['calc']))
recs_log.info('SQL time: %.2fs' % sum(timers['sql']))
示例5: test_pinned_reads
def test_pinned_reads(self):
"""Test PinningMasterSlaveRouter.db_for_read() when pinned and when
not."""
router = PinningMasterSlaveRouter()
eq_(router.db_for_read(TestModel), get_slave())
pin_this_thread()
eq_(router.db_for_read(TestModel), MASTER_DATABASE)
示例6: test_pinned_reads
def test_pinned_reads(self):
"""Test PinningMasterSlaveRouter.db_for_read() when pinned and when
not."""
router = PinningMasterSlaveRouter()
eq_(router.db_for_read(None), get_slave())
pin_this_thread()
eq_(router.db_for_read(None), DEFAULT_DB_ALIAS)
示例7: test_db_write_decorator
def test_db_write_decorator(self):
def read_view(req):
eq_(router.db_for_read(None), get_slave())
return HttpResponse()
@db_write
def write_view(req):
eq_(router.db_for_read(None), DEFAULT_DB_ALIAS)
return HttpResponse()
router = PinningMasterSlaveRouter()
eq_(router.db_for_read(None), get_slave())
write_view(HttpRequest())
read_view(HttpRequest())
示例8: test_db_write_decorator
def test_db_write_decorator(self):
def read_view(req):
eq_(router.db_for_read(TestModel), get_slave())
return HttpResponse()
@db_write
def write_view(req):
eq_(router.db_for_read(TestModel), MASTER_DATABASE)
return HttpResponse()
router = PinningMasterSlaveRouter()
eq_(router.db_for_read(TestModel), get_slave())
write_view(HttpRequest())
read_view(HttpRequest())
示例9: update_addon_average_daily_users
def update_addon_average_daily_users():
"""Update add-ons ADU totals."""
raise_if_reindex_in_progress("amo")
cursor = connections[multidb.get_slave()].cursor()
q = """SELECT addon_id, AVG(`count`)
FROM update_counts
WHERE `date` > DATE_SUB(CURDATE(), INTERVAL 7 DAY)
GROUP BY addon_id
ORDER BY addon_id"""
cursor.execute(q)
d = cursor.fetchall()
cursor.close()
ts = [_update_addon_average_daily_users.subtask(args=[chunk]) for chunk in chunked(d, 250)]
TaskSet(ts).apply_async()
示例10: update_addon_download_totals
def update_addon_download_totals():
"""Update add-on total and average downloads."""
cursor = connections[multidb.get_slave()].cursor()
# We need to use SQL for this until
# http://code.djangoproject.com/ticket/11003 is resolved
q = """SELECT
addon_id, AVG(count), SUM(count)
FROM download_counts
USE KEY (`addon_and_count`)
GROUP BY addon_id
ORDER BY addon_id"""
cursor.execute(q)
d = cursor.fetchall()
cursor.close()
ts = [_update_addon_download_totals.subtask(args=[chunk]) for chunk in chunked(d, 250)]
TaskSet(ts).apply_async()
示例11: update_addon_average_daily_users
def update_addon_average_daily_users():
"""Update add-ons ADU totals."""
cursor = connections[multidb.get_slave()].cursor()
# We need to use SQL for this until
# http://code.djangoproject.com/ticket/11003 is resolved
q = """SELECT
addon_id, AVG(`count`)
FROM update_counts
USE KEY (`addon_and_count`)
GROUP BY addon_id
ORDER BY addon_id"""
cursor.execute(q)
d = cursor.fetchall()
cursor.close()
with establish_connection() as conn:
for chunk in chunked(d, 1000):
_update_addon_average_daily_users.apply_async(args=[chunk], connection=conn)
示例12: update_addon_average_daily_users
def update_addon_average_daily_users():
"""Update add-ons ADU totals."""
if settings.IGNORE_NON_CRITICAL_CRONS:
return
cursor = connections[multidb.get_slave()].cursor()
q = """SELECT
addon_id, AVG(`count`)
FROM update_counts
WHERE `date` > DATE_SUB(CURDATE(), INTERVAL 7 DAY)
GROUP BY addon_id
ORDER BY addon_id"""
cursor.execute(q)
d = cursor.fetchall()
cursor.close()
ts = [_update_addon_average_daily_users.subtask(args=[chunk]) for chunk in chunked(d, 1000)]
TaskSet(ts).apply_async()
示例13: update_addon_average_daily_users
def update_addon_average_daily_users():
"""Update add-ons ADU totals."""
if not waffle.switch_is_active('local-statistics-processing'):
return False
raise_if_reindex_in_progress('amo')
cursor = connections[multidb.get_slave()].cursor()
q = """SELECT addon_id, AVG(`count`)
FROM update_counts
WHERE `date` > DATE_SUB(CURDATE(), INTERVAL 13 DAY)
GROUP BY addon_id
ORDER BY addon_id"""
cursor.execute(q)
d = cursor.fetchall()
cursor.close()
ts = [_update_addon_average_daily_users.subtask(args=[chunk])
for chunk in chunked(d, 250)]
group(ts).apply_async()
示例14: update_addon_download_totals
def update_addon_download_totals():
"""Update add-on total and average downloads."""
cursor = connections[multidb.get_slave()].cursor()
# We need to use SQL for this until
# http://code.djangoproject.com/ticket/11003 is resolved
q = """SELECT addon_id, AVG(count), SUM(count)
FROM download_counts
USE KEY (`addon_and_count`)
JOIN addons ON download_counts.addon_id=addons.id
WHERE addons.addontype_id != %s AND
addons.status != %s
GROUP BY addon_id
ORDER BY addon_id"""
cursor.execute(q, [amo.ADDON_WEBAPP, amo.STATUS_DELETED])
d = cursor.fetchall()
cursor.close()
ts = [_update_addon_download_totals.subtask(args=[chunk])
for chunk in chunked(d, 250)]
TaskSet(ts).apply_async()
示例15: update_addon_download_totals
def update_addon_download_totals():
"""Update add-on total and average downloads."""
if not waffle.switch_is_active('local-statistics-processing'):
return False
cursor = connections[multidb.get_slave()].cursor()
# We need to use SQL for this until
# http://code.djangoproject.com/ticket/11003 is resolved
q = """SELECT addon_id, AVG(count), SUM(count)
FROM download_counts
USE KEY (`addon_and_count`)
JOIN addons ON download_counts.addon_id=addons.id
WHERE addons.status != %s
GROUP BY addon_id
ORDER BY addon_id"""
cursor.execute(q, [amo.STATUS_DELETED])
d = cursor.fetchall()
cursor.close()
ts = [_update_addon_download_totals.subtask(args=[chunk])
for chunk in chunked(d, 250)]
group(ts).apply_async()