本文整理汇总了Python中statsd.statsd.timing函数的典型用法代码示例。如果您正苦于以下问题:Python timing函数的具体用法?Python timing怎么用?Python timing使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了timing函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: xpi_build_from_model
def xpi_build_from_model(rev_pk, mod_codes={}, att_codes={}, hashtag=None, tqueued=None):
""" Get object and build xpi
"""
if not hashtag:
log.critical("No hashtag provided")
return
tstart = time.time()
if tqueued:
tinqueue = (tstart - tqueued) * 1000
statsd.timing('xpi.build.queued', tinqueue)
log.info('[xpi:%s] Addon job picked from queue (%dms)' % (hashtag, tinqueue))
revision = PackageRevision.objects.get(pk=rev_pk)
log.debug('[xpi:%s] Building %s' % (hashtag, revision))
# prepare changed modules and attachments
modules = []
attachments = []
for mod in revision.modules.all():
if str(mod.pk) in mod_codes:
mod.code = mod_codes[str(mod.pk)]
modules.append(mod)
for att in revision.attachments.all():
if str(att.pk) in att_codes:
att.code = att_codes[str(att.pk)]
attachments.append(att)
revision.build_xpi(
modules=modules,
attachments=attachments,
hashtag=hashtag,
tstart=tstart)
示例2: get_download
def get_download(r, hashtag, filename):
"""
Download XPI (it has to be ready)
"""
if not validator.is_valid('alphanum', hashtag):
log.warning('[security] Wrong hashtag provided')
return HttpResponseForbidden("{'error': 'Wrong hashtag'}")
path = os.path.join(settings.XPI_TARGETDIR, '%s.xpi' % hashtag)
log.info('[xpi:%s] Downloading Addon from %s' % (filename, path))
tend = time.time()
tkey = xpi_utils.get_queued_cache_key(hashtag, r)
tqueued = cache.get(tkey)
if tqueued:
ttotal = (tend - tqueued) * 1000
statsd.timing('xpi.build.total', ttotal)
total = '%dms' % ttotal
else:
total = 'n/a'
log.info('[xpi:%s] Downloading Add-on (%s)' % (hashtag, total))
response = serve(r, path, '/', show_indexes=False)
response['Content-Disposition'] = ('attachment; '
'filename="%s.xpi"' % filename)
return response
示例3: get_zip
def get_zip(request, hashtag, filename):
"""
Download zip (it has to be ready)
"""
if not validator.is_valid("alphanum", hashtag):
log.warning("[security] Wrong hashtag provided")
return HttpResponseForbidden("{'error': 'Wrong hashtag'}")
path = os.path.join(settings.XPI_TARGETDIR, "%s.zip" % hashtag)
log.info("[zip:%s] Downloading Addon from %s" % (filename, path))
tend = time.time()
tkey = _get_zip_cache_key(request, hashtag)
tqueued = cache.get(tkey)
if tqueued:
ttotal = (tend - tqueued) * 1000
statsd.timing("zip.total", ttotal)
total = "%dms" % ttotal
else:
total = "n/a"
log.info("[zip:%s] Downloading Add-on (%s)" % (hashtag, total))
response = serve(request, path, "/", show_indexes=False)
response["Content-Disposition"] = "attachment; " 'filename="%s.zip"' % filename
return response
示例4: _rebuild_kb_chunk
def _rebuild_kb_chunk(data):
"""Re-render a chunk of documents.
Note: Don't use host components when making redirects to wiki pages; those
redirects won't be auto-pruned when they're 404s.
"""
log.info('Rebuilding %s documents.' % len(data))
pin_this_thread() # Stick to master.
messages = []
start = time.time()
for pk in data:
message = None
try:
document = Document.objects.get(pk=pk)
# If we know a redirect link to be broken (i.e. if it looks like a
# link to a document but the document isn't there), log an error:
url = document.redirect_url()
if (url and points_to_document_view(url) and
not document.redirect_document()):
log.warn('Invalid redirect document: %d' % pk)
html = document.parse_and_calculate_links()
if document.html != html:
# We are calling update here to so we only update the html
# column instead of all of them. This bypasses post_save
# signal handlers like the one that triggers reindexing.
# See bug 797038 and bug 797352.
Document.objects.filter(pk=pk).update(html=html)
statsd.incr('wiki.rebuild_chunk.change')
else:
statsd.incr('wiki.rebuild_chunk.nochange')
except Document.DoesNotExist:
message = 'Missing document: %d' % pk
except Revision.DoesNotExist:
message = 'Missing revision for document: %d' % pk
except ValidationError as e:
message = 'ValidationError for %d: %s' % (pk, e.messages[0])
except SlugCollision:
message = 'SlugCollision: %d' % pk
except TitleCollision:
message = 'TitleCollision: %d' % pk
if message:
log.debug(message)
messages.append(message)
d = time.time() - start
statsd.timing('wiki.rebuild_chunk', int(round(d * 1000)))
if messages:
subject = ('[%s] Exceptions raised in _rebuild_kb_chunk()' %
settings.PLATFORM_NAME)
mail_admins(subject=subject, message='\n'.join(messages))
if not transaction.get_connection().in_atomic_block:
transaction.commit()
unpin_this_thread() # Not all tasks need to do use the master.
示例5: handle_submission
def handle_submission(self, frame, on_complete):
""" Handles a submission popped off the dead letter queue.
Pushes a failure response to XQueue to notify students of the issue.
"""
submission = frame["submission"]
submission_id = submission['xqueue_header']['submission_id']
log.info("Pulled submission #%d off of dead letter queue", submission_id)
statsd.incr('bux_grader_framework.submissions.dead_lettered')
# Note time spent in grader
elapsed_time = int((time.time() - frame["received_time"])*1000.0)
statsd.timing('bux_grader_framework.total_time_spent', elapsed_time)
log.info("Submission #%d evaluated in %0.3fms",
submission_id, elapsed_time)
# Check evaluator for extra context to add to fail message.
hints = ''
if 'fail_hints' in dir(self.evaluator):
hints = self.evaluator.fail_hints()
# Post response to XQueue.
message = FAIL_RESPONSE.substitute(reason=hints)
result, success = safe_multi_call(self.xqueue.push_failure,
args=(message, submission),
max_attempts=5,
delay=5)
# Notifies queue to ack / nack message.
on_complete(success)
示例6: submit
def submit(self, tags):
for k in list(self.data.keys()):
statsd.timing(self.prefix + "." + k, self.data.pop(k),
tags=tags, sample_rate=sample_rate)
if settings.DEBUG:
assert not self.starts, ('Timer(s) %r were started but never '
'stopped' % self.starts)
示例7: _do_search
def _do_search(self):
if not self._results_cache:
qs = self._build_query()
es = elasticutils.get_es()
hits = es.search(qs, settings.ES_INDEX, self.type._meta.app_label)
self._results_cache = results = SearchResults(self.type, hits)
statsd.timing('search', results.took)
log.debug('[%s] %s' % (results.took, qs))
return self._results_cache
示例8: raw
def raw(self):
qs = self._build_query()
es = elasticutils.get_es()
try:
hits = es.search(qs, settings.ES_INDEX, self.type._meta.db_table)
except Exception:
log.error(qs)
raise
statsd.timing('search', hits['took'])
log.debug('[%s] %s' % (hits['took'], qs))
return hits
示例9: raw
def raw(self):
qs = self._build_query()
es = elasticutils.get_es()
try:
with statsd.timer('search.es.timer') as timer:
hits = es.search(qs, self.index, self.type._meta.db_table)
except Exception:
log.error(qs)
raise
statsd.timing('search.es.took', hits['took'])
log.debug('[%s] [%s] %s' % (hits['took'], timer.ms, qs))
return hits
示例10: zip_source
def zip_source(pk, hashtag, tqueued=None, **kw):
if not hashtag:
log.critical("[zip] No hashtag provided")
return
tstart = time.time()
if tqueued:
tinqueue = (tstart - tqueued) * 1000
statsd.timing('zip.queued', tinqueue)
log.info('[zip:%s] Addon job picked from queue (%dms)' % (hashtag, tinqueue))
log.debug("[zip:%s] Compressing" % pk)
PackageRevision.objects.get(pk=pk).zip_source(hashtag=hashtag, tstart=tstart)
log.debug("[zip:%s] Compressed" % pk)
示例11: on_postrun
def on_postrun(self, sender, **kw):
# sender is the task object. task_id in here.
pending = self.redis.hincrby(self.pending, sender.name, -1)
# Clamp pending at 0. Tasks could be coming in before we started
# tracking.
if pending < 0:
self.redis.hset(self.pending, sender.name, 0)
self.redis.hincrby(self.run, sender.name, 1)
start = self.redis.hget(self.timer, kw['task_id'])
if start:
t = (time.time() - float(start)) * 1000
statsd.timing('tasks.%s' % sender.name, int(t))
示例12: migrate_helpfulvotes
def migrate_helpfulvotes(start_id, end_id):
"""Transfer helpfulvotes from old to new version."""
if not waffle.switch_is_active('migrate-helpfulvotes'):
raise # Celery emails the failed IDs so we know to which to rerun.
start = time.time()
pin_this_thread() # Pin to master
transaction.enter_transaction_management()
transaction.managed(True)
try:
cursor = connection.cursor()
cursor.execute("""INSERT INTO `wiki_helpfulvote`
(revision_id, helpful, created,
creator_id, anonymous_id, user_agent)
SELECT COALESCE(
(SELECT id FROM `wiki_revision`
WHERE `document_id` = wiki_helpfulvoteold.document_id
AND `is_approved`=1 AND
(`reviewed` <= wiki_helpfulvoteold.created
OR `reviewed` IS NULL)
ORDER BY CASE WHEN `reviewed`
IS NULL THEN 1 ELSE 0 END,
`wiki_revision`.`created` DESC LIMIT 1),
(SELECT id FROM `wiki_revision`
WHERE `document_id` = wiki_helpfulvoteold.document_id
AND (`reviewed` <= wiki_helpfulvoteold.created
OR `reviewed` IS NULL)
ORDER BY CASE WHEN `reviewed`
IS NULL THEN 1 ELSE 0 END,
`wiki_revision`.`created` DESC LIMIT 1),
(SELECT id FROM `wiki_revision`
WHERE `document_id` = wiki_helpfulvoteold.document_id
ORDER BY `created` ASC LIMIT 1)),
helpful, created, creator_id, anonymous_id, user_agent
FROM `wiki_helpfulvoteold` WHERE id >= %s AND id < %s""",
[start_id, end_id])
transaction.commit()
except:
transaction.rollback()
raise
transaction.leave_transaction_management()
unpin_this_thread()
d = time.time() - start
statsd.timing('wiki.migrate_helpfulvotes', int(round(d * 1000)))
示例13: _rebuild_kb_chunk
def _rebuild_kb_chunk(data, **kwargs):
"""Re-render a chunk of documents.
Note: Don't use host components when making redirects to wiki pages; those
redirects won't be auto-pruned when they're 404s.
"""
log.info('Rebuilding %s documents.' % len(data))
pin_this_thread() # Stick to master.
messages = []
start = time.time()
for pk in data:
message = None
try:
document = Document.objects.get(pk=pk)
# If we know a redirect link to be broken (i.e. if it looks like a
# link to a document but the document isn't there), delete it:
url = document.redirect_url()
if (url and points_to_document_view(url) and
not document.redirect_document()):
document.delete()
else:
document.html = document.current_revision.content_parsed
document.save()
except Document.DoesNotExist:
message = 'Missing document: %d' % pk
except ValidationError as e:
message = 'ValidationError for %d: %s' % (pk, e.messages[0])
except SlugCollision:
message = 'SlugCollision: %d' % pk
except TitleCollision:
message = 'TitleCollision: %d' % pk
if message:
log.debug(message)
messages.append(message)
d = time.time() - start
statsd.timing('wiki.rebuild_chunk', int(round(d * 1000)))
if messages:
subject = ('[%s] Exceptions raised in _rebuild_kb_chunk()' %
settings.PLATFORM_NAME)
mail_admins(subject=subject, message='\n'.join(messages))
transaction.commit_unless_managed()
unpin_this_thread() # Not all tasks need to do use the master.
示例14: raw
def raw(self):
"""
Builds query and passes to ElasticSearch, then returns the raw format
returned.
"""
qs = self._build_query()
es = get_es()
try:
hits = es.search(qs, settings.ES_INDEX, self.type._meta.db_table)
except Exception:
log.error(qs)
raise
if statsd:
statsd.timing("search", hits["took"])
log.debug("[%s] %s" % (hits["took"], qs))
return hits
示例15: check_machine
def check_machine(jenkins, client, machine, error_counts):
try:
server = client.servers.get(machine.external_id)
except:
print "Unable to get server detail, will retry"
traceback.print_exc()
return
if server.status == 'ACTIVE':
ip = utils.get_public_ip(server)
if not ip and 'os-floating-ips' in utils.get_extensions(client):
utils.add_public_ip(server)
ip = utils.get_public_ip(server)
if not ip:
raise Exception("Unable to find public ip of server")
machine.ip = ip
print "Machine %s is running, testing ssh" % machine.id
if utils.ssh_connect(ip, 'jenkins'):
if statsd:
dt = int((time.time() - machine.state_time) * 1000)
key = 'devstack.launch.%s' % machine.base_image.provider.name
statsd.timing(key, dt)
statsd.incr(key)
print "Adding machine %s to Jenkins" % machine.id
create_jenkins_node(jenkins, machine)
print "Machine %s is ready" % machine.id
machine.state = vmdatabase.READY
return
elif not server.status.startswith('BUILD'):
count = error_counts.get(machine.id, 0)
count += 1
error_counts[machine.id] = count
print "Machine %s is in error %s (%s/5)" % (machine.id,
server.status,
count)
if count >= 5:
if statsd:
statsd.incr('devstack.error.%s' %
machine.base_image.provider.name)
raise Exception("Too many errors querying machine %s" % machine.id)
else:
if time.time() - machine.state_time >= ABANDON_TIMEOUT:
if statsd:
statsd.incr('devstack.timeout.%s' %
machine.base_image.provider.name)
raise Exception("Waited too long for machine %s" % machine.id)