当前位置: 首页>>代码示例>>Python>>正文


Python statsd.gauge函数代码示例

本文整理汇总了Python中statsd.statsd.gauge函数的典型用法代码示例。如果您正苦于以下问题:Python gauge函数的具体用法?Python gauge怎么用?Python gauge使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了gauge函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: rssatomfeed_post_save

def rssatomfeed_post_save(instance, **kwargs):

    if not kwargs.get('created', False):
        return

    statsd.gauge('feeds.counts.total', 1, delta=True)
    statsd.gauge('feeds.counts.rssatom', 1, delta=True)
开发者ID:1flow,项目名称:1flow,代码行数:7,代码来源:rssatom.py

示例2: process_exit_surveys

def process_exit_surveys():
    """Exit survey handling.

    * Collect new exit survey results.
    * Save results to our metrics table.
    * Add new emails collected to the exit survey.
    """

    _process_exit_survey_results()

    # Get the email addresses from two days ago and add them to the survey
    # campaign (skip this on stage).
    if settings.STAGE:
        # Only run this on prod, it doesn't need to be running multiple times
        # from different places.
        return

    startdate = date.today() - timedelta(days=2)
    enddate = date.today() - timedelta(days=1)

    for survey in SURVEYS.keys():
        if 'email_collection_survey_id' not in SURVEYS[survey]:
            # Some surveys don't have email collection on the site
            # (the askers survey, for example).
            continue

        emails = get_email_addresses(survey, startdate, enddate)
        for email in emails:
            add_email_to_campaign(survey, email)

        statsd.gauge('survey.{0}'.format(survey), len(emails))
开发者ID:ChromiumEx,项目名称:kitsune,代码行数:31,代码来源:cron.py

示例3: report_stats

def report_stats(host, topology, toporoot, topic):
        state = urllib2.urlopen(
            "http://{}/api/status?toporoot={}&topic={}".format(
                host, toporoot, topic
            )
        ).read()

        data = json.loads(state)

        amount = 0
        for looplord in data:
            if looplord['amount'] is not None:
                statsd.gauge(
                    'razor.kafkamon.topology.partition',
                    looplord['amount'],
                    tags = [
                        "topic:{}".format(topic),
                        "topology:{}".format(topology),
                        "partition:{}".format(looplord['partition'])
                    ]
                )
                amount += looplord['amount']

        print "Got {} for {}".format(amount, topology)

        statsd.gauge(
            'razor.kafkamon.total_delta',
            amount, tags = [
                "topic:{}".format(topic),
                "topology:{}".format(topology)
            ]
        )
开发者ID:evertrue,项目名称:capillary,代码行数:32,代码来源:stats-to-datadog.py

示例4: _deliver_submission

    def _deliver_submission(self, submission):
        payload = {'xqueue_body': submission.xqueue_body,
                   'xqueue_files': submission.s3_urls}

        submission.grader_id = self.worker_url
        submission.push_time = timezone.now()
        start = time.time()
        (grading_success, grader_reply) = _http_post(self.worker_url, json.dumps(payload), settings.GRADING_TIMEOUT)
        statsd.histogram('xqueue.consumer.consumer_callback.grading_time', time.time() - start,
                         tags=['queue:{0}'.format(self.queue_name)])

        job_count = get_queue_length(self.queue_name)
        statsd.gauge('xqueue.consumer.consumer_callback.queue_length', job_count,
                     tags=['queue:{0}'.format(self.queue_name)])

        submission.return_time = timezone.now()

        # TODO: For the time being, a submission in a push interface gets one chance at grading,
        #       with no requeuing logic
        if grading_success:
            submission.grader_reply = grader_reply
            submission.lms_ack = post_grade_to_lms(submission.xqueue_header, grader_reply)
        else:
            log.error("Submission {} to grader {} failure: Reply: {}, ".format(submission.id, self.worker_url, grader_reply))
            submission.num_failures += 1
            submission.lms_ack = post_failure_to_lms(submission.xqueue_header)

        # NOTE: retiring pushed submissions after one shot regardless of grading_success
        submission.retired = True

        submission.save()
开发者ID:EduPepperPDTesting,项目名称:xqueue,代码行数:31,代码来源:consumer.py

示例5: emit

 def emit(self, stat_name, tags, value):
     # Convert the dictionary of tags into an array of strings separated by a colon
     string_tags = map(lambda (k, v): (self.dd_tag_string.format(key=k, value=v)), tags.iteritems())
     statsd.gauge(self.dd_metric_string.format(
         scope=self.scope,
         stat=stat_name
     ), value, tags=string_tags)
开发者ID:digideskio,项目名称:razor,代码行数:7,代码来源:stats.py

示例6: measure_queue_lag

def measure_queue_lag(queued_time):
    """A task that measures the time it was sitting in the queue.

    It saves the data to graphite via statsd.
    """
    lag = datetime.now() - queued_time
    lag = (lag.days * 3600 * 24) + lag.seconds
    statsd.gauge('rabbitmq.lag', max(lag, 0))
开发者ID:1234-,项目名称:kitsune,代码行数:8,代码来源:tasks.py

示例7: post_create_task

    def post_create_task(self):
        """ Method meant to be run from a celery task. """

        if not self.slug:
            self.slug = slugify(self.name)
            self.save()

            statsd.gauge('tags.counts.total', 1, delta=True)
开发者ID:EliotBerriot,项目名称:1flow,代码行数:8,代码来源:tag.py

示例8: synchronize_statsd_websites_gauges

def synchronize_statsd_websites_gauges(full=False):

    with benchmark('synchronize statsd gauges for WebSite.*'):

        statsd.gauge('websites.counts.total', WebSite._get_collection().count())

        if full:
            duplicates = WebSite.objects(duplicate_of__ne=None).no_cache()
            statsd.gauge('websites.counts.duplicates', duplicates.count())
开发者ID:EliotBerriot,项目名称:1flow,代码行数:9,代码来源:stats.py

示例9: synchronize_statsd_authors_gauges

def synchronize_statsd_authors_gauges(full=False):

    with benchmark('synchronize statsd gauges for Author.*'):

        statsd.gauge('authors.counts.total', Author._get_collection().count())

        if full:
            duplicates = Author.objects(duplicate_of__ne=None).no_cache()
            statsd.gauge('authors.counts.duplicates', duplicates.count())
开发者ID:EliotBerriot,项目名称:1flow,代码行数:9,代码来源:stats.py

示例10: synchronize_mongodb_statsd_tags_gauges

def synchronize_mongodb_statsd_tags_gauges(full=False):
    """ synchronize all tag-related gauges on our statsd server. """

    with benchmark('synchronize statsd gauges for Tag.*'):

        statsd.gauge('mongo.tags.counts.total', Tag._get_collection().count())

        if full:
            duplicates = Tag.objects(duplicate_of__ne=None).no_cache()
            statsd.gauge('mongo.tags.counts.duplicates', duplicates.count())
开发者ID:1flow,项目名称:1flow,代码行数:10,代码来源:stats.py

示例11: register_duplicate

    def register_duplicate(self, duplicate, force=False):
        """ TODO. """

        # be sure this helper method is called
        # on a document that has the atribute.
        assert hasattr(duplicate, 'duplicate_of')

        _cls_name_ = self.__class__.__name__
        _cls_name_lower_ = _cls_name_.lower()
        # TODO: get this from a class attribute?
        # I'm not sure for MongoEngine models.
        lower_plural = _cls_name_lower_ + u's'

        if duplicate.duplicate_of:
            if duplicate.duplicate_of != self:
                # NOTE: for Article, this situation can't happen IRL
                # (demonstrated with Willian 20130718).
                #
                # Any "second" duplicate *will* resolve to the master via the
                # redirect chain. It will *never* resolve to an intermediate
                # URL in the chain.
                #
                # For other objects it should happen too, because the
                # `get_or_create()` methods should return the `.duplicate_of`
                # attribute if it is not None.

                LOGGER.warning(u'%s %s is already a duplicate of '
                               u'another instance, not %s. Aborting.',
                               _cls_name_, duplicate, duplicate.duplicate_of)
                return

        LOGGER.info(u'Registering %s %s as duplicate of %s…',
                    _cls_name_, duplicate, self)

        # Register the duplication immediately, for other
        # background operations to use ourselves as value.
        duplicate.duplicate_of = self
        duplicate.save()

        statsd.gauge('mongo.%s.counts.duplicates' % lower_plural, 1, delta=True)

        try:
            # Having tasks not as methods because of Celery bugs forces
            # us to do strange things. We have to "guess" and lookup the
            # task name in the current module. OK, not *that* big deal.
            self.nonrel_globals[
                _cls_name_lower_ + '_replace_duplicate_everywhere_task'].delay(
                self.id, duplicate.id)

        except KeyError:
            LOGGER.warning(u'Object %s has no `replace_duplicate_everywhere()` '
                           u'method, or the method has not been registered as '
                           u'a task with `register_task_method()`.', self)
开发者ID:1flow,项目名称:1flow,代码行数:53,代码来源:common.py

示例12: g729_metrics

		    def g729_metrics(self):
		        if (self.g729):
		            g729_count = yield self.api('g729_count')
		            g729_count = int(g729_count)
		            statsd.gauge('freeswitch.g729.total', g729_count)
		            g729_counts = yield self.api('g729_used')
		            g729_enc, g729_dec = [int(e) for e in g729_counts.split(":")]
		            statsd.gauge('freeswitch.g729.used.encoder', g729_enc)
		            statsd.gauge('freeswitch.g729.used.decoder', g729_dec)
		            if (g729_enc > g729_dec):
		                statsd.gauge('freeswitch.g729.utilization', g729_enc / g729_count)
		            else:
		                statsd.gauge('freeswitch.g729.utilization', g729_dec / g729_count)
开发者ID:areski,项目名称:FreeSwitch-DataDog-Metrics,代码行数:13,代码来源:fsmetrics.py

示例13: post_create_task

    def post_create_task(self):
        """ Method meant to be run from a celery task. """

        if not self.slug:
            if self.name is None:
                proto, host_and_port, remaining = WebSite.split_url(self.url)
                self.name = host_and_port.replace(u'_', u' ').title()

            self.slug = slugify(self.name)

            self.save()

            statsd.gauge('mongo.websites.counts.total', 1, delta=True)
开发者ID:1flow,项目名称:1flow,代码行数:13,代码来源:website.py

示例14: mark_tweet_deleted

def mark_tweet_deleted(tweet_id):

        try:
            tweet = Tweet.objects.get(tweet_id=tweet_id)

        except:
            LOGGER.warning(u'Unknown tweet to delete: %s', tweet_id)

        else:
            tweet.is_deleted = True
            tweet.save()

            statsd.gauge('tweets.counts.deleted', 1, delta=True)
            LOGGER.info(u'Tweet %s marked as deleted.', tweet)
开发者ID:1flow,项目名称:1flow,代码行数:14,代码来源:tweet.py

示例15: _execute

def _execute():

	statsd.connect('localhost', 8125)

	result = _netfilter()

	for chain, entries in result.iteritems():

		for number, item in entries.iteritems():

			key, bytes = _generate_key(chain, item)
			if key is None or key == '':
				continue

			_println('[info]: send gauge=[', key, '], value=[', str(bytes), ']')
			# statsd.histogram(key, bytes)
			statsd.gauge(key, bytes)
开发者ID:mass10,项目名称:datadog.note,代码行数:17,代码来源:test.py


注:本文中的statsd.statsd.gauge函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。