当前位置: 首页>>代码示例>>Python>>正文


Python config.global_settings函数代码示例

本文整理汇总了Python中newrelic.core.config.global_settings函数的典型用法代码示例。如果您正苦于以下问题:Python global_settings函数的具体用法?Python global_settings怎么用?Python global_settings使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了global_settings函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: do_interpreter

    def do_interpreter(self):
        """
        When enabled in the configuration file, will startup up an embedded
        interactive Python interpreter. Invoke 'exit()' or 'quit()' to
        escape the interpreter session."""

        enabled = False

        _settings = global_settings()

        if not _settings.console.allow_interpreter_cmd:
            print >> self.stdout, 'Sorry, the embedded Python ' \
                    'interpreter is disabled.'
            return

        locals = {}

        locals['stdin'] = self.stdin
        locals['stdout'] = self.stdout

        console = EmbeddedConsole(locals)

        console.stdin = self.stdin
        console.stdout = self.stdout

        acquire_console(self)

        try:
            console.interact()
        except SystemExit:
            pass
        finally:
            release_console()
开发者ID:Mause,项目名称:table_select_web,代码行数:33,代码来源:console.py

示例2: collector_url

def collector_url(server=None):
    """Returns the URL for talking to the data collector. When no server
    'host:port' is specified then the main data collector host and port is
    taken from the agent configuration. When a server is explicitly passed
    it would be the secondary data collector which subsequents requests
    in an agent session should be sent to.

    """

    settings = global_settings()

    url = '%s://%s/agent_listener/invoke_raw_method'

    scheme = settings.ssl and 'https' or 'http'

    if not server or settings.port:
        # When pulling port from agent configuration it should only be
        # set when testing against a local data collector. For staging
        # and production should not be set and would default to port 80
        # or 443 based on scheme name in URL and we don't explicitly
        # add the ports.

        if settings.port:
            server = '%s:%d' % (settings.host, settings.port)
        else:
            server = '%s' % settings.host

    return url % (scheme, server)
开发者ID:GbalsaC,项目名称:bitnamiP,代码行数:28,代码来源:data_collector.py

示例3: proxy_server

def proxy_server():
    """Returns the dictionary of proxy server settings to be supplied to
    the 'requests' library when making requests.

    """

    settings = global_settings()

    # Require that both proxy host and proxy port are set to work.

    if not settings.proxy_host or not settings.proxy_port:
        return

    # The agent configuration only provides means to set one proxy so we
    # assume that it will be set correctly depending on whether SSL
    # connection requested or not.

    scheme = settings.ssl and 'https' or 'http'
    proxy = '%s:%d' % (settings.proxy_host, settings.proxy_port)

    # Encode the proxy user name and password into the proxy server value
    # as requests library will strip it out of there and use that.

    if settings.proxy_user is not None and settings.proxy_pass is not None:
        proxy = 'http://%s:%[email protected]%s' % (settings.proxy_user,
                settings.proxy_pass, proxy)

    return { scheme: proxy }
开发者ID:Jasckom,项目名称:nutrientdata,代码行数:28,代码来源:data_collector.py

示例4: time_metrics

    def time_metrics(self, stats, root, parent):
        settings = global_settings()

        if 'database.instrumentation.r1' in settings.feature_flag:
            return self.time_metrics_r1(stats, root, parent)

        return self.time_metrics_r2(stats, root, parent)
开发者ID:Dragoon013,项目名称:newrelic-python-kata,代码行数:7,代码来源:database_node.py

示例5: profile_data

    def profile_data(self):
        """Returns the profile data once the thread profiling session has
        finished otherwise returns None. The data structure returned is
        in a form suitable for sending back to the data collector.

        """

        # Profiling session not finished.

        if self._profiler_thread.isAlive() and not self._xray_txns:
            return None

        call_data = {}
        thread_count = 0

        # We prune the number of nodes sent if we are over the specified
        # limit. This is just to avoid having the response be too large
        # and get rejected by the data collector.

        settings = global_settings()

        self._prune_call_trees(settings.agent_limits.thread_profiler_nodes)

        for thread_category, bucket in self._call_buckets.items():
            if bucket:
                call_data[thread_category] = bucket.values()
                thread_count += len(bucket)

        # If no profile data was captured return None instead of sending an
        # encoded empty data-structure 

        if thread_count == 0:
            return None

        # Construct the actual final data for sending. The actual call
        # data is turned into JSON, compessed and then base64 encoded at
        # this point to cut its size.

        json_data = simplejson.dumps(call_data, ensure_ascii=True,
                encoding='Latin-1', default=lambda o: o.jsonable(),
                namedtuple_as_object=False)
        encoded_data = base64.standard_b64encode(zlib.compress(json_data))

        if self._xray_txns:
            xray_obj = self._xray_txns.values()[0]
            xray_id = xray_obj.xray_id
        else:
            xray_id = None

        profile = [[self.profile_id, self._start_time*1000,
                self._stop_time*1000, self._sample_count, encoded_data,
                thread_count, 0, xray_id]]

        # If xray session is running send partial call tree and clear the
        # data-structures.
        if self._xray_txns:
            self._reset_call_buckets()

        return profile
开发者ID:dmathewwws,项目名称:twitter-sentiment-analysis-python,代码行数:59,代码来源:thread_profiler.py

示例6: __init__

    def __init__(self, maximum=4):
        self.connections = []
        self.maximum = maximum

        settings = global_settings()

        if settings.debug.log_explain_plan_queries:
            _logger.debug('Creating SQL connections cache %r.', self)
开发者ID:GbalsaC,项目名称:bitnamiP,代码行数:8,代码来源:database_utils.py

示例7: _profiler_loop

    def _profiler_loop(self):
        """Infinite loop that wakes up periodically to collect stack traces,
        merge it into call tree if necessaray, finally update the state of all
        the active profile sessions.

        """

        settings = global_settings()

        overhead_threshold = settings.agent_limits.xray_profile_overhead

        while True:

            # If x-ray profilers are not suspended and at least one x-ray
            # session is active it'll cause collect_stack_traces() to add
            # the stack_traces to the txn obj.

            start = time.time()

            include_xrays = (not self._xray_suspended) and any(six.itervalues(self.application_xrays))

            for category, stack in collect_stack_traces(self.profile_agent_code, include_xrays):

                # Merge the stack_trace to the call tree only for
                # full_profile_session. X-ray profiles will be merged at
                # the time of exiting the transaction.

                if self.full_profile_session:
                    self.full_profile_session.update_call_tree(category, stack)

            self.update_profile_sessions()

            # Stop the profiler thread if there are no profile sessions.

            if (self.full_profile_session is None) and (not any(six.itervalues(self.application_xrays))):
                self._profiler_thread_running = False
                return

            # Adjust sample period dynamically base on overheads of doing
            # thread profiling if is an X-Ray session.

            if not self._xray_suspended:
                overhead = time.time() - start

                with self._lock:
                    aggregation_time = self._aggregation_time
                    self._aggregation_time = 0.0

                overhead += aggregation_time

                delay = overhead / self.sample_period_s / overhead_threshold
                delay = min((max(1.0, delay) * self.sample_period_s), 5.0)

                self._profiler_shutdown.wait(delay)

            else:
                self._profiler_shutdown.wait(self.sample_period_s)
开发者ID:edmorley,项目名称:newrelic-python-agent,代码行数:57,代码来源:profile_sessions.py

示例8: cleanup

    def cleanup(self):
        settings = global_settings()

        if settings.debug.log_explain_plan_queries:
            _logger.debug('Cleaning up SQL connections cache %r.', self)

        for key, connection in self.connections:
            connection.cleanup()

        self.connections = []
开发者ID:GbalsaC,项目名称:bitnamiP,代码行数:10,代码来源:database_utils.py

示例9: active_threads

    def active_threads(self):
        """Returns an iterator over all current stack frames for all
        active threads in the process. The result for each is a tuple
        consisting of the thread identifier, a categorisation of the
        type of thread, and the stack frame. Note that we actually treat
        any greenlets as threads as well. In that case the thread ID is
        the id() of the greenlet.

        This is in this class for convenience as needs to access the
        currently active transactions to categorise transaction threads
        as being for web transactions or background tasks.

        """

        # First yield up those for real Python threads.

        for thread_id, frame in sys._current_frames().items():
            transaction = self._cache.get(thread_id)
            if transaction is not None:
                if transaction.background_task:
                    yield transaction, thread_id, 'BACKGROUND', frame
                else:
                    yield transaction, thread_id, 'REQUEST', frame
            else:
                # Note that there may not always be a thread object.
                # This is because thread could have been created direct
                # against the thread module rather than via the high
                # level threading module. Categorise anything we can't
                # obtain a name for as being 'OTHER'.

                thread = threading._active.get(thread_id)
                if thread is not None and thread.getName().startswith('NR-'):
                    yield None, thread_id, 'AGENT', frame
                else:
                    yield None, thread_id, 'OTHER', frame

        # Now yield up those corresponding to greenlets. Right now only
        # doing this for greenlets in which any active transactions are
        # running. We don't have a way of knowing what non transaction
        # threads are running.

        debug = global_settings().debug

        if debug.enable_coroutine_profiling:
            for thread_id, transaction in self._cache.items():
                if transaction._greenlet is not None:
                    gr = transaction._greenlet()
                    if gr and gr.gr_frame is not None:
                        if transaction.background_task:
                            yield (transaction, thread_id,
                                    'BACKGROUND', gr.gr_frame)
                        else:
                            yield (transaction, thread_id,
                                    'REQUEST', gr.gr_frame)
开发者ID:dmathewwws,项目名称:twitter-sentiment-analysis-python,代码行数:54,代码来源:transaction_cache.py

示例10: create_session

def create_session(license_key, app_name, linked_applications,
        environment, settings):

    _global_settings = global_settings()

    if _global_settings.developer_mode:
        return DeveloperModeSession.create_session(license_key, app_name,
                linked_applications, environment, settings)

    return ApplicationSession.create_session(license_key, app_name,
            linked_applications, environment, settings)
开发者ID:TheTincho,项目名称:python-newrelic,代码行数:11,代码来源:data_collector.py

示例11: _requests_proxy_scheme_workaround

def _requests_proxy_scheme_workaround(wrapped, instance, args, kwargs):
    def _params(connection, *args, **kwargs):
        return connection

    pool, connection = instance, _params(*args, **kwargs)

    settings = global_settings()

    if pool.proxy and pool.proxy.scheme == 'https':
        if settings.proxy_scheme in (None, 'https'):
            return connection

    return wrapped(*args, **kwargs)
开发者ID:GbalsaC,项目名称:bitnamiP,代码行数:13,代码来源:data_collector.py

示例12: connection

    def connection(self, database, args, kwargs):
        key = (database.client, args, kwargs)

        connection = None

        settings = global_settings()

        for i, item in enumerate(self.connections):
            if item[0] == key:
                connection = item[1]

                # Move to back of list so we know which is the
                # most recently used all the time.

                item = self.connections.pop(i)
                self.connections.append(item)

                break

        if connection is None:
            # If we are at the maximum number of connections to
            # keep hold of, pop the one which has been used the
            # longest amount of time.

            if len(self.connections) == self.maximum:
                connection = self.connections.pop(0)[1]

                internal_metric('Supportability/DatabaseUtils/Counts/'
                                'drop_database_connection', 1)

                if settings.debug.log_explain_plan_queries:
                    _logger.debug('Drop database connection for %r as '
                            'reached maximum of %r.',
                            connection.database.client, self.maximum)

                connection.cleanup()

            connection = SQLConnection(database,
                    database.connect(*args, **kwargs))

            self.connections.append((key, connection))

            internal_metric('Supportability/DatabaseUtils/Counts/'
                            'create_database_connection', 1)

            if settings.debug.log_explain_plan_queries:
                _logger.debug('Created database connection for %r.',
                        database.client)

        return connection
开发者ID:GbalsaC,项目名称:bitnamiP,代码行数:50,代码来源:database_utils.py

示例13: cursor

    def cursor(self, args=(), kwargs={}):
        key = (args, frozenset(kwargs.items()))

        cursor = self.cursors.get(key)

        if cursor is None:
            settings = global_settings()

            if settings.debug.log_explain_plan_queries:
                _logger.debug('Created database cursor for %r.',
                        self.database.client)

            cursor = self.connection.cursor(*args, **kwargs)
            self.cursors[key] = cursor

        return cursor
开发者ID:GbalsaC,项目名称:bitnamiP,代码行数:16,代码来源:database_utils.py

示例14: connection_type

def connection_type(proxies):
    """Returns a string describing the connection type for use in metrics.

    """

    settings = global_settings()

    ssl = settings.ssl

    request_scheme = ssl and 'https' or 'http'

    if proxies is None:
        return 'direct/%s' % request_scheme

    proxy_scheme = proxies['http'].split('://')[0]

    return '%s-proxy/%s' % (proxy_scheme, request_scheme)
开发者ID:tasnim07,项目名称:BlackFly,代码行数:17,代码来源:data_collector.py

示例15: send_request

    def send_request(cls, session, url, method, license_key,
            agent_run_id=None, payload=()):

        assert method in _developer_mode_responses

        # Create fake details for the request being made so that we
        # can use the same audit logging functionality.

        params = {}
        headers = {}

        if not license_key:
            license_key = 'NO LICENSE KEY WAS SET IN AGENT CONFIGURATION'

        params['method'] = method
        params['license_key'] = license_key
        params['protocol_version'] = '14'
        params['marshal_format'] = 'json'

        if agent_run_id:
            params['run_id'] = str(agent_run_id)

        headers['User-Agent'] = USER_AGENT
        headers['Content-Encoding'] = 'identity'

        data = json_encode(payload)

        log_id = _log_request(url, params, headers, data)

        # Now create the fake responses so the agent still runs okay.

        result = _developer_mode_responses[method]

        if method == 'connect':
            settings = global_settings()
            if settings.high_security:
                result = dict(result)
                result['high_security'] = True

        # Even though they are always fake responses, still log them.

        if log_id is not None:
            _log_response(log_id, dict(return_value=result))

        return result
开发者ID:GbalsaC,项目名称:bitnamiP,代码行数:45,代码来源:data_collector.py


注:本文中的newrelic.core.config.global_settings函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。