當前位置: 首頁>>代碼示例>>Python>>正文


Python settings.REDIS_PORT屬性代碼示例

本文整理匯總了Python中django.conf.settings.REDIS_PORT屬性的典型用法代碼示例。如果您正苦於以下問題:Python settings.REDIS_PORT屬性的具體用法?Python settings.REDIS_PORT怎麽用?Python settings.REDIS_PORT使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在django.conf.settings的用法示例。


在下文中一共展示了settings.REDIS_PORT屬性的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: get

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def get(self, request, *args, **kwargs):
        connect = redis.StrictRedis(
            host=settings.REDIS_HOST,
            port=settings.REDIS_PORT,
            db=settings.REDIS_SPACE,
            password=settings.REDIS_PASSWD
        )
        week_list = ['Won', 'Tue', 'Wed', 'Thur', 'Fri', 'Sat', 'Sun']
        TEMP = connect.hgetall('WORK',)
        WORK = []
        for key in week_list:
            WORK.append({
                'time': str(key, encoding='utf-8'),
                '執行次數': TEMP[key]
            })
        return Response(
            {'title': '一周內工單執行','dataset': WORK} or {}, status.HTTP_200_OK
        ) 
開發者ID:YoLoveLife,項目名稱:DevOps,代碼行數:20,代碼來源:dashboard.py

示例2: invalidate_view_cache_for_tenant_and_cache_key

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def invalidate_view_cache_for_tenant_and_cache_key(schema_name, cache_key_prefix=None):
    """Invalidate our view cache for a specific tenant and source type.

    If cache_key_prefix is None, all views will be invalidated.
    """
    cache = caches["default"]
    if isinstance(cache, RedisCache):
        cache = Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB)
        all_keys = cache.keys("*")
        all_keys = [key.decode("utf-8") for key in all_keys]
    elif isinstance(cache, LocMemCache):
        all_keys = list(locmem._caches.get(settings.TEST_CACHE_LOCATION).keys())
        all_keys = [key.split(":", 2)[-1] for key in all_keys]
    else:
        msg = "Using an unsupported caching backend!"
        raise KokuCacheError(msg)

    all_keys = all_keys if all_keys is not None else []

    if cache_key_prefix:
        keys_to_invalidate = [key for key in all_keys if (schema_name in key and cache_key_prefix in key)]
    else:
        # Invalidate all cached views for the tenant
        keys_to_invalidate = [key for key in all_keys if schema_name in key]

    for key in keys_to_invalidate:
        cache.delete(key)

    msg = f"Invalidated request cache for\n\ttenant: {schema_name}\n\tcache_key_prefix: {cache_key_prefix}"
    LOG.info(msg) 
開發者ID:project-koku,項目名稱:koku,代碼行數:32,代碼來源:cache.py

示例3: __init__

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def __init__(self):
        self._pool = redis.ConnectionPool(host=settings.REDIS_HOST, port=settings.REDIS_PORT,
                                          decode_responses=True, db=settings.MAP_CACHE_DB,
                                          password=settings.REDIS_PASSWORD)
        self.client = redis.Redis(connection_pool=self._pool) 
開發者ID:yandenghong,項目名稱:KortURL,代碼行數:7,代碼來源:client.py

示例4: get_redis_client

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def get_redis_client() -> redis.StrictRedis:
    return redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT,
                             password=settings.REDIS_PASSWORD, db=0) 
開發者ID:zulip,項目名稱:zulip,代碼行數:5,代碼來源:redis_utils.py

示例5: __init__

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def __init__(self, verbose=False):
        self.r = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0)
        self.verbose = verbose 
開發者ID:MTG,項目名稱:freesound-datasets,代碼行數:5,代碼來源:redis_store.py

示例6: connect

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def connect(self):  
        cc = lambda *args: protocol.ClientCreator(reactor, *args)

        self.redis_sub = RedisDispatch(settings.REDIS_HOST, settings.REDIS_PORT)
        redis_factory = RedisServiceRegisteringFactory(self)
        reactor.connectTCP(settings.REDIS_HOST, settings.REDIS_PORT, redis_factory)
        yield redis_factory.deferred 
開發者ID:canvasnetworks,項目名稱:canvas,代碼行數:9,代碼來源:channels.py

示例7: __init__

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def __init__(self):
        if self.__kerberos_has_ticket() is False:
            self.__kerberos_init()
        if api.isdone('finalize') is False:
            api.bootstrap_with_global_options(context='api')
            api.finalize()
        api.Backend.rpcclient.connect()
        self.redis = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB, password=settings.REDIS_PASSWORD) 
開發者ID:larrabee,項目名稱:freeipa-password-reset,代碼行數:10,代碼來源:pwdmanager.py

示例8: __init__

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def __init__(self, thread_id, name, experiment, component_id, max_results, cache_results):
        threading.Thread.__init__(self)
        self.threadID = thread_id
        self.name = name
        self.experiment = experiment
        self.comp_id = component_id
        self.result = {}
        self.max_results = max_results
        self.cache_results = cache_results
        print "Submitting topology to storm. End component", self.comp_id
        exp = Experiment.objects.get(pk=self.experiment)
        graph = exp.workflow.graph_data
        graph_data = {}
        print graph
        tmp = graph.split(',')
        for elem in tmp:
            first_node = elem.split(":")[0]
            second_node = elem.split(":")[1]
            if second_node in graph_data:
                depend_nodes = graph_data[second_node]
                depend_nodes.add(first_node)
            else:
                graph_data[second_node] = set()
                graph_data[second_node].add(first_node)
        topological_graph = toposort_flatten(graph_data)
        print "Graph after topological sort", topological_graph
        message = {
            'exp_id': self.experiment, 'result': self.comp_id,
            'graph': topological_graph, 'components': defaultdict()}

        for data in topological_graph:
            component_id = int(data)
            comp = Component.objects.get(pk=component_id)
            if comp.operation_type.function_type == 'Create':
                if comp.operation_type.function_arg == 'Table':
                        filename = comp.operation_type.function_subtype_arg
                        input_data = read_csv(filename)
                        message['input'] = {}
                        for elem in list(input_data.columns):
                            message['input'][elem] = list(input_data[elem])
                        message['cols'] = list(input_data.columns)
                        # message['input'] = input_data.to_dict()

            serialized_obj = serializers.serialize('json', [comp.operation_type, ])
            print "Component_id", component_id, " ", comp.operation_type
            message['components'][data] = serialized_obj

        print "Message ", message
        r = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0)
        self.pubsub = r.pubsub(ignore_subscribe_messages=True)
        self.pubsub.subscribe("Exp " + str(self.experiment))
        ret = r.publish('workflow', json.dumps(message))
        print "return", ret 
開發者ID:CiscoSystems,項目名稱:cognitive,代碼行數:55,代碼來源:results_storm.py

示例9: handle

# 需要導入模塊: from django.conf import settings [as 別名]
# 或者: from django.conf.settings import REDIS_PORT [as 別名]
def handle(self, sample='10000', host='ip-10-203-46-218.ec2.internal', *args, **options):
        slave_redis = CanvasRedis(host=host, port=settings.REDIS_PORT, db=settings.REDIS_DB_MAIN)
        slave_cache = CanvasRedis(host=host, port=settings.REDIS_PORT, db=settings.REDIS_DB_CACHE)
        
        if sample != "*":
            sample = int(sample)

        def human(size):
            # Multiply size * 3 to roughly account for the difference in RDB vs in-memory size.
            return "%.1f MB" % (size * 3 / 1000000.0)

        for client in (slave_redis, slave_cache):
            dbsize = client.dbsize()
            if sample == "*":
                print "Summarizing total memory usage for db %s" % client.connection.db
                key_names = client.keys("*")
            else:
                groups = collections.defaultdict(lambda: 0)
                sizes = []
                scalar = 1.0 * dbsize / sample
                print "Sampling %s random keys (of %s) from db %s" % (sample, dbsize, client.connection.db)
                pipeline = client.pipeline()
                for i in range(sample):
                    pipeline.randomkey()
                key_names = pipeline.execute()

            chunksize = 10000
            cursor = 0
            key_sizes = []
            while cursor < len(key_names):
                pipeline = client.pipeline()
                for result in key_names[cursor:cursor+chunksize]:
                    pipeline.execute_command("DEBUG", "OBJECT", result)
                debug_chunk = pipeline.execute()
                for i, result in enumerate(debug_chunk):
                    debug_dict = dict([kv.split(':') for kv in ('type:' + result).split()])
                    key = key_names[cursor + i]
                    keysize = int(debug_dict['serializedlength']) + len(key)
                    key_sizes.append(keysize)
                cursor += chunksize

            if sample == "*":
                print human(sum(key_sizes))
                continue

            # TODO: msg_backlogs look big, figure out how to group these (probably show biggest 25 keys too)
            for key, keysize in zip(key_names, key_sizes):
                keygroup = re.sub("(?<=[:\.]).+(?=[:\.])", "#", key)
                groups[keygroup] += keysize

            print "== TOP 10 RESULTS =="
            for k in sorted(groups, key=lambda k: -groups[k])[:10]:
                size = groups[k]
                print k, human(size * scalar)
            avg = 1.0 * sum(key_sizes) / len(key_sizes)
            print "Average key size: %s (%s estimated total)" % (avg, human(avg * dbsize))
            print "" 
開發者ID:canvasnetworks,項目名稱:canvas,代碼行數:59,代碼來源:profile_redis.py


注:本文中的django.conf.settings.REDIS_PORT屬性示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。