当前位置: 首页>>代码示例>>Python>>正文


Python LruCache.del_multi方法代码示例

本文整理汇总了Python中synapse.util.caches.lrucache.LruCache.del_multi方法的典型用法代码示例。如果您正苦于以下问题:Python LruCache.del_multi方法的具体用法?Python LruCache.del_multi怎么用?Python LruCache.del_multi使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在synapse.util.caches.lrucache.LruCache的用法示例。


在下文中一共展示了LruCache.del_multi方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_del_multi

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import del_multi [as 别名]
    def test_del_multi(self):
        cache = LruCache(4, 2, cache_type=TreeCache)
        cache[("animal", "cat")] = "mew"
        cache[("animal", "dog")] = "woof"
        cache[("vehicles", "car")] = "vroom"
        cache[("vehicles", "train")] = "chuff"

        self.assertEquals(len(cache), 4)

        self.assertEquals(cache.get(("animal", "cat")), "mew")
        self.assertEquals(cache.get(("vehicles", "car")), "vroom")
        cache.del_multi(("animal",))
        self.assertEquals(len(cache), 2)
        self.assertEquals(cache.get(("animal", "cat")), None)
        self.assertEquals(cache.get(("animal", "dog")), None)
        self.assertEquals(cache.get(("vehicles", "car")), "vroom")
        self.assertEquals(cache.get(("vehicles", "train")), "chuff")
开发者ID:DoubleMalt,项目名称:synapse,代码行数:19,代码来源:test_lrucache.py

示例2: Cache

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import del_multi [as 别名]

#.........这里部分代码省略.........
        callbacks = [callback] if callback else []
        val = self._pending_deferred_cache.get(key, _CacheSentinel)
        if val is not _CacheSentinel:
            val.callbacks.update(callbacks)
            if update_metrics:
                self.metrics.inc_hits()
            return val.deferred

        val = self.cache.get(key, _CacheSentinel, callbacks=callbacks)
        if val is not _CacheSentinel:
            self.metrics.inc_hits()
            return val

        if update_metrics:
            self.metrics.inc_misses()

        if default is _CacheSentinel:
            raise KeyError()
        else:
            return default

    def set(self, key, value, callback=None):
        callbacks = [callback] if callback else []
        self.check_thread()
        entry = CacheEntry(
            deferred=value,
            callbacks=callbacks,
        )

        existing_entry = self._pending_deferred_cache.pop(key, None)
        if existing_entry:
            existing_entry.invalidate()

        self._pending_deferred_cache[key] = entry

        def shuffle(result):
            existing_entry = self._pending_deferred_cache.pop(key, None)
            if existing_entry is entry:
                self.cache.set(key, result, entry.callbacks)
            else:
                # oops, the _pending_deferred_cache has been updated since
                # we started our query, so we are out of date.
                #
                # Better put back whatever we took out. (We do it this way
                # round, rather than peeking into the _pending_deferred_cache
                # and then removing on a match, to make the common case faster)
                if existing_entry is not None:
                    self._pending_deferred_cache[key] = existing_entry

                # we're not going to put this entry into the cache, so need
                # to make sure that the invalidation callbacks are called.
                # That was probably done when _pending_deferred_cache was
                # updated, but it's possible that `set` was called without
                # `invalidate` being previously called, in which case it may
                # not have been. Either way, let's double-check now.
                entry.invalidate()
            return result

        entry.deferred.addCallback(shuffle)

    def prefill(self, key, value, callback=None):
        callbacks = [callback] if callback else []
        self.cache.set(key, value, callbacks=callbacks)

    def invalidate(self, key):
        self.check_thread()
        self.cache.pop(key, None)

        # if we have a pending lookup for this key, remove it from the
        # _pending_deferred_cache, which will (a) stop it being returned
        # for future queries and (b) stop it being persisted as a proper entry
        # in self.cache.
        entry = self._pending_deferred_cache.pop(key, None)

        # run the invalidation callbacks now, rather than waiting for the
        # deferred to resolve.
        if entry:
            entry.invalidate()

    def invalidate_many(self, key):
        self.check_thread()
        if not isinstance(key, tuple):
            raise TypeError(
                "The cache key must be a tuple not %r" % (type(key),)
            )
        self.cache.del_multi(key)

        # if we have a pending lookup for this key, remove it from the
        # _pending_deferred_cache, as above
        entry_dict = self._pending_deferred_cache.pop(key, None)
        if entry_dict is not None:
            for entry in iterate_tree_cache_entry(entry_dict):
                entry.invalidate()

    def invalidate_all(self):
        self.check_thread()
        self.cache.clear()
        for entry in itervalues(self._pending_deferred_cache):
            entry.invalidate()
        self._pending_deferred_cache.clear()
开发者ID:DoubleMalt,项目名称:synapse,代码行数:104,代码来源:descriptors.py

示例3: Cache

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import del_multi [as 别名]
class Cache(object):
    __slots__ = (
        "cache",
        "max_entries",
        "name",
        "keylen",
        "sequence",
        "thread",
        "metrics",
    )

    def __init__(self, name, max_entries=1000, keylen=1, tree=False):
        cache_type = TreeCache if tree else dict
        self.cache = LruCache(
            max_size=max_entries, keylen=keylen, cache_type=cache_type
        )

        self.name = name
        self.keylen = keylen
        self.sequence = 0
        self.thread = None
        self.metrics = register_cache(name, self.cache)

    def check_thread(self):
        expected_thread = self.thread
        if expected_thread is None:
            self.thread = threading.current_thread()
        else:
            if expected_thread is not threading.current_thread():
                raise ValueError(
                    "Cache objects can only be accessed from the main thread"
                )

    def get(self, key, default=_CacheSentinel, callback=None):
        val = self.cache.get(key, _CacheSentinel, callback=callback)
        if val is not _CacheSentinel:
            self.metrics.inc_hits()
            return val

        self.metrics.inc_misses()

        if default is _CacheSentinel:
            raise KeyError()
        else:
            return default

    def update(self, sequence, key, value, callback=None):
        self.check_thread()
        if self.sequence == sequence:
            # Only update the cache if the caches sequence number matches the
            # number that the cache had before the SELECT was started (SYN-369)
            self.prefill(key, value, callback=callback)

    def prefill(self, key, value, callback=None):
        self.cache.set(key, value, callback=callback)

    def invalidate(self, key):
        self.check_thread()
        if not isinstance(key, tuple):
            raise TypeError(
                "The cache key must be a tuple not %r" % (type(key),)
            )

        # Increment the sequence number so that any SELECT statements that
        # raced with the INSERT don't update the cache (SYN-369)
        self.sequence += 1
        self.cache.pop(key, None)

    def invalidate_many(self, key):
        self.check_thread()
        if not isinstance(key, tuple):
            raise TypeError(
                "The cache key must be a tuple not %r" % (type(key),)
            )
        self.sequence += 1
        self.cache.del_multi(key)

    def invalidate_all(self):
        self.check_thread()
        self.sequence += 1
        self.cache.clear()
开发者ID:mebjas,项目名称:synapse,代码行数:83,代码来源:descriptors.py


注:本文中的synapse.util.caches.lrucache.LruCache.del_multi方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。