当前位置: 首页>>代码示例>>Python>>正文


Python LruCache.set方法代码示例

本文整理汇总了Python中synapse.util.caches.lrucache.LruCache.set方法的典型用法代码示例。如果您正苦于以下问题:Python LruCache.set方法的具体用法?Python LruCache.set怎么用?Python LruCache.set使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在synapse.util.caches.lrucache.LruCache的用法示例。


在下文中一共展示了LruCache.set方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_clear

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import set [as 别名]
    def test_clear(self):
        m1 = Mock()
        m2 = Mock()
        cache = LruCache(5)

        cache.set("key1", "value", callbacks=[m1])
        cache.set("key2", "value", callbacks=[m2])

        self.assertEquals(m1.call_count, 0)
        self.assertEquals(m2.call_count, 0)

        cache.clear()

        self.assertEquals(m1.call_count, 1)
        self.assertEquals(m2.call_count, 1)
开发者ID:DoubleMalt,项目名称:synapse,代码行数:17,代码来源:test_lrucache.py

示例2: test_pop

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import set [as 别名]
    def test_pop(self):
        m = Mock()
        cache = LruCache(1)

        cache.set("key", "value", callbacks=[m])
        self.assertFalse(m.called)

        cache.pop("key")
        self.assertEquals(m.call_count, 1)

        cache.set("key", "value")
        self.assertEquals(m.call_count, 1)

        cache.pop("key")
        self.assertEquals(m.call_count, 1)
开发者ID:DoubleMalt,项目名称:synapse,代码行数:17,代码来源:test_lrucache.py

示例3: test_del_multi

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import set [as 别名]
    def test_del_multi(self):
        m1 = Mock()
        m2 = Mock()
        m3 = Mock()
        m4 = Mock()
        cache = LruCache(4, 2, cache_type=TreeCache)

        cache.set(("a", "1"), "value", callbacks=[m1])
        cache.set(("a", "2"), "value", callbacks=[m2])
        cache.set(("b", "1"), "value", callbacks=[m3])
        cache.set(("b", "2"), "value", callbacks=[m4])

        self.assertEquals(m1.call_count, 0)
        self.assertEquals(m2.call_count, 0)
        self.assertEquals(m3.call_count, 0)
        self.assertEquals(m4.call_count, 0)

        cache.del_multi(("a",))

        self.assertEquals(m1.call_count, 1)
        self.assertEquals(m2.call_count, 1)
        self.assertEquals(m3.call_count, 0)
        self.assertEquals(m4.call_count, 0)
开发者ID:DoubleMalt,项目名称:synapse,代码行数:25,代码来源:test_lrucache.py

示例4: test_eviction

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import set [as 别名]
    def test_eviction(self):
        m1 = Mock(name="m1")
        m2 = Mock(name="m2")
        m3 = Mock(name="m3")
        cache = LruCache(2)

        cache.set("key1", "value", callbacks=[m1])
        cache.set("key2", "value", callbacks=[m2])

        self.assertEquals(m1.call_count, 0)
        self.assertEquals(m2.call_count, 0)
        self.assertEquals(m3.call_count, 0)

        cache.set("key3", "value", callbacks=[m3])

        self.assertEquals(m1.call_count, 1)
        self.assertEquals(m2.call_count, 0)
        self.assertEquals(m3.call_count, 0)

        cache.set("key3", "value")

        self.assertEquals(m1.call_count, 1)
        self.assertEquals(m2.call_count, 0)
        self.assertEquals(m3.call_count, 0)

        cache.get("key2")

        self.assertEquals(m1.call_count, 1)
        self.assertEquals(m2.call_count, 0)
        self.assertEquals(m3.call_count, 0)

        cache.set("key1", "value", callbacks=[m1])

        self.assertEquals(m1.call_count, 1)
        self.assertEquals(m2.call_count, 0)
        self.assertEquals(m3.call_count, 1)
开发者ID:DoubleMalt,项目名称:synapse,代码行数:38,代码来源:test_lrucache.py

示例5: Cache

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import set [as 别名]
class Cache(object):
    __slots__ = (
        "cache",
        "max_entries",
        "name",
        "keylen",
        "thread",
        "metrics",
        "_pending_deferred_cache",
    )

    def __init__(self, name, max_entries=1000, keylen=1, tree=False, iterable=False):
        cache_type = TreeCache if tree else dict
        self._pending_deferred_cache = cache_type()

        self.cache = LruCache(
            max_size=max_entries, keylen=keylen, cache_type=cache_type,
            size_callback=(lambda d: len(d)) if iterable else None,
            evicted_callback=self._on_evicted,
        )

        self.name = name
        self.keylen = keylen
        self.thread = None
        self.metrics = register_cache("cache", name, self.cache)

    def _on_evicted(self, evicted_count):
        self.metrics.inc_evictions(evicted_count)

    def check_thread(self):
        expected_thread = self.thread
        if expected_thread is None:
            self.thread = threading.current_thread()
        else:
            if expected_thread is not threading.current_thread():
                raise ValueError(
                    "Cache objects can only be accessed from the main thread"
                )

    def get(self, key, default=_CacheSentinel, callback=None, update_metrics=True):
        """Looks the key up in the caches.

        Args:
            key(tuple)
            default: What is returned if key is not in the caches. If not
                specified then function throws KeyError instead
            callback(fn): Gets called when the entry in the cache is invalidated
            update_metrics (bool): whether to update the cache hit rate metrics

        Returns:
            Either a Deferred or the raw result
        """
        callbacks = [callback] if callback else []
        val = self._pending_deferred_cache.get(key, _CacheSentinel)
        if val is not _CacheSentinel:
            val.callbacks.update(callbacks)
            if update_metrics:
                self.metrics.inc_hits()
            return val.deferred

        val = self.cache.get(key, _CacheSentinel, callbacks=callbacks)
        if val is not _CacheSentinel:
            self.metrics.inc_hits()
            return val

        if update_metrics:
            self.metrics.inc_misses()

        if default is _CacheSentinel:
            raise KeyError()
        else:
            return default

    def set(self, key, value, callback=None):
        callbacks = [callback] if callback else []
        self.check_thread()
        entry = CacheEntry(
            deferred=value,
            callbacks=callbacks,
        )

        existing_entry = self._pending_deferred_cache.pop(key, None)
        if existing_entry:
            existing_entry.invalidate()

        self._pending_deferred_cache[key] = entry

        def shuffle(result):
            existing_entry = self._pending_deferred_cache.pop(key, None)
            if existing_entry is entry:
                self.cache.set(key, result, entry.callbacks)
            else:
                # oops, the _pending_deferred_cache has been updated since
                # we started our query, so we are out of date.
                #
                # Better put back whatever we took out. (We do it this way
                # round, rather than peeking into the _pending_deferred_cache
                # and then removing on a match, to make the common case faster)
                if existing_entry is not None:
                    self._pending_deferred_cache[key] = existing_entry
#.........这里部分代码省略.........
开发者ID:DoubleMalt,项目名称:synapse,代码行数:103,代码来源:descriptors.py

示例6: Cache

# 需要导入模块: from synapse.util.caches.lrucache import LruCache [as 别名]
# 或者: from synapse.util.caches.lrucache.LruCache import set [as 别名]
class Cache(object):
    __slots__ = (
        "cache",
        "max_entries",
        "name",
        "keylen",
        "sequence",
        "thread",
        "metrics",
    )

    def __init__(self, name, max_entries=1000, keylen=1, tree=False):
        cache_type = TreeCache if tree else dict
        self.cache = LruCache(
            max_size=max_entries, keylen=keylen, cache_type=cache_type
        )

        self.name = name
        self.keylen = keylen
        self.sequence = 0
        self.thread = None
        self.metrics = register_cache(name, self.cache)

    def check_thread(self):
        expected_thread = self.thread
        if expected_thread is None:
            self.thread = threading.current_thread()
        else:
            if expected_thread is not threading.current_thread():
                raise ValueError(
                    "Cache objects can only be accessed from the main thread"
                )

    def get(self, key, default=_CacheSentinel, callback=None):
        val = self.cache.get(key, _CacheSentinel, callback=callback)
        if val is not _CacheSentinel:
            self.metrics.inc_hits()
            return val

        self.metrics.inc_misses()

        if default is _CacheSentinel:
            raise KeyError()
        else:
            return default

    def update(self, sequence, key, value, callback=None):
        self.check_thread()
        if self.sequence == sequence:
            # Only update the cache if the caches sequence number matches the
            # number that the cache had before the SELECT was started (SYN-369)
            self.prefill(key, value, callback=callback)

    def prefill(self, key, value, callback=None):
        self.cache.set(key, value, callback=callback)

    def invalidate(self, key):
        self.check_thread()
        if not isinstance(key, tuple):
            raise TypeError(
                "The cache key must be a tuple not %r" % (type(key),)
            )

        # Increment the sequence number so that any SELECT statements that
        # raced with the INSERT don't update the cache (SYN-369)
        self.sequence += 1
        self.cache.pop(key, None)

    def invalidate_many(self, key):
        self.check_thread()
        if not isinstance(key, tuple):
            raise TypeError(
                "The cache key must be a tuple not %r" % (type(key),)
            )
        self.sequence += 1
        self.cache.del_multi(key)

    def invalidate_all(self):
        self.check_thread()
        self.sequence += 1
        self.cache.clear()
开发者ID:mebjas,项目名称:synapse,代码行数:83,代码来源:descriptors.py


注:本文中的synapse.util.caches.lrucache.LruCache.set方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。