当前位置: 首页>>代码示例>>Python>>正文


Python functional.LRUCache类代码示例

本文整理汇总了Python中celery.utils.functional.LRUCache的典型用法代码示例。如果您正苦于以下问题:Python LRUCache类的具体用法?Python LRUCache怎么用?Python LRUCache使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了LRUCache类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: assertSafeIter

    def assertSafeIter(self, method, interval=0.01, size=10000):
        from threading import Thread, Event
        from time import sleep
        x = LRUCache(size)
        x.update(zip(range(size), range(size)))

        class Burglar(Thread):

            def __init__(self, cache):
                self.cache = cache
                self.__is_shutdown = Event()
                self.__is_stopped = Event()
                Thread.__init__(self)

            def run(self):
                while not self.__is_shutdown.isSet():
                    try:
                        self.cache.data.popitem(last=False)
                    except KeyError:
                        break
                self.__is_stopped.set()

            def stop(self):
                self.__is_shutdown.set()
                self.__is_stopped.wait()
                self.join(THREAD_TIMEOUT_MAX)

        burglar = Burglar(x)
        burglar.start()
        try:
            for _ in getattr(x, method)():
                sleep(0.0001)
        finally:
            burglar.stop()
开发者ID:343829084,项目名称:celery,代码行数:34,代码来源:test_functional.py

示例2: test_update_expires

    def test_update_expires(self):
        limit = 100
        x = LRUCache(limit=limit)
        slots = list(range(limit * 2))
        for i in slots:
            x.update({i: i})

        self.assertListEqual(list(x.keys()), list(slots[limit:]))
开发者ID:343829084,项目名称:celery,代码行数:8,代码来源:test_functional.py

示例3: test_expires

 def test_expires(self):
     limit = 100
     x = LRUCache(limit=limit)
     slots = list(range(limit * 2))
     for i in slots:
         x[i] = i
     self.assertListEqual(list(x.keys()), list(slots[limit:]))
     self.assertTrue(x.items())
     self.assertTrue(x.values())
开发者ID:343829084,项目名称:celery,代码行数:9,代码来源:test_functional.py

示例4: __init__

 def __init__(self, callback=None,
              max_workers_in_memory=5000, max_tasks_in_memory=10000):
     self.max_workers_in_memory = max_workers_in_memory
     self.max_tasks_in_memory = max_tasks_in_memory
     self.workers = LRUCache(limit=self.max_workers_in_memory)
     self.tasks = LRUCache(limit=self.max_tasks_in_memory)
     self._taskheap = []
     self.event_callback = callback
     self._mutex = threading.Lock()
开发者ID:JeniaSkorski,项目名称:celery,代码行数:9,代码来源:state.py

示例5: __init__

 def __init__(self, app, serializer=None, max_cached_results=None, accept=None, **kwargs):
     self.app = app
     conf = self.app.conf
     self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER
     (self.content_type, self.content_encoding, self.encoder) = serializer_registry._encoders[self.serializer]
     self._cache = LRUCache(limit=max_cached_results or conf.CELERY_MAX_CACHED_RESULTS)
     self.accept = prepare_accept_content(conf.CELERY_ACCEPT_CONTENT if accept is None else accept)
开发者ID:jess-sheneberger,项目名称:celery,代码行数:7,代码来源:base.py

示例6: test_least_recently_used

    def test_least_recently_used(self):
        x = LRUCache(3)

        x[1], x[2], x[3] = 1, 2, 3
        self.assertEqual(list(x.keys()), [1, 2, 3])

        x[4], x[5] = 4, 5
        self.assertEqual(list(x.keys()), [3, 4, 5])

        # access 3, which makes it the last used key.
        x[3]
        x[6] = 6
        self.assertEqual(list(x.keys()), [5, 3, 6])

        x[7] = 7
        self.assertEqual(list(x.keys()), [3, 6, 7])
开发者ID:343829084,项目名称:celery,代码行数:16,代码来源:test_functional.py

示例7: __init__

 def __init__(
     self,
     callback=None,
     workers=None,
     tasks=None,
     taskheap=None,
     max_workers_in_memory=5000,
     max_tasks_in_memory=10000,
 ):
     self.event_callback = callback
     self.workers = LRUCache(max_workers_in_memory) if workers is None else workers
     self.tasks = LRUCache(max_tasks_in_memory) if tasks is None else tasks
     self._taskheap = [] if taskheap is None else taskheap
     self.max_workers_in_memory = max_workers_in_memory
     self.max_tasks_in_memory = max_tasks_in_memory
     self._mutex = threading.Lock()
     self.handlers = {}
     self._seen_types = set()
     self.rebuild_taskheap()
开发者ID:haridas,项目名称:celery,代码行数:19,代码来源:state.py

示例8: DummyClient

class DummyClient(object):
    def __init__(self, *args, **kwargs):
        self.cache = LRUCache(limit=5000)

    def get(self, key, *args, **kwargs):
        return self.cache.get(key)

    def get_multi(self, keys):
        cache = self.cache
        return dict((k, cache[k]) for k in keys if k in cache)

    def set(self, key, value, *args, **kwargs):
        self.cache[key] = value

    def delete(self, key, *args, **kwargs):
        self.cache.pop(key, None)

    def incr(self, key, delta=1):
        return self.cache.incr(key, delta)
开发者ID:peterbrook,项目名称:celery,代码行数:19,代码来源:cache.py

示例9: __init__

 def __init__(self, app, serializer=None,
              max_cached_results=None, **kwargs):
     self.app = app
     conf = self.app.conf
     self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER
     (self.content_type,
      self.content_encoding,
      self.encoder) = serialization.registry._encoders[self.serializer]
     self._cache = LRUCache(
         limit=max_cached_results or conf.CELERY_MAX_CACHED_RESULTS,
     )
开发者ID:Birdbird,项目名称:celery,代码行数:11,代码来源:base.py

示例10: test_is_pickleable

 def test_is_pickleable(self):
     x = LRUCache(limit=10)
     x.update(luke=1, leia=2)
     y = pickle.loads(pickle.dumps(x))
     self.assertEqual(y.limit, y.limit)
     self.assertEqual(y, x)
开发者ID:343829084,项目名称:celery,代码行数:6,代码来源:test_functional.py

示例11: State

class State(object):
    """Records clusters state."""
    event_count = 0
    task_count = 0

    def __init__(self, callback=None,
                 max_workers_in_memory=5000, max_tasks_in_memory=10000):
        self.max_workers_in_memory = max_workers_in_memory
        self.max_tasks_in_memory = max_tasks_in_memory
        self.workers = LRUCache(limit=self.max_workers_in_memory)
        self.tasks = LRUCache(limit=self.max_tasks_in_memory)
        self._taskheap = []
        self.event_callback = callback
        self._mutex = threading.Lock()

    def freeze_while(self, fun, *args, **kwargs):
        clear_after = kwargs.pop('clear_after', False)
        with self._mutex:
            try:
                return fun(*args, **kwargs)
            finally:
                if clear_after:
                    self._clear()

    def clear_tasks(self, ready=True):
        with self._mutex:
            return self._clear_tasks(ready)

    def _clear_tasks(self, ready=True):
        if ready:
            in_progress = dict(
                (uuid, task) for uuid, task in self.itertasks()
                if task.state not in states.READY_STATES)
            self.tasks.clear()
            self.tasks.update(in_progress)
        else:
            self.tasks.clear()
        self._taskheap[:] = []

    def _clear(self, ready=True):
        self.workers.clear()
        self._clear_tasks(ready)
        self.event_count = 0
        self.task_count = 0

    def clear(self, ready=True):
        with self._mutex:
            return self._clear(ready)

    def get_or_create_worker(self, hostname, **kwargs):
        """Get or create worker by hostname.

        Returns tuple of ``(worker, was_created)``.
        """
        try:
            worker = self.workers[hostname]
            worker.update(kwargs)
            return worker, False
        except KeyError:
            worker = self.workers[hostname] = Worker(
                hostname=hostname, **kwargs)
            return worker, True

    def get_or_create_task(self, uuid):
        """Get or create task by uuid."""
        try:
            return self.tasks[uuid], True
        except KeyError:
            task = self.tasks[uuid] = Task(uuid=uuid)
            return task, False

    def worker_event(self, type, fields):
        """Process worker event."""
        try:
            hostname = fields['hostname']
        except KeyError:
            pass
        else:
            worker, created = self.get_or_create_worker(hostname)
            handler = getattr(worker, 'on_' + type, None)
            if handler:
                handler(**fields)
            return worker, created

    def task_event(self, type, fields):
        """Process task event."""
        uuid = fields['uuid']
        hostname = fields['hostname']
        worker, _ = self.get_or_create_worker(hostname)
        task, created = self.get_or_create_task(uuid)
        task.worker = worker
        maxtasks = self.max_tasks_in_memory * 2

        taskheap = self._taskheap
        timestamp = fields.get('timestamp') or 0
        clock = 0 if type == 'sent' else fields.get('clock')
        heappush(taskheap, _lamportinfo(clock, timestamp, worker.id, task))
        if len(taskheap) > maxtasks:
            heappop(taskheap)

#.........这里部分代码省略.........
开发者ID:JeniaSkorski,项目名称:celery,代码行数:101,代码来源:state.py

示例12: test_update_larger_than_cache_size

 def test_update_larger_than_cache_size(self):
     x = LRUCache(2)
     x.update(dict((x, x) for x in range(100)))
     self.assertEqual(list(x.keys()), [98, 99])
开发者ID:1995rishi,项目名称:flaskmap,代码行数:4,代码来源:test_functional.py

示例13: State

class State(object):
    """Records clusters state."""

    Worker = Worker
    Task = Task
    event_count = 0
    task_count = 0
    heap_multiplier = 4

    def __init__(
        self,
        callback=None,
        workers=None,
        tasks=None,
        taskheap=None,
        max_workers_in_memory=5000,
        max_tasks_in_memory=10000,
        on_node_join=None,
        on_node_leave=None,
    ):
        self.event_callback = callback
        self.workers = LRUCache(max_workers_in_memory) if workers is None else workers
        self.tasks = LRUCache(max_tasks_in_memory) if tasks is None else tasks
        self._taskheap = [] if taskheap is None else taskheap
        self.max_workers_in_memory = max_workers_in_memory
        self.max_tasks_in_memory = max_tasks_in_memory
        self.on_node_join = on_node_join
        self.on_node_leave = on_node_leave
        self._mutex = threading.Lock()
        self.handlers = {}
        self._seen_types = set()
        self.rebuild_taskheap()

    @cached_property
    def _event(self):
        return self._create_dispatcher()

    def freeze_while(self, fun, *args, **kwargs):
        clear_after = kwargs.pop("clear_after", False)
        with self._mutex:
            try:
                return fun(*args, **kwargs)
            finally:
                if clear_after:
                    self._clear()

    def clear_tasks(self, ready=True):
        with self._mutex:
            return self._clear_tasks(ready)

    def _clear_tasks(self, ready=True):
        if ready:
            in_progress = {uuid: task for uuid, task in self.itertasks() if task.state not in states.READY_STATES}
            self.tasks.clear()
            self.tasks.update(in_progress)
        else:
            self.tasks.clear()
        self._taskheap[:] = []

    def _clear(self, ready=True):
        self.workers.clear()
        self._clear_tasks(ready)
        self.event_count = 0
        self.task_count = 0

    def clear(self, ready=True):
        with self._mutex:
            return self._clear(ready)

    def get_or_create_worker(self, hostname, **kwargs):
        """Get or create worker by hostname.

        Return tuple of ``(worker, was_created)``.
        """
        try:
            worker = self.workers[hostname]
            if kwargs:
                worker.update(kwargs)
            return worker, False
        except KeyError:
            worker = self.workers[hostname] = self.Worker(hostname, **kwargs)
            return worker, True

    def get_or_create_task(self, uuid):
        """Get or create task by uuid."""
        try:
            return self.tasks[uuid], False
        except KeyError:
            task = self.tasks[uuid] = self.Task(uuid)
            return task, True

    def event(self, event):
        with self._mutex:
            return self._event(event)

    def task_event(self, type_, fields):
        """Deprecated, use :meth:`event`."""
        return self._event(dict(fields, type="-".join(["task", type_])))[0]

    def worker_event(self, type_, fields):
#.........这里部分代码省略.........
开发者ID:Rameshkc8,项目名称:celery,代码行数:101,代码来源:state.py

示例14: __init__

 def __init__(self, *args, **kwargs):
     self.cache = LRUCache(limit=5000)
开发者ID:Birdbird,项目名称:celery,代码行数:2,代码来源:cache.py

示例15: test_items

 def test_items(self):
     c = LRUCache()
     c.update(a=1, b=2, c=3)
     self.assertTrue(list(items(c)))
开发者ID:343829084,项目名称:celery,代码行数:4,代码来源:test_functional.py


注:本文中的celery.utils.functional.LRUCache类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。