当前位置: 首页>>代码示例>>Python>>正文


Python queue.Queue类代码示例

本文整理汇总了Python中six.moves.queue.Queue的典型用法代码示例。如果您正苦于以下问题:Python Queue类的具体用法?Python Queue怎么用?Python Queue使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了Queue类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: ThreadTestCase

class ThreadTestCase(testtools.TestCase):
    def setUp(self):
        super(ThreadTestCase, self).setUp()
        self.got_args_kwargs = Queue()
        self.starting_thread_count = threading.active_count()

    def _func(self, q_item, *args, **kwargs):
        self.got_items.put(q_item)
        self.got_args_kwargs.put((args, kwargs))

        if q_item == 'go boom':
            raise Exception('I went boom!')
        if q_item == 'c boom':
            raise ClientException(
                'Client Boom', http_scheme='http', http_host='192.168.22.1',
                http_port=80, http_path='/booze', http_status=404,
                http_reason='to much', http_response_content='no sir!')

        return 'best result EVAR!'

    def assertQueueContains(self, queue, expected_contents):
        got_contents = []
        try:
            while True:
                got_contents.append(queue.get(timeout=0.1))
        except Empty:
            pass
        if isinstance(expected_contents, set):
            got_contents = set(got_contents)
        self.assertEqual(expected_contents, got_contents)
开发者ID:AlphaStaxLLC,项目名称:python-swiftclient,代码行数:30,代码来源:test_multithreading.py

示例2: __init__

 def __init__(self, app, disk_image):
     super(ScsiDevice, self).__init__(app, None)
     self.disk_image = disk_image
     self.handlers = {
         ScsiCmds.INQUIRY: self.handle_inquiry,
         ScsiCmds.REQUEST_SENSE: self.handle_request_sense,
         ScsiCmds.TEST_UNIT_READY: self.handle_test_unit_ready,
         ScsiCmds.READ_CAPACITY_10: self.handle_read_capacity_10,
         # ScsiCmds.SEND_DIAGNOSTIC: self.handle_send_diagnostic,
         ScsiCmds.PREVENT_ALLOW_MEDIUM_REMOVAL: self.handle_prevent_allow_medium_removal,
         ScsiCmds.WRITE_10: self.handle_write_10,
         ScsiCmds.READ_10: self.handle_read_10,
         # ScsiCmds.WRITE_6: self.handle_write_6,
         # ScsiCmds.READ_6: self.handle_read_6,
         # ScsiCmds.VERIFY_10: self.handle_verify_10,
         ScsiCmds.MODE_SENSE_6: self.handle_mode_sense_6,
         ScsiCmds.MODE_SENSE_10: self.handle_mode_sense_10,
         ScsiCmds.READ_FORMAT_CAPACITIES: self.handle_read_format_capacities,
         ScsiCmds.SYNCHRONIZE_CACHE: self.handle_synchronize_cache,
     }
     self.tx = Queue()
     self.rx = Queue()
     self.stop_event = Event()
     self.thread = Thread(target=self.handle_data_loop)
     self.thread.daemon = True
     self.thread.start()
     self.is_write_in_progress = False
     self.write_cbw = None
     self.write_base_lba = 0
     self.write_length = 0
     self.write_data = b''
开发者ID:Manouchehri,项目名称:umap2,代码行数:31,代码来源:mass_storage.py

示例3: inner

        def inner(self, *args, **kwargs):
            if self.use_post_event:
                # create ephemeral queue
                q = Queue()

                # create an invocation that calls the decorated function
                class Invocation(object):
                    def __call__(killme):
                        # when the invocation is called, we call the function and stick the result into the queue
                        try:
                            res = func(self, *args, **kwargs)
                        except Exception as e:
                            # if we got an exception, just queue that instead
                            res = e
                        q.put(res)

                # post this invocation to be called on the main thread at the next opportunity
                gdb.post_event(Invocation())

                # now we wait until there's something in the queue, which indicates that the invocation has run and return
                # the result that was pushed onto the queue by the invocation
                res = q.get()

                # if we got an exception back from the posted event, raise it
                if isinstance(res, Exception):
                    raise res

                return res
            else:
                return func(self, *args, **kwargs)
开发者ID:Stenean,项目名称:voltron,代码行数:30,代码来源:dbg_gdb.py

示例4: make_buffer_for_iterator_with_thread

def make_buffer_for_iterator_with_thread(gen, n_workers, buffer_size):
    wait_time = 0.02
    generator_queue = Queue()
    _stop = threading.Event()

    def generator_task():
        while not _stop.is_set():
            try:
                if generator_queue.qsize() < buffer_size:
                    generator_output = next(gen)
                    generator_queue.put(generator_output)
                else:
                    time.sleep(wait_time)
            except (StopIteration, KeyboardInterrupt):
                _stop.set()
                return

    generator_threads = [threading.Thread(target=generator_task) for _ in range(n_workers)]
    for thread in generator_threads:
        thread.start()

    while not _stop.is_set() or not generator_queue.empty():
        if not generator_queue.empty():
            yield generator_queue.get()
        else:
            time.sleep(wait_time)
开发者ID:4Catalyzer,项目名称:nolearn_utils,代码行数:26,代码来源:iterators.py

示例5: Search

class Search(TracePosterior):
    """
    Trace and Poutine-based implementation of systematic search.

    :param callable model: Probabilistic model defined as a function.
    :param int max_tries: The maximum number of times to try completing a trace from the queue.
    """
    def __init__(self, model, max_tries=1e6):
        """
        Constructor. Default `max_tries` to something sensible - 1e6.

        :param callable model: Probabilistic model defined as a function.
        :param int max_tries: The maximum number of times to try completing a trace from the queue.
        """
        self.model = model
        self.max_tries = int(max_tries)

    def _traces(self, *args, **kwargs):
        """
        algorithm entered here
        Running until the queue is empty and collecting the marginal histogram
        is performing exact inference

        :returns: Iterator of traces from the posterior.
        :rtype: Generator[:class:`pyro.Trace`]
        """
        # currently only using the standard library queue
        self.queue = Queue()
        self.queue.put(poutine.Trace())

        p = poutine.trace(
            poutine.queue(self.model, queue=self.queue, max_tries=self.max_tries))
        while not self.queue.empty():
            tr = p.get_trace(*args, **kwargs)
            yield (tr, tr.log_pdf())
开发者ID:Magica-Chen,项目名称:pyro,代码行数:35,代码来源:search.py

示例6: Executor

class Executor(object):
    _INTERRUPT = object()

    def __init__(self, num_workers=1):
        super(Executor, self).__init__()
        self._queue = Queue()
        self._workers = []

        for _ in range(num_workers):
            th = Thread(target=self._work)
            th.start()
            self._workers.append(th)

    def submit(self, task):
        self._queue.put(task)

    def shutdown(self):
        for _ in self._workers:
            self._queue.put(self._INTERRUPT)

    def join(self):
        for worker in self._workers:
            worker.join()

    def _work(self):
        while True:
            task = self._queue.get(block=True)
            if task is self._INTERRUPT:
                break
            try:
                task()
            except BaseException as e:
                logger.exception(e)
开发者ID:tamland,项目名称:python-actors,代码行数:33,代码来源:executor.py

示例7: lines

    def lines(self, fuseki_process):
        """
        Provides an iterator generating the encoded string representation
        of each member of this metarelate mapping translation.

        Returns:
            An iterator of string.

        """
        msg = '\tGenerating phenomenon translation {!r}.'
        print(msg.format(self.mapping_name))
        lines = ['\n%s = {\n' % self.mapping_name]
        # Retrieve encodings for the collection of mapping instances.
        # Retrieval is threaded as it is heavily bound by resource resolution
        # over http.
        # Queue for metarelate mapping instances
        mapenc_queue = Queue()
        for mapping in self.mappings:
            mapenc_queue.put(mapping)
        # deque to contain the results of the jobs processed from the queue
        mapencs = deque()
        # run worker threads
        for i in range(MAXTHREADS):
            MappingEncodeWorker(mapenc_queue, mapencs, fuseki_process).start()
        # block progress until the queue is empty
        mapenc_queue.join()
        # end of threaded retrieval process.

        # now sort the payload
        payload = [mapenc.encoding for mapenc in mapencs]
        payload.sort(key=self._key)
        lines.extend(payload)
        lines.append('    }\n')
        return iter(lines)
开发者ID:MahatmaCane,项目名称:iris,代码行数:34,代码来源:__init__.py

示例8: test_acquire_contextmanager

    def test_acquire_contextmanager(self):
        class TestedClass(Monitor):
            def __init__(self, cqueue):
                self.cqueue = cqueue
                Monitor.__init__(self)

            @Monitor.synchronized
            def execute(self):
                self.cqueue.put(1)
                sleep(1)
                self.cqueue.get()

        class TesterThread(Thread):
            def __init__(self, tc):
                self.tc = tc
                Thread.__init__(self)

            def run(self):
                self.tc.execute()

        cq = Queue()
        cq.put(1)
        tc = TestedClass(cq)
        tt = TesterThread(tc)

        with Monitor.acquire(tc):
            tt.start()
            sleep(0.4)
            self.assertEqual(cq.qsize(), 1)
开发者ID:piotrmaslanka,项目名称:satella,代码行数:29,代码来源:test_monitor.py

示例9: test_equipped

    def test_equipped(self):
        """
        Created equipped worker that will use an internal Counter resource to
        keep track of the job count.
        """
        results = Queue()

        def toolbox_factory():
            return Counter()

        def worker_factory(job_queue):
            return workerpool.EquippedWorker(job_queue, toolbox_factory)

        pool = workerpool.WorkerPool(1, worker_factory=worker_factory)

        # Run 10 jobs
        for i in range(10):
            j = CountJob(results)
            pool.put(j)

        # Get 10 results
        for i in range(10):
            r = results.get()
            # Each result should be an incremented value
            self.assertEquals(r, i)

        pool.shutdown()
开发者ID:davanstrien,项目名称:workerpool,代码行数:27,代码来源:test_equipped.py

示例10: Pool

class Pool(object):
    class Error(Exception):
        pass
    
    def __init__(self, threads, host, port, ssl, user, password):
        self._threads=[]
        self._queue = Queue(maxsize=1000)
        count=0
        while len(self._threads) < threads and count < 3* threads:
            try:
                count+=1
                w=Downloader(self._queue, host, port, ssl, user, password)
                w.start()
                self._threads.append(w)
            except SOFT_ERRORS as e:
                log.warn('Cannot create downloder thread: %s', e)
                
        if len(self._threads) != threads:
            log.error('Cannot create enough workers')
            raise Pool.Error('Cannot create enough workers')
        
    def wait_finish(self):
        self._queue.join()
        
    def stop(self):
        for t in self._threads:
            t.stop()
        
    def download(self, **kwargs):
        kwargs['retry']=0
        self._queue.put(kwargs)
                
        
        
开发者ID:izderadicka,项目名称:imap_detach,代码行数:31,代码来源:pool.py

示例11: test_monitoring

    def test_monitoring(self):
        class TestedClass(Monitor):
            def __init__(self, cqueue):
                self.cqueue = cqueue
                Monitor.__init__(self)

            @Monitor.synchronized
            def execute(self):
                self.cqueue.put(1)
                sleep(1)
                self.cqueue.get()

        class TesterThread(Thread):
            def __init__(self, tc):
                self.tc = tc
                Thread.__init__(self)

            def run(self):
                self.tc.execute()

        q = Queue()
        tc = TestedClass(q)
        a, b = TesterThread(tc), TesterThread(tc)
        a.start(), b.start()

        while a.is_alive() or b.is_alive():
            sleep(0.1)
            self.assertNotEqual(q.qsize(), 2)
开发者ID:piotrmaslanka,项目名称:satella,代码行数:28,代码来源:test_monitor.py

示例12: parallel_execute_stream

def parallel_execute_stream(objects, func, get_deps):
    if get_deps is None:
        get_deps = _no_deps

    results = Queue()
    state = State(objects)

    while not state.is_done():
        for event in feed_queue(objects, func, get_deps, results, state):
            yield event

        try:
            event = results.get(timeout=0.1)
        except Empty:
            continue
        # See https://github.com/docker/compose/issues/189
        except thread.error:
            raise ShutdownException()

        obj, _, exception = event
        if exception is None:
            log.debug('Finished processing: {}'.format(obj))
            state.finished.add(obj)
        else:
            log.debug('Failed: {}'.format(obj))
            state.failed.add(obj)

        yield event
开发者ID:asiqq23,项目名称:compose,代码行数:28,代码来源:parallel.py

示例13: run

    def run(self):
        args = list(islice(self.reqs, self.requests))
        if self.shuffle:
            random.shuffle(args)
        print("Total requests: %d" % len(args))
        print("Concurrency   : %d" % self.concurrency)

        starttime = time.time()
        q, p = Queue(), Queue()
        for _ in six.moves.range(self.concurrency):
            t = Thread(target=worker, args=(self.host, q, p, self.verbose))
            t.daemon = True
            t.start()
        for a in args:
            q.put(a)
        q.join()

        outputs = []
        for _ in six.moves.range(self.requests):
            outputs.append(p.get())

        elapsed = time.time() - starttime
        print()
        print("Total requests: %d" % len(args))
        print("Concurrency   : %d" % self.concurrency)
        print("Elapsed time  : %.3fs" % elapsed)
        print("Avg time p/req: %.3fs" % (elapsed / len(args)))
        print("Received (per status code or error):")
        for c, n in Counter(outputs).items():
            print("  %s: %d" % (c, n))
开发者ID:dvska,项目名称:splash,代码行数:30,代码来源:stress.py

示例14: __init__

    def __init__(self, jobStore, toilState):
        logger.debug("Initializing service manager")
        self.jobStore = jobStore
        
        self.toilState = toilState

        self.jobGraphsWithServicesBeingStarted = set()

        self._terminate = Event() # This is used to terminate the thread associated
        # with the service manager

        self._jobGraphsWithServicesToStart = Queue() # This is the input queue of
        # jobGraphs that have services that need to be started

        self._jobGraphsWithServicesThatHaveStarted = Queue() # This is the output queue
        # of jobGraphs that have services that are already started

        self._serviceJobGraphsToStart = Queue() # This is the queue of services for the
        # batch system to start

        self.jobsIssuedToServiceManager = 0 # The number of jobs the service manager
        # is scheduling

        # Start a thread that starts the services of jobGraphs in the
        # jobsWithServicesToStart input queue and puts the jobGraphs whose services
        # are running on the jobGraphssWithServicesThatHaveStarted output queue
        self._serviceStarter = Thread(target=self._startServices,
                                     args=(self._jobGraphsWithServicesToStart,
                                           self._jobGraphsWithServicesThatHaveStarted,
                                           self._serviceJobGraphsToStart, self._terminate,
                                           self.jobStore))
开发者ID:brainstorm,项目名称:toil,代码行数:31,代码来源:serviceManager.py

示例15: StoppableThread

class StoppableThread(threading.Thread):
    """This is thread can be stopped.

    Note: Thread by default does not return function result in any case,
    which is why I've implemented this workaroung with built-in Queue.
    """
    def __init__(self, **kwargs):
        super(StoppableThread, self).__init__(**kwargs)
        self.__target = kwargs.get('target')
        self.__args = kwargs.get('args')
        if self.__args is None:
            self.__args = ()
        self.__kwargs = kwargs.get('kwargs')
        if self.__kwargs is None:
            self.__kwargs = {}
        self.__result_queue = Queue()
        self.__stopped = threading.Event()

    def stop(self):
        """Stop the thread. It will not terminate code, but set the flag that
        should be handled in executed function.
        """
        self.__stopped.set()

    def is_stopped(self):
        """Check the status of the thread. It only monitors the flag state. If
        task is stopped you have to pay attention to `.is_alive()`.
        """
        return self.__stopped.is_set()

    def run(self):
        """Run the target function, check expected result and propagate
        exceptions.
        """
        try:
            self.__kwargs['_is_stopped'] = self.__stopped.is_set
            try:
                if self.__target:
                    func_result = self.__target(*self.__args, **self.__kwargs)
            finally:
                # Avoid a refcycle if the thread is running a function with
                # an argument that has a member that points to the thread.
                del self.__target, self.__args, self.__kwargs
            if func_result is None:
                func_result = {}
            elif not isinstance(func_result, dict):
                raise TypeError("Task has to return a dict or None.")
        except Exception: # pylint: disable=W0703
            self.__result_queue.put(traceback.format_exc())
        else:
            self.__result_queue.put(func_result)

    def get_result(self):
        """Return results of target function execution.
        """
        self.join()
        try:
            return self.__result_queue.get_nowait()
        except Queue.Empty:
            return None
开发者ID:ProstoKSI,项目名称:distributed-queue,代码行数:60,代码来源:utils.py


注:本文中的six.moves.queue.Queue类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。