当前位置: 首页>>代码示例>>Python>>正文


Python Queue.get方法代码示例

本文整理汇总了Python中eventlet.Queue.get方法的典型用法代码示例。如果您正苦于以下问题:Python Queue.get方法的具体用法?Python Queue.get怎么用?Python Queue.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在eventlet.Queue的用法示例。


在下文中一共展示了Queue.get方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: GreenletsThread

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
class GreenletsThread(Thread):
    """
    Main thread for the program. If running stand alone this will be running
    as a greenlet instead.
    """
    def __init__ (self, server_url, login_params):
        self.running = True
        self.agent = True
        self.cmd_out_queue = []
        self.cmd_in_queue = []
        self.out_queue = Queue()
        self.in_queue = Queue()
        self.server_url = server_url
        self.login_params = login_params
        Thread.__init__(self)

    def apply_position(self, obj_uuid, pos, rot=None):
        cmd = ['pos', obj_uuid, pos, rot]
        self.addCmd(cmd)

    def __getattr__(self, name):
        return ProxyFunction(name, self)

    def apply_scale(self, obj_uuid, scale):
        cmd = ['scale', obj_uuid, scale]
        self.addCmd(cmd)

    def run(self):
        agent = AgentManager(self.in_queue,
                   self.out_queue)
        error = agent.login(self.server_url, self.login_params)
        if error:
            self.out_queue.put(["error", str(error)])
            self.out_queue.put(["agentquit", str(error)])
            while self.out_queue.qsize():
                api.sleep(0.1)
        agent.logger.debug("Quitting")
        self.agent = agent
        self.running = False

    def addCmd(self, cmd):
        self.in_queue.put(cmd)

    def getQueue(self):
        out_queue = []
        while self.out_queue.qsize():
            out_queue.append(self.out_queue.get())
        return out_queue
开发者ID:b2rex,项目名称:b2rex,代码行数:50,代码来源:simrt.py

示例2: test_putting_to_queue

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
    def test_putting_to_queue(self):
        timer = eventlet.Timeout(0.1)
        try:
            size = 2
            self.pool = IntPool(min_size=0, max_size=size)
            queue = Queue()
            results = []
            def just_put(pool_item, index):
                self.pool.put(pool_item)
                queue.put(index)
            for index in xrange(size + 1):
                pool_item = self.pool.get()
                eventlet.spawn(just_put, pool_item, index)

            for _ in range(size+1):
                x = queue.get()
                results.append(x)
            self.assertEqual(sorted(results), range(size + 1))
        finally:
            timer.cancel()
开发者ID:Cue,项目名称:eventlet,代码行数:22,代码来源:pools_test.py

示例3: test_exhaustion

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
    def test_exhaustion(self):
        waiter = Queue(0)
        def consumer():
            gotten = None
            try:
                gotten = self.pool.get()
            finally:
                waiter.put(gotten)

        eventlet.spawn(consumer)

        one, two, three, four = (
            self.pool.get(), self.pool.get(), self.pool.get(), self.pool.get())
        self.assertEquals(self.pool.free(), 0)

        # Let consumer run; nothing will be in the pool, so he will wait
        eventlet.sleep(0)

        # Wake consumer
        self.pool.put(one)

        # wait for the consumer
        self.assertEquals(waiter.get(), one)
开发者ID:Cue,项目名称:eventlet,代码行数:25,代码来源:pools_test.py

示例4: ECWriter

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
class ECWriter(object):
    """
    Writes an EC chunk
    """
    def __init__(self, chunk, conn):
        self._chunk = chunk
        self._conn = conn
        self.failed = False
        self.bytes_transferred = 0
        self.checksum = hashlib.md5()

    @property
    def chunk(self):
        return self._chunk

    @property
    def conn(self):
        return self._conn

    @classmethod
    def connect(cls, chunk, sysmeta, reqid=None):
        raw_url = chunk["url"]
        parsed = urlparse(raw_url)
        chunk_path = parsed.path.split('/')[-1]
        h = {}
        h["transfer-encoding"] = "chunked"
        h[chunk_headers["content_id"]] = sysmeta['id']
        h[chunk_headers["content_path"]] = sysmeta['content_path']
        h[chunk_headers["content_chunkmethod"]] = sysmeta['chunk_method']
        h[chunk_headers["container_id"]] = sysmeta['container_id']
        h[chunk_headers["chunk_pos"]] = chunk["pos"]
        h[chunk_headers["chunk_id"]] = chunk_path
        h[chunk_headers["content_policy"]] = sysmeta['policy']
        h[chunk_headers["content_version"]] = sysmeta['version']
        if reqid:
            h['X-oio-req-id'] = reqid

        # in the trailer
        # metachunk_size & metachunk_hash
        h["Trailer"] = (chunk_headers["metachunk_size"],
                        chunk_headers["metachunk_hash"])
        with ConnectionTimeout(io.CONNECTION_TIMEOUT):
            conn = io.http_connect(
                parsed.netloc, 'PUT', parsed.path, h)
            conn.chunk = chunk
        return cls(chunk, conn)

    def start(self, pool):
        # we use eventlet Queue to pass data to the send coroutine
        self.queue = Queue(io.PUT_QUEUE_DEPTH)
        # spawn the send coroutine
        pool.spawn(self._send)

    def _send(self):
        # this is the send coroutine loop
        while True:
            # fetch input data from the queue
            d = self.queue.get()
            # use HTTP transfer encoding chunked
            # to write data to RAWX
            if not self.failed:
                # format the chunk
                to_send = "%x\r\n%s\r\n" % (len(d), d)
                try:
                    with ChunkWriteTimeout(io.CHUNK_TIMEOUT):
                        self.conn.send(to_send)
                        self.bytes_transferred += len(d)
                except (Exception, ChunkWriteTimeout) as e:
                    self.failed = True
                    msg = str(e)
                    logger.warn("Failed to write to %s (%s)", self.chunk, msg)
                    self.chunk['error'] = msg

            self.queue.task_done()

    def wait(self):
        # wait until all data in the queue
        # has been processed by the send coroutine
        if self.queue.unfinished_tasks:
            self.queue.join()

    def send(self, data):
        # do not send empty data because
        # this will end the chunked body
        if not data:
            return
        # put the data to send into the queue
        # it will be processed by the send coroutine
        self.queue.put(data)

    def finish(self, metachunk_size, metachunk_hash):
        parts = [
            '0\r\n',
            '%s: %s\r\n' % (chunk_headers['metachunk_size'],
                            metachunk_size),
            '%s: %s\r\n' % (chunk_headers['metachunk_hash'],
                            metachunk_hash),
            '\r\n'
        ]
        to_send = "".join(parts)
#.........这里部分代码省略.........
开发者ID:fvennetier,项目名称:oio-sds,代码行数:103,代码来源:ec.py

示例5: Manager

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
class Manager(object):
    """Class encapsulating Heroshi URL server state."""

    def __init__(self):
        self.active = False

        self.prefetch_queue = Queue(settings.prefetch['queue_size'])
        self.prefetch_thread = spawn(self.prefetch_worker)
        self.prefetch_thread.link(reraise_errors, greenthread.getcurrent())

        self.given_items = Cache()

        self.postreport_queue = Queue(settings.postreport['queue_size'])
        self.postreport_thread = spawn(self.postreport_worker)
        self.postreport_thread.link(reraise_errors, greenthread.getcurrent())

        self.storage_connections = eventlet.pools.Pool(max_size=settings.storage['max_connections'])
        self.storage_connections.create = StorageConnection

    def close(self):
        self.active = False
        self.prefetch_thread.kill()
        self.postreport_thread.kill()

    def ping_storage(self):
        with self.storage_connections.item() as storage:
            pass

    def get_from_prefetch_queue(self, size):
        result = []
        while len(result) < size:
            sleep()
            try:
                pack = self.prefetch_queue.get(timeout=settings.prefetch['get_timeout'])
            except eventlet.queue.Empty:
                break
            result.extend(pack)
        return result

    def prefetch_worker(self):
        if not self.active:
            sleep(0.01)
        while self.active:
            with self.storage_connections.item() as storage:
                docs = storage.query_new_random(settings.prefetch['single_limit'])
            if len(docs) == 0:
                sleep(10.)
                continue
            else:
                # Note: putting a *list* as a single item into queue
                self.prefetch_queue.put(docs)
        # and respawn again
        self.prefetch_thread = spawn(self.prefetch_worker)

    @log_exceptions
    def _postreport_worker(self):
        docs = []
        while len(docs) < settings.postreport['flush_size']: # inner accumulator loop
            try:
                item = self.postreport_queue.get(timeout=settings.postreport['flush_delay'])
            except eventlet.queue.Empty:
                break

            # Quick dirty duplicate filtering.
            #     Note that this code only finds dups in current "flush pack". `report_result` uses
            # `is_duplicate_report` which finds dups in whole `postreport_queue` but it can't find dups here.
            # Thus two dups searchers.
            #     It is still possible that at most 2 duplicate reports exist: one in `postreport_queue`
            # and one in current "flush pack". This is acceptable, because most of the dups are filtered out.
            for doc in docs:
                if item['url'] == doc['url']:
                    item = None
                    break
            if item is None:
                continue

            if 'result' not in item:
                # It's a link, found on some reported page.
                # Just add it to bulk insert, don't try to update any document here.
                docs.append(item)
                continue

            docs.append(item)

        if not docs:
            return

        with self.storage_connections.item() as storage:
            for doc in docs:
                content = doc.pop('content', None)

                storage.save(doc)

                if content is None:
                    continue
                headers = doc.get('headers') or {}
                content_type = headers.get('content-type', "application/octet-stream")

                storage.save_content(doc, content, content_type)

#.........这里部分代码省略.........
开发者ID:GunioRobot,项目名称:heroshi,代码行数:103,代码来源:manager.py

示例6: Crawler

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
class Crawler(object):
    """
    A crawler will traverse all the pages of a site and process the content
    in a defined way.

    :param init_urls: the very first urls to start with.
    :param q: the queue that stores all urls to be crawled
    :param urls: a set stores all urls already crawled
    """

    def __init__(self, init_urls, max_workers=200):
        self.init_urls = init_urls
        self.max_workers = max_workers
        self.q = Queue()
        self.urls = set()
        self.s = requests.Session()
        self.root_hosts = set()
        for url in init_urls:
            self.q.put(url)
            self.urls.add(url)
            self.root_hosts.add(get_netloc(url))

    def url_allowed(self, url):
        """Check if given url will be crawled.

        Current, only if the url belongs to the same host as init_urls.
        """
        return get_netloc(url) in self.root_hosts


    def save(self, response):
        """Save data at the given url."""
        raise NotImplementedError(
            "Please implement your own save logic in subclass.")

    def parse(self, response):
        self.save(response)

        new_links = set()

        for url in self.find_links(response):
            if url not in self.urls and self.url_allowed(url):
                new_links.add(url)
                self.urls.add(url)
                self.q.put(url)
        if len(new_links) != 0:
            print("Find %d new urls to crawl" % len(new_links))


    def fetch(self, url):
        """Fetch content of the url from network."""

        response = self.s.get(url)
        print("Getting content from %s, length: %d" % (url,
                                                       len(response.content)))
        return response

    def work(self, i):
        """Define the work process.

        Retrieve a url from queue, fetch the content from it,
        process it and get new urls to crawl.
        Continue the process until all pages are crawled.

        :param i: indicate the worker number
        """
        while True:
            url = self.q.get()
            print("Worker %d: Getting url %s from queue." % (i, url))
            response = self.fetch(url)
            self.parse(response)
            self.q.task_done()

    def run(self):
        """Start the crawling process.

        This is the main entrance for our crawler. It will start several
        workers, crawling in parallel.
        """
        pool = eventlet.GreenPool()
        start = time.time()
        for i in range(self.max_workers):
            pool.spawn(self.work, i)

        self.q.join()
        end = time.time()

        print("Finished crawling, takes %s seconds." % str(end - start))
        print("Have fun hacking!")
开发者ID:cizixs,项目名称:mystarwar,代码行数:91,代码来源:crawler.py

示例7: Interpreter

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
class Interpreter(object):
    '''
    The class repsonsible for keeping track of the execution of the 
    statemachine.
    '''
    def __init__(self):
        self.running = True
        self.configuration = OrderedSet()
        
        self.internalQueue = Queue()
        self.externalQueue = Queue()
        
        self.statesToInvoke = OrderedSet()
        self.historyValue = {}
        self.dm = None
        self.invokeId = None
        self.parentId = None
        self.logger = None
    
    
    def interpret(self, document, invokeId=None):
        '''Initializes the interpreter given an SCXMLDocument instance'''
        
        self.doc = document
        self.invokeId = invokeId
        
        transition = Transition(document.rootState)
        transition.target = document.rootState.initial
        transition.exe = document.rootState.initial.exe
        
        self.executeTransitionContent([transition])
        self.enterStates([transition])
        
    
    
    def mainEventLoop(self):
        while self.running:
            enabledTransitions = None
            stable = False
                
            # now take any newly enabled null transitions and any transitions triggered by internal events
            while self.running and not stable:
                enabledTransitions = self.selectEventlessTransitions()
                if not enabledTransitions:
                    if self.internalQueue.empty(): 
                        stable = True
                    else:
                        internalEvent = self.internalQueue.get() # this call returns immediately if no event is available
                        
                        self.logger.info("internal event found: %s", internalEvent.name)
                        
                        self.dm["__event"] = internalEvent
                        enabledTransitions = self.selectTransitions(internalEvent)

                if enabledTransitions:
                    self.microstep(enabledTransitions)
#                eventlet.greenthread.sleep()
            eventlet.greenthread.sleep()
                
                    
            
            for state in self.statesToInvoke:
                for inv in state.invoke:
                    inv.invoke(inv)
            self.statesToInvoke.clear()
            
            if not self.internalQueue.empty():
                continue
            
            externalEvent = self.externalQueue.get() # this call blocks until an event is available
            
            if externalEvent.name == "cancel.invoke.%s" % self.dm.sessionid:
                continue
            
            self.logger.info("external event found: %s", externalEvent.name)
            
            self.dm["__event"] = externalEvent
            
            for state in self.configuration:
                for inv in state.invoke:
                    if inv.invokeid == externalEvent.invokeid:  # event is the result of an <invoke> in this state
                        self.applyFinalize(inv, externalEvent)
                    if inv.autoforward:
                        inv.send(externalEvent)
            
            enabledTransitions = self.selectTransitions(externalEvent)
            if enabledTransitions:
                self.microstep(enabledTransitions)
            
              
        # if we get here, we have reached a top-level final state or some external entity has set running to False        
        self.exitInterpreter()  
         
    
        
    def exitInterpreter(self):
        statesToExit = sorted(self.configuration, key=exitOrder)
        for s in statesToExit:
            for content in s.onexit:
                self.executeContent(content)
#.........这里部分代码省略.........
开发者ID:bollwyvl,项目名称:PySCXML,代码行数:103,代码来源:interpreter.py

示例8: LiteAccounting

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
class LiteAccounting(object):

    def __init__(self, app, conf):
        """
        Should be placed after liteauth but before proxy-query

        """
        self.app = app
        self.conf = conf
        self.logger = get_logger(conf, log_route='lite-accounting')
        # interval between accounting data dumps
        self.interval = int(conf.get('liteacc_interval', 60))
        # how long to keep in memcache, there should be: self.interval << self.timeout
        # maybe we need: self.timeout = 100 * self.interval
        self.timeout = int(conf.get('liteacc_cache_time', 30 * 60))
        # url for accounting objects
        # Example: /v1/liteacc/accounting
        self.accounting_url = conf.get('liteacc_url', '').lower().rstrip('/')
        self.queue = Queue()
        # we will get memcache object later, with first request
        self.memcache = None
        # let's spawn the accounting thread
        spawn_n(self.accounting_server)

    def __call__(self, env, start_response):
        if 'swift.cache' in env:
            self.memcache = env['swift.cache']
        context = LiteAccountingContext(self.app, self.logger, self)
        return context.handle_request(env, start_response)

    def accounting_server(self):
        sleep(random.random() * self.interval)  # start with some entropy
        accounts = {}
        start = time.time()
        while True:
            try:
                account_id = None
                try:
                    account_id = self.queue.get(block=False)
                    accounts[account_id] = True
                except Empty:
                    pass
                if (time.time() - start) > self.interval:
                    try:
                        with Timeout(self.interval):
                            self.dump_accounting_data(accounts)
                    except Timeout:
                        pass
                    start = time.time()
                    continue
                if not account_id:
                    sleep(self.interval / 100.0)
            except Exception:
                self.logger.exception('Exception occurred when dumping accounting data')
                sleep(self.interval)
                start = time.time()
                accounts = {}
                continue

    def dump_accounting_data(self, accounts):
        if not self.accounting_url:
            self.logger.warning('No accounting url, dump cannot complete')
            return
        if not self.memcache:
            return
        while len(accounts):
            for acc_id in accounts.keys():
                if not self.add_semaphore(acc_id):
                    # cannot lock the accounting data, will try again
                    continue
                try:
                    totals = self.retrieve_accounting_info(acc_id)
                    if sum(totals.values()) > 0:  # sum(totals.values()) == 0 if all executions failed
                        req = Request.blank('%s/%s' % (self.accounting_url, acc_id))
                        req.method = 'GET'
                        req.environ['swift.cache'] = self.memcache
                        resp = req.get_response(self.app)
                        if is_success(resp.status_int):
                            try:
                                acc_totals = json.loads(resp.body)
                                for key in acc_totals.keys():
                                    acc_totals[key] += totals[key]
                            except Exception:
                                self.logger.warning('Accounting response for GET %s is %s %s'
                                                    % (req.path, resp.status, resp.body))
                                acc_totals = totals
                        else:
                            self.logger.warning('Accounting response for GET %s is %s %s'
                                                % (req.path, resp.status, resp.body))
                            acc_totals = totals
                        req = Request.blank('%s/%s' % (self.accounting_url, acc_id))
                        req.method = 'PUT'
                        req.environ['swift.cache'] = self.memcache
                        req.body = json.dumps(acc_totals)
                        resp = req.get_response(self.app)
                        if not is_success(resp.status_int):
                            self.logger.warning('Accounting response for PUT %s is %s %s'
                                                % (req.path, resp.status, resp.body))
                    del accounts[acc_id]
                finally:
#.........这里部分代码省略.........
开发者ID:mgeisler,项目名称:liteauth,代码行数:103,代码来源:liteaccounting.py

示例9: Interpreter

# 需要导入模块: from eventlet import Queue [as 别名]
# 或者: from eventlet.Queue import get [as 别名]
class Interpreter(object):
    '''
    The class repsonsible for keeping track of the execution of the 
    statemachine.
    '''
    def __init__(self):
        self.running = True
        self.exited = False
        self.cancelled = False
        self.configuration = OrderedSet()
        
        self.internalQueue = Queue()
        self.externalQueue = Queue()
        
        self.statesToInvoke = OrderedSet()
        self.historyValue = {}
        self.dm = None
        self.invokeId = None
        self.parentId = None
        self.logger = None
    
    
    def interpret(self, document, invokeId=None):
        '''Initializes the interpreter given an SCXMLDocument instance'''
        
        self.doc = document
        self.invokeId = invokeId
        
        transition = Transition(document.rootState)
        transition.target = document.rootState.initial
        transition.exe = document.rootState.initial.exe
        
        self.executeTransitionContent([transition])
        self.enterStates([transition])
        
    
    
    def mainEventLoop(self):
        while self.running:
            enabledTransitions = None
            stable = False
                
            # now take any newly enabled null transitions and any transitions triggered by internal events
            while self.running and not stable:
                enabledTransitions = self.selectEventlessTransitions()
                if not enabledTransitions:
                    if self.internalQueue.empty(): 
                        stable = True
                    else:
                        internalEvent = self.internalQueue.get() # this call returns immediately if no event is available
                        
                        self.logger.info("internal event found: %s", internalEvent.name)
                        
                        self.dm["__event"] = internalEvent
                        enabledTransitions = self.selectTransitions(internalEvent)

                if enabledTransitions:
                    self.microstep(enabledTransitions)
#                eventlet.greenthread.sleep()
            eventlet.greenthread.sleep()
                
                    
            
            for state in self.statesToInvoke:
                for inv in state.invoke:
                    inv.invoke(inv)
            self.statesToInvoke.clear()
            
            if not self.internalQueue.empty():
                continue
            
            externalEvent = self.externalQueue.get() # this call blocks until an event is available
            
#            if externalEvent.name == "cancel.invoke.%s" % self.dm.sessionid:
#                continue

            # our parent session also might cancel us.  The mechanism for this is platform specific,
            if isCancelEvent(externalEvent):
                self.running = False
                continue
            
            self.logger.info("external event found: %s", externalEvent.name)
            
            self.dm["__event"] = externalEvent
            
            for state in self.configuration:
                for inv in state.invoke:
                    if inv.invokeid == externalEvent.invokeid:  # event is the result of an <invoke> in this state
                        self.applyFinalize(inv, externalEvent)
                    if inv.autoforward:
                        inv.send(externalEvent)
            
            enabledTransitions = self.selectTransitions(externalEvent)
            if enabledTransitions:
                self.microstep(enabledTransitions)
            
              
        # if we get here, we have reached a top-level final state or some external entity has set running to False        
        self.exitInterpreter()  
         
#.........这里部分代码省略.........
开发者ID:emusal,项目名称:PySCXML,代码行数:103,代码来源:interpreter.py


注:本文中的eventlet.Queue.get方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。