当前位置: 首页>>代码示例>>Python>>正文


Python Manager.get_nowait方法代码示例

本文整理汇总了Python中multiprocessing.Manager.get_nowait方法的典型用法代码示例。如果您正苦于以下问题:Python Manager.get_nowait方法的具体用法?Python Manager.get_nowait怎么用?Python Manager.get_nowait使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.Manager的用法示例。


在下文中一共展示了Manager.get_nowait方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: Pool

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import get_nowait [as 别名]
class Pool(object):
    """
    The Pool class represents a pool of worker threads. It has methods
    which allows tasks to be offloaded to the worker processes in a
    few different ways
   """
    def __init__(self, num_workers, name="Pool"):
        """
        \param num_workers (integer) number of worker threads to start
        \param name (string) prefix for the worker threads' name
        """
        self.queue = Manager().Queue()
        self.closed = False
        self.workers = []

        for idx in range(num_workers):
            process = PoolWorker(self.queue, name="%s-Worker-%d" % (name, idx))
            process.daemon = True
            try:
                process.start()
            except:
                # If one thread has a problem, undo everything
                self.terminate()
                raise
            else:
                self.workers.append(process)

    def submit(self, work_unit):
        self.queue.put(work_unit)
    
    def close(self):
        """Prevents any more tasks from being submitted to the
        pool. Once all the tasks have been completed the worker
        processes will exit."""
        # No lock here. We assume it's sufficiently atomic...
        self.closed = True

    def terminate(self):
        """Stops the worker processes immediately without completing
        outstanding work. When the pool object is garbage collected
        terminate() will be called immediately."""
        self.close()

        # Clearing the job queue
        try:
            while 1:
                self.queue.get_nowait()
        # except Manager().Queue.empty():
        except:
            pass

        # Send one sentinel for each worker thread: each thread will die
        # eventually, leaving the next sentinel for the next thread
        for process in self.workers:
            self.queue.put(SENTINEL)
开发者ID:anushreejangid,项目名称:csm,代码行数:57,代码来源:process_pool.py

示例2: MultiProcessFile

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import get_nowait [as 别名]
class MultiProcessFile(object):
    """
    helper for testing multiprocessing

    multiprocessing poses a problem for doctests, since the strategy
    of replacing sys.stdout/stderr with file-like objects then
    inspecting the results won't work: the child processes will
    write to the objects, but the data will not be reflected
    in the parent doctest-ing process.

    The solution is to create file-like objects which will interact with
    multiprocessing in a more desirable way.

    All processes can write to this object, but only the creator can read.
    This allows the testing system to see a unified picture of I/O.
    """
    def __init__(self):
        # per advice at:
        #    http://docs.python.org/library/multiprocessing.html#all-platforms
        self.__master = getpid()
        self.__queue = Manager().Queue()
        self.__buffer = StringIO()
        self.softspace = 0

    def buffer(self):
        if getpid() != self.__master:
            return

        from Queue import Empty
        from collections import defaultdict
        cache = defaultdict(str)
        while True:
            try:
                pid, data = self.__queue.get_nowait()
            except Empty:
                break
            if pid == ():
                #show parent output after children
                #this is what users see, usually
                pid = ( 1e100, ) # googol!
            cache[pid] += data
        for pid in sorted(cache):
            #self.__buffer.write( '%s wrote: %r\n' % (pid, cache[pid]) ) #DEBUG
            self.__buffer.write( cache[pid] )
    def write(self, data):
        # note that these pids are in the form of current_process()._identity
        # rather than OS pids
        from multiprocessing import current_process
        pid = current_process()._identity
        self.__queue.put((pid, data))
    def __iter__(self):
        "getattr doesn't work for iter()"
        self.buffer()
        return self.__buffer
    def seek(self, offset, whence=0):
        self.buffer()
        return self.__buffer.seek(offset, whence)
    def getvalue(self):
        self.buffer()
        return self.__buffer.getvalue()
    def __getattr__(self, attr):
        return getattr(self.__buffer, attr)
开发者ID:ANKIT-KS,项目名称:fjord,代码行数:64,代码来源:plugintest.py

示例3: __init__

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import get_nowait [as 别名]

#.........这里部分代码省略.........
                verb = relation_util.normalize_relation(verb)

                for a0, a1 in argument_pairs:
                    en0 = relation_util.form_entity(tokenized_sentence, a0, chunk_parse, pos_tags)
                    en1 = relation_util.form_entity(tokenized_sentence, a1, chunk_parse, pos_tags)
                    if not en0 or not en1: continue
                    relations.append(RelationTuple(left_entity=en0, right_entity=en1, relation=verb,
                                                   sentence=sentence, text=text, block_id=block_id,
                                                   payload=payload, ff = ff))
                    logger.info("generated a relation for ")
                    logger.info(block_id)

                for arg_modifier in modifiers:
                    mod_pos = sentence.find(arg_modifier)
                    linked_arg = min([(a, abs(mod_pos - sentence.find(a))) for a in arguments], key=lambda e: e[1])[0]
                    en0 = relation_util.form_entity(tokenized_sentence, linked_arg, chunk_parse, pos_tags)
                    en1 = relation_util.form_entity(tokenized_sentence, arg_modifier, chunk_parse, pos_tags)
                    if not en0 or not en1: continue
                    relations.append(RelationTuple(left_entity=en0, right_entity=en1, relation=verb,
                                                   sentence=sentence, text=text, block_id=block_id,
                                                   payload=payload, ff=ff))
                    logger.info("generated a relation for ")
                    logger.info(block_id)

        return relations

    def form_relations_source(self, source_item):
        if not source_item:
            logger.error("got an empty source item")
            return

        item_entry = ""
        payload = ""
        ff = ""

        for f_name, f_value in source_item:
            if f_name == "payload":
                payload = f_value
            elif f_name == "ff":
                ff = f_value
            else:
                item_entry += f_value

        if item_entry == ' ': return
        try:
            block_id = str(uuid.uuid1())
            relations = self.form_relations(item_entry, block_id, payload, ff)
        except RuntimeError as e:
            logger.error("Error generating relations")
            logger.error(e)
            return

        for relation in relations:
            sink_relation = self.model_class()
            sink_relation.leftEntity = relation.left_entity
            sink_relation.rightEntity = relation.right_entity
            sink_relation.relation = relation.relation
            sink_relation.sentence = relation.sentence
            sink_relation.text = relation.text
            sink_relation.block_id = relation.block_id
            sink_relation.productName = relation.ff
            sink_relation.webLocation = relation.payload

            logger.info("generated a relation")
            logger.info(sink_relation)

            try:
                self.relation_queue.put(sink_relation, timeout=1)
            except Full as e:
                logger.error(e)

    def sink_relations(self):
        while not self.all_sinked:
            try:
                item = self.relation_queue.get_nowait()
                self.relation_sink.sink_item(item)
            except Empty as e:
                pass

    def form_relations_from_source(self):

        if not self.data_source or not self.relation_sink:
            raise RuntimeError("Data source and sink must be set")

        self.data_source.start()
        self.relation_sink.start()

        self.all_sinked = False
        pool = Pool(processes=self.workers)
        t1 = time.time()
        pool.imap(self.form_relations_source, self.data_source)

        sinker = Thread(target=self.sink_relations, name='Sink-Thread')
        sinker.start()

        pool.close()
        pool.join()
        self.all_sinked = True
        t2 = time.time()
        logger.info("process finished in :: %d  seconds" %(t2 - t1))
开发者ID:subhadeepmaji,项目名称:ml_algorithms,代码行数:104,代码来源:RelationExtractor.py


注:本文中的multiprocessing.Manager.get_nowait方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。