当前位置: 首页>>代码示例>>Python>>正文


Python Manager.qsize方法代码示例

本文整理汇总了Python中multiprocessing.Manager.qsize方法的典型用法代码示例。如果您正苦于以下问题:Python Manager.qsize方法的具体用法?Python Manager.qsize怎么用?Python Manager.qsize使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.Manager的用法示例。


在下文中一共展示了Manager.qsize方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: launch_expeditions

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import qsize [as 别名]
    def launch_expeditions( self , task_request_list , moon_name_list=None ):
        
        global expedition
        
        # ---[ 1 ]------------------------------------------------------
        
        self.log.show( 'Checking Moon list sent by user' )
        
        working_moons = []
        
        if not moon_name_list :
            
            self.log.show( 'Traveling to available Moons on Orbit' )
            
            working_moons = self.orbit.values()
            
        else :
            
            self.log.show( 'Traveling to ' + str( moon_name_list ) )
            
            working_moons = [ self.orbit.get_moon( moon_name ) for moon_name in moon_name_list ]
            
        # ---[ 2 ]------------------------------------------------------
        
        self.log.show( 'Build Thread-safe Queues with no maximum size' )
        
        recv_queue = Manager().Queue( )#len(task_request_list) )
        
        send_queue  = Manager().Queue( )#len(task_request_list) )

        # ---[ 3 ]------------------------------------------------------
        
        self.log.show( 'Enqueue tasks on "send_queue" object' )
        
        for task_obj in task_request_list : 
            
            send_queue.put_nowait( str(task_obj) ) # "Normal" Objects are note thread safe!
            
        self.log.show( 'send_queue = ' + str(send_queue.qsize())+'/'+str(len(task_request_list)) + 'tasks')
        
        # ---[ 4 ]------------------------------------------------------
        
        self.log.show( 'Starting up Process Pool' )
                
        pool = Pool(processes=len(working_moons))

        

        for moon in working_moons :
            
            #running_expeditions.append( Process( target=expedition , args=(self.name , moon.name , moon.ip , moon.port , taskrequest_queue , taskresponse_queue, ) ) ) # Process Object
            pool.apply_async( func=expedition , args=(self.name , moon.name , moon.ip , moon.port , send_queue , recv_queue , ) )

        # ---[ 5 ]------------------------------------------------------
        
        pool.close()
        pool.join()
        
        self.log.show( 'recv_queue = '+ str(recv_queue.qsize())+'/'+str(len(task_request_list)) + 'tasks' )
        
        tmp = []
        while not recv_queue.empty() :
            
            tmp.append( recv_queue.get() )
            
        self.log.show( 'closing queue' )
        
        self.log.show( 'return results' )
        
        return tmp
开发者ID:vyscond,项目名称:cosmos,代码行数:72,代码来源:cosmos.py

示例2: __init__

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import qsize [as 别名]
class ConcurrentPandas:
    """
    Concurrent Pandas is a class for concurrent asynchronous data downloads
    from a variety of sources using either threads, or processes.
    """
    def __init__(self):
        self.output_map = Manager().dict()
        self.input_queue = Manager().Queue()
        self.data_worker = None
        self.worker_args = None
        self.source_name = None

    def consume_keys(self):
        """
        Work through the keys to look up sequentially
        """
        print("\nLooking up " + self.input_queue.qsize().__str__() + " keys from " + self.source_name + "\n")
        self.data_worker(**self.worker_args)

    def consume_keys_asynchronous_processes(self):
        """
        Work through the keys to look up asynchronously using multiple processes
        """
        print("\nLooking up " + self.input_queue.qsize().__str__() + " keys from " + self.source_name + "\n")
        jobs = multiprocessing.cpu_count()*4 if (multiprocessing.cpu_count()*4 < self.input_queue.qsize()) \
            else self.input_queue.qsize()

        pool = multiprocessing.Pool(processes=jobs,  maxtasksperchild=10)
        for x in range(jobs):
            pool.apply(self.data_worker, [], self.worker_args)

        pool.close()
        pool.join()

    def consume_keys_asynchronous_threads(self):
        """
        Work through the keys to look up asynchronously using multiple threads
        """
        print("\nLooking up " + self.input_queue.qsize().__str__() + " keys from " + self.source_name + "\n")
        jobs = multiprocessing.cpu_count()*4 if (multiprocessing.cpu_count()*4 < self.input_queue.qsize()) \
            else self.input_queue.qsize()

        pool = ThreadPool(jobs)

        for x in range(jobs):
            pool.apply(self.data_worker, [], self.worker_args)

        pool.close()
        pool.join()

    def return_map(self):
        """
        Return hashmap consisting of key string -> data frame
        """
        return self.output_map

    def return_input_queue(self):
        """
        Return input Queue
        """
        return self.input_queue

    def insert_keys(self, *args):
        """
        Unpack each key and add to queue
        """
        for key in args:
            self.unpack(key)

    def unpack(self, to_unpack):
        """
        Unpack is a recursive function that will unpack anything that inherits
        from abstract base class Container provided it is not also inheriting from Python basestring.

        Raise Exception if resulting object is neither a container or a string

        Code working in both Python 2 and Python 3
        """

        # Python 3 lacks basestring type, work around below
        try:
            isinstance(to_unpack, basestring)
        except NameError:
            basestring = str

        # Base Case
        if isinstance(to_unpack, basestring):
            self.input_queue.put(to_unpack)
            return

        for possible_key in to_unpack:
            if isinstance(possible_key, basestring):
                self.input_queue.put(possible_key)

            elif sys.version_info >= (3, 0):
                if isinstance(possible_key, collections.abc.Container) and not isinstance(possible_key, basestring):
                    self.unpack(possible_key)
                else:
                    raise Exception("A type that is neither a string or a container was passed to unpack. "
                                    "Aborting!")
#.........这里部分代码省略.........
开发者ID:hobson,项目名称:Concurrent-Pandas,代码行数:103,代码来源:concurrentpandas.py

示例3: while

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import qsize [as 别名]
l.append(-1)

#######################################
# 再生
#######################################
# 現在再生中の音声のインデックス
nowplaying = -1

# 現在再生中のaplayコマンドのProcess
playing = None

# 進捗を確認しつつ音声を読み上げる
while ( not r.ready() ) or ( nowplaying != len(arg) ) or ( playing is not None ):
	time.sleep(0.5)
	# 音声合成の終了報告があるかキューを確認する。
	for _ in range(queue.qsize()):
		compiled_index = queue.get()
		l[compiled_index] = 1
	# 再生できるならしてみる?
	if nowplaying < len(arg):
		if playing is None:
			if l[nowplaying + 1] == 1:
				# まとめてWAVファイルを指定できるときはする
				listindex = list()
				while l[nowplaying + 1] == 1:
					nowplaying += 1
					listindex.append(nowplaying)
				_print( "DEBUG: しゃべるよ![%s]" % str(listindex) )
				playing = play_wav(listindex)
			elif l[nowplaying + 1] == 0:
				_print( "DEBUG: 音声合成の完了待ちです!" )
开发者ID:WindVoiceVox,项目名称:PySpeak,代码行数:33,代码来源:speak.py

示例4: AmazonSpider

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import qsize [as 别名]
if __name__ == '__main__':
    '''
    main ,start here
    '''
    now = time.time()
    amazon = AmazonSpider()
    html = amazon.getHTML(amazon.url)
    time.sleep(5)
    amazon.parseBase(html)
    pool = Pool(5)
    q = Manager().Queue()
    for color_id in amazon.color_list:
        pool.apply_async(start, args = (color_id, amazon.size_list, amazon.url, q, ))
    pool.close()
    pool.join()
    while q.qsize() != 0:
        print q.get(False)
    end = time.time()
    print (end-now)
    amazon.driver.quit()
    # for color_id in amazon.color_list:
    #     xpath = "//li[@id='"+color_id+"']"
    #     amazon.driver.find_element_by_xpath(xpath).click()
    #     time.sleep(2)
    #     for size in amazon.size_list:
    #         if size == 'native_size_name_-1':
    #             continue
    #         if 'U' in amazon.driver.find_element_by_xpath("//option[@id='"+size+"']").get_attribute('class'):
    #             continue
    #         else:
    #             amazon.driver.find_element_by_xpath("//option[@id='"+size+"']").click()
开发者ID:boostsup,项目名称:amazon_crawler,代码行数:33,代码来源:amazon_crawler.py

示例5: range

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import qsize [as 别名]
number_of_processes = 16
for i in range(number_of_processes):
    worker = MD5Cracker(work_queue, global_namespace)
    worker.start()
    workers.append(worker)

print "Target Hash: {}".format(hash)

maxChars = 13
while_count = 1
for baseWidth in range(1, maxChars + 1):

    while global_namespace.finished is False:
        if work_queue.empty():
            print "checking passwords width [" + `baseWidth` + "]"

            # set is width, position, baseString
            work_queue.put({'width': baseWidth, 'position': 0, 'baseString': ""})
            break
        else:

            if while_count % 10 == 0:
                global_namespace.count = 0
                while_count = 1
            else:
                print "{:,d} passwords/sec".format(global_namespace.count/while_count)
                while_count += 1

            print "Queue Size: {}".format(work_queue.qsize())
            time.sleep(1)
            continue
开发者ID:Hengjie,项目名称:High-Performance-MD5-Bruteforcer,代码行数:33,代码来源:md5cracker.py

示例6: Queue_server

# 需要导入模块: from multiprocessing import Manager [as 别名]
# 或者: from multiprocessing.Manager import qsize [as 别名]
class Queue_server(object):
    
    '''
                 初始话公众号队列
     @param Tuple wx_lists 公众号列表
    '''
    def __init__(self ,wx_lists=()):
        self.__queue = Manager().Queue(-1)
        self.init_wx_lists(wx_lists)
        self.__fail_list = Manager().list()
    '''
                 初始话公众号队列
     @param Tuple wx_lists 公众号列表
    '''      
    def init_wx_lists(self ,wx_lists=()):
        for wx in wx_lists:
            self.put(wx)
    '''
                 添加元素
     @param mixed value 要添加的元素
    '''
    def put(self ,value):
        self.__queue.put(value)
    
    '''
                 弹出元素
     @return mixed       
    '''
    def get(self):
        if not self.empty():
            return self.__queue.get()
        return False
    
    '''
                 获取队列
     @return mixed       
    '''
    def get_wx_lists_queue(self):
        return self.__queue
    
    '''
                             获取队列大小
        @return int
    '''
    def get_size(self):
        return self.__queue.qsize()
    
    '''
                             队列是否为空
        @return bool
    '''
    def empty(self):
        return self.__queue.empty()
    
    '''
                             添加失败数据
        @param tuple wx_data 公众号信息
        @return bool
    '''     
    def put_fail_wx(self , wx_data):
        self.__fail_list.append(wx_data)
    
    '''
                             打印失败列表
    '''    
    def print_fail_list(self ,flush=None):
        if len(self.__fail_list) > 0 :
            for fail in self.__fail_list:
                self.put(fail)
                print 'the fail wx : {0}' . format(fail)
            if not flush:
                self.__fail_list = Manager().list()
        elif flush:
            print 'all success'
            
    #判断是否有错
    def is_have_failed(self):
        #判断是否有失败的公众号重新加入队列中
        return not self.empty()
开发者ID:yankaics,项目名称:get_wx_article,代码行数:81,代码来源:queue_server.py


注:本文中的multiprocessing.Manager.qsize方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。