当前位置: 首页>>代码示例>>Python>>正文


Python ThreadPool.join_all方法代码示例

本文整理汇总了Python中threadpool.ThreadPool.join_all方法的典型用法代码示例。如果您正苦于以下问题:Python ThreadPool.join_all方法的具体用法?Python ThreadPool.join_all怎么用?Python ThreadPool.join_all使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在threadpool.ThreadPool的用法示例。


在下文中一共展示了ThreadPool.join_all方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: render_rap

# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import join_all [as 别名]
    def render_rap(self, msg_id, words):
        # Make the length of words fit the melody
        notes = sum(1 for pitch, beats in self._melody if pitch != REST)
        diff = notes - len(words)
        if diff < 0:
            words = words[:diff]
        else:
            words = words + ['la'] * diff

        delay = 0
        offsets = {}
        word_index = 0
        word_count = len(words)
        word_delays = []
        word_paths = []

        pool = ThreadPool(min(word_count, self._thread_pool))

        for pitch, beats in self._melody:
            duration = beats * self._spb

            if pitch != REST:
                word = words[word_index]
                word_delays.append(delay)
                word_path = '/tmp/%s-%s.wav' % (msg_id, word_index)
                word_paths.append(word_path)
                ssml = '<s><prosody pitch="%sHz" range="x-low">%s</prosody></s>' \
                    % (pitch, word)
                def task(word_id, ssml, word_path):
                    offsets[word_id] = self._swift.tts_file(ssml, word_path)
                pool.queue_task(task, (word_index, ssml, word_path))
                word_index += 1

            delay += duration

            if word_index == word_count:
                # Break here, rather than inside the if statement above, so that
                # that delay is updated and equals the duration of the rap.
                break

        pool.join_all()

        if not word_index:
            # Didn't render any words!
            return

        # Mix the rap and the backing track
        mix_path = '/tmp/%s-mix.wav' % msg_id
        sox_args = [self.sox_path, '-M'] + word_paths \
            + [self._backing_sample, mix_path, 'delay'] \
            + [str(delay + offsets[i]) for i, delay in enumerate(word_delays)] \
            + ['remix',
                ','.join(str(channel) for channel in range(1, word_count + 2)),
                'norm']
        print(' '.join(sox_args))
        subprocess.check_call(sox_args)

        return mix_path
开发者ID:Man-UP,项目名称:text-to-spit,代码行数:60,代码来源:tts.py

示例2: LinkExtractorTPool

# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import join_all [as 别名]

#.........这里部分代码省略.........
            return False 
    
    def feed(self, xhtml):
        self.__tpool.dispatch(self.__parseto_xtree, xhtml, self.__callback_chain)
        
    def l_feed(self, xhtml_l):
        if isinstance(xhtml_l, list):
            self.__tpool.map(self.__parseto_xtree,  self.__callback_chain, xhtml_l)
        else:
            raise Exception("LinkExtractor.l_feed() Error: List() argument was expected")
            
    
    def __callback_chain(self, etree):
        #Put the etree to the etree-queue for getting all the URLs available
        self.__etree_q.put(etree)
        #Find Links to other site and put them in the queue 
        site_links = self.__site_links(etree)
        if site_links: 
            self.__site_links_q.put(site_links)
        #Find Links of media and put them in the queue
        media_links = self.__media_links(etree)
        if media_links:
            self.__media_links_q.put(media_links)
        #Find Links of scripts and put them in the queue
        script_links = self.__media_links(etree)
        if script_links:
            self.__scripts_links_q.put(script_links)
        undefined_links = self.__undefined_links(etree)
        if undefined_links:
            self.__undefined_links_q.put(undefined_links)
    
    def all_links(self, etree):
        links = list()
        for link in etree.iterlinks():
            links.append(link)
            
    def sites_links(self, xhtml): 
        url_l = self._url_href.findall(xhtml['xhtml_s'])
        for i, url in enumerate(url_l):
            if url.find('#') > -1:
                url = ""
            prsd_url = urlparse(url)
            if not prsd_url.netloc:
                url_l[i] = xhtml['base_url'] + url
        return url_l
    
    def media_links(self, xhtml):
        return None #to be Fixed
    
    def scripts_links(self, xhtml):
        return None #to be Fixed
    
    def undefined_links(self, xhtml):
        return None #to be fixed
    
    def __site_links(self, etree):
        return self.__extract_site_urls(etree)
    
    def __media_links(self, etree):
        return None #to be Fixed
    
    def __scripts_links(self, etree):
        return None #to be Fixed
    
    def __undefined_links(self, etree):
        return None #to be Fixed
       
    def __ret_q(self, q):
        """A callable for iterators to return the content of Queues"""
        if q.empty():
            return True 
        else:
            return q.get()
    
    def all_links_iter(self):
        try:
            etree = self.__etree_q.get(2)
        except Queue.Empty:
            return StopIteration
        else:    
            return etree.iterlinks()
    
    def sites_links_iter(self):
        self.__call_q = self.__site_links_q
        return self
    
    def media_links_iter(self):
        self.__call_q = self.__media_links_q
        return self  
    
    def scripts_links_iter(self):
        self.__call_q = self.__scripts_links_q
        return self
    
    def undefined_links_iter(self):
        self.__call_q = self.__undefined_links_q
        return self
    
    def close(self):
        self.__tpool.join_all()
开发者ID:dpritsos,项目名称:synergeticprocessing,代码行数:104,代码来源:linkextractors.py


注:本文中的threadpool.ThreadPool.join_all方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。