当前位置: 首页>>代码示例>>Python>>正文


Python Queue.join方法代码示例

本文整理汇总了Python中queue.Queue.join方法的典型用法代码示例。如果您正苦于以下问题:Python Queue.join方法的具体用法?Python Queue.join怎么用?Python Queue.join使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在queue.Queue的用法示例。


在下文中一共展示了Queue.join方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: fmb

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
def fmb(checked_files):
    '''
    Placeholder for function
    Manage thread pool (wrong place for it)
    '''
    
    # Create the queue and thread pool.
    global q,res_queue
    q = Queue()
    res_queue=Queue()
    
    for i in range(settings.thread_poolsize):
         t = threading.Thread(target=worker)
         t.daemon = True  # thread dies when main thread (only non-daemon thread) exits.
         t.start()
    
    # stuff work items on the queue (in this case, just a number).
    start = time.perf_counter()

#convey names of files to into queue
    for item in checked_files.items():
        
        q.put(item)

    q.join() # block until all tasks are done
    res_queue.join()
    
    checked_files.clear()
    
    while not res_queue.empty():
        i=res_queue.get()
        checked_files[i[0]]=i[1]
    pass
开发者ID:vbronfman,项目名称:Develop,代码行数:35,代码来源:fmb_deploy_utils.py

示例2: main

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
def main():
    # These three parameters are user defined
    client_id = "*****"
    username = "***"
    password = "*****"

    tokenDict = get_tokenDict(client_id, username, password)

    ts = time()
    download_dir = setup_download_dir()
    links = [l for l in get_links(client_id, tokenDict) if l.endswith(".jpg")]
    # Create a queue to communicate with the worker threads
    queue = Queue()
    # Create worker threads

    for x in range(int(argv[1])):
        worker = DownloadWorker(queue)
        # Setting daemon to True will let the main thread exit even though the workers are blocking
        worker.daemon = True
        worker.start()
    # Put the tasks into the queue as a tuple
    for link in links:
        logger.info("Queueing {}".format(link))
        queue.put((download_dir, link))
    # Causes the main thread to wait for the queue to finish processing all the tasks
    queue.join()
    print("Took {}".format(time() - ts))
开发者ID:YaleLeo,项目名称:multithreadDownload,代码行数:29,代码来源:multithread.py

示例3: crawl

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
  def crawl(self, urls, follow_links=False):
    links, seen = set(), set()
    queue = Queue()
    converged = threading.Event()

    def execute():
      while not converged.is_set():
        try:
          url = queue.get(timeout=0.1)
        except Empty:
          continue
        if url not in seen:
          seen.add(url)
          hrefs, rel_hrefs = self.execute(url)
          links.update(hrefs)
          if follow_links:
            for href in rel_hrefs:
              if href not in seen:
                queue.put(href)
        queue.task_done()

    for url in urls:
      queue.put(url)
    for _ in range(self._threads):
      worker = threading.Thread(target=execute)
      worker.daemon = True
      worker.start()
    queue.join()
    converged.set()
    return links
开发者ID:kamilchm,项目名称:pex,代码行数:32,代码来源:crawler.py

示例4: is_alive

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
def is_alive(ip_addr):
    lock = threading.Lock()
    probe_ports = [22, 3389]
    q = Queue()
    status = False
    for port in probe_ports:
        q.put(port)

    class Probe(threading.Thread):

        def __init__(self):
            threading.Thread.__init__(self)

        def run(self):
            try:
                self.port = q.get(block=False)
            except Empty:
                return False
            if tcp_probe(ip_addr, self.port):
                with lock:
                    nonlocal status
                    status = True
                # print("Success to connect to " + ip_addr + " " + str(self.port))
            # else:
                # print("Failed to connect to " + ip_addr + " " + str(self.port))
            q.task_done()

    for x in range(5):
        p = Probe()
        p.daemon = True
        p.start()

    q.join()
    return status
开发者ID:humw,项目名称:ToolBox,代码行数:36,代码来源:lib.py

示例5: __init__

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
class Mission:
    def __init__(self, max_thread):
        self.queue = Queue()
        self.max_thread = max_thread

    def __enter__(self):
        for x in range(self.max_thread):
            thread = Thread(target=self._threader)
            thread.daemon = True
            thread.start()
        return self

    def __exit__(self, exception_type, exception_value, traceback):
        self.queue.join()

    def send_task(self, func, *args):
        self.queue.put((func, args))

    def _threader(self):
        while True:
            try:
                func, args = self.queue.get()
                func(*args)
                self.queue.task_done()
            except queue.Empty:
                pass
开发者ID:YogaPan,项目名称:linux-basic,代码行数:28,代码来源:mission.py

示例6: add_playlist

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
    def add_playlist(self, playlist_id):
        def update_track_uri():
            while True:
                try:
                    current_track = q.get(block=False)
                    current_track.url = self.session.get_media_url(current_track.id)
                    q.task_done()
                except Empty:
                    # nothing to do
                    break
            pass

        playlist_guid = UUID(int=int(playlist_id))
        tracks = self.session.get_playlist_tracks(playlist_guid)
        q = Queue()
        for track in tracks:
            self.tracklist.add_track(track)
            q.put(track)

        for i in range(0, self.max_concurrent_url_resolvers):
            t = Thread(target=update_track_uri)
            t.start()

        q.join()
        return {"total_tracks": len(self.tracklist)}
开发者ID:mones88,项目名称:tdop,代码行数:27,代码来源:commands.py

示例7: __init__

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
class Work:
    def __init__(self, threads = 100, offset = 0, timeout = 15.0):
        self.lines = Lines(f=_DOMAINS_FILE)
        self.num_lines = len(self.lines.d)
        self.concurrent = threads
        self.q = Queue(self.concurrent * 2)
        for self.i in range(self.concurrent):
            self.t = Thread(target=self.doWork)
            self.t.daemon = True
            self.t.start()
        try:
            for self.i in range(offset, self.num_lines):
                self.q.put(self.i)
            self.q.join()
        except KeyboardInterrupt:
            sys.exit(1)

    def doWork(self):
        while True:
            n = int(self.q.get())
            _domain = self.lines.find_line(n=n)

            c = Check(url = _domain)
            ip = c.ip
            if ip:
                print(ip, end = ' ')
                print(_domain)
                save(ip=ip, domain=_domain)

            self.q.task_done()
开发者ID:hoytnix,项目名称:spidey,代码行数:32,代码来源:ips.py

示例8: _ThreadQueue

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
class ThreadPool:
    class _ThreadQueue(threading.Thread):
        def __init__(self, pool, *args, **kwargs):
            super(ThreadPool._ThreadQueue, self).__init__(*args, **kwargs)
            self.tasks = pool.tasks
            self.daemon = True
            self.start()

        def run(self):
            while True:
                # Pick a task and run it
                task,args = self.tasks.get(True)
                task(*args)
                self.tasks.task_done()

    def __init__(self, num=10):
        self.tasks = Queue(num)
        for _ in range(num):
            self._ThreadQueue(self)

    def add_task(self, target, args):
        self.tasks.put((target, args))

    def wait_completion(self):
        self.tasks.join()
开发者ID:Nakrez,项目名称:RePy,代码行数:27,代码来源:testme.py

示例9: execute

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
    def execute(self, item_generator, item_processor):
        # Create a queue to communicate with the worker threads
        try:
            queue = Queue()
            threads = []
            # Create 8 worker threads
            for x in range(self.no_of_threads):
                worker = Neo4jUploadWorker(queue, item_processor)
                # Setting daemon to True will let the main thread exit even though the workers are blocking
                worker.daemon = True
                worker.start()
                threads.append(worker)
            # Put the tasks into the queue as a tuple
            for item in item_generator.generate:
                queue.put(item)
            # Causes the main thread to wait for the queue to finish processing all the tasks
            queue.join()

            # stop workers
            for i in range(self.no_of_threads):
                queue.put(None)
            for t in threads:
                t.join()
        finally:
            print('In the finally')
            item_generator.close()
            for i in range(self.no_of_threads):
                queue.put(None)
            for t in threads:
                t.join()
开发者ID:zentiment,项目名称:Snomed,代码行数:32,代码来源:thread_executor.py

示例10: main

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
def main():
    work_queue = Queue()
    results_queue = Queue()
    throttle = Throttle(10)

    for place in PLACES:
        work_queue.put(place)

    threads = [
        Thread(target=worker, args=(work_queue, results_queue, throttle))
        for _ in range(THREAD_POOL_SIZE)
    ]

    for thread in threads:
        thread.start()

    work_queue.join()
    print()
    while threads:
        threads.pop().join()

    while not results_queue.empty():
        result = results_queue.get()

        if isinstance(result, Exception):
            raise result

        present_result(result)
开发者ID:PacktPublishing,项目名称:Expert-Python-Programming_Second-Edition,代码行数:30,代码来源:threads_exceptions_and_throttling.py

示例11: threadpool

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
def threadpool(action, datas):
    if not datas:
        return

    # Use threading to do some at the same time
    q = Queue()
    for data in datas:
        q.put(data)

    def worker():
        while True:
            try:
                data = q.get_nowait()
            except Empty:
                return
            else:
                action(data)
                q.task_done()

    # Build our pool
    threads = []
    for n in range(4):
        t = Thread(target=worker, name='worker-'+str(n))
        threads.append(t)
        t.start()

    # Wait for completeion
    q.join()

    # Tidy our threads
    for t in threads:
        t.join()
开发者ID:PeterJCLaw,项目名称:srweb,代码行数:34,代码来源:shrink-images.py

示例12: get_book_list

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
 def get_book_list(self,mydb,db):
 # 通过下列参数组成url
 #形成主线程任务队列
     search_type = [1,2,3,6,7,9,12, 13, 14, 15, 16, 17]  # 小说类型参数
     search_page_count = 80  # 结果列表每页小说个数
     search_rand = str(math.ceil(time.time())) #'1464074348875'  # time.time()  #从1997.1.1至今经过的毫秒数
     coll = mydb.get_collection(db, 'book_info')
     worker_queue = Queue()
     for j in range(1,5,1):
         thread = my_thread(mydb,coll,worker_queue)
         thread.daemon = True
         thread.start()
     for i in search_type:
         url_list = []
         search_page_no = 1  # 搜索结果页码
         search_url = self.host + 'search.aspx?q=' + '&cp=' + str(i) + '&sort=9' + '&rn=' + str(
             search_page_count) + '&pn=' + str(search_page_no) + '&rand=' + search_rand
         req = urllib.request.Request(search_url, headers=self.headers)
         page = urllib.request.urlopen(req).read()
         total = json.loads(page.decode())['total']
         print('total:',total)
         if total < 80:
             search_page_range = 1
         else:
             search_page_range = math.ceil(total / search_page_count)
         for search_page_no in range(1, search_page_range, 1):
             if search_page_no == search_page_range:
                 search_page_count = total % 80
             search_url = self.host + '/search.aspx?q=' + '&cp=' + str(i) + '&sort=9' + '&rn=' + str(
                 search_page_count) + '&pn=' + str(search_page_no) + '&rand=' + search_rand
             worker_queue.put(search_url)
     worker_queue.join()
     return url_list
开发者ID:Triswuting,项目名称:xxsy_novel_spider,代码行数:35,代码来源:Novel_Spider_Class.py

示例13: main

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
def main():
    global pbcount
    global domains_count
    start_time = time.time()

    # Для получения задач и выдачи результата используем очереди
    queue = Queue()
    result_queue = Queue()

    #fr_success = os.path.join(domain_temp, "good.txt")
    #fr_errors  = os.path.join(domain_temp, "error.txt")

    # Сначала загружаем все URL из файла в очередь задач
    domains_count = len(open('source.txt', 'r').readlines())
    with open(source_file) as f:
        for line in f:
            queue.put(line.strip())

    # Затем запускаем необходимое количество потоков
    for i in range(theard_count):
        thread = Thread(target=run, args=(queue, result_queue))
        thread.daemon = True
        thread.start()

    # И ждем, когда задачи будут выполнены    
    queue.join()
开发者ID:tw1ns,项目名称:python,代码行数:28,代码来源:brute.py

示例14: run

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
    def run(self):
        args = list(islice(self.reqs, self.requests))
        if self.shuffle:
            random.shuffle(args)
        print("Total requests: %d" % len(args))
        print("Concurrency   : %d" % self.concurrency)

        starttime = time.time()
        q, p = Queue(), Queue()
        for _ in range(self.concurrency):
            t = Thread(target=worker, args=(self.host, q, p, self.verbose))
            t.daemon = True
            t.start()
        for a in args:
            q.put(a)
        q.join()

        outputs = []
        for _ in range(self.requests):
            outputs.append(p.get())

        elapsed = time.time() - starttime
        print()
        print("Total requests: %d" % len(args))
        print("Concurrency   : %d" % self.concurrency)
        print("Elapsed time  : %.3fs" % elapsed)
        print("Avg time p/req: %.3fs" % (elapsed/len(args)))
        print("Received (per status code or error):")
        for c, n in Counter(outputs).items():
            print("  %s: %d" % (c, n))
开发者ID:oldtimestj,项目名称:splash,代码行数:32,代码来源:stress.py

示例15: reupload_resources

# 需要导入模块: from queue import Queue [as 别名]
# 或者: from queue.Queue import join [as 别名]
def reupload_resources(ckan, to_reupload, resource_id_legacy_url, auth, num_threads):
    # this is not a lot of code, but it's a lot of the time we spend in
    # this script. hence, the uploads run in parallel.
    def upload_worker():
        while True:
            task = q.get()
            if task is None:
                break
            reupload_obj, legacy_url = task
            reupload_resource(ckan, reupload_obj, legacy_url, auth)
            q.task_done()

    q = Queue()
    threads = []
    for i in range(num_threads):
        t = Thread(target=upload_worker)
        threads.append(t)
        t.start()

    logger.info("%d objects to be re-uploaded" % (len(to_reupload)))
    for item in to_reupload:
        q.put(item)
    q.join()

    for thread in threads:
        q.put(None)
    for thread in threads:
        thread.join()
开发者ID:muccg,项目名称:bpa-ingest,代码行数:30,代码来源:sync.py


注:本文中的queue.Queue.join方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。