当前位置: 首页>>代码示例>>Python>>正文


Python Pool.apply方法代码示例

本文整理汇总了Python中multiprocessing.Pool.apply方法的典型用法代码示例。如果您正苦于以下问题:Python Pool.apply方法的具体用法?Python Pool.apply怎么用?Python Pool.apply使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.Pool的用法示例。


在下文中一共展示了Pool.apply方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: runBamHash

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def runBamHash(obj):
    pool = Pool(processes=10)
    for element in obj.data_files:
        pool.apply(runBamHashWorker, (element, ))
       #pool.apply_async(runBamHashWorker, (element, ))
    pool.close()
    pool.join()
开发者ID:bradleycolquitt,项目名称:umi,代码行数:9,代码来源:umi_util.py

示例2: handle

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
    def handle(self, *args, **options):
        replace = options.get('replace')
        for route in args:
            app_label, model_name, field_name = route.rsplit('.')
            model_class = get_model(app_label, model_name)
            field = model_class._meta.get_field(field_name)

            queryset = model_class.objects \
                .exclude(**{'%s__isnull' % field_name: True}) \
                .exclude(**{field_name: ''})
            images = queryset.values_list(field_name, flat=True)

            pool = Pool(
                initializer=init_progressbar,
                initargs=[queryset.count()]
            )
            args = [
                dict(
                    file_name=file_name,
                    variations=field.variations,
                    replace=replace,
                )
                for file_name in images
            ]
            pool.map(render_field_variations, args)
            pool.apply(finish_progressbar)
            pool.close()
            pool.join()
开发者ID:webgoeson,项目名称:django-stdimage,代码行数:30,代码来源:rendervariations.py

示例3: render_in_parallel

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
 def render_in_parallel(field, images, count, replace):
     pool = Pool(initializer=init_progressbar, initargs=[count])
     args = [
         dict(file_name=file_name, variations=field.variations, replace=replace, storage=field.storage)
         for file_name in images
     ]
     pool.map(render_field_variations, args)
     pool.apply(finish_progressbar)
     pool.close()
     pool.join()
开发者ID:ArrangementMaker,项目名称:django-stdimage,代码行数:12,代码来源:rendervariations.py

示例4: main

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def main():
    # results = requests.get("http://www.xiami.com/artist/1", headers={'User-Agent': user_agent})
    # print results.content

    star_pool = Pool(10)
    for i in range(1630, 10000):
        star_pool.apply(get_star, (i, ))
        # print star_pool.apply(get_star, (i, ))
    star_pool.close()
    star_pool.join()
开发者ID:bspeng922,项目名称:pyutils,代码行数:12,代码来源:star_collecter.py

示例5: run

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def run():
    ensure_index_built()

    activate_queue = Queue(1)
    keybind_process = Process(target=_start_keybind_process, args=(activate_queue,))
    keybind_process.start()

    translate_pool = Pool(processes=1, initializer=_init_translate_process,
                          maxtasksperchild=1)

    stop = []

    def stop_now(sig, *_):
        stop.append(sig)
        activate_queue.close()
        debug('stop due to signal %s' % sig)
        signal.signal(signal.SIGTERM, signal.SIG_DFL)
        signal.signal(signal.SIGINT, signal.SIG_DFL)
        signal.signal(signal.SIGUSR1, signal.SIG_DFL)

    signal.signal(signal.SIGTERM, stop_now)
    signal.signal(signal.SIGINT, stop_now)
    signal.signal(signal.SIGUSR1, stop_now)

    while not stop:
        got = None
        try:
            got = activate_queue.get()
        except:
            if not stop:
                raise
        debug('parent got: %s' % got)

        if not got:
            break

        debug('invoke translate')
        try:
            translate_pool.apply(_run_translate_process)
        except StandardError as e:
            debug('failed: %s' % e)

    if stop[0] == signal.SIGUSR1:
        # keybind child signaled an error
        keybind_process.join(10)
        os._exit(7)

    debug('exiting normally')
    keybind_process.terminate()

    # FIXME: this always hangs.  Why?
    # That's why we use _exit instead.
    #translate_pool.terminate()

    os._exit(0)
开发者ID:rohanpm,项目名称:qingfanyi,代码行数:57,代码来源:main.py

示例6: main

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def main():
    page_id = 1
    artist_pool = Pool(pool_size)
    while 1:
        print "Current page: %s , processing..." % page_id
        artists, has_next = get_page_stars(page_id)
        for artist in artists:
            artist_pool.apply(get_star, (artist[1], ))
        if not has_next:
            break
        page_id += 1
    artist_pool.close()
    artist_pool.join()
    print "total page: %s " % page_id
开发者ID:bspeng922,项目名称:pyutils,代码行数:16,代码来源:star_col2.py

示例7: main

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def main():
    usage = """
    Copy data from one MongoDB instance to another.

    Example:
        arctic_copy_data --log "Copying data" --src [email protected] --dest [email protected] symbol1 symbol2
    """
    setup_logging()
    p = argparse.ArgumentParser(usage=usage)
    p.add_argument("--src", required=True, help="Source MongoDB like: [email protected]:port")
    p.add_argument("--dest", required=True, help="Destination MongoDB like: [email protected]:port")
    p.add_argument("--log", required=True, help="Data CR")
    p.add_argument("--force", default=False, action='store_true', help="Force overwrite of existing data for symbol.")
    p.add_argument("--splice", default=False, action='store_true', help="Keep existing data before and after the new data.")
    p.add_argument("--parallel", default=1, type=int, help="Number of imports to run in parallel.")
    p.add_argument("symbols", nargs='+', type=str, help="List of symbol regexes to copy from source to dest.")

    opts = p.parse_args()

    src = get_arctic_lib(opts.src)
    dest = get_arctic_lib(opts.dest)

    logger.info("Copying data from %s -> %s" % (opts.src, opts.dest))

    # Prune the list of symbols from the library according to the list of symbols.
    required_symbols = set()
    for symbol in opts.symbols:
        required_symbols.update(src.list_symbols(regex=symbol))
    required_symbols = sorted(required_symbols)

    logger.info("Copying: {} symbols".format(len(required_symbols)))
    if len(required_symbols) < 1:
        logger.warn("No symbols found that matched those provided.")
        return

    # Function we'll call to do the data copying
    copy_symbol = copy_symbols_helper(src, dest, opts.log, opts.force, opts.splice)

    if opts.parallel > 1:
        logger.info("Starting: {} jobs".format(opts.parallel))
        pool = Pool(processes=opts.parallel)
        # Break the jobs into chunks for multiprocessing
        chunk_size = len(required_symbols) / opts.parallel
        chunk_size = max(chunk_size, 1)
        chunks = [required_symbols[offs:offs + chunk_size] for offs in
                  range(0, len(required_symbols), chunk_size)]
        assert sum(len(x) for x in chunks) == len(required_symbols)
        pool.apply(copy_symbol, chunks)
    else:
        copy_symbol(required_symbols)
开发者ID:cozmacib,项目名称:arctic,代码行数:52,代码来源:arctic_copy_data.py

示例8: testing

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def testing():
    readBlob = ['12'] * 100
    print readBlob
    freeze_support()
    pool = Pool(processes=cpu_count(), maxtasksperchild=1)
    step = 10
    for i in xrange(0, len(readBlob), step):
        print i
        pool.apply(recfromreadblob,
                   args=(readBlob[i:min(len(readBlob), i+step)],
                   len(readBlob)-i))
    pool.close()
    pool.join()
    pool.terminate()
    return
开发者ID:Ikudza,项目名称:my_test_file,代码行数:17,代码来源:multiprocessing+tes.py

示例9: find_dups_dev

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def find_dups_dev(ref_scores, length, max_plog, min_hlog, clusters, processors):
    from multiprocessing import Manager, Pool
    m = Manager()
    q = m.Queue()
    my_dict_o = m.dict()
    p = Pool(processors)
    curr_dir=os.getcwd()
    dup_dict = {}
    duplicate_file = open("duplicate_ids.txt", "w")
    genome_specific_list_of_lists = []
    files = os.listdir(curr_dir)
    files_and_temp_names = []
    for idx, f in enumerate(files):
        files_and_temp_names.append([str(idx), os.path.join(curr_dir, f), ref_scores, length, max_plog, min_hlog, clusters, processors])
    # Multiprocessing here (mp_shell for Ctrl+F)
    """How to test this function???"""
    for process in files_and_temp_names:
        p.apply(_perform_workflow_fdd, args=(q,my_dict_o,process))
    # Get rid of any duplicate values in queue
    unique = set()
    while q.empty() == False:
        unique.add(q.get())
    """This generates the list of all possible CDSs"""
    with open("dup_refs.txt", "a") as ref_file:
        ref_file.write("ID"+"\n")
        ref_file.write("\n".join(clusters)+"\n")
    ref_file.close()
    try:
        generate_dup_matrix()
        os.system("paste dup_refs.txt dup_values > dup_matrix.txt")
    except:
        print("problem generating duplicate matrix")
    """new way to report duplicates"""
    duplicate_IDs = []
    for line in open("dup_matrix.txt"):
        fields = line.split()
        if fields[0] == "ID":
            pass
        else:
            for field in fields[1:]:
                if float(field)>1:
                    if fields[0] in duplicate_IDs:
                        pass
                    else:
                        duplicate_IDs.append(fields[0])
    duplicate_file.write("\n".join(duplicate_IDs))
    duplicate_file.close()
    return duplicate_IDs
开发者ID:jasonsahl,项目名称:LS-BSR,代码行数:50,代码来源:util.py

示例10: usingMultiprocess

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def usingMultiprocess():
    ''' using multiprocessing module for python concurrent programming '''
    num = 100
    processes = []

    print '************ using original process ***********'
    input_conn, output_conn = Pipe()
    for m in [-1,1,2,3]:
        p = Process(target=obtainQuadraticSumByPipe, args=(input_conn, num,m,))
        p.start()
        print output_conn.recv()

    print '------------- using Pool -------------'
    pool = Pool(processes=4)
    for m in [-1,1,2,3]:
        pool.apply(printQuadraticSum, (num,m))
开发者ID:shuqin,项目名称:pystudy,代码行数:18,代码来源:pp_lambda_usage.py

示例11: render

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
 def render(field, images, count, replace, do_render):
     pool = Pool(
         initializer=init_progressbar,
         initargs=[count]
     )
     args = [
         dict(
             file_name=file_name,
             do_render=do_render,
             variations=field.variations,
             replace=replace,
             storage=field.storage.deconstruct()[0],
         )
         for file_name in images
     ]
     pool.map(render_field_variations, args)
     pool.apply(finish_progressbar)
     pool.close()
     pool.join()
开发者ID:ClairePhila,项目名称:django-stdimage,代码行数:21,代码来源:rendervariations.py

示例12: multiprocess_pool_sync

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def multiprocess_pool_sync():
    # In this function the apply() method creates a lock that prevents more than the specified number of
    # processes to take place at the same time. In this case, only 10 processes can be active at the same
    # time.
    jobs = []
    pool = Pool(processes=10)
    results = [pool.apply(func=freqsPerText, args=(text,)) for text in texts]
    print("Finished processing texts with Pool")
    print("Pool returned ", len(results), "results")
    return results
开发者ID:abunuwas,项目名称:threading_multiprocessing_experiments,代码行数:12,代码来源:multiprocessing_experiments.py

示例13: MultiprocessingView

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
class MultiprocessingView(Singleton):
    """Provides a parallel view (similar to IPython)"""

    def __init__(self, *args, **kwargs):
        self._args = args
        self._kwargs = kwargs
        if not hasattr(self, 'pool'):
            self.pool = None

    def map(self, *args, **kwargs):
        if self.pool is None:
            self.pool = Pool(*self._args, **self._kwargs)
        return self.pool.map(*args, **kwargs)

    def apply(self, func, *args, **kwargs):
        if self.pool is None:
            self.pool = Pool(*self._args, **self._kwargs)
        return self.pool.apply(func, args=args, **kwargs)

    def apply_async(self, func, *args, **kwargs):
        if self.pool is None:
            self.pool = Pool(*self._args, **self._kwargs)
        self.pool.apply_async(func, args=args, **kwargs)

    def imap(self, func, *args, **kwargs):
        if self.pool is None:
            self.pool = Pool(*self._args, **self._kwargs)
        return self.pool.imap(func, *args, **kwargs)

    def __len__(self):
        if len(self._args) > 0:
            return self._args[0]
        elif "processes" in self._kwargs:
            return self._kwargs["processes"]
        else:
            return cpu_count()

    def shutdown(self):
        if self.pool is not None:
            logger.debug('Terminating multiprocessing pool')
            try:
                self.pool.terminate()
            except Exception as e:
                logger.debug('Could not terminate multiprocessing pool.')
                raise e
            else:
                self.pool = None
        else:
            logger.debug('No multiprocessing pool to shut down.')

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.shutdown()
开发者ID:KristianJensen,项目名称:cameo,代码行数:57,代码来源:parallel.py

示例14: runTest

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
 def runTest(self):
     pool = Pool(initializer=initfunc)
     results = resultset.load(self.filename)
     self.settings.update(results.meta())
     self.settings.load_test(informational=True)
     plotters.init_matplotlib("-", False, True)
     for p in self.settings.PLOTS.keys():
         plot = pool.apply(plot_one, (self.settings, p, results))
         if not plot.verify() and p not in PLOTS_MAY_FAIL:
             raise self.failureException(
                 "Verification of plot '%s' failed" % p)
开发者ID:tohojo,项目名称:flent,代码行数:13,代码来源:test_plotters.py

示例15: run_all_tests

# 需要导入模块: from multiprocessing import Pool [as 别名]
# 或者: from multiprocessing.Pool import apply [as 别名]
def run_all_tests():
    to_run = []
    for solution in find_solutions():
        test = find_test_for_solution(solution)
        if test:
            to_run.append((solution, test))

    pool = Pool(maxtasksperchild=1)
    failed_solutions = []
    for solution, test in sorted(to_run):
        print("----------------------------------------------------------------------")
        print("----------------------------------------------------------------------")
        print("----------------------------------------------------------------------")
        print("\n\t{1}\n\tRunning tests for {0}\n".format(solution, test))
        if not pool.apply(run_one_testsuite_in_isolated_environment, args=(solution, test)):
            failed_solutions.append(solution)

    return failed_solutions
开发者ID:0x1001,项目名称:Python-Course,代码行数:20,代码来源:run_all_tests.py


注:本文中的multiprocessing.Pool.apply方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。