当前位置: 首页>>代码示例>>Python>>正文


Python Server.close方法代码示例

本文整理汇总了Python中calibre.utils.ipc.server.Server.close方法的典型用法代码示例。如果您正苦于以下问题:Python Server.close方法的具体用法?Python Server.close怎么用?Python Server.close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在calibre.utils.ipc.server.Server的用法示例。


在下文中一共展示了Server.close方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_metadata

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
def get_metadata(stream, cpath=None):
    if not podofo:
        raise Unavailable(podofo_err)
    pt = PersistentTemporaryFile('_podofo.pdf')
    pt.write(stream.read())
    pt.close()
    server = Server(pool_size=1)
    job = ParallelJob('read_pdf_metadata', 'Read pdf metadata',
        lambda x,y:x,  args=[pt.name, cpath])
    server.add_job(job)
    while not job.is_finished:
        time.sleep(0.1)
        job.update()

    job.update()
    server.close()
    if job.result is None:
        raise ValueError('Failed to read metadata: ' + job.details)
    title, authors, creator, tags, ok = job.result
    if not ok:
        print 'Failed to extract cover:'
        print job.details
    if title == '_':
        title = getattr(stream, 'name', _('Unknown'))
        title = os.path.splitext(title)[0]

    mi = MetaInformation(title, authors)
    if creator:
        mi.book_producer = creator
    if tags:
        mi.tags = tags
    if os.path.exists(pt.name): os.remove(pt.name)
    if ok:
        mi.cover = cpath
    return mi
开发者ID:Eksmo,项目名称:calibre,代码行数:37,代码来源:__init__.py

示例2: run

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
    def run(self):
        job = ParallelJob('move_library',
                'Move library from %s to %s'%(self.from_, self.to),
                lambda x,y:x,
                args=[self.from_, self.to])
        server = Server(pool_size=1)
        server.add_job(job)

        while not job.is_finished:
            time.sleep(0.2)
            job.update(consume_notifications=False)
            while True:
                try:
                    title = job.notifications.get_nowait()[0]
                    self.count += 1
                    self.result_queue.put((float(self.count)/self.total, title))
                except Empty:
                    break

        job.update()
        server.close()
        if not job.result:
            self.failed = True
            self.details = job.details

        if os.path.exists(job.log_path):
            os.remove(job.log_path)
开发者ID:BobPyron,项目名称:calibre,代码行数:29,代码来源:move.py

示例3: set_metadata

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
def set_metadata(stream, mi):
    if not podofo:
        raise Unavailable(podofo_err)
    with TemporaryFile('_podofo_read.pdf') as inputf, \
            TemporaryFile('_podofo_write.pdf') as outputf:
        server = Server(pool_size=1)
        with open(inputf, 'wb') as f:
            shutil.copyfileobj(stream, f)
        job = ParallelJob('write_pdf_metadata', 'Write pdf metadata',
            lambda x,y:x,  args=[inputf, outputf, mi.title, mi.authors,
                mi.book_producer, mi.tags])
        server.add_job(job)
        while not job.is_finished:
            time.sleep(0.1)
            job.update()

        job.update()
        server.close()
        if job.failed:
            prints(job.details)
        elif job.result:
            with open(outputf, 'rb') as f:
                f.seek(0, 2)
                if f.tell() > 100:
                    f.seek(0)
                    stream.seek(0)
                    stream.truncate()
                    shutil.copyfileobj(f, stream)
                    stream.flush()
    stream.seek(0)
开发者ID:Eksmo,项目名称:calibre,代码行数:32,代码来源:__init__.py

示例4: do_store_locations

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
def do_store_locations(books_to_scan, options, notification=lambda x,y:x):
    '''
    Master job, to launch child jobs to modify each ePub
    '''
    debug_print("start")
    server = Server()
    
    debug_print("options=%s" % (options))
    # Queue all the jobs
#     args = ['calibre_plugins.sonyutilities.jobs', 'do_sonyutilities_all',
    args = ['calibre_plugins.sonyutilities.jobs', 'do_store_bookmarks',
            (books_to_scan, options)]
#    debug_print("args=%s" % (args))
    debug_print("len(books_to_scan)=%d" % (len(books_to_scan)))
    job = ParallelJob('arbitrary', "Store locations", done=None, args=args)
    server.add_job(job)

    # This server is an arbitrary_n job, so there is a notifier available.
    # Set the % complete to a small number to avoid the 'unavailable' indicator
    notification(0.01, 'Reading device database')

    # dequeue the job results as they arrive, saving the results
    total = 1
    count = 0
    stored_locations = dict()
    while True:
        job = server.changed_jobs_queue.get()
        # A job can 'change' when it is not finished, for example if it
        # produces a notification. Ignore these.
        job.update()
        if not job.is_finished:
            debug_print("Job not finished")
            continue
#        debug_print("Job finished")
        # A job really finished. Get the information.
        stored_locations = job.result
        import pydevd;pydevd.settrace()
#        book_id = job._book_id
#        stored_locations[book_id] = stored_location
        count += 1
        notification(float(count)/total, 'Storing locations')
        # Add this job's output to the current log
        #debug_print("Stored_location=", stored_locations)
        number_bookmarks = len(stored_locations) if stored_locations else 0
        debug_print("Stored_location count=%d" % number_bookmarks)
        debug_print(job.details)
        if count >= total:
            # All done!
            break

    server.close()
    debug_print("finished")
    # return the map as the job result
    return stored_locations, options
开发者ID:auspex,项目名称:sonyutilities,代码行数:56,代码来源:jobs.py

示例5: run

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
    def run(self):
        jobs, ids = set([]), set([])
        for t in self.tasks:
            for b in t:
                ids.add(b[0])
        progress = Progress(self.result_queue, self.tdir)
        server = Server() if self.spare_server is None else self.spare_server
        try:
            for i, task in enumerate(self.tasks):
                job = ParallelJob('read_metadata',
                    'Read metadata (%d of %d)'%(i, len(self.tasks)),
                    lambda x,y:x,  args=[task, self.tdir])
                jobs.add(job)
                server.add_job(job)

            while not self.canceled:
                time.sleep(0.2)
                running = False
                for job in jobs:
                    while True:
                        try:
                            id = job.notifications.get_nowait()[-1]
                            if id in ids:
                                progress(id)
                                ids.remove(id)
                        except Empty:
                            break
                    job.update(consume_notifications=False)
                    if not job.is_finished:
                        running = True

                if not running:
                    break
        finally:
            server.close()
        time.sleep(1)

        if self.canceled:
            return

        for id in ids:
            progress(id)

        for job in jobs:
            if job.failed:
                prints(job.details)
            if os.path.exists(job.log_path):
                try:
                    os.remove(job.log_path)
                except:
                    pass
开发者ID:089git,项目名称:calibre,代码行数:53,代码来源:worker.py

示例6: process_pages

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
def process_pages(pages, opts, update, tdir):
    '''
    Render all identified comic pages.
    '''
    progress = Progress(len(pages), update)
    server = Server()
    jobs = []
    tasks = [(p, os.path.join(tdir, os.path.basename(p))) for p in pages]
    tasks = server.split(pages)
    for task in tasks:
        jobs.append(ParallelJob('render_pages', '', progress,
                                args=[task, tdir, opts]))
        server.add_job(jobs[-1])
    while True:
        time.sleep(1)
        running = False
        for job in jobs:
            while True:
                try:
                    x = job.notifications.get_nowait()
                    progress(*x)
                except Empty:
                    break
            job.update()
            if not job.is_finished:
                running = True
        if not running:
            break
    server.close()
    ans, failures = [], []

    for job in jobs:
        if job.failed or job.result is None:
            raise Exception(_('Failed to process comic: \n\n%s')%
                    job.log_file.read())
        pages, failures_ = job.result
        ans += pages
        failures += failures_
    return ans, failures
开发者ID:Coi-l,项目名称:calibre,代码行数:41,代码来源:input.py

示例7: do_download_worker

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
def do_download_worker(book_list,
                       options,
                       cpus,
                       merge=False,
                       notification=lambda x,y:x):
    '''
    Master job, to launch child jobs to extract ISBN for a set of books
    This is run as a worker job in the background to keep the UI more
    responsive and get around the memory leak issues as it will launch
    a child job for each book as a worker process
    '''
    server = Server(pool_size=cpus)

    logger.info(options['version'])
    total = 0
    alreadybad = []
    # Queue all the jobs
    logger.info("Adding jobs for URLs:")
    for book in book_list:
        logger.info("%s"%book['url'])
        if book['good']:
            total += 1
            args = ['calibre_plugins.fanficfare_plugin.jobs',
                    'do_download_for_worker',
                    (book,options,merge)]
            job = ParallelJob('arbitrary_n',
                              "url:(%s) id:(%s)"%(book['url'],book['calibre_id']),
                              done=None,
                              args=args)
            job._book = book
            server.add_job(job)
        else:
            # was already bad before the subprocess ever started.
            alreadybad.append(book)
    
    # This server is an arbitrary_n job, so there is a notifier available.
    # Set the % complete to a small number to avoid the 'unavailable' indicator
    notification(0.01, _('Downloading FanFiction Stories'))

    # dequeue the job results as they arrive, saving the results
    count = 0
    while True:
        job = server.changed_jobs_queue.get()
        # A job can 'change' when it is not finished, for example if it
        # produces a notification. Ignore these.
        job.update()
        if not job.is_finished:
            continue
        # A job really finished. Get the information.
        book_list.remove(job._book)
        book_list.append(job.result)
        book_id = job._book['calibre_id']
        count = count + 1
        notification(float(count)/total, '%d of %d stories finished downloading'%(count,total))
        # Add this job's output to the current log
        logger.info('Logfile for book ID %s (%s)'%(book_id, job._book['title']))
        logger.info(job.details)

        if count >= total:
            ## ordering first by good vs bad, then by listorder.
            good_list = filter(lambda x : x['good'], book_list)
            bad_list = filter(lambda x : not x['good'], book_list)
            good_list = sorted(good_list,key=lambda x : x['listorder'])
            bad_list = sorted(bad_list,key=lambda x : x['listorder'])
            
            logger.info("\n"+_("Download Results:")+"\n%s\n"%("\n".join([ "%(url)s %(comment)s" % book for book in good_list+bad_list])))
            
            logger.info("\n"+_("Successful:")+"\n%s\n"%("\n".join([book['url'] for book in good_list])))
            logger.info("\n"+_("Unsuccessful:")+"\n%s\n"%("\n".join([book['url'] for book in bad_list])))
            break

    server.close()
    
    # return the book list as the job result
    return book_list
开发者ID:Zweibach,项目名称:FanFicFare,代码行数:77,代码来源:jobs.py

示例8: _run

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
    def _run(self, tdir):
        from calibre.library.save_to_disk import config
        server = Server() if self.spare_server is None else self.spare_server
        ids = set(self.ids)
        tasks = server.split(list(ids))
        jobs = set([])
        c = config()
        recs = {}
        for pref in c.preferences:
            recs[pref.name] = getattr(self.opts, pref.name)

        plugboards = self.db.prefs.get('plugboards', {})
        template_functions = self.db.prefs.get('user_template_functions', [])

        for i, task in enumerate(tasks):
            tids = [x[-1] for x in task]
            data = self.collect_data(tids, tdir)
            dpath = os.path.join(tdir, '%d.json'%i)
            with open(dpath, 'wb') as f:
                f.write(json.dumps(data, ensure_ascii=False).encode('utf-8'))

            job = ParallelJob('save_book',
                    'Save books (%d of %d)'%(i, len(tasks)),
                    lambda x,y:x,
                    args=[tids, dpath, plugboards, template_functions, self.path, recs])
            jobs.add(job)
            server.add_job(job)


        while not self.canceled:
            time.sleep(0.2)
            running = False
            for job in jobs:
                self.get_notifications(job, ids)
                if not job.is_finished:
                    running = True

            if not running:
                break

        for job in jobs:
            if not job.result:
                continue
            for id_, title, ok, tb in job.result:
                if id_ in ids:
                    self.result_queue.put((id_, title, ok, tb))
                    ids.remove(id_)

        server.close()
        time.sleep(1)

        if self.canceled:
            return

        for job in jobs:
            if job.failed:
                prints(job.details)
                self.error = job.details
            if os.path.exists(job.log_path):
                try:
                    os.remove(job.log_path)
                except:
                    pass
开发者ID:089git,项目名称:calibre,代码行数:65,代码来源:worker.py

示例9: do_count_statistics

# 需要导入模块: from calibre.utils.ipc.server import Server [as 别名]
# 或者: from calibre.utils.ipc.server.Server import close [as 别名]
def do_count_statistics(books_to_scan, pages_algorithm, use_goodreads, nltk_pickle, cpus, notification=lambda x, y: x):
    """
    Master job, to launch child jobs to count pages in this list of books
    """
    server = Server(pool_size=cpus)

    # Queue all the jobs
    for book_id, title, book_path, goodreads_id, statistics_to_run in books_to_scan:
        args = [
            "calibre_plugins.count_pages.jobs",
            "do_statistics_for_book",
            (book_path, pages_algorithm, goodreads_id, use_goodreads, statistics_to_run, nltk_pickle),
        ]
        job = ParallelJob("arbitrary", str(book_id), done=None, args=args)
        job._book_id = book_id
        job._title = title
        job._pages_algorithm = pages_algorithm
        job._goodreads_id = goodreads_id
        job._use_goodreads = use_goodreads
        job._statistics_to_run = statistics_to_run
        server.add_job(job)

    # This server is an arbitrary_n job, so there is a notifier available.
    # Set the % complete to a small number to avoid the 'unavailable' indicator
    notification(0.01, "Counting Statistics")

    # dequeue the job results as they arrive, saving the results
    total = len(books_to_scan)
    count = 0
    book_stats_map = dict()
    while True:
        job = server.changed_jobs_queue.get()
        # A job can 'change' when it is not finished, for example if it
        # produces a notification. Ignore these.
        job.update()
        if not job.is_finished:
            continue
        # A job really finished. Get the information.
        results = job.result
        book_id = job._book_id
        book_stats_map[book_id] = results
        count = count + 1
        notification(float(count) / total, "Counting Statistics")

        # Add this job's output to the current log
        print("-------------------------------")
        print("Logfile for book ID %d (%s)" % (book_id, job._title))

        for stat in job._statistics_to_run:
            if stat == cfg.STATISTIC_PAGE_COUNT:
                if job._use_goodreads:
                    if job._goodreads_id is not None:
                        if stat in results and results[stat]:
                            print("\tGoodreads edition has %d pages" % results[stat])
                        else:
                            print("\tFAILED TO GET PAGE COUNT FROM GOODREADS")
                else:
                    if stat in results and results[stat]:
                        print("\tFound %d pages" % results[stat])
            elif stat == cfg.STATISTIC_WORD_COUNT:
                if stat in results and results[stat]:
                    print("\tFound %d words" % results[stat])
            elif stat == cfg.STATISTIC_FLESCH_READING:
                if stat in results and results[stat]:
                    print("\tComputed %.1f Flesch Reading" % results[stat])
            elif stat == cfg.STATISTIC_FLESCH_GRADE:
                if stat in results and results[stat]:
                    print("\tComputed %.1f Flesch-Kincaid Grade" % results[stat])
            elif stat == cfg.STATISTIC_GUNNING_FOG:
                if stat in results and results[stat]:
                    print("\tComputed %.1f Gunning Fog Index" % results[stat])

        print(job.details)

        if count >= total:
            # All done!
            break

    server.close()
    # return the map as the job result
    return book_stats_map
开发者ID:john-peterson,项目名称:count_pages,代码行数:83,代码来源:jobs.py


注:本文中的calibre.utils.ipc.server.Server.close方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。