当前位置: 首页>>代码示例>>Python>>正文


Python download_manager.DownloadManager类代码示例

本文整理汇总了Python中mozregression.download_manager.DownloadManager的典型用法代码示例。如果您正苦于以下问题:Python DownloadManager类的具体用法?Python DownloadManager怎么用?Python DownloadManager使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了DownloadManager类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

 def __init__(self, cache_dir, log=None):
     # TODO: instead of storing N artifact packages, store M megabytes.
     CacheManager.__init__(self, cache_dir, 'fetch', MAX_CACHED_ARTIFACTS, cache_callback=self.delete_file, log=log)
     self._cache_dir = cache_dir
     size_limit = 1024 * 1024 * 1024 # 1Gb in bytes.
     file_limit = 4 # But always keep at least 4 old artifacts around.
     persist_limit = PersistLimit(size_limit, file_limit)
     self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)
开发者ID:LongyunZhang,项目名称:gecko-dev,代码行数:8,代码来源:artifacts.py

示例2: ArtifactCache

class ArtifactCache(CacheManager):
    '''Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk.'''

    def __init__(self, cache_dir, log=None):
        # TODO: instead of storing N artifact packages, store M megabytes.
        CacheManager.__init__(self, cache_dir, 'fetch', MAX_CACHED_ARTIFACTS, cache_callback=self.delete_file, log=log)
        self._cache_dir = cache_dir
        size_limit = 1024 * 1024 * 1024 # 1Gb in bytes.
        file_limit = 4 # But always keep at least 4 old artifacts around.
        persist_limit = PersistLimit(size_limit, file_limit)
        self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)

    def delete_file(self, key, value):
        try:
            os.remove(value)
            self.log(logging.INFO, 'artifact',
                {'filename': value},
                'Purged artifact {filename}')
        except (OSError, IOError):
            pass

        try:
            os.remove(value + PROCESSED_SUFFIX)
            self.log(logging.INFO, 'artifact',
                {'filename': value + PROCESSED_SUFFIX},
                'Purged processed artifact {filename}')
        except (OSError, IOError):
            pass

    @cachedmethod(operator.attrgetter('_cache'))
    def fetch(self, url, force=False):
        # We download to a temporary name like HASH[:16]-basename to
        # differentiate among URLs with the same basenames.  We used to then
        # extract the build ID from the downloaded artifact and use it to make a
        # human readable unique name, but extracting build IDs is time consuming
        # (especially on Mac OS X, where we must mount a large DMG file).
        hash = hashlib.sha256(url).hexdigest()[:16]
        fname = hash + '-' + os.path.basename(url)
        self.log(logging.INFO, 'artifact',
            {'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
            'Downloading to temporary location {path}')
        try:
            dl = self._download_manager.download(url, fname)
            if dl:
                dl.wait()
            self.log(logging.INFO, 'artifact',
                {'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
                'Downloaded artifact to {path}')
            return os.path.abspath(mozpath.join(self._cache_dir, fname))
        finally:
            # Cancel any background downloads in progress.
            self._download_manager.cancel()

    def print_last_item(self, args, sorted_kwargs, result):
        url, = args
        self.log(logging.INFO, 'artifact',
            {'url': url},
            'Last installed binaries from url {url}')
        self.log(logging.INFO, 'artifact',
            {'filename': result},
            'Last installed binaries from local file {filename}')
        self.log(logging.INFO, 'artifact',
            {'filename': result + PROCESSED_SUFFIX},
            'Last installed binaries from local processed file {filename}')
开发者ID:LongyunZhang,项目名称:gecko-dev,代码行数:64,代码来源:artifacts.py

示例3: ArtifactCache

class ArtifactCache(CacheManager):
    """Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk."""

    def __init__(self, cache_dir, log=None, skip_cache=False):
        # TODO: instead of storing N artifact packages, store M megabytes.
        CacheManager.__init__(
            self,
            cache_dir,
            "fetch",
            MAX_CACHED_ARTIFACTS,
            cache_callback=self.delete_file,
            log=log,
            skip_cache=skip_cache,
        )
        self._cache_dir = cache_dir
        size_limit = 1024 * 1024 * 1024  # 1Gb in bytes.
        file_limit = 4  # But always keep at least 4 old artifacts around.
        persist_limit = PersistLimit(size_limit, file_limit)
        self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)
        self._last_dl_update = -1

    def delete_file(self, key, value):
        try:
            os.remove(value)
            self.log(logging.INFO, "artifact", {"filename": value}, "Purged artifact {filename}")
        except (OSError, IOError):
            pass

        try:
            os.remove(value + PROCESSED_SUFFIX)
            self.log(
                logging.INFO, "artifact", {"filename": value + PROCESSED_SUFFIX}, "Purged processed artifact {filename}"
            )
        except (OSError, IOError):
            pass

    @cachedmethod(operator.attrgetter("_cache"))
    def fetch(self, url, force=False):
        # We download to a temporary name like HASH[:16]-basename to
        # differentiate among URLs with the same basenames.  We used to then
        # extract the build ID from the downloaded artifact and use it to make a
        # human readable unique name, but extracting build IDs is time consuming
        # (especially on Mac OS X, where we must mount a large DMG file).
        hash = hashlib.sha256(url).hexdigest()[:16]
        fname = hash + "-" + os.path.basename(url)

        path = os.path.abspath(mozpath.join(self._cache_dir, fname))
        if self._skip_cache and os.path.exists(path):
            self.log(
                logging.DEBUG, "artifact", {"path": path}, "Skipping cache: removing cached downloaded artifact {path}"
            )
            os.remove(path)

        self.log(logging.INFO, "artifact", {"path": path}, "Downloading to temporary location {path}")
        try:
            dl = self._download_manager.download(url, fname)

            def download_progress(dl, bytes_so_far, total_size):
                percent = (float(bytes_so_far) / total_size) * 100
                now = int(percent / 5)
                if now == self._last_dl_update:
                    return
                self._last_dl_update = now
                self.log(
                    logging.INFO,
                    "artifact",
                    {"bytes_so_far": bytes_so_far, "total_size": total_size, "percent": percent},
                    "Downloading... {percent:02.1f} %",
                )

            if dl:
                dl.set_progress(download_progress)
                dl.wait()
            self.log(
                logging.INFO,
                "artifact",
                {"path": os.path.abspath(mozpath.join(self._cache_dir, fname))},
                "Downloaded artifact to {path}",
            )
            return os.path.abspath(mozpath.join(self._cache_dir, fname))
        finally:
            # Cancel any background downloads in progress.
            self._download_manager.cancel()

    def print_last_item(self, args, sorted_kwargs, result):
        url, = args
        self.log(logging.INFO, "artifact", {"url": url}, "Last installed binaries from url {url}")
        self.log(logging.INFO, "artifact", {"filename": result}, "Last installed binaries from local file {filename}")
        self.log(
            logging.INFO,
            "artifact",
            {"filename": result + PROCESSED_SUFFIX},
            "Last installed binaries from local processed file {filename}",
        )
开发者ID:carriercomm,项目名称:gecko-dev,代码行数:94,代码来源:artifacts.py


注:本文中的mozregression.download_manager.DownloadManager类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。