当前位置: 首页>>代码示例>>Python>>正文


Python threaded.HTTPThreadedDownloader类代码示例

本文整理汇总了Python中nectar.downloaders.threaded.HTTPThreadedDownloader的典型用法代码示例。如果您正苦于以下问题:Python HTTPThreadedDownloader类的具体用法?Python HTTPThreadedDownloader怎么用?Python HTTPThreadedDownloader使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了HTTPThreadedDownloader类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

    def __init__(self, sync_conduit, config):
        """
        Initialize an ISOSyncRun.

        :param sync_conduit: the sync conduit to use for this sync run.
        :type  sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit
        :param config:       plugin configuration
        :type  config:       pulp.plugins.config.PluginCallConfiguration
        """
        self.sync_conduit = sync_conduit
        self.config = config
        self._remove_missing_units = config.get(
            importer_constants.KEY_UNITS_REMOVE_MISSING,
            default=constants.CONFIG_UNITS_REMOVE_MISSING_DEFAULT)
        self._validate_downloads = config.get(importer_constants.KEY_VALIDATE,
                                              default=constants.CONFIG_VALIDATE_DEFAULT)
        self._repo_url = encode_unicode(config.get(importer_constants.KEY_FEED))
        # The _repo_url must end in a trailing slash, because we will use urljoin to determine
        # the path to
        # PULP_MANIFEST later
        if self._repo_url[-1] != '/':
            self._repo_url = self._repo_url + '/'

        # Cast our config parameters to the correct types and use them to build a Downloader
        max_speed = config.get(importer_constants.KEY_MAX_SPEED)
        if max_speed is not None:
            max_speed = float(max_speed)
        max_downloads = config.get(importer_constants.KEY_MAX_DOWNLOADS)
        if max_downloads is not None:
            max_downloads = int(max_downloads)
        else:
            max_downloads = constants.CONFIG_MAX_DOWNLOADS_DEFAULT
        ssl_validation = config.get_boolean(importer_constants.KEY_SSL_VALIDATION)
        ssl_validation = ssl_validation if ssl_validation is not None else \
            constants.CONFIG_VALIDATE_DEFAULT
        downloader_config = {
            'max_speed': max_speed,
            'max_concurrent': max_downloads,
            'ssl_client_cert': config.get(importer_constants.KEY_SSL_CLIENT_CERT),
            'ssl_client_key': config.get(importer_constants.KEY_SSL_CLIENT_KEY),
            'ssl_ca_cert': config.get(importer_constants.KEY_SSL_CA_CERT),
            'ssl_validation': ssl_validation,
            'proxy_url': config.get(importer_constants.KEY_PROXY_HOST),
            'proxy_port': config.get(importer_constants.KEY_PROXY_PORT),
            'proxy_username': config.get(importer_constants.KEY_PROXY_USER),
            'proxy_password': config.get(importer_constants.KEY_PROXY_PASS),
            'basic_auth_username': config.get(importer_constants.KEY_BASIC_AUTH_USER),
            'basic_auth_password': config.get(importer_constants.KEY_BASIC_AUTH_PASS),
            'working_dir': common_utils.get_working_directory()}
        downloader_config = DownloaderConfig(**downloader_config)

        # We will pass self as the event_listener, so that we can receive the callbacks in this
        # class
        if self._repo_url.lower().startswith('file'):
            self.downloader = LocalFileDownloader(downloader_config, self)
        else:
            self.downloader = HTTPThreadedDownloader(downloader_config, self)
        self.progress_report = SyncProgressReport(sync_conduit)

        self.repo_units = []
开发者ID:ATIX-AG,项目名称:pulp_rpm,代码行数:60,代码来源:sync.py

示例2: __init__

    def __init__(self, name, download_config, registry_url, working_dir):
        """
        Initialize the V2Repository.

        :param name:            name of a docker repository
        :type  name:            basestring
        :param download_config: download configuration object
        :type  download_config: nectar.config.DownloaderConfig
        :param registry_url:    URL for the docker registry
        :type  registry_url:    basestring
        :param working_dir:     full path to the directory where files should
                                be saved
        :type  working_dir:     basestring
        """

        # Docker's registry aligns non-namespaced images to the library namespace.
        # if we have a docker registry image, and no namespace, add the library
        # namespace to the image name.

        if '/' not in name and re.search(r'registry[-,\w]*.docker.io', registry_url, re.IGNORECASE):
            self.name = "library/" + name
        else:
            self.name = name

        self.download_config = download_config
        self.registry_url = registry_url
        self.downloader = HTTPThreadedDownloader(self.download_config, AggregatingEventListener())
        self.working_dir = working_dir
        self.token = None
开发者ID:bowlofeggs,项目名称:pulp_docker,代码行数:29,代码来源:registry.py

示例3: initialize

    def initialize(self):
        """
        Set up the nectar downloader

        Originally based on the ISO sync setup
        """
        config = self.get_config()
        self._validate_downloads = config.get(importer_constants.KEY_VALIDATE, default=True)
        self._repo_url = encode_unicode(config.get(importer_constants.KEY_FEED))
        # The _repo_url must end in a trailing slash, because we will use
        # urljoin to determine the path later
        if self._repo_url[-1] != '/':
            self._repo_url = self._repo_url + '/'

        downloader_config = importer_config_to_nectar_config(config.flatten())

        # We will pass self as the event_listener, so that we can receive the
        # callbacks in this class
        if self._repo_url.lower().startswith('file'):
            self.downloader = LocalFileDownloader(downloader_config, self)
        else:
            self.downloader = HTTPThreadedDownloader(downloader_config, self)
开发者ID:jeremycline,项目名称:pulp,代码行数:22,代码来源:publish_step.py

示例4: __init__

    def __init__(self, name, download_config, registry_url, working_dir):
        """
        Initialize the V2Repository.

        :param name:            name of a docker repository
        :type  name:            basestring
        :param download_config: download configuration object
        :type  download_config: nectar.config.DownloaderConfig
        :param registry_url:    URL for the docker registry
        :type  registry_url:    basestring
        :param working_dir:     full path to the directory where files should
                                be saved
        :type  working_dir:     basestring
        """
        self.name = name
        self.download_config = download_config
        self.registry_url = registry_url
        self.downloader = HTTPThreadedDownloader(self.download_config, AggregatingEventListener())
        self.working_dir = working_dir
开发者ID:shubham90,项目名称:pulp_docker,代码行数:19,代码来源:registry.py

示例5: __init__

    def __init__(self, step_type, step_description, lazy_status_conduit, download_requests):
        """
        Initializes a Step that downloads all the download requests provided.

        :param lazy_status_conduit: Conduit used to update the task status.
        :type  lazy_status_conduit: LazyStatusConduit
        :param download_requests:   List of download requests to process.
        :type  download_requests:   list of nectar.request.DownloadRequest
        """
        super(LazyUnitDownloadStep, self).__init__(
            step_type=step_type,
            status_conduit=lazy_status_conduit,
        )
        self.description = step_description
        self.download_requests = download_requests
        self.download_config = {
            MAX_CONCURRENT: int(pulp_conf.get('lazy', 'download_concurrency')),
            HEADERS: {PULP_STREAM_REQUEST_HEADER: 'true'},
            SSL_VALIDATION: True
        }
        self.downloader = HTTPThreadedDownloader(
            DownloaderConfig(**self.download_config),
            self
        )
开发者ID:maxamillion,项目名称:pulp,代码行数:24,代码来源:content.py

示例6: ISOSyncRun

class ISOSyncRun(listener.DownloadEventListener):
    """
    This class maintains state for a single repository sync (do not reuse it). We need to keep
    the state so that we can cancel a sync that is in progress. It subclasses DownloadEventListener
    so it can pass itself to the downloader library and receive the callbacks when downloads are
    complete.
    """

    def __init__(self, sync_conduit, config):
        """
        Initialize an ISOSyncRun.

        :param sync_conduit: the sync conduit to use for this sync run.
        :type  sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit
        :param config:       plugin configuration
        :type  config:       pulp.plugins.config.PluginCallConfiguration
        """
        self.sync_conduit = sync_conduit
        self._remove_missing_units = config.get(
            importer_constants.KEY_UNITS_REMOVE_MISSING,
            default=constants.CONFIG_UNITS_REMOVE_MISSING_DEFAULT)
        self._validate_downloads = config.get(importer_constants.KEY_VALIDATE,
                                              default=constants.CONFIG_VALIDATE_DEFAULT)
        self._repo_url = encode_unicode(config.get(importer_constants.KEY_FEED))
        # The _repo_url must end in a trailing slash, because we will use urljoin to determine
        # the path to
        # PULP_MANIFEST later
        if self._repo_url[-1] != '/':
            self._repo_url = self._repo_url + '/'

        # Cast our config parameters to the correct types and use them to build a Downloader
        max_speed = config.get(importer_constants.KEY_MAX_SPEED)
        if max_speed is not None:
            max_speed = float(max_speed)
        max_downloads = config.get(importer_constants.KEY_MAX_DOWNLOADS)
        if max_downloads is not None:
            max_downloads = int(max_downloads)
        else:
            max_downloads = constants.CONFIG_MAX_DOWNLOADS_DEFAULT
        ssl_validation = config.get_boolean(importer_constants.KEY_SSL_VALIDATION)
        ssl_validation = ssl_validation if ssl_validation is not None else \
            constants.CONFIG_VALIDATE_DEFAULT
        downloader_config = {
            'max_speed': max_speed,
            'max_concurrent': max_downloads,
            'ssl_client_cert': config.get(importer_constants.KEY_SSL_CLIENT_CERT),
            'ssl_client_key': config.get(importer_constants.KEY_SSL_CLIENT_KEY),
            'ssl_ca_cert': config.get(importer_constants.KEY_SSL_CA_CERT),
            'ssl_validation': ssl_validation,
            'proxy_url': config.get(importer_constants.KEY_PROXY_HOST),
            'proxy_port': config.get(importer_constants.KEY_PROXY_PORT),
            'proxy_username': config.get(importer_constants.KEY_PROXY_USER),
            'proxy_password': config.get(importer_constants.KEY_PROXY_PASS)}
        downloader_config = DownloaderConfig(**downloader_config)

        # We will pass self as the event_listener, so that we can receive the callbacks in this
        # class
        if self._repo_url.lower().startswith('file'):
            self.downloader = LocalFileDownloader(downloader_config, self)
        else:
            self.downloader = HTTPThreadedDownloader(downloader_config, self)
        self.progress_report = SyncProgressReport(sync_conduit)

    def cancel_sync(self):
        """
        This method will cancel a sync that is in progress.
        """
        # We used to support sync cancellation, but the current downloader implementation does
        # not support it
        # and so for now we will just pass
        self.progress_report.state = self.progress_report.STATE_CANCELLED
        self.downloader.cancel()

    def download_failed(self, report):
        """
        This is the callback that we will get from the downloader library when any individual
        download fails.
        """
        # If we have a download failure during the manifest phase, we should set the report to
        # failed for that phase.
        msg = _('Failed to download %(url)s: %(error_msg)s.')
        msg = msg % {'url': report.url, 'error_msg': report.error_msg}
        logger.error(msg)
        if self.progress_report.state == self.progress_report.STATE_MANIFEST_IN_PROGRESS:
            self.progress_report.state = self.progress_report.STATE_MANIFEST_FAILED
            self.progress_report.error_message = report.error_report
        elif self.progress_report.state == self.progress_report.STATE_ISOS_IN_PROGRESS:
            iso = report.data
            self.progress_report.add_failed_iso(iso, report.error_report)
        self.progress_report.update_progress()

    def download_progress(self, report):
        """
        We will get notified from time to time about some bytes we've downloaded. We can update
        our progress
        report with this information so the client can see the progress.

        :param report: The report of the file we are downloading
        :type  report: nectar.report.DownloadReport
        """
#.........这里部分代码省略.........
开发者ID:AndreaGiardini,项目名称:pulp_rpm,代码行数:101,代码来源:sync.py

示例7: Repository

class Repository(object):
    IMAGES_PATH = '/v1/repositories/%s/images'
    TAGS_PATH = '/v1/repositories/%s/tags'
    ANCESTRY_PATH = '/v1/images/%s/ancestry'

    DOCKER_TOKEN_HEADER = 'x-docker-token'
    DOCKER_ENDPOINT_HEADER = 'x-docker-endpoints'

    def __init__(self, name, download_config, registry_url, working_dir):
        """
        :param name:            name of a docker repository
        :type  name:            basestring
        :param download_config: download configuration object
        :type  download_config: nectar.config.DownloaderConfig
        :param registry_url:    URL for the docker registry
        :type  registry_url:    basestring
        :param working_dir:     full path to the directory where files should
                                be saved
        :type  working_dir:     basestring
        """
        self.name = name
        self.download_config = download_config
        self.registry_url = registry_url
        self.listener = AggregatingEventListener()
        self.downloader = HTTPThreadedDownloader(self.download_config, self.listener)
        self.working_dir = working_dir
        self.token = None
        self.endpoint = None

    def _get_single_path(self, path):
        """
        Retrieve a single path within the upstream registry, and return its
        body after deserializing it as json

        :param path:    a full http path to retrieve that will be urljoin'd to the
                        upstream registry url.
        :type  path:    basestring

        :return:    whatever gets deserialized out of the response body's json
        """
        # if talking to docker hub, we'll get an endpoint specified, and then we'll have to get
        # tags from that endpoint instead of talking to the original feed URL.
        if self.endpoint:
            # we assume the same scheme that the registry URL used
            registry_url_parts = urlparse.urlsplit(self.registry_url)
            parts = urlparse.SplitResult(scheme=registry_url_parts.scheme, netloc=self.endpoint,
                                         path=path, query=None, fragment=None)
            url = urlparse.urlunsplit(parts)
        else:
            url = urlparse.urljoin(self.registry_url, path)
        request = DownloadRequest(url, StringIO())
        if path.endswith('/images'):
            # this is required by the docker index and indicates that it should
            # return an auth token
            if request.headers is None:
                request.headers = {}
            request.headers[self.DOCKER_TOKEN_HEADER] = 'true'
        # endpoints require auth
        if self.endpoint:
            self.add_auth_header(request)
        report = self.downloader.download_one(request)

        if report.state == report.DOWNLOAD_FAILED:
            raise IOError(report.error_msg)

        self._parse_response_headers(report.headers)
        return json.loads(report.destination.getvalue())

    def _parse_response_headers(self, headers):
        """
        Some responses can include header information that we need later. This
        grabs those values and stores them for later use.

        :param headers: dictionary-like object where keys are HTTP header names
                        and values are their values.
        :type  headers: dict
        """
        # this is used for authorization on an endpoint
        if self.DOCKER_TOKEN_HEADER in headers:
            self.token = headers[self.DOCKER_TOKEN_HEADER]
        # this tells us what host to use when accessing image files
        if self.DOCKER_ENDPOINT_HEADER in headers:
            self.endpoint = headers[self.DOCKER_ENDPOINT_HEADER]

    def get_image_ids(self):
        """
        Get a list of all images in the upstream repository. This is
        conceptually a little ambiguous, as there can be images in a repo that
        are neither tagged nor in the ancestry for a tagged image.

        :return:    list of image IDs in the repo
        :rtype:     list

        :raises pulp_exceptions.PulpCodedException: if fetching the IDs fails
        """
        path = self.IMAGES_PATH % self.name

        _logger.debug('retrieving image ids from remote registry')
        try:
            raw_data = self._get_single_path(path)
#.........这里部分代码省略.........
开发者ID:jeremycline,项目名称:pulp_docker,代码行数:101,代码来源:registry.py

示例8: DownloadStep

class DownloadStep(PluginStep, listener.DownloadEventListener):

    def __init__(self, step_type, downloads=None, repo=None, conduit=None, config=None,
                 working_dir=None, plugin_type=None, description=''):
        """
        Set the default parent and step_type for the Download step

        :param step_type: The id of the step this processes
        :type  step_type: str
        :param downloads: A list of DownloadRequests
        :type  downloads: list of nectar.request.DownloadRequest
        :param repo: The repo to be published
        :type  repo: pulp.plugins.model.Repository
        :param conduit: The conduit for the repo
        :type  conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit
        :param config: The publish configuration
        :type  config: PluginCallConfiguration
        :param working_dir: The temp directory this step should use for processing
        :type  working_dir: str
        :param plugin_type: The type of the plugin
        :type  plugin_type: str
        :param description: The text description that will be displayed to users
        :type  description: basestring
        """

        super(DownloadStep, self).__init__(step_type, repo=repo, conduit=conduit,
                                           config=config, working_dir=working_dir,
                                           plugin_type=plugin_type)
        if downloads is not None:
            self._downloads = downloads
        else:
            self._downloads = []
        self.step_type = step_type
        self.repo = repo
        self.conduit = conduit
        self.config = config
        self.working_dir = working_dir
        self.plugin_type = plugin_type
        self.description = description

    def initialize(self):
        """
        Set up the nectar downloader

        Originally based on the ISO sync setup
        """
        config = self.get_config()
        self._validate_downloads = config.get(importer_constants.KEY_VALIDATE, default=True)
        self._repo_url = encode_unicode(config.get(importer_constants.KEY_FEED))
        # The _repo_url must end in a trailing slash, because we will use
        # urljoin to determine the path later
        if self._repo_url[-1] != '/':
            self._repo_url = self._repo_url + '/'

        downloader_config = importer_config_to_nectar_config(config.flatten())

        # We will pass self as the event_listener, so that we can receive the
        # callbacks in this class
        if self._repo_url.lower().startswith('file'):
            self.downloader = LocalFileDownloader(downloader_config, self)
        else:
            self.downloader = HTTPThreadedDownloader(downloader_config, self)

    @property
    def downloads(self):
        """
        This lets the class be instantiated with "downloads" as a generator that
        gets lazily evaluated. This is helpful, because at the time of
        instantiation, it is probably not known what downloads will be
        required.

        :return:    list of download requests (nectar.request.DownloadRequest)
        :rtype:     list
        """
        if not isinstance(self._downloads, list):
            self._downloads = list(self._downloads)
        return self._downloads

    def get_total(self):
        """
        Get total number of items to download

        :returns: number of DownloadRequests
        :rtype: int
        """
        return len(self.downloads)

    def _process_block(self):
        """
        the main "do stuff" method. In this case, just kick off all the
        downloads.
        """
        self.downloader.download(self.downloads)

    # from listener.DownloadEventListener
    def download_succeeded(self, report):
        """
        This is the callback that we will get from the downloader library when any individual
        download succeeds. Bump the successes counter and report progress.

#.........这里部分代码省略.........
开发者ID:jeremycline,项目名称:pulp,代码行数:101,代码来源:publish_step.py

示例9: LazyUnitDownloadStep

class LazyUnitDownloadStep(Step, DownloadEventListener):
    """
    A Step that downloads all the given requests. The downloader is configured
    to download from the Pulp Streamer components.

    :ivar download_requests: The download requests the step will process.
    :type download_requests: list of nectar.request.DownloadRequest
    :ivar download_config:   The keyword args used to initialize the Nectar
                             downloader configuration.
    :type download_config:   dict
    :ivar downloader:        The Nectar downloader used to fetch the requests.
    :type downloader:        nectar.downloaders.threaded.HTTPThreadedDownloader
    """

    def __init__(self, step_type, step_description, lazy_status_conduit, download_requests):
        """
        Initializes a Step that downloads all the download requests provided.

        :param lazy_status_conduit: Conduit used to update the task status.
        :type  lazy_status_conduit: LazyStatusConduit
        :param download_requests:   List of download requests to process.
        :type  download_requests:   list of nectar.request.DownloadRequest
        """
        super(LazyUnitDownloadStep, self).__init__(
            step_type=step_type,
            status_conduit=lazy_status_conduit,
        )
        self.description = step_description
        self.download_requests = download_requests
        self.download_config = {
            MAX_CONCURRENT: int(pulp_conf.get('lazy', 'download_concurrency')),
            HEADERS: {PULP_STREAM_REQUEST_HEADER: 'true'},
            SSL_VALIDATION: True
        }
        self.downloader = HTTPThreadedDownloader(
            DownloaderConfig(**self.download_config),
            self
        )

    def _process_block(self, item=None):
        """
        This block is called by the `process` loop. This is overridden because
        success and failure is determined during the EventListener callbacks,
        which will handle updating the progress. Since `item` is not used, this
        does not make use of `process_main` and simply calls the downloader.

        Inherited from Step.

        :param item: Unused.
        :type  item: None
        """
        self.downloader.download(self.download_requests)

    def get_total(self):
        """
        The total number of download requests so progress reporting occurs at
        the file level.

        Inherited from Step.

        :return: The number of download requests this step will process.
        :rtype:  int
        """
        return len(self.download_requests)

    def download_started(self, report):
        """
        Checks the filesystem for the file that we are about to download,
        and if it exists, raise an exception which will cause Nectar to
        skip the download.

        Inherited from DownloadEventListener.

        :param report: the report associated with the download request.
        :type  report: nectar.report.DownloadReport

        :raises SkipLocation: if the file is already downloaded and matches
                              the checksum stored in the catalog.
        """
        _logger.debug(_('Starting download of {url}.').format(url=report.url))

        # Remove the deferred entry now that the download has started.
        query_set = DeferredDownload.objects.filter(
            unit_id=report.data[UNIT_ID],
            unit_type_id=report.data[TYPE_ID]
        )
        query_set.delete()

        try:
            # If the file exists and the checksum is valid, don't download it
            path_entry = report.data[UNIT_FILES][report.destination]
            catalog_entry = path_entry[CATALOG_ENTRY]
            self.validate_file(
                catalog_entry.path,
                catalog_entry.checksum_algorithm,
                catalog_entry.checksum
            )
            path_entry[PATH_DOWNLOADED] = True
            self.progress_successes += 1
            self.report_progress()
#.........这里部分代码省略.........
开发者ID:maxamillion,项目名称:pulp,代码行数:101,代码来源:content.py

示例10: V2Repository

class V2Repository(object):
    """
    This class represents a Docker v2 repository.
    """
    API_VERSION_CHECK_PATH = '/v2/'
    LAYER_PATH = '/v2/{name}/blobs/{digest}'
    MANIFEST_PATH = '/v2/{name}/manifests/{reference}'
    TAGS_PATH = '/v2/{name}/tags/list'

    def __init__(self, name, download_config, registry_url, working_dir):
        """
        Initialize the V2Repository.

        :param name:            name of a docker repository
        :type  name:            basestring
        :param download_config: download configuration object
        :type  download_config: nectar.config.DownloaderConfig
        :param registry_url:    URL for the docker registry
        :type  registry_url:    basestring
        :param working_dir:     full path to the directory where files should
                                be saved
        :type  working_dir:     basestring
        """
        self.name = name
        self.download_config = download_config
        self.registry_url = registry_url
        self.downloader = HTTPThreadedDownloader(self.download_config, AggregatingEventListener())
        self.working_dir = working_dir

    def api_version_check(self):
        """
        Make a call to the registry URL's /v2/ API call to determine if the registry supports API
        v2.

        :return: True if the v2 API is found, else False
        :rtype:  bool
        """
        _logger.debug('Determining if the registry URL can do v2 of the Docker API.')

        try:
            headers, body = self._get_path(self.API_VERSION_CHECK_PATH)
        except IOError:
            return False

        try:
            version = headers['Docker-Distribution-API-Version']
            if version != "registry/2.0":
                return False
            _logger.debug(_('The docker registry is using API version: %(v)s') % {'v': version})
        except KeyError:
            # If the Docker-Distribution-API-Version header isn't present, we will assume that this
            # is a valid Docker 2.0 API server so that simple file-based webservers can serve as our
            # remote feed.
            pass

        return True

    def create_blob_download_request(self, digest):
        """
        Return a DownloadRequest instance for the given blob digest.
        It is desirable to download the blob files with a separate
        downloader (for progress tracking, etc), so we just create the download
        requests here and let them get processed elsewhere.

        :param digest:          digest of the docker blob you wish to download
        :type  digest:          basestring

        :return:    a download request instance
        :rtype:     nectar.request.DownloadRequest
        """
        path = self.LAYER_PATH.format(name=self.name, digest=digest)
        url = urlparse.urljoin(self.registry_url, path)
        req = DownloadRequest(url, os.path.join(self.working_dir, digest))
        return req

    def get_manifest(self, reference):
        """
        Get the manifest and its digest for the given reference.

        :param reference: The reference (tag or digest) of the Manifest you wish to retrieve.
        :type  reference: basestring
        :return:          A 2-tuple of the digest and the manifest, both basestrings
        :rtype:           tuple
        """
        path = self.MANIFEST_PATH.format(name=self.name, reference=reference)
        headers, manifest = self._get_path(path)

        digest_header = 'docker-content-digest'
        if digest_header in headers:
            expected_digest = headers[digest_header]
            # The digest is formatted as algorithm:sum, so let's ask our hasher to use the same
            # algorithm as we received in the headers.
            digest = models.Manifest.calculate_digest(manifest, expected_digest.split(':')[0])
            if digest != expected_digest:
                msg = _('The Manifest digest does not match the expected value. The remote '
                        'feed announced a digest of {e}, but the downloaded digest was {d}.')
                msg = msg.format(e=expected_digest, d=digest)
                raise IOError(msg)
        else:
            digest = models.Manifest.digest(manifest)
#.........这里部分代码省略.........
开发者ID:shubham90,项目名称:pulp_docker,代码行数:101,代码来源:registry.py

示例11: ISOSyncRun

class ISOSyncRun(listener.DownloadEventListener):
    """
    This class maintains state for a single repository sync (do not reuse it). We need to keep
    the state so that we can cancel a sync that is in progress. It subclasses DownloadEventListener
    so it can pass itself to the downloader library and receive the callbacks when downloads are
    complete.
    """

    def __init__(self, sync_conduit, config):
        """
        Initialize an ISOSyncRun.

        :param sync_conduit: the sync conduit to use for this sync run.
        :type  sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit
        :param config:       plugin configuration
        :type  config:       pulp.plugins.config.PluginCallConfiguration
        """
        self.sync_conduit = sync_conduit
        self.config = config
        self._remove_missing_units = config.get(
            importer_constants.KEY_UNITS_REMOVE_MISSING,
            default=constants.CONFIG_UNITS_REMOVE_MISSING_DEFAULT)
        self._validate_downloads = config.get(importer_constants.KEY_VALIDATE,
                                              default=constants.CONFIG_VALIDATE_DEFAULT)
        self._repo_url = encode_unicode(config.get(importer_constants.KEY_FEED))
        # The _repo_url must end in a trailing slash, because we will use urljoin to determine
        # the path to
        # PULP_MANIFEST later
        if self._repo_url[-1] != '/':
            self._repo_url = self._repo_url + '/'

        # Cast our config parameters to the correct types and use them to build a Downloader
        max_speed = config.get(importer_constants.KEY_MAX_SPEED)
        if max_speed is not None:
            max_speed = float(max_speed)
        max_downloads = config.get(importer_constants.KEY_MAX_DOWNLOADS)
        if max_downloads is not None:
            max_downloads = int(max_downloads)
        else:
            max_downloads = constants.CONFIG_MAX_DOWNLOADS_DEFAULT
        ssl_validation = config.get_boolean(importer_constants.KEY_SSL_VALIDATION)
        ssl_validation = ssl_validation if ssl_validation is not None else \
            constants.CONFIG_VALIDATE_DEFAULT
        downloader_config = {
            'max_speed': max_speed,
            'max_concurrent': max_downloads,
            'ssl_client_cert': config.get(importer_constants.KEY_SSL_CLIENT_CERT),
            'ssl_client_key': config.get(importer_constants.KEY_SSL_CLIENT_KEY),
            'ssl_ca_cert': config.get(importer_constants.KEY_SSL_CA_CERT),
            'ssl_validation': ssl_validation,
            'proxy_url': config.get(importer_constants.KEY_PROXY_HOST),
            'proxy_port': config.get(importer_constants.KEY_PROXY_PORT),
            'proxy_username': config.get(importer_constants.KEY_PROXY_USER),
            'proxy_password': config.get(importer_constants.KEY_PROXY_PASS),
            'basic_auth_username': config.get(importer_constants.KEY_BASIC_AUTH_USER),
            'basic_auth_password': config.get(importer_constants.KEY_BASIC_AUTH_PASS),
            'working_dir': common_utils.get_working_directory()}
        downloader_config = DownloaderConfig(**downloader_config)

        # We will pass self as the event_listener, so that we can receive the callbacks in this
        # class
        if self._repo_url.lower().startswith('file'):
            self.downloader = LocalFileDownloader(downloader_config, self)
        else:
            self.downloader = HTTPThreadedDownloader(downloader_config, self)
        self.progress_report = SyncProgressReport(sync_conduit)

        self.repo_units = []

    @property
    def download_deferred(self):
        """
        Test the download policy to determine if downloading is deferred.

        :return: True if deferred.
        :rtype: bool
        """
        policy = self.config.get(
            importer_constants.DOWNLOAD_POLICY,
            importer_constants.DOWNLOAD_IMMEDIATE)
        return policy != importer_constants.DOWNLOAD_IMMEDIATE

    def download_failed(self, report):
        """
        This is the callback that we will get from the downloader library when any individual
        download fails.
        """
        # If we have a download failure during the manifest phase, we should set the report to
        # failed for that phase.
        msg = _('Failed to download %(url)s: %(error_msg)s.')
        msg = msg % {'url': report.url, 'error_msg': report.error_msg}
        _logger.error(msg)
        if self.progress_report.state == self.progress_report.STATE_MANIFEST_IN_PROGRESS:
            self.progress_report.state = self.progress_report.STATE_MANIFEST_FAILED
            self.progress_report.error_message = report.error_report
        elif self.progress_report.state == self.progress_report.STATE_ISOS_IN_PROGRESS:
            iso = report.data
            self.progress_report.add_failed_iso(iso, report.error_report)
        self.progress_report.update_progress()

#.........这里部分代码省略.........
开发者ID:ATIX-AG,项目名称:pulp_rpm,代码行数:101,代码来源:sync.py

示例12: ISOSyncRun

class ISOSyncRun(listener.DownloadEventListener):
    """
    This class maintains state for a single repository sync (do not reuse it). We need to keep the state so
    that we can cancel a sync that is in progress. It subclasses DownloadEventListener so it can pass itself
    to the downloader library and receive the callbacks when downloads are complete.
    """
    def __init__(self, sync_conduit, config):
        self.sync_conduit = sync_conduit
        self._remove_missing_units = config.get(importer_constants.KEY_UNITS_REMOVE_MISSING,
                                                default=constants.CONFIG_UNITS_REMOVE_MISSING_DEFAULT)
        self._validate_downloads = config.get(importer_constants.KEY_VALIDATE,
                                              default=constants.CONFIG_VALIDATE_DEFAULT)
        self._repo_url = encode_unicode(config.get(importer_constants.KEY_FEED))
        # The _repo_url must end in a trailing slash, because we will use urljoin to determine the path to
        # PULP_MANIFEST later
        if self._repo_url[-1] != '/':
            self._repo_url = self._repo_url + '/'

        # Cast our config parameters to the correct types and use them to build a Downloader
        max_speed = config.get(importer_constants.KEY_MAX_SPEED)
        if max_speed is not None:
            max_speed = float(max_speed)
        max_downloads = config.get(importer_constants.KEY_MAX_DOWNLOADS)
        if max_downloads is not None:
            max_downloads = int(max_downloads)
        else:
            max_downloads = constants.CONFIG_MAX_DOWNLOADS_DEFAULT
        ssl_validation = config.get_boolean(importer_constants.KEY_SSL_VALIDATION)
        ssl_validation = ssl_validation if ssl_validation is not None else constants.CONFIG_VALIDATE_DEFAULT
        downloader_config = {
            'max_speed': max_speed,
            'max_concurrent': max_downloads,
            'ssl_client_cert': config.get(importer_constants.KEY_SSL_CLIENT_CERT),
            'ssl_client_key': config.get(importer_constants.KEY_SSL_CLIENT_KEY),
            'ssl_ca_cert': config.get(importer_constants.KEY_SSL_CA_CERT),
            'ssl_validation': ssl_validation,
            'proxy_url': config.get(importer_constants.KEY_PROXY_HOST),
            'proxy_port': config.get(importer_constants.KEY_PROXY_PORT),
            'proxy_username': config.get(importer_constants.KEY_PROXY_USER),
            'proxy_password': config.get(importer_constants.KEY_PROXY_PASS)}
        downloader_config = DownloaderConfig(**downloader_config)

        # We will pass self as the event_listener, so that we can receive the callbacks in this class
        if self._repo_url.lower().startswith('file'):
            self.downloader = LocalFileDownloader(downloader_config, self)
        else:
            self.downloader = HTTPThreadedDownloader(downloader_config, self)
        self.progress_report = SyncProgressReport(sync_conduit)

    def cancel_sync(self):
        """
        This method will cancel a sync that is in progress.
        """
        # We used to support sync cancellation, but the current downloader implementation does not support it
        # and so for now we will just pass
        self.progress_report.state = self.progress_report.STATE_CANCELLED
        self.downloader.cancel()

    def download_failed(self, report):
        """
        This is the callback that we will get from the downloader library when any individual
        download fails.
        """
        # If we have a download failure during the manifest phase, we should set the report to
        # failed for that phase.
        if self.progress_report.state == self.progress_report.STATE_MANIFEST_IN_PROGRESS:
            self.progress_report.state = self.progress_report.STATE_MANIFEST_FAILED
            self.progress_report.error_message = report.error_report
        elif self.progress_report.state == self.progress_report.STATE_ISOS_IN_PROGRESS:
            iso = report.data
            self.progress_report.add_failed_iso(iso, report.error_report)
        self.progress_report.update_progress()

    def download_progress(self, report):
        """
        We will get notified from time to time about some bytes we've downloaded. We can update our progress
        report with this information so the client can see the progress.

        :param report: The report of the file we are downloading
        :type  report: nectar.report.DownloadReport
        """
        if self.progress_report.state == self.progress_report.STATE_ISOS_IN_PROGRESS:
            iso = report.data
            additional_bytes_downloaded = report.bytes_downloaded - iso.bytes_downloaded
            self.progress_report.finished_bytes += additional_bytes_downloaded
            iso.bytes_downloaded = report.bytes_downloaded
            self.progress_report.update_progress()

    def download_succeeded(self, report):
        """
        This is the callback that we will get from the downloader library when it succeeds in downloading a
        file. This method will check to see if we are in the ISO downloading stage, and if we are, it will add
        the new ISO to the database.

        :param report: The report of the file we downloaded
        :type  report: nectar.report.DownloadReport
        """
        # If we are in the isos stage, then this must be one of our ISOs.
        if self.progress_report.state == self.progress_report.STATE_ISOS_IN_PROGRESS:
            # This will update our bytes downloaded
#.........这里部分代码省略.........
开发者ID:bechtoldt,项目名称:pulp_rpm,代码行数:101,代码来源:sync.py

示例13: V2Repository

class V2Repository(object):
    """
    This class represents a Docker v2 repository.
    """
    API_VERSION_CHECK_PATH = '/v2/'
    LAYER_PATH = '/v2/{name}/blobs/{digest}'
    MANIFEST_PATH = '/v2/{name}/manifests/{reference}'
    TAGS_PATH = '/v2/{name}/tags/list'

    def __init__(self, name, download_config, registry_url, working_dir):
        """
        Initialize the V2Repository.

        :param name:            name of a docker repository
        :type  name:            basestring
        :param download_config: download configuration object
        :type  download_config: nectar.config.DownloaderConfig
        :param registry_url:    URL for the docker registry
        :type  registry_url:    basestring
        :param working_dir:     full path to the directory where files should
                                be saved
        :type  working_dir:     basestring
        """

        # Docker's registry aligns non-namespaced images to the library namespace.
        # if we have a docker registry image, and no namespace, add the library
        # namespace to the image name.

        if '/' not in name and re.search(r'registry[-,\w]*.docker.io', registry_url, re.IGNORECASE):
            self.name = "library/" + name
        else:
            self.name = name

        self.download_config = download_config
        self.registry_url = registry_url

        # Use basic auth information only for retrieving tokens from auth server.
        self.token_downloader = HTTPThreadedDownloader(self.download_config,
                                                       AggregatingEventListener())
        self.download_config.basic_auth_username = None
        self.download_config.basic_auth_password = None
        self.downloader = HTTPThreadedDownloader(self.download_config, AggregatingEventListener())
        self.working_dir = working_dir
        self.token = None

    def api_version_check(self):
        """
        Make a call to the registry URL's /v2/ API call to determine if the registry supports API
        v2.

        :return: True if the v2 API is found, else False
        :rtype:  bool
        """
        _logger.debug('Determining if the registry URL can do v2 of the Docker API.')

        try:
            headers, body = self._get_path(self.API_VERSION_CHECK_PATH)
        except IOError:
            return False

        try:
            version = headers['Docker-Distribution-API-Version']
            if version != "registry/2.0":
                return False
            _logger.debug(_('The docker registry is using API version: %(v)s') % {'v': version})
        except KeyError:
            # If the Docker-Distribution-API-Version header isn't present, we will assume that this
            # is a valid Docker 2.0 API server so that simple file-based webservers can serve as our
            # remote feed.
            pass

        return True

    def create_blob_download_request(self, digest):
        """
        Return a DownloadRequest instance for the given blob digest.
        It is desirable to download the blob files with a separate
        downloader (for progress tracking, etc), so we just create the download
        requests here and let them get processed elsewhere.

        :param digest:          digest of the docker blob you wish to download
        :type  digest:          basestring

        :return:    a download request instance
        :rtype:     nectar.request.DownloadRequest
        """
        path = self.LAYER_PATH.format(name=self.name, digest=digest)
        url = urlparse.urljoin(self.registry_url, path)
        req = DownloadRequest(url, os.path.join(self.working_dir, digest))
        return req

    def get_manifest(self, reference):
        """
        Get the manifest and its digest for the given reference.

        :param reference: The reference (tag or digest) of the Manifest you wish to retrieve.
        :type  reference: basestring
        :return:          A 2-tuple of the digest and the manifest, both basestrings
        :rtype:           tuple
        """
#.........这里部分代码省略.........
开发者ID:daviddavis,项目名称:pulp_docker,代码行数:101,代码来源:registry.py


注:本文中的nectar.downloaders.threaded.HTTPThreadedDownloader类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。