当前位置: 首页>>代码示例>>Python>>正文


Python storage.Blob方法代码示例

本文整理汇总了Python中google.cloud.storage.Blob方法的典型用法代码示例。如果您正苦于以下问题:Python storage.Blob方法的具体用法?Python storage.Blob怎么用?Python storage.Blob使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在google.cloud.storage的用法示例。


在下文中一共展示了storage.Blob方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _get_blob_name

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def _get_blob_name(blob):
        """
        Gets blob name (last part of the path).
        :param blob: instance of :class:`google.cloud.storage.Blob`.
        :return: name string.
        """
        if isinstance(blob, Blob):
            return os.path.basename(blob.name)
        assert isinstance(blob, (unicode, str))
        if blob.endswith("/"):
            blob = blob[:-1]
        return os.path.basename(blob) 
开发者ID:src-d,项目名称:jgscm,代码行数:14,代码来源:__init__.py

示例2: _read_file

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def _read_file(self, blob, format):
        """Reads a non-notebook file.

        blob: instance of :class:`google.cloud.storage.Blob`.
        format:
          If "text", the contents will be decoded as UTF-8.
          If "base64", the raw bytes contents will be encoded as base64.
          If not specified, try to decode as UTF-8, and fall back to base64
        """
        bcontent = blob.download_as_string()

        if format is None or format == "text":
            # Try to interpret as unicode if format is unknown or if unicode
            # was explicitly requested.
            try:
                return bcontent.decode("utf8"), "text"
            except UnicodeError:
                if format == "text":
                    raise web.HTTPError(
                        400, "%s is not UTF-8 encoded" %
                             self._get_blob_path(blob),
                        reason="bad format",
                    )
        return base64.encodebytes(bcontent).decode("ascii"), "base64" 
开发者ID:src-d,项目名称:jgscm,代码行数:26,代码来源:__init__.py

示例3: _save_directory

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def _save_directory(self, path, model):
        """Creates a directory in GCS."""
        exists, obj = self._fetch(path)
        if exists:
            if isinstance(obj, Blob):
                raise web.HTTPError(400, u"Not a directory: %s" % path)
            else:
                self.log.debug("Directory %r already exists", path)
                return
        bucket_name, bucket_path = self._parse_path(path)
        if bucket_path == "":
            self.client.create_bucket(bucket_name)
        else:
            bucket = self._get_bucket(bucket_name, throw=True)
            bucket.blob(bucket_path).upload_from_string(
                b"", content_type="application/x-directory") 
开发者ID:src-d,项目名称:jgscm,代码行数:18,代码来源:__init__.py

示例4: maybe_upload_file

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def maybe_upload_file(local_path):
    '''Upload a file to remote cloud storage
    if the path starts with gs:// or s3://
    '''
    if local_path.startswith(('s3://', 'gs://')):
        prefix = local_path.split(':')[0]
        remote_bucket_path = local_path[len("s3://"):]  # same length
        bp = remote_bucket_path.split("/")
        bucket = bp[0]
        path = remote_bucket_path[1 + len(bucket):]

        # s3://example/file becomes s3:/example/file in Linux
        local_path = prefix + ':/' + remote_bucket_path
        if prefix == 's3':
            import boto3
            s3 = boto3.client('s3', endpoint_url=os.environ.get('S3_ENDPOINT'))
            s3.upload_file(local_path, bucket, path)

        elif prefix == 'gs':
            from google.cloud import storage
            client = storage.Client()

            Hbucket = storage.Bucket(client, bucket)
            blob = storage.Blob(path, Hbucket)
            blob.upload_from_filename(local_path) 
开发者ID:lanpa,项目名称:tensorboardX,代码行数:27,代码来源:embedding.py

示例5: copy_to

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def copy_to(self, source_path):
    if os.path.getsize(source_path) == 0:
      message = (
          'Local source file {0:s} is empty.  Not uploading to GCS'.format(
              source_path))
      log.error(message)
      raise TurbiniaException(message)

    bucket = self.client.get_bucket(self.bucket)
    destination_path = os.path.join(
        self.base_output_dir, self.unique_dir, os.path.basename(source_path))
    log.info(
        'Writing {0:s} to GCS path {1:s}'.format(source_path, destination_path))
    try:
      blob = storage.Blob(destination_path, bucket, chunk_size=self.CHUNK_SIZE)
      blob.upload_from_filename(source_path, client=self.client)
    except exceptions.GoogleCloudError as exception:
      message = 'File upload to GCS failed: {0!s}'.format(exception)
      log.error(message)
      raise TurbiniaException(message)
    return os.path.join('gs://', self.bucket, destination_path) 
开发者ID:google,项目名称:turbinia,代码行数:23,代码来源:output_manager.py

示例6: setup_teardown

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def setup_teardown():
    # Create the product set csv file locally and upload it to GCS
    # This is so that there is a unique product set ID for all python version
    # tests.
    client = storage.Client(project=PROJECT_ID)
    bucket = client.get_bucket(PROJECT_ID)
    blob = storage.Blob("vision/{}.csv".format(FILENAME), bucket)
    blob.upload_from_string(
        '"gs://cloud-samples-data/vision/product_search/shoes_1.jpg",' +
        '"{}",'.format(IMAGE_URI_1) +
        '"{}",'.format(PRODUCT_SET_ID) +
        '"{}",'.format(PRODUCT_ID_1) +
        '"apparel",,"style=womens","0.1,0.1,0.9,0.1,0.9,0.9,0.1,0.9"')

    yield

    delete_product(PROJECT_ID, LOCATION, PRODUCT_ID_1)
    delete_product_set(PROJECT_ID, LOCATION, PRODUCT_SET_ID)
    # Delete the created file
    blob.delete(client) 
开发者ID:GoogleCloudPlatform,项目名称:python-docs-samples,代码行数:22,代码来源:import_product_sets_test.py

示例7: delete_blob

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def delete_blob(to_delete):
    # [START delete_blob]
    from google.cloud.exceptions import NotFound

    client = storage.Client()
    bucket = client.get_bucket("my-bucket")
    blobs = list(bucket.list_blobs())
    assert len(blobs) > 0
    # [<Blob: my-bucket, my-file.txt>]
    bucket.delete_blob("my-file.txt")
    try:
        bucket.delete_blob("doesnt-exist")
    except NotFound:
        pass
    # [END delete_blob]

    blob = None
    # [START delete_blobs]
    bucket.delete_blobs([blob], on_error=lambda blob: None)
    # [END delete_blobs]

    to_delete.append(bucket) 
开发者ID:googleapis,项目名称:python-storage,代码行数:24,代码来源:snippets.py

示例8: verify_gcs_path

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def verify_gcs_path(path):
  """Verifies that a GCS path exists.

  Args:
    path: A string that represents the target path.
  Returns:
    A boolean of the verification status.
  """
  storage_client = storage.Client()
  path_info = gcsutil.GcsFileName.from_path(path)
  try:
    bucket = storage_client.get_bucket(path_info.bucket)
  except exceptions.NotFound:
    return False
  return storage.Blob(bucket=bucket,
                      name=path_info.blob).exists(storage_client) 
开发者ID:GoogleCloudPlatform,项目名称:healthcare-deid,代码行数:18,代码来源:server.py

示例9: write_n_line_file_to_gcs

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def write_n_line_file_to_gcs(project, temp_location, n):
    """
    Write an n-line file to the temp_location in Google Cloud Storage.
    Args:
        project: A string containing the GCP project-id.
        temp_location: A string specifying a GCS location to write to.
        n: An integer specifying the number of lines to write to a file.
    """
    # Prepare to write gcs file 'temp_num_records.txt' in the temp_location.
    bucket_name, path = temp_location.replace('gs://', '').split('/', 1)

    gcs_client = gcs.Client(project=project)
    temp_bucket = gcs_client.get_bucket(bucket_name)
    temp_blob = gcs.Blob(path + '/temp_num_records%s.txt' % uuid4(),
                         temp_bucket)

    # Write num_records newlines to a file_string. These will be our initial
    # PCollection elements.
    # This method was chosen because it proved more performant than beam.Create
    # for a large initial
    # PColleciton and to take advantage of distributed read from GCS.
    file_string = '\n' * int(n)
    temp_blob.upload_from_string(file_string)
    return temp_blob 
开发者ID:GoogleCloudPlatform,项目名称:professional-services,代码行数:26,代码来源:PrettyDataGenerator.py

示例10: url

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def url(self, name):
        """
        Return public url or a signed url for the Blob.
        This DOES NOT check for existance of Blob - that makes codes too slow
        for many use cases.
        """
        name = self._normalize_name(clean_name(name))
        blob = self.bucket.blob(name)

        if not self.custom_endpoint and self.default_acl == 'publicRead':
            return blob.public_url
        elif self.default_acl == 'publicRead':
            return '{storage_base_url}/{quoted_name}'.format(
                storage_base_url=self.custom_endpoint,
                quoted_name=_quote(name, safe=b"/~"),
            )
        elif not self.custom_endpoint:
            return blob.generate_signed_url(self.expiration)
        else:
            return blob.generate_signed_url(
                expiration=self.expiration,
                api_access_endpoint=self.custom_endpoint,
            ) 
开发者ID:jschneier,项目名称:django-storages,代码行数:25,代码来源:gcloud.py

示例11: get_blob

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def get_blob(self, blob_name):
    """Gets google.cloud.storage.blob.Blob object by blob name."""
    return self.bucket.get_blob(blob_name) 
开发者ID:StephanZheng,项目名称:neural-fingerprinting,代码行数:5,代码来源:cloud_client.py

示例12: new_blob

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def new_blob(self, blob_name):
    """Creates new storage blob with provided name."""
    return storage.Blob(blob_name, self.bucket) 
开发者ID:StephanZheng,项目名称:neural-fingerprinting,代码行数:5,代码来源:cloud_client.py

示例13: _compose_vcf_data_files

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def _compose_vcf_data_files(project, vcf_data_files_folder):
  # type: (str, str) -> storage.Blob
  """Composes multiple VCF data files to one VCF data file.

  Args:
    project: The project name.
    vcf_data_files_folder: The folder that contains all VCF data files.
  """
  bucket_name, blob_prefix = gcsio.parse_gcs_path(vcf_data_files_folder)
  multi_process_composer = MultiProcessComposer(project, bucket_name,
                                                blob_prefix)
  return multi_process_composer.get_composed_blob() 
开发者ID:googlegenomics,项目名称:gcp-variant-transforms,代码行数:14,代码来源:vcf_file_composer.py

示例14: _create_blob

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def _create_blob(client, file_path):
  # type: (storage.Client, str) -> storage.Blob
  bucket_name, blob_name = gcsio.parse_gcs_path(file_path)
  file_blob = client.get_bucket(bucket_name).blob(blob_name)
  file_blob.content_type = 'text/plain'
  return file_blob 
开发者ID:googlegenomics,项目名称:gcp-variant-transforms,代码行数:8,代码来源:vcf_file_composer.py

示例15: get_composed_blob

# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Blob [as 别名]
def get_composed_blob(self):
    # type: () -> storage.Blob
    """Returns the final blob that all blobs composed to."""
    return self._compose_blobs_to_one(self._blob_prefix) 
开发者ID:googlegenomics,项目名称:gcp-variant-transforms,代码行数:6,代码来源:vcf_file_composer.py


注:本文中的google.cloud.storage.Blob方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。