本文整理汇总了Python中google.cloud.storage.Client方法的典型用法代码示例。如果您正苦于以下问题:Python storage.Client方法的具体用法?Python storage.Client怎么用?Python storage.Client使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类google.cloud.storage
的用法示例。
在下文中一共展示了storage.Client方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _compose_files
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def _compose_files(project, bucket_name, blob_names, composite_name):
# type: (str, str, List[str], str) -> None
"""Composes multiple files (up to 32 objects) in GCS to one.
Args:
project: The project name.
bucket_name: The name of the bucket where the `components` and the new
composite are saved.
blob_names: A list of blob object names.
composite_name: Name of the new composite.
"""
bucket = storage.Client(project).get_bucket(bucket_name)
output_file_blob = bucket.blob(composite_name)
output_file_blob.content_type = 'text/plain'
blobs = [bucket.get_blob(blob_name) for blob_name in blob_names]
output_file_blob.compose(blobs)
示例2: __init__
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def __init__(self, project, bucket_name, blob_prefix):
# type: (str, str, str) -> None
"""Initializes a `MultiProcessComposer`.
This class composes all blobs that start with `blob_prefix` to one.
Args:
project: The project name.
bucket_name: The name of the bucket where the blob components and the new
composite are saved.
blob_prefix: The prefix used to filter blobs. Only the blobs with this
prefix will be composed.
"""
self._project = project
self._bucket_name = bucket_name
self._blob_prefix = blob_prefix
self._bucket = storage.Client(project).get_bucket(bucket_name)
示例3: from_config
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def from_config(cls, config, creds=None):
"""Returns an initialized CloudStorageAPI object.
Args:
config: common.ProjectConfig, the project configuration.
creds: auth.CloudCredentials, the credentials to use for client
authentication.
Returns:
An authenticated CloudStorageAPI instance.
"""
if creds is None:
creds = auth.CloudCredentials(config, cls.SCOPES)
client = storage.Client(
project=config.project, credentials=creds.get_credentials(cls.SCOPES))
return cls(config, client)
示例4: download_from_bucket
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def download_from_bucket(file_path):
"""Downloads file from Google Cloud Storage bucket"""
path_segments = file_path[5:].split("/")
storage_client = storage.Client()
bucket_name = path_segments[0]
bucket = storage_client.get_bucket(bucket_name)
source = "/".join(path_segments[1:])
blob = bucket.blob(source)
destination = "/tmp/" + source.replace("/", "%2f")
blob.download_to_filename(destination)
print(f"{file_path} downloaded to {destination}.")
return destination
示例5: _upload_to_gcs
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def _upload_to_gcs(self, gcs_project_id, target_bucket_name, bucket_folder, filename):
'''upload CSV to file in GCS
Args:
gcs_project_id (str): project name
target_bucket_name (str): name of GCS bucket
bucket_folder (str): name of GCS folder
filename (str): filepath to upload
Returns:
nothing. Side effect is that data is uploaded to GCS
'''
storage_client = storage.Client(gcs_project_id)
bucket = storage_client.get_bucket(target_bucket_name)
path = bucket_folder + os.sep + filename
logging.info("Loading to GCS: %s", path)
blob = bucket.blob(path) #name in GCS
blob.upload_from_filename(filename)
示例6: cost_usage_source_is_reachable
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def cost_usage_source_is_reachable(self, credential_name, data_source):
"""
Verify that the GCP bucket exists and is reachable.
Args:
credential_name (object): not used; only present for interface compatibility
data_source (dict): dict containing name of GCP storage bucket
"""
storage_client = storage.Client()
bucket = data_source["bucket"]
try:
bucket_info = storage_client.lookup_bucket(bucket)
if not bucket_info:
# if the lookup does not return anything, then this is an nonexistent bucket
key = "billing_source.bucket"
message = f"The provided GCP bucket {bucket} does not exist"
raise serializers.ValidationError(error_obj(key, message))
except GoogleCloudError as e:
key = "billing_source.bucket"
raise serializers.ValidationError(error_obj(key, e.message))
return True
示例7: __init__
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def __init__(self, task, customer_name, billing_source, **kwargs):
"""
Constructor.
Args:
task (Object) bound celery object
customer_name (str): Name of the customer
billing_source (dict): dict containing name of GCP storage bucket
"""
super().__init__(task, **kwargs)
self.bucket_name = billing_source["bucket"]
self.report_prefix = billing_source.get("report_prefix", "")
self.customer_name = customer_name.replace(" ", "_")
self._provider_uuid = kwargs.get("provider_uuid")
try:
GCPProvider().cost_usage_source_is_reachable(None, billing_source)
self._storage_client = storage.Client()
self._bucket_info = self._storage_client.lookup_bucket(self.bucket_name)
except ValidationError as ex:
msg = f"GCP bucket {self.bucket_name} for customer {customer_name} is not reachable. Error: {str(ex)}"
LOG.error(log_json(self.request_id, msg, self.context))
raise GCPReportDownloaderError(str(ex))
示例8: upload_to_gcs
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def upload_to_gcs(filenames, gcs_bucket_path):
"""Upload wave file to GCS, at provided path."""
path_parts = gcs_bucket_path[5:].split('/', 1)
bucket_name = path_parts[0]
if len(path_parts) == 1:
key_prefix = ''
elif path_parts[1].endswith('/'):
key_prefix = path_parts[1]
else:
key_prefix = path_parts[1] + '/'
client = storage.Client(project=os.environ["PROJECT_NAME"])
bucket = client.get_bucket(bucket_name)
def _upload_files(filenames):
"""Upload a list of files into a specifc subdirectory."""
for i, filename in enumerate(filenames):
blob = bucket.blob(key_prefix + os.path.basename(filename))
blob.upload_from_filename(filename)
if not i % 5:
tf.logging.info('Finished uploading file: %s' % filename)
_upload_files(filenames)
示例9: _upload_to_gcp_storage
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def _upload_to_gcp_storage(exec_dir):
"""Upload all files to GCP storage under exec_dir folder.
Args:
exec_dir (str): The execution directory.
"""
_bucket = storage.Client().bucket('resl-garage-benchmarks')
exec_name = os.path.basename(exec_dir)
for folder_name in os.listdir(exec_dir):
folder_path = os.path.join(exec_dir, folder_name)
if not os.path.isfile(folder_path):
remote_folder = os.path.join(exec_name, folder_name)
for file_name in os.listdir(folder_path):
file_path = os.path.join(folder_path, file_name)
if os.path.isfile(file_path):
blob = _bucket.blob(os.path.join(remote_folder, file_name))
blob.upload_from_filename(file_path)
示例10: init_gcs
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def init_gcs():
is_user_secrets_token_set = "KAGGLE_USER_SECRETS_TOKEN" in os.environ
from google.cloud import storage
if not is_user_secrets_token_set:
return storage
from kaggle_gcp import get_integrations
if not get_integrations().has_gcs():
return storage
from kaggle_secrets import GcpTarget
from kaggle_gcp import KaggleKernelCredentials
monkeypatch_client(
storage.Client,
KaggleKernelCredentials(target=GcpTarget.GCS))
return storage
示例11: maybe_upload_file
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def maybe_upload_file(local_path):
'''Upload a file to remote cloud storage
if the path starts with gs:// or s3://
'''
if local_path.startswith(('s3://', 'gs://')):
prefix = local_path.split(':')[0]
remote_bucket_path = local_path[len("s3://"):] # same length
bp = remote_bucket_path.split("/")
bucket = bp[0]
path = remote_bucket_path[1 + len(bucket):]
# s3://example/file becomes s3:/example/file in Linux
local_path = prefix + ':/' + remote_bucket_path
if prefix == 's3':
import boto3
s3 = boto3.client('s3', endpoint_url=os.environ.get('S3_ENDPOINT'))
s3.upload_file(local_path, bucket, path)
elif prefix == 'gs':
from google.cloud import storage
client = storage.Client()
Hbucket = storage.Bucket(client, bucket)
blob = storage.Blob(path, Hbucket)
blob.upload_from_filename(local_path)
示例12: __init__
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def __init__(self, fn):
self.fn = fn
if fn.startswith('gs://'):
from google.cloud import storage
self.s3client = None
self.gclient = storage.Client()
self.storage_dir = TemporaryDirectory()
self.writer = tf.python_io.TFRecordWriter(
os.path.join(self.storage_dir.name, 'temp.tfrecord'))
self.bucket_name, self.file_name = self.fn.split(
'gs://', 1)[1].split('/', 1)
else:
self.s3client = None
self.gclient = None
self.bucket_name = None
self.file_name = None
self.storage_dir = None
self.writer = tf.python_io.TFRecordWriter(fn)
示例13: gs_download_file
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def gs_download_file(path):
if path is None:
return None
parsed_path = urlparse(path)
bucket_name = parsed_path.netloc
file_path = parsed_path.path[1:]
try:
gs_client = storage.Client()
bucket = gs_client.get_bucket(bucket_name)
except exceptions.DefaultCredentialsError:
logger.info('Switching to anonymous google storage client')
gs_client = storage.Client.create_anonymous_client()
bucket = gs_client.bucket(bucket_name, user_project=None)
blob = bucket.blob(file_path)
tmp_path = os.path.join('/tmp', file_path.split(os.sep)[-1])
blob.download_to_filename(tmp_path)
return tmp_path
示例14: get_local_file
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def get_local_file(source_path):
parsed_path = urlparse(source_path)
if parsed_path.scheme == "gs":
bucket_name = parsed_path.netloc
file_path = parsed_path.path[1:]
file_name = os.path.split(parsed_path.path)[1]
try:
gs_client = storage.Client()
bucket = gs_client.get_bucket(bucket_name)
except exceptions.DefaultCredentialsError:
# if credentials fails, try to connect as anonymous user
gs_client = storage.Client.create_anonymous_client()
bucket = gs_client.bucket(bucket_name, user_project=None)
blob = bucket.blob(file_path)
blob.download_to_filename(file_name)
elif parsed_path.scheme == "":
# in case of local path just pass the input argument
if os.path.isfile(source_path):
file_name = source_path
else:
print("file " + source_path + "is not accessible")
file_name = ""
return file_name
示例15: upload_file
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Client [as 别名]
def upload_file(source_file, target_folder):
parsed_path = urlparse(target_folder)
if parsed_path.scheme == "gs":
bucket_name = parsed_path.netloc
folder_path = parsed_path.path[1:]
try:
gs_client = storage.Client()
bucket = gs_client.get_bucket(bucket_name)
blob = bucket.blob(folder_path + "/" + source_file)
blob.upload_from_filename(source_file)
except Exception as er:
print(er)
return False
elif parsed_path.scheme == "":
if target_folder != ".":
copy(source_file, target_folder)
return True