本文整理汇总了Python中google.cloud.storage.Bucket方法的典型用法代码示例。如果您正苦于以下问题:Python storage.Bucket方法的具体用法?Python storage.Bucket怎么用?Python storage.Bucket使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类google.cloud.storage
的用法示例。
在下文中一共展示了storage.Bucket方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: maybe_upload_file
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def maybe_upload_file(local_path):
'''Upload a file to remote cloud storage
if the path starts with gs:// or s3://
'''
if local_path.startswith(('s3://', 'gs://')):
prefix = local_path.split(':')[0]
remote_bucket_path = local_path[len("s3://"):] # same length
bp = remote_bucket_path.split("/")
bucket = bp[0]
path = remote_bucket_path[1 + len(bucket):]
# s3://example/file becomes s3:/example/file in Linux
local_path = prefix + ':/' + remote_bucket_path
if prefix == 's3':
import boto3
s3 = boto3.client('s3', endpoint_url=os.environ.get('S3_ENDPOINT'))
s3.upload_file(local_path, bucket, path)
elif prefix == 'gs':
from google.cloud import storage
client = storage.Client()
Hbucket = storage.Bucket(client, bucket)
blob = storage.Blob(path, Hbucket)
blob.upload_from_filename(local_path)
示例2: get_blob_update_time
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def get_blob_update_time(self, bucket_name, object_name):
"""
Get the update time of a file in Google Cloud Storage
:param bucket_name: The Google Cloud Storage bucket where the object is.
:type bucket_name: str
:param object_name: The name of the blob to get updated time from the Google cloud
storage bucket.
:type object_name: str
"""
client = self.get_conn()
bucket = client.bucket(bucket_name)
blob = bucket.get_blob(blob_name=object_name)
if blob is None:
raise ValueError("Object ({}) not found in Bucket ({})".format(
object_name, bucket_name))
return blob.updated
示例3: delete_bucket
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def delete_bucket(self, bucket_name: str, force: bool = False):
"""
Delete a bucket object from the Google Cloud Storage.
:param bucket_name: name of the bucket which will be deleted
:type bucket_name: str
:param force: false not allow to delete non empty bucket, set force=True
allows to delete non empty bucket
:type: bool
"""
client = self.get_conn()
bucket = client.bucket(bucket_name)
self.log.info("Deleting %s bucket", bucket_name)
try:
bucket.delete(force=force)
self.log.info("Bucket %s has been deleted", bucket_name)
except NotFound:
self.log.info("Bucket %s not exists", bucket_name)
示例4: storage
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def storage(request):
# create a random test bucket name
bucket_name = "test_bucket_" + get_random_string(6, string.ascii_lowercase)
storage = DjangoGCloudStorage(
project=request.config.getoption("--gcs-project-name"),
bucket=bucket_name,
credentials_file_path=request.config.getoption("--gcs-credentials-file")
)
# Make sure the bucket exists
bucket = Bucket(storage.client, bucket_name)
bucket.create(
location=request.config.getoption("--gcs-bucket-location")
)
yield storage
storage.bucket.delete_blobs(storage.bucket.list_blobs())
storage.bucket.delete(force=True)
示例5: tearDown
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def tearDown(self):
def _still_in_use(bad_request):
return any(
error["reason"] == "resourceInUse" for error in bad_request._errors
)
retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use)
retry_storage_errors_conflict = RetryErrors(
(Conflict, TooManyRequests, InternalServerError, ServiceUnavailable)
)
for doomed in self.to_delete:
if isinstance(doomed, storage.Bucket):
retry_storage_errors_conflict(doomed.delete)(force=True)
elif isinstance(doomed, (Dataset, bigquery.DatasetReference)):
retry_in_use(Config.CLIENT.delete_dataset)(doomed, delete_contents=True)
elif isinstance(doomed, (Table, bigquery.TableReference)):
retry_in_use(Config.CLIENT.delete_table)(doomed)
else:
doomed.delete()
示例6: __init__
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def __init__(self, bucket, project="", region="us", noop=False):
"""Setup the GCS storage backend with the bucket we will use and
optional region."""
if project == "":
self.client = storage.Client()
else:
self.client = storage.Client(project)
self.noop = noop
self.bucket = storage.Bucket(self.client, bucket)
self.bucket.location = region
self.bucket.storage_class = "STANDARD"
# Create the bucket if it doesn't exist
if not self.bucket.exists():
if not noop:
self.bucket.create()
else:
logger.info("No-Op: Create bucket: %s" % bucket)
示例7: get_files
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def get_files(client: storage.Client,
bucket: storage.Bucket) -> List[dict]:
"""Retrieves all files in a given GCS bucket
Args:
client: Object representing Python GCS client
bucket: google.cloud.storage.Bucket holding bucket name
Returns:
List of dicts [{name: String holding file name,
type: String representing type of file, 'audio/flac'.
}]
"""
bucket = client.get_bucket(bucket)
return [{'name': blob.name,
'type': blob.content_type} for blob in list(bucket.list_blobs())]
示例8: _print_config_details
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def _print_config_details(cloud_logger, config):
"""Print out the pertinent project/bucket details
Args:
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
"""
_print_and_log(cloud_logger,
'Source Project: {}'.format(config.source_project))
_print_and_log(cloud_logger, 'Source Bucket: {}'.format(config.bucket_name))
_print_and_log(cloud_logger, 'Source Service Account: {}'.format(
config.source_project_credentials.service_account_email)) # pylint: disable=no-member
_print_and_log(cloud_logger,
'Target Project: {}'.format(config.target_project))
_print_and_log(cloud_logger,
'Target Bucket: {}'.format(config.target_bucket_name))
_print_and_log(cloud_logger, 'Target Service Account: {}'.format(
config.target_project_credentials.service_account_email)) # pylint: disable=no-member
示例9: create_bucket
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def create_bucket(self, bucket_name):
try:
bucket = self.storage_client.create_bucket(bucket_name)
print('Bucket {} created.'.format(bucket.name))
except Exception as err:
logging.info(
"Unable to create Bucket: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout))
append_result(str({"error": "Unable to create Bucket",
"error_message": str(err) + "\n Traceback: " + traceback.print_exc(
file=sys.stdout)}))
traceback.print_exc(file=sys.stdout)
示例10: add_bucket_labels
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def add_bucket_labels(self, bucket_name, tags):
try:
bucket = self.storage_client.get_bucket(bucket_name)
labels = bucket.labels
labels.update(tags)
bucket.labels = labels
bucket.patch()
print('Updated labels on {}.'.format(bucket_name))
except Exception as err:
logging.info(
"Unable to create Bucket: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout))
append_result(str({"error": "Unable to create Bucket",
"error_message": str(err) + "\n Traceback: " + traceback.print_exc(
file=sys.stdout)}))
traceback.print_exc(file=sys.stdout)
示例11: remove_bucket
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def remove_bucket(self, bucket_name):
try:
GCPActions().bucket_cleanup(bucket_name)
storage_resource = storage.Bucket(self.storage_client, bucket_name)
storage_resource.delete(force=True)
print('Bucket {} removed.'.format(bucket_name))
except Exception as err:
logging.info(
"Unable to remove Bucket: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout))
append_result(str({"error": "Unable to remove Bucket",
"error_message": str(err) + "\n Traceback: " + traceback.print_exc(
file=sys.stdout)}))
traceback.print_exc(file=sys.stdout)
示例12: __init__
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def __init__(self, path):
if not GCS_ENABLED:
raise ImportError("`google-cloud-storage` must be installed in order to use "
"the 'gs://' protocol")
self.path = path
self.buffer = io.BytesIO()
client = storage.Client()
bucket_name, filepath = self.bucket_and_path()
bucket = storage.Bucket(client, bucket_name)
self.blob = storage.Blob(filepath, bucket)
示例13: test_create_bucket_with_resource
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def test_create_bucket_with_resource(self, mock_service, mock_bucket):
test_bucket = 'test_bucket'
test_project = 'test-project'
test_location = 'EU'
test_labels = {'env': 'prod'}
test_storage_class = 'MULTI_REGIONAL'
test_versioning_enabled = {"enabled": True}
mock_service.return_value.bucket.return_value.create.return_value = None
mock_bucket.return_value.storage_class = test_storage_class
mock_bucket.return_value.labels = test_labels
mock_bucket.return_value.versioning_enabled = True
sample_bucket = mock_service().bucket(bucket_name=test_bucket)
# sample_bucket = storage.Bucket(client=mock_service, name=test_bucket)
# Assert for resource other than None.
response = self.gcs_hook.create_bucket(
bucket_name=test_bucket,
resource={"versioning": test_versioning_enabled},
storage_class=test_storage_class,
location=test_location,
labels=test_labels,
project_id=test_project
)
self.assertEqual(response, sample_bucket.id)
mock_service.return_value.bucket.return_value._patch_property.assert_called_once_with(
name='versioning', value=test_versioning_enabled
)
mock_service.return_value.bucket.return_value.create.assert_called_once_with(
project=test_project, location=test_location
)
示例14: _prepare_sync_plan
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def _prepare_sync_plan(
source_bucket: storage.Bucket,
destination_bucket: storage.Bucket,
source_object: Optional[str],
destination_object: Optional[str],
recursive: bool,
) -> Tuple[Set[storage.Blob], Set[storage.Blob], Set[storage.Blob]]:
# Calculate the number of characters that remove from the name, because they contain information
# about the parent's path
source_object_prefix_len = len(source_object) if source_object else 0
destination_object_prefix_len = len(destination_object) if destination_object else 0
delimiter = "/" if not recursive else None
# Fetch blobs list
source_blobs = list(source_bucket.list_blobs(prefix=source_object, delimiter=delimiter))
destination_blobs = list(
destination_bucket.list_blobs(prefix=destination_object, delimiter=delimiter))
# Create indexes that allow you to identify blobs based on their name
source_names_index = {a.name[source_object_prefix_len:]: a for a in source_blobs}
destination_names_index = {a.name[destination_object_prefix_len:]: a for a in destination_blobs}
# Create sets with names without parent object name
source_names = set(source_names_index.keys())
destination_names = set(destination_names_index.keys())
# Determine objects to copy and delete
to_copy = source_names - destination_names
to_delete = destination_names - source_names
to_copy_blobs = {source_names_index[a] for a in to_copy} # type: Set[storage.Blob]
to_delete_blobs = {destination_names_index[a] for a in to_delete} # type: Set[storage.Blob]
# Find names that are in both buckets
names_to_check = source_names.intersection(destination_names)
to_rewrite_blobs = set() # type: Set[storage.Blob]
# Compare objects based on crc32
for current_name in names_to_check:
source_blob = source_names_index[current_name]
destination_blob = destination_names_index[current_name]
# if the objects are different, save it
if source_blob.crc32c != destination_blob.crc32c:
to_rewrite_blobs.add(source_blob)
return to_copy_blobs, to_delete_blobs, to_rewrite_blobs
示例15: get_bucket
# 需要导入模块: from google.cloud import storage [as 别名]
# 或者: from google.cloud.storage import Bucket [as 别名]
def get_bucket(self):
"""Get the bucket defined by 'bucket_name' from the storage_client.
Throws a ValueError when bucket_name is not set. If the bucket does not
exist in GCS, a new bucket will be created.
"""
if self._bucket:
return self._bucket
if not self.bucket_name:
raise ValueError("The 'bucket_name' needs to be set.")
try:
self._bucket = self.storage_client.get_bucket(self.bucket_name)
except (exceptions.NotFound, exceptions.Forbidden):
bucket = storage.Bucket(self.storage_client, name=self.bucket_name)
bucket.versioning_enabled = True
bucket.lifecycle_rules = [{
'action': {'type': 'SetStorageClass', 'storageClass': 'NEARLINE'},
'condition': {
'numNewerVersions': 1,
'matchesStorageClass': ['REGIONAL', 'STANDARD'],
'age': 30
}
}]
try:
bucket.create(location='europe-west4')
except exceptions.Conflict:
raise
self._bucket = self.storage_client.get_bucket(self.bucket_name)
return self._bucket