本文整理汇总了Python中boto.s3.bucket.Bucket类的典型用法代码示例。如果您正苦于以下问题:Python Bucket类的具体用法?Python Bucket怎么用?Python Bucket使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Bucket类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_file
def get_file(self):
b = Bucket(self.conn, 'Bucket01')
key = b.get_key('file10m.dat')
filename = str(uuid.uuid4())
key.get_contents_to_filename('%s.tmp' % filename)
# remove the file from the local fs
os.remove('%s.tmp' % filename)
示例2: load
def load(context, url, callback):
enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER', default=False)
if enable_http_loader and 'http' in url:
return http_loader.load(context, url, callback)
url = urllib2.unquote(url)
if context.config.S3_LOADER_BUCKET:
bucket = context.config.S3_LOADER_BUCKET
else:
bucket, url = _get_bucket(url)
if not _validate_bucket(context, bucket):
return callback(None)
bucket_loader = Bucket(
connection=thumbor_aws.connection.get_connection(context),
name=bucket
)
file_key = bucket_loader.get_key(url)
if not file_key:
return callback(None)
return callback(file_key.read())
示例3: Compost
class Compost(object):
def __init__(self, directory, bucket):
self.directory = directory
self.bucket = Bucket(connection=boto.connect_s3(), name=bucket)
def turn(self):
"""
'Turn' the compost, i.e. make a backup of all files in the local directory.
"""
for filename, full_path in self._local_files():
logger.debug('backing up {}'.format(filename))
key = self.bucket.new_key(filename)
key.set_contents_from_filename(full_path)
def list(self):
"""Return a list of known backed up files."""
return [k.name for k in self.bucket.get_all_keys()]
def read(self, filename):
"""
Return the contents of named file, or the empty string if the files does not exist.
"""
return self.bucket.get_key(filename).get_contents_as_string()
def _local_files(self):
for f in os.listdir(self.directory):
yield f, os.path.join(self.directory, f)
示例4: rmup
def rmup(args):
parser = option_parser("rmup URL [UPLOAD]")
parser.add_option("-a", "--all", dest="all", action="store_true",
default=False, help="Cancel all uploads for the specified bucket")
options, args = parser.parse_args(args)
if options.all:
if len(args) < 1:
parser.error("Specify bucket URL")
else:
if len(args) != 2:
parser.error("Specify bucket URL and UPLOAD")
upload = args[1]
uri = parse_uri(args[0])
if uri.bucket is None:
raise Exception("URL must contain a bucket: %s" % args[0])
if uri.key is not None:
raise Exception("URL cannot contain a key: %s" % args[0])
config = get_config(options)
conn = get_connection(config, uri)
# There is no easy way to do this with boto
b = Bucket(connection=conn, name=uri.bucket)
for up in b.list_multipart_uploads():
if options.all or up.id == upload:
info("Removing upload %s" % up.id)
up.cancel_upload()
示例5: load
def load(context, url, callback):
enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER', default=False)
if enable_http_loader and url.startswith('http'):
return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url)
url = urllib2.unquote(url)
bucket = context.config.get('S3_LOADER_BUCKET', default=None)
if not bucket:
bucket, url = _get_bucket(url)
if _validate_bucket(context, bucket):
bucket_loader = Bucket(
connection=thumbor_aws.connection.get_connection(context),
name=bucket
)
file_key = None
try:
file_key = bucket_loader.get_key(url)
except Exception, e:
logger.warn("ERROR retrieving image from S3 {0}: {1}".format(url, str(e)))
if file_key:
callback(file_key.read())
return
示例6: test_basic_anon
def test_basic_anon(self):
auth_con = S3Connection()
# create a new, empty bucket
bucket_name = 'test-%d' % int(time.time())
auth_bucket = auth_con.create_bucket(bucket_name)
# try read the bucket anonymously
anon_con = S3Connection(anon=True)
anon_bucket = Bucket(anon_con, bucket_name)
try:
next(iter(anon_bucket.list()))
self.fail("anon bucket list should fail")
except S3ResponseError:
pass
# give bucket anon user access and anon read again
auth_bucket.set_acl('public-read')
time.sleep(10) # Was 5 secondes, turns out not enough
try:
next(iter(anon_bucket.list()))
self.fail("not expecting contents")
except S3ResponseError as e:
self.fail("We should have public-read access, but received "
"an error: %s" % e)
except StopIteration:
pass
# cleanup
auth_con.delete_bucket(auth_bucket)
示例7: test_basic_anon
def test_basic_anon(self):
auth_con = S3Connection()
# create a new, empty bucket
bucket_name = 'test-%d' % int(time.time())
auth_bucket = auth_con.create_bucket(bucket_name)
# try read the bucket anonymously
anon_con = S3Connection(anon=True)
anon_bucket = Bucket(anon_con, bucket_name)
try:
iter(anon_bucket.list()).next()
self.fail("anon bucket list should fail")
except S3ResponseError:
pass
# give bucket anon user access and anon read again
auth_bucket.set_acl('public-read')
try:
iter(anon_bucket.list()).next()
self.fail("not expecting contents")
except S3ResponseError:
self.fail("we should have public-read access.")
except StopIteration:
pass
# cleanup
auth_con.delete_bucket(auth_bucket)
示例8: test_restore_header_with_ongoing_restore
def test_restore_header_with_ongoing_restore(self):
self.set_http_response(
status_code=200,
header=[('x-amz-restore', 'ongoing-request="true"')])
b = Bucket(self.service_connection, 'mybucket')
k = b.get_key('myglacierkey')
self.assertTrue(k.ongoing_restore)
self.assertIsNone(k.expiry_date)
示例9: test_500_retry
def test_500_retry(self, sleep_mock):
self.set_http_response(status_code=500)
b = Bucket(self.service_connection, 'mybucket')
k = b.new_key('test_failure')
fail_file = StringIO('This will attempt to retry.')
with self.assertRaises(BotoServerError):
k.send_file(fail_file)
示例10: empty_bucket
def empty_bucket(bucket_name):
"""Destructive helper."""
import boto
from boto.s3.bucket import Bucket
bucket = Bucket(connection=boto.connect_s3(), name=bucket_name)
for key in bucket.get_all_keys():
key.delete()
示例11: ls
def ls(bucket_name, prefix='', pattern=None):
connection = boto.connect_s3()
bucket = Bucket(connection, bucket_name)
keys = [k.key for k in bucket.list(prefix=prefix)]
if pattern:
regex = re.compile(pattern)
keys = [k for k in keys if regex.search(k)]
return keys
示例12: test_restore_completed
def test_restore_completed(self):
self.set_http_response(
status_code=200,
header=[('x-amz-restore',
'ongoing-request="false", '
'expiry-date="Fri, 21 Dec 2012 00:00:00 GMT"')])
b = Bucket(self.service_connection, 'mybucket')
k = b.get_key('myglacierkey')
self.assertFalse(k.ongoing_restore)
self.assertEqual(k.expiry_date, 'Fri, 21 Dec 2012 00:00:00 GMT')
示例13: __init__
def __init__(self):
super(S3StaticFileStorage, self).__init__()
self._bucket = Bucket(connection=s3_conn, name=self.BUCKET_NAME)
self._bucket_public = Bucket(connection=s3_public_conn,
name=self.BUCKET_NAME)
if s3_conn.lookup(self.BUCKET_NAME) is None:
s3_conn.create_bucket(self.BUCKET_NAME, policy='public-read')
# Allow CORS access (for web fonts)
self._bucket.set_cors(self._get_cors_config())
示例14: test_500_retry
def test_500_retry(self):
self.set_http_response(status_code=500)
b = Bucket(self.service_connection, 'mybucket')
k = b.new_key('test_failure')
fail_file = StringIO('This will attempt to retry.')
try:
k.send_file(fail_file)
self.fail("This shouldn't ever succeed.")
except BotoServerError:
pass
示例15: main
def main(arg):
#Copy Source bucket not exist
try:
bucket=Bucket()
bucket=conn.create_bucket(arg[0])
bucket2=conn.create_bucket(arg[1])
key=bucket.new_key('test.txt')
key.set_contents_from_string("Hello World!")
bucket2.copy_key('cptest','nosuchbucket','test.txt')
except S3ResponseError,e:
Expectexception(e,404)