本文整理汇总了Python中boto.s3.bucket.Bucket.new_key方法的典型用法代码示例。如果您正苦于以下问题:Python Bucket.new_key方法的具体用法?Python Bucket.new_key怎么用?Python Bucket.new_key使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类boto.s3.bucket.Bucket
的用法示例。
在下文中一共展示了Bucket.new_key方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Compost
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
class Compost(object):
def __init__(self, directory, bucket):
self.directory = directory
self.bucket = Bucket(connection=boto.connect_s3(), name=bucket)
def turn(self):
"""
'Turn' the compost, i.e. make a backup of all files in the local directory.
"""
for filename, full_path in self._local_files():
logger.debug('backing up {}'.format(filename))
key = self.bucket.new_key(filename)
key.set_contents_from_filename(full_path)
def list(self):
"""Return a list of known backed up files."""
return [k.name for k in self.bucket.get_all_keys()]
def read(self, filename):
"""
Return the contents of named file, or the empty string if the files does not exist.
"""
return self.bucket.get_key(filename).get_contents_as_string()
def _local_files(self):
for f in os.listdir(self.directory):
yield f, os.path.join(self.directory, f)
示例2: test_500_retry
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
def test_500_retry(self, sleep_mock):
self.set_http_response(status_code=500)
b = Bucket(self.service_connection, 'mybucket')
k = b.new_key('test_failure')
fail_file = StringIO('This will attempt to retry.')
with self.assertRaises(BotoServerError):
k.send_file(fail_file)
示例3: test_500_retry
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
def test_500_retry(self):
self.set_http_response(status_code=500)
b = Bucket(self.service_connection, 'mybucket')
k = b.new_key('test_failure')
fail_file = StringIO('This will attempt to retry.')
try:
k.send_file(fail_file)
self.fail("This shouldn't ever succeed.")
except BotoServerError:
pass
示例4: main
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
def main(arg):
#Copy Source bucket not exist
try:
bucket=Bucket()
bucket=conn.create_bucket(arg[0])
bucket2=conn.create_bucket(arg[1])
key=bucket.new_key('test.txt')
key.set_contents_from_string("Hello World!")
bucket2.copy_key('cptest','nosuchbucket','test.txt')
except S3ResponseError,e:
Expectexception(e,404)
示例5: test_400_timeout
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
def test_400_timeout(self, sleep_mock):
weird_timeout_body = "<Error><Code>RequestTimeout</Code></Error>"
self.set_http_response(status_code=400, body=weird_timeout_body)
b = Bucket(self.service_connection, 'mybucket')
k = b.new_key('test_failure')
fail_file = StringIO('This will pretend to be chunk-able.')
k.should_retry = counter(k.should_retry)
self.assertEqual(k.should_retry.count, 0)
with self.assertRaises(BotoServerError):
k.send_file(fail_file)
self.assertTrue(k.should_retry.count, 1)
示例6: upload
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket = '{}-{}'.format(user.lower(), bucket.strip('-'))
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
示例7: test_400_timeout
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
def test_400_timeout(self):
weird_timeout_body = "<Error><Code>RequestTimeout</Code></Error>"
self.set_http_response(status_code=400, body=weird_timeout_body)
b = Bucket(self.service_connection, 'mybucket')
k = b.new_key('test_failure')
fail_file = StringIO('This will pretend to be chunk-able.')
# Decorate.
k.should_retry = counter(k.should_retry)
self.assertEqual(k.should_retry.count, 0)
try:
k.send_file(fail_file)
self.fail("This shouldn't ever succeed.")
except BotoServerError:
pass
self.assertTrue(k.should_retry.count, 1)
示例8: set
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
from boto.s3.bucket import Bucket
from tilequeue.tile import deserialize_coord
from tilequeue.tile import serialize_coord
import fileinput
bucket_name = 'mapzen-tiles-assets'
key_name = 'test/integration-test-coords.txt'
coords_to_store = set()
for line in fileinput.input():
coord = deserialize_coord(line.strip())
assert coord
if coord.zoom == 0:
uplifted_coord = coord
else:
uplifted_coord = coord.zoomBy(-1).container()
coords_to_store.add(uplifted_coord)
sorted_coords = sorted(coords_to_store)
coords_str = '\n'.join(map(serialize_coord, sorted_coords))
conn = connect_s3()
bucket = Bucket(conn, bucket_name)
key = bucket.new_key(key_name)
key.set_contents_from_string(
coords_str,
headers={'Content-Type': 'text/plain'},
)
示例9: relpath
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
this_file_relative = relpath(this_file, content_directory)
print "file = " + this_file
print "relative = " + this_file_relative
print
# is this file on S3?
this_key = bucket.get_key(this_file_relative)
if not this_key:
# upload to S3
print this_file_relative + " is not on S3. Uploading..."
# create a new key in the bucket
this_key = bucket.new_key(this_file_relative)
# upload our file as this key's contents
try:
this_key.set_contents_from_filename(this_file, None, True)
this_key.set_acl("public-read")
except S3ResponseError as e:
print "Unable to upload " + this_file_relative + " to Amazon S3."
print "S3ResponseError({0}): {1}.".format(e.errno, e.strerror)
# is the file now there?
this_key = bucket.get_key(this_file_relative)
if this_key:
示例10: S3StaticFileStorage
# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import new_key [as 别名]
class S3StaticFileStorage(Storage):
BUCKET_NAME = settings.S3_STATICFILES_BUCKET
KEY_POLICY = 'public-read'
CHUNK_SIZE = 100 << 20
def __init__(self):
super(S3StaticFileStorage, self).__init__()
self._bucket = Bucket(connection=s3_conn, name=self.BUCKET_NAME)
self._bucket_public = Bucket(connection=s3_public_conn,
name=self.BUCKET_NAME)
if s3_conn.lookup(self.BUCKET_NAME) is None:
s3_conn.create_bucket(self.BUCKET_NAME, policy='public-read')
# Allow CORS access (for web fonts)
self._bucket.set_cors(self._get_cors_config())
def _get_cors_config(self):
cors = CORSConfiguration()
cors.add_rule(['GET'], ['*'])
return cors
def _get_key(self, name):
key = self._bucket.get_key(name)
if key is None:
raise IOError('No such key')
return key
def _open(self, name, mode='rb'):
if mode not in ('r', 'rb'):
raise IOError('_open() only supports reading')
key = self._get_key(name)
key.open_read()
return File(key)
def _save(self, name, content):
if name.endswith('.css'):
content_type = 'text/css'
elif name.endswith('.js'):
content_type = 'application/javascript'
elif name.endswith('.json'):
content_type = 'application/json'
elif hasattr(content.file, 'getvalue'):
content_type = magic.from_buffer(content.file.getvalue(),
mime=True)
else:
content_type = magic.from_file(content.file.name, mime=True)
hdrs = {
'Content-Type': content_type,
}
if content.size > self.CHUNK_SIZE:
# Upload in chunks
upload = self._bucket.initiate_multipart_upload(name,
policy=self.KEY_POLICY, headers=hdrs)
for i, buf in enumerate(content.chunks(self.CHUNK_SIZE), 1):
upload.upload_part_from_file(StringIO(buf), i)
upload.complete_upload()
else:
# Upload all at once
key = self._bucket.new_key(name)
key.set_contents_from_string(content.read(),
policy=self.KEY_POLICY, headers=hdrs)
return name
def get_available_name(self, name):
return name
def get_valid_name(self, name):
return name
def delete(self, name):
self._bucket.delete_key(name)
def exists(self, name):
key = self._bucket.get_key(name)
return key is not None
def listdir(self, path):
path = path.lstrip('/')
return ([], [key.name for key in self._bucket.list(prefix=path)])
def modified_time(self, name):
key = self._get_key(name)
stamp = dateutil.parser.parse(key.last_modified)
# Convert to naive datetime in local time, as FileSystemStorage does
return stamp.astimezone(tzlocal()).replace(tzinfo=None)
def size(self, name):
key = self._get_key(name)
return key.size
def url(self, name):
key = self._bucket_public.new_key(name)
return key.generate_url(0, query_auth=False)