本文整理汇总了Python中boto.s3.connection.Key.set_acl方法的典型用法代码示例。如果您正苦于以下问题:Python Key.set_acl方法的具体用法?Python Key.set_acl怎么用?Python Key.set_acl使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类boto.s3.connection.Key
的用法示例。
在下文中一共展示了Key.set_acl方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: upload_file
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_acl [as 别名]
def upload_file(conn, full_path):
b = Bucket(conn, BUCKET)
k = Key(b)
k.key = full_path
expires = datetime.utcnow() + timedelta(days=(25 * 365))
expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
k.set_metadata("Content-Type", mimetypes.guess_type(full_path)[0])
k.set_metadata("Expires", expires)
k.set_metadata("Cache-Control", "max-age={0}, public".format(25 * 365 * 36400))
k.set_contents_from_filename(full_path)
k.set_acl('public-read')
print "{} -> http://s3.amazonaws.com/yaluandmike/{}".format(full_path, full_path)
示例2: upload
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_acl [as 别名]
def upload(self, local_filepath, aws_filepath):
"""
Uploads `local_filepath` to `aws_filepath`.
Returns the published URL for the file.
"""
logging.info('Publishing %s to %s' % (
local_filepath, aws_filepath))
key = Key(bucket=self.bucket, name=aws_filepath)
key.key = aws_filepath
key.set_contents_from_filename(local_filepath)
key.set_acl('public-read')
示例3: s3_put_directory
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_acl [as 别名]
def s3_put_directory(s3_dir, local_dir):
logger.info("Putting file://%s to s3://%s/%s" % (
os.path.abspath(local_dir), aws_bucket.name, s3_dir) +
" (DRY RUN)" if settings.S3_DRY_RUN else "")
if not settings.S3_DRY_RUN:
for dirname, dirnames, filenames in os.walk(local_dir):
for filename in filenames:
if filename == ".DS_Store":
continue
filepath = os.path.join(dirname, filename)
relpath = os.path.relpath(filepath, local_dir)
key = os.path.join(s3_dir, relpath)
aws_key = aws_bucket.get_key(key)
if aws_key:
# assume the content of file did not change if md5 hashes are consistent.
if aws_key.etag.strip("\"") == calc_file_md5(filepath):
continue
else:
aws_key = Key(aws_bucket, key)
aws_key.set_contents_from_filename(filepath)
aws_key.set_acl('public-read')
示例4: post
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_acl [as 别名]
def post():
data = request.form
files = request.files
# adds a modicum of security...
code = data.get('code')
if code != os.environ.get('SEKKRIT_CODE'):
err = Response(response="{'error':'unauthorized'}", status=401, mimetype="application/json")
return err
f = files.get('image')
b = conn.get_bucket(S3_BUCKET)
k = Key(b)
path = data.get('filename')
k.key = path
k.set_contents_from_file(f)
k.set_acl("public-read")
#update the list of URLs stored in redis
get_latest()
#return the file name because reasons
return json.dumps({"file" : path})
示例5: process_file
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_acl [as 别名]
def process_file(aws_conn, filepath):
mtime = get_mtime(filepath)
name_200 = add_size_name(filepath, '200')
name_800 = add_size_name(filepath, '800')
mtime_200 = get_mtime(name_200)
mtime_800 = get_mtime(name_800)
im = None
if mtime_200 is None or mtime_200 < mtime:
try:
im = Image.open(filepath)
except:
return None
generate_200(im, name_200)
if mtime_800 is None or mtime_800 < mtime:
if im is None:
try:
im = Image.open(filepath)
except:
return None
generate_800(im, name_800)
names = {
'original': filepath,
'thumbnail': name_200,
'display': name_800,
}
b = Bucket(aws_conn, BUCKET)
image_result = {}
for image_type, name in names.items():
aws_tag_path = add_size_name(name, 's3t') + '.meta'
aws_key_path = name[len(GALLERY_DIR):].strip('/')
image_result[image_type] = {
'url': 'http://s3.amazonaws.com/{}/{}'.format(
BUCKET,
aws_key_path)
}
if not is_newer(name, aws_tag_path):
try:
resolution = load_data(aws_tag_path)
resolution['width']
except:
resolution = get_resolution(name)
save_data(aws_tag_path, resolution)
image_result[image_type].update(resolution)
continue
resolution = get_resolution(name)
image_result.update(resolution)
save_data(aws_tag_path, resolution)
s3key = b.get_key(aws_key_path)
mtime = get_mtime(name)
if s3key and s3key.last_modified:
print datetime.datetime(*parsedate(s3key.last_modified)[:6])
print mtime
if datetime.datetime(*parsedate(s3key.last_modified)[:6]) > mtime:
with open(aws_tag_path, 'a'):
os.utime(aws_tag_path, None)
continue
print 'Sending {} to S3'.format(name)
k = Key(b)
k.key = aws_key_path
expires = datetime.datetime.utcnow() + datetime.timedelta(days=25 * 365)
expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
k.set_metadata("Content-Type", mimetypes.guess_type(name)[0])
k.set_metadata("Expires", expires)
k.set_metadata("Cache-Control", "max-age={0}, public".format(86400 * 365 * 25))
k.set_contents_from_filename(name)
k.set_acl('public-read')
with open(aws_tag_path, 'a'):
os.utime(aws_tag_path, None)
photo_age = get_photo_age(filepath)
image_result['caption'] = get_caption(filepath)
return photo_age, image_result