本文整理汇总了Python中boto.s3.connection.Key.set_contents_from_filename方法的典型用法代码示例。如果您正苦于以下问题:Python Key.set_contents_from_filename方法的具体用法?Python Key.set_contents_from_filename怎么用?Python Key.set_contents_from_filename使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类boto.s3.connection.Key
的用法示例。
在下文中一共展示了Key.set_contents_from_filename方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: uploadFileToS3
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def uploadFileToS3(self, filename):
'''Uploads file to S3.'''
destDir = '' # Root folder of the S3 bucket
destpath = os.path.join(destDir, os.path.basename(filename))
print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)
k = Key(self.bucket)
k.key = destpath
k.set_contents_from_filename(filename, reduced_redundancy=True)
示例2: transfer_files
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def transfer_files(self):
from boto.s3.connection import S3Connection
from boto.s3.connection import Key
conn = S3Connection(self.extra_args['aws_access_key'],
self.extra_args['aws_secret_key'])
bucket = conn.get_bucket(self.extra_args['s3_bucket'])
for fname in self.files:
key = Key(bucket)
key.key = os.path.basename(fname)
key.set_contents_from_filename(fname)
示例3: uploadFileToS3
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def uploadFileToS3(self, filename):
"""Uploads file to S3."""
destDir = "" # Root folder of the S3 bucket
destpath = os.path.join(destDir, os.path.basename(filename))
print "Uploading %s to Amazon S3 bucket %s" % (filename, self.bucket_name)
k = Key(self.bucket)
k.key = destpath
k.set_contents_from_filename(filename, reduced_redundancy=True)
print # This newline is needed to get the path of the compiled binary printed on a newline.
示例4: upload_file
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def upload_file(conn, full_path):
b = Bucket(conn, BUCKET)
k = Key(b)
k.key = full_path
expires = datetime.utcnow() + timedelta(days=(25 * 365))
expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
k.set_metadata("Content-Type", mimetypes.guess_type(full_path)[0])
k.set_metadata("Expires", expires)
k.set_metadata("Cache-Control", "max-age={0}, public".format(25 * 365 * 36400))
k.set_contents_from_filename(full_path)
k.set_acl('public-read')
print "{} -> http://s3.amazonaws.com/yaluandmike/{}".format(full_path, full_path)
示例5: push_s3_file
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def push_s3_file(self, bucket, src=None, key=None):
"""
Upload a file to an S3 bucket
"""
if not src: src = self.working_file
if not key: key = os.path.basename(src)
conn = boto.connect_s3(self.aws_id, self.aws_key)
b = conn.create_bucket(bucket)
k = Key(b)
k.key = key
k.set_contents_from_filename(src)
self.working_file = '%s/%s' % (bucket, key)
return self.working_file
示例6: upload
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def upload(self, local_filepath, aws_filepath):
"""
Uploads `local_filepath` to `aws_filepath`.
Returns the published URL for the file.
"""
logging.info('Publishing %s to %s' % (
local_filepath, aws_filepath))
key = Key(bucket=self.bucket, name=aws_filepath)
key.key = aws_filepath
key.set_contents_from_filename(local_filepath)
key.set_acl('public-read')
示例7: upload
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def upload():
s3_conn = s3()
# bucket = s3_conn.create_bucket('distributed-web-crawler')
bucket = Bucket(s3_conn, 'distributed-web-crawler')
k = Key(bucket)
k.key = 'list_links_a.txt'
k.set_contents_from_filename('input_links_a.txt')
os.remove('input_links_a.txt')
s3_conn.close()
示例8: upload_content
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def upload_content(bucket=None, key_name=None,
data_type=kUploadContentType.String, data=None) :
bucket = get_bucket(bucket)
bucketKey = Key(bucket)
bucketKey.key = key_name
try :
if data_type == kUploadContentType.String :
bucketKey.set_contents_from_string(data)
elif data_type == kUploadContentType.File :
bucketKey.set_contents_from_file(data)
elif data_type == kUploadContentType.FileName(data) :
bucketKey.set_contents_from_filename(data)
elif data_type == kUploadContentType.Stream :
bucketKey.set_contents_from_stream(data)
return True
except Exception, e :
return False
示例9: s3_put_directory
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def s3_put_directory(s3_dir, local_dir):
logger.info("Putting file://%s to s3://%s/%s" % (
os.path.abspath(local_dir), aws_bucket.name, s3_dir) +
" (DRY RUN)" if settings.S3_DRY_RUN else "")
if not settings.S3_DRY_RUN:
for dirname, dirnames, filenames in os.walk(local_dir):
for filename in filenames:
if filename == ".DS_Store":
continue
filepath = os.path.join(dirname, filename)
relpath = os.path.relpath(filepath, local_dir)
key = os.path.join(s3_dir, relpath)
aws_key = aws_bucket.get_key(key)
if aws_key:
# assume the content of file did not change if md5 hashes are consistent.
if aws_key.etag.strip("\"") == calc_file_md5(filepath):
continue
else:
aws_key = Key(aws_bucket, key)
aws_key.set_contents_from_filename(filepath)
aws_key.set_acl('public-read')
示例10: sync_dir
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def sync_dir(self, directory):
k = Key(self.bucket_id)
tree = self._get_tree(directory)
for fname in tree:
k.key = fname
k.set_contents_from_filename(fname)
示例11: process_file
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
def process_file(aws_conn, filepath):
mtime = get_mtime(filepath)
name_200 = add_size_name(filepath, '200')
name_800 = add_size_name(filepath, '800')
mtime_200 = get_mtime(name_200)
mtime_800 = get_mtime(name_800)
im = None
if mtime_200 is None or mtime_200 < mtime:
try:
im = Image.open(filepath)
except:
return None
generate_200(im, name_200)
if mtime_800 is None or mtime_800 < mtime:
if im is None:
try:
im = Image.open(filepath)
except:
return None
generate_800(im, name_800)
names = {
'original': filepath,
'thumbnail': name_200,
'display': name_800,
}
b = Bucket(aws_conn, BUCKET)
image_result = {}
for image_type, name in names.items():
aws_tag_path = add_size_name(name, 's3t') + '.meta'
aws_key_path = name[len(GALLERY_DIR):].strip('/')
image_result[image_type] = {
'url': 'http://s3.amazonaws.com/{}/{}'.format(
BUCKET,
aws_key_path)
}
if not is_newer(name, aws_tag_path):
try:
resolution = load_data(aws_tag_path)
resolution['width']
except:
resolution = get_resolution(name)
save_data(aws_tag_path, resolution)
image_result[image_type].update(resolution)
continue
resolution = get_resolution(name)
image_result.update(resolution)
save_data(aws_tag_path, resolution)
s3key = b.get_key(aws_key_path)
mtime = get_mtime(name)
if s3key and s3key.last_modified:
print datetime.datetime(*parsedate(s3key.last_modified)[:6])
print mtime
if datetime.datetime(*parsedate(s3key.last_modified)[:6]) > mtime:
with open(aws_tag_path, 'a'):
os.utime(aws_tag_path, None)
continue
print 'Sending {} to S3'.format(name)
k = Key(b)
k.key = aws_key_path
expires = datetime.datetime.utcnow() + datetime.timedelta(days=25 * 365)
expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
k.set_metadata("Content-Type", mimetypes.guess_type(name)[0])
k.set_metadata("Expires", expires)
k.set_metadata("Cache-Control", "max-age={0}, public".format(86400 * 365 * 25))
k.set_contents_from_filename(name)
k.set_acl('public-read')
with open(aws_tag_path, 'a'):
os.utime(aws_tag_path, None)
photo_age = get_photo_age(filepath)
image_result['caption'] = get_caption(filepath)
return photo_age, image_result
示例12: getArgument
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import set_contents_from_filename [as 别名]
# MAGIC * Use the **boto** library in python to programmatically write data to S3.
# MAGIC * Below is sample code - replace with your own values.
# MAGIC * In this example, a file is downloaded from a URL.
# MAGIC * You can generate local files to write to S3 other way you'd like as well.
# COMMAND ----------
url_to_retrieve = getArgument("1. URL_TO_RETRIEVE", "https://github.com/apache/spark/blob/master/README.md")
tmpfile = getArgument("2. TMP_FILE_PATH", "/tmp/spark_README.md")
s3_filename = getArgument("3. S3_FILE_PATHNAME", "/my-s3-bucket/spark_README.md")
# COMMAND ----------
# Download a url and save to a local file.
import urllib
urllib.urlretrieve(url_to_retrieve, tmpfile)
# COMMAND ----------
# Note: boto should be available.
from boto.s3.connection import S3Connection
from boto.s3.connection import Key
# Open a connection to S3.
conn = S3Connection(ACCESS_KEY, SECRET_KEY)
bucket = conn.get_bucket(AWS_BUCKET_NAME)
k = Key(bucket)
k.key = s3_filename
k.set_contents_from_filename(tmpfile)