本文整理汇总了Python中boto.s3.key.Key方法的典型用法代码示例。如果您正苦于以下问题:Python key.Key方法的具体用法?Python key.Key怎么用?Python key.Key使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类boto.s3.key
的用法示例。
在下文中一共展示了key.Key方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: upload_output_to_s3
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def upload_output_to_s3(job, job_vars):
"""
If s3_dir is specified in arguments, file will be uploaded to S3 using boto.
WARNING: ~/.boto credentials are necessary for this to succeed!
job_vars: tuple Tuple of dictionaries: input_args and ids
"""
import boto
from boto.s3.key import Key
input_args, ids = job_vars
work_dir = job.fileStore.getLocalTempDir()
uuid = input_args['uuid']
# Parse s3_dir
s3_dir = input_args['s3_dir']
bucket_name = s3_dir.split('/')[0]
bucket_dir = '/'.join(s3_dir.split('/')[1:])
# I/O
uuid_tar = return_input_paths(job, work_dir, ids, 'uuid.tar.gz')
# Upload to S3 via boto
conn = boto.connect_s3()
bucket = conn.get_bucket(bucket_name)
k = Key(bucket)
k.key = os.path.join(bucket_dir, uuid + '.tar.gz')
k.set_contents_from_filename(uuid_tar)
示例2: upload
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def upload(self, file_name, key_name):
if self.s3_exists(key_name):
s3_md5hex = self.s3_md5hex(key_name)
file_md5hex = file_md5hash(file_name)
if s3_md5hex and file_md5hex == s3_md5hex:
logging.warning("Key %s already exists with same checksum (%s), skipping" % (key_name, s3_md5hex))
return
else:
logging.debug("Key %s already exists but the local file checksum differs (local:%s, s3:%s). Re-uploading" % (
key_name,
file_md5hex,
s3_md5hex
))
file_size = self.get_file_size(file_name)
if file_size >= self.multipart_min_bytes and file_size >= self.chunk_bytes:
self.upload_multipart(file_name, key_name, file_size)
else:
result = self.start(file_name, key_name, file_size)
self._uploads[file_name] = {
"complete": False,
"multipart": False,
"result": result
}
示例3: get_records
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def get_records(id_, iterator):
conn = boto.connect_s3(anon=True, host='s3.amazonaws.com')
bucket = conn.get_bucket('commoncrawl')
for uri in iterator:
key_ = Key(bucket, uri)
_file = warc.WARCFile(fileobj=GzipStreamFile(key_))
for record in _file:
if record['Content-Type'] == 'application/json':
record = json.loads(record.payload.read())
try:
def cc_filter(x):
return "creativecommons.org" in x['url']
cc_links = filter(cc_filter, list(record['Envelope']['Payload-Metadata']['HTTP-Response-Metadata']['HTML-Metadata']['Links']))
if len(cc_links) > 0:
yield record
except KeyError:
pass
示例4: push_file
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def push_file(src_path, s3_path, verbose=False, overwrite=False):
key = _get_key(s3_path)
if key is not None:
if not overwrite:
raise Exception('File already at %s' % s3_path)
if verbose:
print 'Overwriting existing %s.' % s3_path
key = Key(_get_bucket(), s3_path)
if s3_path.endswith('.TIF') or s3_path.endswith('.tif'):
key.content_type = 'image/tiff'
if s3_path.endswith('.jpg'):
key.content_type = 'image/jpeg'
if s3_path.endswith('.txt'):
key.content_type = 'text/plain'
bytes_uploaded = key.set_contents_from_filename(src_path, policy='public-read')
if verbose:
print 'Uploaded %d bytes from %s to %s.' % (
bytes_uploaded, src_path, s3_path)
示例5: handle_s3
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def handle_s3(tmp_path, start_time):
print("Sending to S3.")
file_name = '{0}.zip'.format(start_time)
file_path = os.path.join(settings.BASE_DIR, 'files', 'temp', file_name)
f = open(file_path, 'rb')
END_POINT = settings.END_POINT
S3_HOST = settings.S3_HOST
UPLOADED_FILENAME = 'backups/{0}.zip'.format(start_time)
# include folders in file path. If it doesn't exist, it will be created
s3 = boto.s3.connect_to_region(END_POINT,
aws_access_key_id=settings.S3_ACCESS_KEY,
aws_secret_access_key=settings.S3_SECRET_KEY,
host=S3_HOST)
bucket = s3.get_bucket(settings.S3_BUCKET_NAME)
k = Key(bucket)
k.key = UPLOADED_FILENAME
k.set_contents_from_file(f, cb=mycb, num_cb=200)
示例6: setUp
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def setUp(self):
import boto
from boto.exception import NoAuthHandlerFound
from boto.s3.key import Key
keys = ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY']
try:
for k in keys:
os.environ[k]
self.bucket_name = os.environ.get('AWS_TEST_BUCKET', 'drf-to-s3-test')
except KeyError:
self.skipTest('To test s3, set %s in .env' % ' and '.join(keys))
conn = boto.connect_s3()
bucket = conn.get_bucket(self.bucket_name)
k = Key(bucket)
k.key = "%s%s.txt" % (str(uuid.uuid4()), self.prefix)
k.set_contents_from_string('This is a test of S3')
self.existing_key = k.key
self.existing_key_etag = k.etag
self.bucket = bucket
self.nonexisting_key = "%s%s.txt" % (str(uuid.uuid4()), self.prefix)
self.new_key = None
示例7: test_copy_fails_with_mismatched_etag_after_subsequent_update
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def test_copy_fails_with_mismatched_etag_after_subsequent_update(self):
from boto.s3.key import Key
from drf_to_s3 import s3
k = Key(self.bucket)
k.key = self.existing_key
k.set_contents_from_string('Another test')
with self.assertRaises(s3.ObjectNotFoundException):
s3.copy(
src_bucket=self.bucket_name,
src_key=self.existing_key,
dst_bucket=self.bucket_name,
dst_key=self.nonexisting_key,
src_etag=self.existing_key_etag,
validate_src_etag=True
)
示例8: insert
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def insert(self, report):
"""
create a json string from an NmapReport instance
and push it to S3 bucket.
:param NmapReport: obj to insert
:rtype: string
:return: str id
:todo: Add tagging option
"""
try:
oid = ObjectId()
mykey = Key(self.bucket)
mykey.key = str(oid)
strjsonnmapreport = json.dumps(report, cls=ReportEncoder)
mykey.set_contents_from_string(strjsonnmapreport)
except:
raise Exception("Failed to add nmap object in s3 bucket")
return str(oid)
示例9: get
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def get(self, str_report_id=None):
"""
select a NmapReport by Id.
:param str: id
:rtype: NmapReport
:return: NmapReport object
"""
nmapreport = None
if str_report_id is not None and isinstance(str_report_id, str):
try:
mykey = Key(self.bucket)
mykey.key = str_report_id
nmapreportjson = json.loads(mykey.get_contents_as_string())
nmapreport = NmapParser.parse_fromdict(nmapreportjson)
except S3ResponseError:
pass
return nmapreport
示例10: make_request
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def make_request(self, method, bucket='', key='', headers=None, data='',
query_args=None, sender=None, override_num_retries=None,
retry_handler=None):
if isinstance(bucket, self.bucket_class):
bucket = bucket.name
if isinstance(key, Key):
key = key.name
path = self.calling_format.build_path_base(bucket, key)
boto.log.debug('path=%s' % path)
auth_path = self.calling_format.build_auth_path(bucket, key)
boto.log.debug('auth_path=%s' % auth_path)
host = self.calling_format.build_host(self.server_name(), bucket)
if query_args:
path += '?' + query_args
boto.log.debug('path=%s' % path)
auth_path += '?' + query_args
boto.log.debug('auth_path=%s' % auth_path)
return super(S3Connection, self).make_request(
method, path, headers,
data, host, auth_path, sender,
override_num_retries=override_num_retries,
retry_handler=retry_handler
)
示例11: _copy
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def _copy(self, filename, id_=-1): # type: (str, int) -> None
"""Copy a file or folder to the bucket.
Does not yet support chunking.
Args:
filename: Path to copy.
destination: Remote path to copy file to.
"""
bucket_key = os.path.basename(filename)
exists = self.bucket.get_key(bucket_key)
if exists:
print("Already exists")
else:
k = Key(self.bucket)
k.key = bucket_key
k.set_metadata('jamf-package-id', id_)
k.set_contents_from_filename(filename)
示例12: upload_to_s3
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def upload_to_s3(css_file):
bucket_name = settings.AWS_BUCKET_NAME
conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
folder = 'webpack_bundles/'
bucket = conn.get_bucket(bucket_name=bucket_name)
filename = css_file.split('/')[-1]
file_obj = open(css_file, 'r')
content = file_obj.read()
key = folder + filename
bucket = conn.get_bucket(bucket_name=bucket_name)
mime = mimetypes.guess_type(filename)[0]
k = Key(bucket)
k.key = key # folder + filename
k.set_metadata("Content-Type", mime)
k.set_contents_from_string(content)
public_read = True
if public_read:
k.set_acl("public-read")
示例13: set_profile_image
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def set_profile_image(self, file_path, file_name, content_type):
"""
Takes a local path, name and content-type, which are parameters passed in by
nginx upload module. Converts to RGB, resizes to thumbnail and uploads to S3.
Returns False if some conditions aren't met, such as error making thumbnail
or content type is one we don't support.
"""
valid_content_types = ('image/gif', 'image/jpeg', 'image/jpg', 'image/png',)
if content_type not in valid_content_types:
return False
destination = cStringIO.StringIO()
if not transform_to_square_thumbnail(file_path, 100*2, destination):
return False
bucket = S3Bucket()
k = Key(bucket)
k.key = "account/%s/profile.jpg" % (self.id)
k.set_metadata('Content-Type', 'image/jpeg')
k.set_metadata('Cache-Control', 'max-age=86400')
k.set_contents_from_string(destination.getvalue())
k.set_acl('public-read')
self.profile_image = 1
self.save()
return True
示例14: _delete_file
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def _delete_file(self, filename, obj):
storage_type = getattr(obj, self.storage_type_field, '')
bucket_name = getattr(obj, self.bucket_name_field, '')
if not (storage_type and bucket_name):
return super(S3FileUploadField, self)._delete_file(filename)
if storage_type != 's3':
raise ValueError(
'Storage type "%s" is invalid, the only supported storage type'
' (apart from default local storage) is s3.' % storage_type)
conn = S3Connection(self.access_key_id, self.access_key_secret)
bucket = conn.get_bucket(bucket_name)
path = self._get_s3_path(filename)
k = Key(bucket)
k.key = path
try:
bucket.delete_key(k)
except S3ResponseError:
pass
示例15: _delete_thumbnail
# 需要导入模块: from boto.s3 import key [as 别名]
# 或者: from boto.s3.key import Key [as 别名]
def _delete_thumbnail(self, filename, storage_type, bucket_name):
if not (storage_type and bucket_name):
self._delete_thumbnail_local(filename)
return
if storage_type != 's3':
raise ValueError(
'Storage type "%s" is invalid, the only supported storage type'
' (apart from default local storage) is s3.' % storage_type)
conn = S3Connection(self.access_key_id, self.access_key_secret)
bucket = conn.get_bucket(bucket_name)
path = self._get_s3_path(self.thumbnail_fn(filename))
k = Key(bucket)
k.key = path
try:
bucket.delete_key(k)
except S3ResponseError:
pass
# Saving