本文整理汇总了Python中boto.s3.connection.S3Connection方法的典型用法代码示例。如果您正苦于以下问题:Python connection.S3Connection方法的具体用法?Python connection.S3Connection怎么用?Python connection.S3Connection使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类boto.s3.connection
的用法示例。
在下文中一共展示了connection.S3Connection方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_bwa
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def test_bwa():
work_dir = tempfile.mkdtemp()
create_config(work_dir)
create_manifest(work_dir)
# Call Pipeline
try:
subprocess.check_call(['toil-bwa', 'run',
os.path.join(work_dir, 'jstore'),
'--manifest', os.path.join(work_dir, 'manifest.txt'),
'--config', os.path.join(work_dir, 'config.txt'),
'--retryCount', '1'])
finally:
shutil.rmtree(work_dir)
conn = S3Connection()
b = Bucket(conn, 'cgl-driver-projects')
k = Key(b)
k.key = 'test/ci/ci_test.bam'
k.delete()
示例2: test_exome
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def test_exome():
workdir = tempfile.mkdtemp()
create_config_and_manifest(workdir)
# Call Pipeline
try:
base_command = ['toil-exome', 'run',
'--config', os.path.join(workdir, 'config-toil-exome.yaml'),
os.path.join(workdir, 'jstore')]
# Run with manifest
subprocess.check_call(base_command + ['--manifest', os.path.join(workdir, 'manifest-toil-exome.tsv')])
finally:
shutil.rmtree(workdir)
conn = S3Connection()
b = Bucket(conn, 'cgl-driver-projects')
k = Key(b)
k.key = 'test/ci/exome-ci-test.tar.gz'
k.delete()
示例3: download_from_s3_url
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def download_from_s3_url(file_path, url):
from urlparse import urlparse
from boto.s3.connection import S3Connection
s3 = S3Connection()
try:
parsed_url = urlparse(url)
if not parsed_url.netloc or not parsed_url.path.startswith('/'):
raise RuntimeError("An S3 URL must be of the form s3:/BUCKET/ or "
"s3://BUCKET/KEY. '%s' is not." % url)
bucket = s3.get_bucket(parsed_url.netloc)
key = bucket.get_key(parsed_url.path[1:])
key.get_contents_to_filename(file_path)
finally:
s3.close()
# Job Functions
示例4: connect
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def connect(self): # pylint: disable=missing-return-doc,missing-return-type-doc
"""Connect to the S3 bucket, but only on non-WSL Linux systems."""
if not (platform.system() == "Linux" and "Microsoft" not in platform.release()):
return False
EC2_PROFILE = None if isEC2VM() else "laniakea" # pylint: disable=invalid-name
try:
conn = S3Connection(profile_name=EC2_PROFILE)
self.bucket = conn.get_bucket(self.bucket_name)
return True
except boto.provider.ProfileNotFoundError:
print(f'Unable to connect via boto using profile name "{EC2_PROFILE}" in ~/.boto')
return False
except boto.exception.S3ResponseError:
print(f'Unable to connect to the following bucket "{self.bucket_name}", please check your credentials.')
return False
示例5: _get_boto_connection
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def _get_boto_connection():
from boto.s3.connection import S3Connection
class _v19_S3Connection(S3Connection):
"""A dummy S3Connection wrapper that doesn't do any synchronous download"""
def _mexe(self, method, bucket, key, headers, *args, **kwargs):
return headers
class _v20_S3Connection(S3Connection):
"""A dummy S3Connection wrapper that doesn't do any synchronous download"""
def _mexe(self, http_request, *args, **kwargs):
http_request.authorize(connection=self)
return http_request.headers
try:
import boto.auth
except ImportError:
_S3Connection = _v19_S3Connection
else:
_S3Connection = _v20_S3Connection
return _S3Connection
示例6: _get_bucket
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def _get_bucket(self):
if isinstance(self.config.origin, S3Origin):
if not self._bucket:
bucket_dns_name = self.config.origin.dns_name
bucket_name = bucket_dns_name.replace('.s3.amazonaws.com', '')
from boto.s3.connection import S3Connection
s3 = S3Connection(self.connection.aws_access_key_id,
self.connection.aws_secret_access_key,
proxy=self.connection.proxy,
proxy_port=self.connection.proxy_port,
proxy_user=self.connection.proxy_user,
proxy_pass=self.connection.proxy_pass)
self._bucket = s3.get_bucket(bucket_name)
self._bucket.distribution = self
self._bucket.set_key_class(self._object_class)
return self._bucket
else:
raise NotImplementedError('Unable to get_objects on CustomOrigin')
示例7: __s3_connect__
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def __s3_connect__(self):
"""
connect to s3 - currently return both S3Connection and client because they seem
to do offer different functionality - uploading files vs. generating signed urls
seems pretty silly that this is the case - so feel free to fix it
:return:
"""
# Adam has created keys which always work - had trouble with sending out emails otherwise
param_file = open("/app/config/aws.yml","rb")
param_details = yaml.load(param_file)
id_ = param_details["aws_access_key_id"]
key = param_details["aws_secret_access_key"]
conn = S3Connection(id_,key)
# s3 = boto3.resource("s3",aws_access_key_id=id_,aws_secret_access_key=key)
client = boto3.client(
's3',
aws_access_key_id=id_,
aws_secret_access_key=key,
)
return conn,client
示例8: upload_to_s3
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def upload_to_s3(css_file):
bucket_name = settings.AWS_BUCKET_NAME
conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
folder = 'webpack_bundles/'
bucket = conn.get_bucket(bucket_name=bucket_name)
filename = css_file.split('/')[-1]
file_obj = open(css_file, 'r')
content = file_obj.read()
key = folder + filename
bucket = conn.get_bucket(bucket_name=bucket_name)
mime = mimetypes.guess_type(filename)[0]
k = Key(bucket)
k.key = key # folder + filename
k.set_metadata("Content-Type", mime)
k.set_contents_from_string(content)
public_read = True
if public_read:
k.set_acl("public-read")
示例9: files_to_s3
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def files_to_s3(files, bucket_name):
'''
INPUT (1) list 'files': all files to upload to s3 bucket
(2) string 'bucket_name': name of bucket to dump into
writes all files to s3 bucket using threads
'''
AWS_KEY = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET = os.environ['AWS_SECRET_ACCESS_KEY']
def upload(myfile):
conn = S3Connection(aws_access_key_id = AWS_KEY, aws_secret_access_key = AWS_SECRET)
bucket = conn.get_bucket(bucket_name)
key = bucket.new_key(myfile).set_contents_from_filename(myfile) # , cb=percent_cb, num_cb=1)
return myfile
for fname in files:
t = threading.Thread(target=upload, args=(fname,)).start()
示例10: post
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def post(self, request):
super().post(request)
key = get_upload_path(self.project.id_label, self.form.cleaned_data["filename"])
datafile = ProjectDataFile(
user=self.project_member.member.user,
file=key,
metadata=self.form.cleaned_data["metadata"],
direct_sharing_project=self.project,
)
datafile.save()
datafile.datatypes.set(self.form.cleaned_data["datatypes"])
s3 = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
url = s3.generate_url(
expires_in=settings.INCOMPLETE_FILE_EXPIRATION_HOURS * 60 * 60,
method="PUT",
bucket=settings.AWS_STORAGE_BUCKET_NAME,
key=key,
)
return Response({"id": datafile.id, "url": url}, status=status.HTTP_201_CREATED)
示例11: _delete_file
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def _delete_file(self, filename, obj):
storage_type = getattr(obj, self.storage_type_field, '')
bucket_name = getattr(obj, self.bucket_name_field, '')
if not (storage_type and bucket_name):
return super(S3FileUploadField, self)._delete_file(filename)
if storage_type != 's3':
raise ValueError(
'Storage type "%s" is invalid, the only supported storage type'
' (apart from default local storage) is s3.' % storage_type)
conn = S3Connection(self.access_key_id, self.access_key_secret)
bucket = conn.get_bucket(bucket_name)
path = self._get_s3_path(filename)
k = Key(bucket)
k.key = path
try:
bucket.delete_key(k)
except S3ResponseError:
pass
示例12: _save_file
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def _save_file(self, temp_file, filename):
if not (self.storage_type and self.bucket_name):
return self._save_file_local(temp_file, filename)
if self.storage_type != 's3':
raise ValueError(
'Storage type "%s" is invalid, the only supported storage type'
' (apart from default local storage) is s3.'
% self.storage_type)
conn = S3Connection(self.access_key_id, self.access_key_secret)
bucket = conn.get_bucket(self.bucket_name)
path = self._get_s3_path(filename)
k = bucket.new_key(path)
k.set_contents_from_string(temp_file.getvalue())
k.set_acl(self.acl)
return filename
示例13: _delete_thumbnail
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def _delete_thumbnail(self, filename, storage_type, bucket_name):
if not (storage_type and bucket_name):
self._delete_thumbnail_local(filename)
return
if storage_type != 's3':
raise ValueError(
'Storage type "%s" is invalid, the only supported storage type'
' (apart from default local storage) is s3.' % storage_type)
conn = S3Connection(self.access_key_id, self.access_key_secret)
bucket = conn.get_bucket(bucket_name)
path = self._get_s3_path(self.thumbnail_fn(filename))
k = Key(bucket)
k.key = path
try:
bucket.delete_key(k)
except S3ResponseError:
pass
# Saving
示例14: __init__
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def __init__(self, access_key_id, secret_access_key, bucket=None, host=None,
policy=None, encrypt_key=False, prefix=''):
policy = policy or CANNED_ACL_PUBLIC_READ
assert policy in [CANNED_ACL_PUBLIC_READ, CANNED_ACL_PRIVATE], (
"Key policy must be %s or %s" % (CANNED_ACL_PUBLIC_READ, CANNED_ACL_PRIVATE))
self._policy = policy or CANNED_ACL_PUBLIC_READ
self._encrypt_key = encrypt_key
if bucket is None:
bucket = 'filedepot-%s' % (access_key_id.lower(),)
kw = {}
if host is not None:
kw['host'] = host
self._conn = S3Connection(access_key_id, secret_access_key, **kw)
bucket = self._conn.lookup(bucket) or self._conn.create_bucket(bucket)
self._bucket_driver = BucketDriver(bucket, prefix)
示例15: CopyToS3
# 需要导入模块: from boto.s3 import connection [as 别名]
# 或者: from boto.s3.connection import S3Connection [as 别名]
def CopyToS3( s3_bucket, s3_folder, file_list, force, verbose ):
aws_access_key = os.environ.get('AWS_ACCESSKEYID')
aws_secret_access_key = os.environ.get('AWS_SECRETACCESSKEY')
conn = S3Connection(aws_access_key, aws_secret_access_key)
mybucket = conn.get_bucket(s3_bucket)
k = Key(mybucket)
for f in file_list:
fname = os.path.basename(f)
k.key = os.path.join(s3_folder, fname)
# Check if it already exists
possible_key = mybucket.get_key(k.key)
if force or not possible_key:
if verbose:
print "storing to s3:", mybucket, k.key
k.set_contents_from_filename(f)
mybucket.set_acl('public-read', k.key )