本文整理汇总了Python中boto.s3.connection.Key类的典型用法代码示例。如果您正苦于以下问题:Python Key类的具体用法?Python Key怎么用?Python Key使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Key类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_bwa
def test_bwa():
work_dir = tempfile.mkdtemp()
create_config(work_dir)
create_manifest(work_dir)
# Call Pipeline
try:
subprocess.check_call(
[
"toil-bwa",
"run",
os.path.join(work_dir, "jstore"),
"--manifest",
os.path.join(work_dir, "manifest.txt"),
"--config",
os.path.join(work_dir, "config.txt"),
"--retryCount",
"1",
]
)
finally:
shutil.rmtree(work_dir)
conn = S3Connection()
b = Bucket(conn, "cgl-driver-projects")
k = Key(b)
k.key = "test/ci/ci_test.bam"
k.delete()
示例2: screenshot
def screenshot(request,val):
if val == 1 :
conn = S3Connection('##', '##')
bucket = conn.get_bucket('lheston-bucket')
k = Key(bucket)
k.key = '//lab3' + request + '_toS3.png'
driver = webdriver.PhantomJS() # or add to your PATH
driver.set_window_size(1024, 768) # optional
driver.get(request)
driver.save_screenshot('tempfile.png')
driver.quit
file1 = open('tempfile.png', 'rb')
os.remove('tempfile.png')
k.set_contents_from_file(file1)
driver.quit
return str(request + '_toS3.png')
elif val == 2:
text = '/lab3' + request
conn = S3Connection('##', '##')
S3_BUCKET_NAME = 'lheston-bucket'
bucket = Bucket(conn, S3_BUCKET_NAME)
bucket = bucket.delete_key(text)
#bucket.delete_key('/lab3/' + request.split(':')[1])
#k = Key(b)
#k.name = k.get_key(text)
#b.delete_key(k)
#k.name = k.get_key(text)
#b.delete_key(k)
#b.delete_key('//lab3' + request.split(':')[1] + '_toS3.png')
else:
return str('incorrect input')
示例3: upload_to_s3
def upload_to_s3(fp, name):
conn = _get_s3_connection()
bucket = conn.create_bucket('muxlist')
k = Key(bucket)
k.key = name
k.set_contents_from_file(fp)
return 'http://muxlist.s3.amazonaws.com/%s' % name
示例4: test_upload_and_download_with_encryption
def test_upload_and_download_with_encryption(tmpdir):
from toil_scripts.lib.urls import s3am_upload
from toil_scripts.lib.urls import download_url
from boto.s3.connection import S3Connection, Bucket, Key
work_dir = str(tmpdir)
# Create temporary encryption key
key_path = os.path.join(work_dir, 'foo.key')
subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
'of={}'.format(key_path)])
# Create test file
upload_fpath = os.path.join(work_dir, 'upload_file')
with open(upload_fpath, 'wb') as fout:
fout.write(os.urandom(1024))
# Upload file
s3_dir = 's3://cgl-driver-projects/test'
s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
# Download the file
url = 'https://s3-us-west-2.amazonaws.com/cgl-driver-projects/test/upload_file'
download_url(url=url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
download_fpath = os.path.join(work_dir, 'download_file')
assert os.path.exists(download_fpath)
assert filecmp.cmp(upload_fpath, download_fpath)
# Delete the Key
conn = S3Connection()
b = Bucket(conn, 'cgl-driver-projects')
k = Key(b)
k.key = 'test/upload_file'
k.delete()
示例5: test_upload_and_download_with_encryption
def test_upload_and_download_with_encryption(tmpdir):
from toil_scripts.lib.urls import s3am_upload
from toil_scripts.lib.urls import download_url
from boto.s3.connection import S3Connection, Bucket, Key
work_dir = str(tmpdir)
# Create temporary encryption key
key_path = os.path.join(work_dir, 'foo.key')
subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
'of={}'.format(key_path)])
# Create test file
upload_fpath = os.path.join(work_dir, 'upload_file')
with open(upload_fpath, 'wb') as fout:
fout.write(os.urandom(1024))
# Upload file
random_key = os.path.join('test/', str(uuid4()), 'upload_file')
s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
try:
s3_dir = os.path.split(s3_url)[0]
s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
# Download the file
download_url(url=s3_url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
download_fpath = os.path.join(work_dir, 'download_file')
assert os.path.exists(download_fpath)
assert filecmp.cmp(upload_fpath, download_fpath)
finally:
# Delete the Key. Key deletion never fails so we don't need to catch any exceptions
with closing(S3Connection()) as conn:
b = Bucket(conn, 'cgl-driver-projects')
k = Key(b)
k.key = random_key
k.delete()
示例6: download_file_s3
def download_file_s3(aws_path, aws_config, local_folder=DATA_DL_PATH):
""" Download a file from an S3 bucket and save it in the local folder. """
# remove the prefix and extract the S3 bucket, folder, and file name
m = re.match(S3_PREFIX, aws_path)
split = aws_path[len(m.group()):].split('/')
s3_bucket = split.pop(0)
s3_folder = '/'.join(split[:-1])
keyname = split[-1]
# create the local folder if necessary
if local_folder is not None:
ensure_directory(local_folder)
path = os.path.join(local_folder, keyname)
else:
path = keyname
if os.path.isfile(path):
logger.warning('file %s already exists!' % path)
return path
conn = S3Connection(aws_config.access_key, aws_config.secret_key)
bucket = conn.get_bucket(s3_bucket)
if s3_folder:
aws_keyname = os.path.join(s3_folder, keyname)
else:
aws_keyname = keyname
logger.debug('downloading data from S3...')
s3key = Key(bucket)
s3key.key = aws_keyname
s3key.get_contents_to_filename(path)
logger.info('file saved at %s' % path)
return path
示例7: mapper
def mapper(self, _, line):
line = line.rstrip().split('\t', 1)[-1]
filename = line.rsplit('/', 1)[-1]
first_rec = None
f = open(filename, 'w')
for i in xrange(10):
try:
conn = boto.connect_s3(anon=True)
bucket = conn.get_bucket('aws-publicdatasets')
key = Key(bucket, line)
key.get_contents_to_file(f)
f.close()
records = warc.WARCFile(fileobj=gzip.open(filename, 'rb'))
break
except Exception as e:
continue
else:
logger.error('10 attempts to get file {} failed, skipping...'.format(filename))
return
try:
for i, record in enumerate(records):
if record.type != 'response':
_ = record.payload.read()
continue
for key, value in self.process_record(record):
yield key, value
self.increment_counter('commoncrawl', 'processed_records', 1)
except Exception:
logger.error(traceback.format_exc())
self.increment_counter('errors', 'general', 1)
finally:
f.close()
os.unlink(filename)
示例8: delete
def delete():
#try:
songid = int(request.args.get('songid'))
song = Song.query.filter_by(id=songid).first()
votes = Vote.query.filter_by(songdata=song.songdata).all()
for x in votes:
db.session.delete(x)
db.session.commit()
db.session.delete(song)
db.session.commit()
try:
conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
b = Bucket(conn, S3_BUCKET_NAME)
k = Key(b)
k.key = songdata.lower() + '.mp3'
b.delete_key(k)
except:
pass
return render_template('notice.html', message="Delete successful.", redirect="/")
示例9: WriteDataStringtoS3
def WriteDataStringtoS3(string,game,msg_type, S3_bucket):
today_YYYMMDD, today_hhmmss = datetime.now().strftime('%Y%m%d') , datetime.now().strftime('%H-%M-%S')
S3_path = '/data/' + game + '/' + msg_type + '/' + today_YYYMMDD + '/' + today_hhmmss + '-logs.txt'
k=Key(S3_bucket)
k.key = S3_path
k.set_contents_from_string(string,reduced_redundancy=True)
示例10: getGroupsTrackerKey
def getGroupsTrackerKey(self):
groups_tracker_key_name = self.getGroupsTrackerKeyName()
bucket = getHDISBucket()
groups_tracker_key = bucket.get_key(groups_tracker_key_name)
if not groups_tracker_key:
groups_tracker_key = Key(bucket)
groups_tracker_key.key = groups_tracker_key_name
return groups_tracker_key
示例11: uploadStrToS3
def uploadStrToS3(self, destDir, filename, contents):
'''Uploads a string to an S3 file.'''
print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)
k2 = Key(self.bucket)
k2.key = os.path.join(destDir, filename)
k2.set_contents_from_string(contents, reduced_redundancy=True)
print # This newline is needed to get the path of the compiled binary printed on a newline.
示例12: uploadFileToS3
def uploadFileToS3(self, filename):
'''Uploads file to S3.'''
destDir = '' # Root folder of the S3 bucket
destpath = os.path.join(destDir, os.path.basename(filename))
print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)
k = Key(self.bucket)
k.key = destpath
k.set_contents_from_filename(filename, reduced_redundancy=True)
示例13: pull_s3_file
def pull_s3_file(self, bucket, key, dst):
"""
Get a file from an S3 bucket
"""
conn = boto.connect_s3(self.aws_id, self.aws_key)
b = conn.create_bucket(bucket)
k = Key(b)
k.key = key
k.get_contents_to_filename(dst)
示例14: delete_img_aws
def delete_img_aws(instance, **kwargs):
conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
img_k = Key(b)
img_thumb_k = Key(b)
img_k.key = instance.image.name
img_thumb_k.key = instance.image_thumb.name
b.delete_key(img_k)
b.delete_key(img_thumb_k)
示例15: user_thumbnail_delete
def user_thumbnail_delete(sender, instance, **kwargs):
logging.debug('Firing pre-delete signal...')
gif = get_object_or_404(Gif, pk=instance.id)
f = str(gif.thumbnail)
filename = f[f.rfind('/') + 1:]
s3conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
bucket = Bucket(s3conn, settings.AWS_STORAGE_BUCKET_NAME)
key_obj = Key(bucket)
key_obj.key = 'thumbs/' + filename
bucket.delete_key(key_obj.key)