本文整理汇总了Python中boto.s3.connection.Key.key方法的典型用法代码示例。如果您正苦于以下问题:Python Key.key方法的具体用法?Python Key.key怎么用?Python Key.key使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类boto.s3.connection.Key
的用法示例。
在下文中一共展示了Key.key方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: delete_img_aws
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def delete_img_aws(instance, **kwargs):
conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
img_k = Key(b)
img_thumb_k = Key(b)
img_k.key = instance.image.name
img_thumb_k.key = instance.image_thumb.name
b.delete_key(img_k)
b.delete_key(img_thumb_k)
示例2: delete
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def delete(self):
conn = S3Connection(settings.AWS_ACCESS_KEY, settings.AWS_SECRET_KEY)
bucket = conn.get_bucket(
settings.AWS_IMAGE_BUCKET,
validate=False
)
key = Key(bucket)
key.key = self.key
optimized_key = Key(bucket)
optimized_key.key = self.optimized_key
thumbnail_key = Key(bucket)
thumbnail_key.key = self.thumbnail_key
super(Photo, self).delete()
bucket.delete_keys([key, optimized_key, thumbnail_key])
示例3: test_s3
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def test_s3(self):
title = "Dino Test"
link = "http://example.com"
description = "My dino test"
base_url = "http://example.com/items/"
aws_key = os.environ['MYDINOSAUR_AWS_ACCESS_KEY']
aws_secret = os.environ['MYDINOSAUR_AWS_SECRET_KEY']
s3_bucket = os.environ['MYDINOSAUR_S3_BUCKET']
dino = mydinosaur.MyS3Dinosaur(':memory:',
title=title,
link=link,
description=description,
base_url=base_url,
aws_access_key=aws_key,
aws_secret_key=aws_secret,
s3_bucket=s3_bucket)
dino.update("hello there")
# make sure that stuff got uploaded
conn = S3Connection(aws_key, aws_secret)
bucket = conn.get_bucket(s3_bucket)
k = Key(bucket)
k.key = 'rss.xml'
feed = feedparser.parse(k.get_contents_as_string())
self.assertEqual(len(feed.entries), 1)
k = Key(bucket)
k.key = '1.html'
html = k.get_contents_as_string()
self.assertIn('<!-- MyDinosaur default html template -->', html)
# now, update with media!
filehandle = StringIO("Hello there.\n")
dino.update_with_media('this is a test', filehandle,
media_type="text/plain", ext="txt")
k = Key(bucket)
k.key = 'rss.xml'
feed = feedparser.parse(k.get_contents_as_string())
self.assertEqual(len(feed.entries), 2)
# ensure that media was uploaded
media_url = feed.entries[0].enclosures[0].url
key_name = urlparse(media_url).path.split('/')[-1]
k = Key(bucket)
k.key = key_name
contents = k.get_contents_as_string()
self.assertEqual(contents, "Hello there.\n")
示例4: screenshot
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def screenshot(request,val):
if val == 1 :
conn = S3Connection('##', '##')
bucket = conn.get_bucket('lheston-bucket')
k = Key(bucket)
k.key = '//lab3' + request + '_toS3.png'
driver = webdriver.PhantomJS() # or add to your PATH
driver.set_window_size(1024, 768) # optional
driver.get(request)
driver.save_screenshot('tempfile.png')
driver.quit
file1 = open('tempfile.png', 'rb')
os.remove('tempfile.png')
k.set_contents_from_file(file1)
driver.quit
return str(request + '_toS3.png')
elif val == 2:
text = '/lab3' + request
conn = S3Connection('##', '##')
S3_BUCKET_NAME = 'lheston-bucket'
bucket = Bucket(conn, S3_BUCKET_NAME)
bucket = bucket.delete_key(text)
#bucket.delete_key('/lab3/' + request.split(':')[1])
#k = Key(b)
#k.name = k.get_key(text)
#b.delete_key(k)
#k.name = k.get_key(text)
#b.delete_key(k)
#b.delete_key('//lab3' + request.split(':')[1] + '_toS3.png')
else:
return str('incorrect input')
示例5: test_bwa
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def test_bwa():
work_dir = tempfile.mkdtemp()
create_config(work_dir)
create_manifest(work_dir)
# Call Pipeline
try:
subprocess.check_call(
[
"toil-bwa",
"run",
os.path.join(work_dir, "jstore"),
"--manifest",
os.path.join(work_dir, "manifest.txt"),
"--config",
os.path.join(work_dir, "config.txt"),
"--retryCount",
"1",
]
)
finally:
shutil.rmtree(work_dir)
conn = S3Connection()
b = Bucket(conn, "cgl-driver-projects")
k = Key(b)
k.key = "test/ci/ci_test.bam"
k.delete()
示例6: upload_to_s3
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def upload_to_s3(fp, name):
conn = _get_s3_connection()
bucket = conn.create_bucket('muxlist')
k = Key(bucket)
k.key = name
k.set_contents_from_file(fp)
return 'http://muxlist.s3.amazonaws.com/%s' % name
示例7: download_file_s3
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def download_file_s3(aws_path, aws_config, local_folder=DATA_DL_PATH):
""" Download a file from an S3 bucket and save it in the local folder. """
# remove the prefix and extract the S3 bucket, folder, and file name
m = re.match(S3_PREFIX, aws_path)
split = aws_path[len(m.group()):].split('/')
s3_bucket = split.pop(0)
s3_folder = '/'.join(split[:-1])
keyname = split[-1]
# create the local folder if necessary
if local_folder is not None:
ensure_directory(local_folder)
path = os.path.join(local_folder, keyname)
else:
path = keyname
if os.path.isfile(path):
logger.warning('file %s already exists!' % path)
return path
conn = S3Connection(aws_config.access_key, aws_config.secret_key)
bucket = conn.get_bucket(s3_bucket)
if s3_folder:
aws_keyname = os.path.join(s3_folder, keyname)
else:
aws_keyname = keyname
logger.debug('downloading data from S3...')
s3key = Key(bucket)
s3key.key = aws_keyname
s3key.get_contents_to_filename(path)
logger.info('file saved at %s' % path)
return path
示例8: delete
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def delete():
#try:
songid = int(request.args.get('songid'))
song = Song.query.filter_by(id=songid).first()
votes = Vote.query.filter_by(songdata=song.songdata).all()
for x in votes:
db.session.delete(x)
db.session.commit()
db.session.delete(song)
db.session.commit()
try:
conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
b = Bucket(conn, S3_BUCKET_NAME)
k = Key(b)
k.key = songdata.lower() + '.mp3'
b.delete_key(k)
except:
pass
return render_template('notice.html', message="Delete successful.", redirect="/")
示例9: test_upload_and_download_with_encryption
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def test_upload_and_download_with_encryption(tmpdir):
from toil_scripts.lib.urls import s3am_upload
from toil_scripts.lib.urls import download_url
from boto.s3.connection import S3Connection, Bucket, Key
work_dir = str(tmpdir)
# Create temporary encryption key
key_path = os.path.join(work_dir, 'foo.key')
subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
'of={}'.format(key_path)])
# Create test file
upload_fpath = os.path.join(work_dir, 'upload_file')
with open(upload_fpath, 'wb') as fout:
fout.write(os.urandom(1024))
# Upload file
s3_dir = 's3://cgl-driver-projects/test'
s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
# Download the file
url = 'https://s3-us-west-2.amazonaws.com/cgl-driver-projects/test/upload_file'
download_url(url=url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
download_fpath = os.path.join(work_dir, 'download_file')
assert os.path.exists(download_fpath)
assert filecmp.cmp(upload_fpath, download_fpath)
# Delete the Key
conn = S3Connection()
b = Bucket(conn, 'cgl-driver-projects')
k = Key(b)
k.key = 'test/upload_file'
k.delete()
示例10: add_bucket
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def add_bucket(self,bucket_name,access,zonename,create_date):
try:
bucket_count=len(self.conn.get_all_buckets())
# print 'bucket_count is %s ' %bucket_count
if bucket_count < self.bucket_limit:
self.conn.create_bucket(bucket_name)
b=self.conn.get_bucket(bucket_name)
try:
# k=b.new_key('create_info')
# k.set_contents_from_string("{'bucket_name':'%s','zonename':'%s','access':'%s','create_date':'%s'}" %(bucket_name,zonename,access,create_date))
k1=Key(b)
k1.key='create_info'
#k1.set_metadata('Bucket_Name',bucket_name),注意,在设置元数据的时候key名不能带有下划线,该示例在创建的时候会报错403
k1.set_metadata('BucketName',bucket_name)
k1.set_metadata('ZoneName',zonename)
k1.set_metadata('Access',access)
k1.set_metadata('CreateDate',create_date)
k1.set_contents_from_string('')
except Exception as e:
print r'\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\',e
return True
else:
return False
except Exception as e:
return False
示例11: test_upload_and_download_with_encryption
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def test_upload_and_download_with_encryption(tmpdir):
from toil_scripts.lib.urls import s3am_upload
from toil_scripts.lib.urls import download_url
from boto.s3.connection import S3Connection, Bucket, Key
work_dir = str(tmpdir)
# Create temporary encryption key
key_path = os.path.join(work_dir, 'foo.key')
subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
'of={}'.format(key_path)])
# Create test file
upload_fpath = os.path.join(work_dir, 'upload_file')
with open(upload_fpath, 'wb') as fout:
fout.write(os.urandom(1024))
# Upload file
random_key = os.path.join('test/', str(uuid4()), 'upload_file')
s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
try:
s3_dir = os.path.split(s3_url)[0]
s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
# Download the file
download_url(url=s3_url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
download_fpath = os.path.join(work_dir, 'download_file')
assert os.path.exists(download_fpath)
assert filecmp.cmp(upload_fpath, download_fpath)
finally:
# Delete the Key. Key deletion never fails so we don't need to catch any exceptions
with closing(S3Connection()) as conn:
b = Bucket(conn, 'cgl-driver-projects')
k = Key(b)
k.key = random_key
k.delete()
示例12: WriteDataStringtoS3
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def WriteDataStringtoS3(string,game,msg_type, S3_bucket):
today_YYYMMDD, today_hhmmss = datetime.now().strftime('%Y%m%d') , datetime.now().strftime('%H-%M-%S')
S3_path = '/data/' + game + '/' + msg_type + '/' + today_YYYMMDD + '/' + today_hhmmss + '-logs.txt'
k=Key(S3_bucket)
k.key = S3_path
k.set_contents_from_string(string,reduced_redundancy=True)
示例13: uploadStrToS3
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def uploadStrToS3(self, destDir, filename, contents):
'''Uploads a string to an S3 file.'''
print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)
k2 = Key(self.bucket)
k2.key = os.path.join(destDir, filename)
k2.set_contents_from_string(contents, reduced_redundancy=True)
print # This newline is needed to get the path of the compiled binary printed on a newline.
示例14: getGroupsTrackerKey
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def getGroupsTrackerKey(self):
groups_tracker_key_name = self.getGroupsTrackerKeyName()
bucket = getHDISBucket()
groups_tracker_key = bucket.get_key(groups_tracker_key_name)
if not groups_tracker_key:
groups_tracker_key = Key(bucket)
groups_tracker_key.key = groups_tracker_key_name
return groups_tracker_key
示例15: uploadFileToS3
# 需要导入模块: from boto.s3.connection import Key [as 别名]
# 或者: from boto.s3.connection.Key import key [as 别名]
def uploadFileToS3(self, filename):
'''Uploads file to S3.'''
destDir = '' # Root folder of the S3 bucket
destpath = os.path.join(destDir, os.path.basename(filename))
print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)
k = Key(self.bucket)
k.key = destpath
k.set_contents_from_filename(filename, reduced_redundancy=True)