本文整理汇总了Python中tests.integration.aws函数的典型用法代码示例。如果您正苦于以下问题:Python aws函数的具体用法?Python aws怎么用?Python aws使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了aws函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: tearDown
def tearDown(self):
if os.path.exists(self.filename1):
os.remove(self.filename1)
aws('s3 rb --force s3://%s' % self.bucket_name)
aws('s3 rb --force s3://%s' % self.bucket_name2)
if os.path.exists(self.filename2):
os.remove(self.filename2)
示例2: test_cp_to_and_from_s3
def test_cp_to_and_from_s3(self):
# This tests the ability to put a single file in s3
# move it to a different bucket.
# and download the file locally
bucket_name = self.create_bucket()
# copy file into bucket.
foo_txt = self.files.create_file('foo.txt', 'this is foo.txt')
p = aws('s3 cp %s s3://%s/foo.txt' % (foo_txt, bucket_name))
self.assert_no_errors(p)
# Make sure object is in bucket.
self.assertTrue(self.key_exists(bucket_name, key_name='foo.txt'))
self.assertEqual(
self.get_key_contents(bucket_name, key_name='foo.txt'),
'this is foo.txt')
self.assertEqual(
self.content_type_for_key(bucket_name, key_name='foo.txt'),
'text/plain')
# Make a new name for the file and copy it locally.
full_path = self.files.full_path('bar.txt')
p = aws('s3 cp s3://%s/foo.txt %s' % (bucket_name, full_path))
self.assert_no_errors(p)
with open(full_path, 'r') as f:
self.assertEqual(f.read(), 'this is foo.txt')
示例3: test_exclude_filter_with_delete
def test_exclude_filter_with_delete(self):
# Test for: https://github.com/aws/aws-cli/issues/778
bucket_name = self.create_bucket()
first = self.files.create_file('foo.txt', 'contents')
second = self.files.create_file('bar.py', 'contents')
p = aws("s3 sync %s s3://%s/" % (self.files.rootdir, bucket_name))
self.assert_no_errors(p)
self.assertTrue(self.key_exists(bucket_name, key_name='bar.py'))
os.remove(second)
# We now have the same state as specified in the bug:
# local remote
# ----- ------
#
# foo.txt foo.txt
# bar.py
#
# If we now run --exclude '*.py' --delete, then we should *not*
# delete bar.py and the remote side.
p = aws("s3 sync %s s3://%s/ --exclude '*.py' --delete" % (
self.files.rootdir, bucket_name))
self.assert_no_errors(p)
self.assertTrue(
self.key_exists(bucket_name, key_name='bar.py'),
("The --delete flag was not applied to the receiving "
"end, the 'bar.py' file was deleted even though it was excluded."))
示例4: test_sync_with_delete_option_with_same_prefix
def test_sync_with_delete_option_with_same_prefix(self):
# Test for issue 440 (https://github.com/aws/aws-cli/issues/440)
# First, we need to create a directory structure that has a dir with
# the same prefix as some of the files:
#
# test/foo.txt
# test-123.txt
# test-321.txt
# test.txt
bucket_name = self.create_bucket()
# create test/foo.txt
nested_dir = os.path.join(self.files.rootdir, 'test')
os.mkdir(nested_dir)
self.files.create_file(os.path.join(nested_dir, 'foo.txt'),
contents='foo.txt contents')
# Then create test-123.txt, test-321.txt, test.txt.
self.files.create_file('test-123.txt', 'test-123.txt contents')
self.files.create_file('test-321.txt', 'test-321.txt contents')
self.files.create_file('test.txt', 'test.txt contents')
# Now sync this content up to s3.
p = aws('s3 sync %s s3://%s/' % (self.files.rootdir, bucket_name))
# Now here's the issue. If we try to sync the contents down
# with the --delete flag we should *not* see any output, the
# sync operation should determine that nothing is different and
# therefore do nothing. We can just use --dryrun to show the issue.
p = aws('s3 sync s3://%s/ %s --dryrun' % (
bucket_name, self.files.rootdir))
# These assertion methods will give better error messages than just
# checking if the output is empty.
self.assertNotIn('download:', p.stdout)
self.assertNotIn('delete:', p.stdout)
self.assertEqual('', p.stdout)
示例5: test_set_with_empty_config_file
def test_set_with_empty_config_file(self):
with open(self.config_filename, 'w'):
pass
aws('configure set region us-west-1', env_vars=self.env_vars)
self.assertEqual(
'[default]\n'
'region = us-west-1\n', self.get_config_file_contents())
示例6: test_mb_rb
def test_mb_rb(self):
p = aws('s3 mb s3://%s' % self.bucket_name)
self.assert_no_errors(p)
response = self.list_buckets()
self.assertIn(self.bucket_name, [b['Name'] for b in response])
p = aws('s3 rb s3://%s' % self.bucket_name)
self.assert_no_errors(p)
示例7: test_set_with_updating_value
def test_set_with_updating_value(self):
self.set_config_file_contents(
'[default]\n'
'region = us-west-2\n')
aws('configure set region us-west-1', env_vars=self.env_vars)
self.assertEqual(
'[default]\n'
'region = us-west-1\n', self.get_config_file_contents())
示例8: test_set_with_commented_out_field
def test_set_with_commented_out_field(self):
self.set_config_file_contents(
'#[preview]\n'
';cloudsearch = true\n')
aws('configure set preview.cloudsearch true', env_vars=self.env_vars)
self.assertEqual(
'#[preview]\n'
';cloudsearch = true\n'
'[preview]\n'
'cloudsearch = true\n', self.get_config_file_contents())
示例9: test_mv_local_to_s3
def test_mv_local_to_s3(self):
bucket_name = self.create_bucket()
full_path = self.files.create_file('foo.txt', 'this is foo.txt')
aws('s3 mv %s s3://%s/foo.txt' % (full_path,
bucket_name))
# When we move an object, the local file is gone:
self.assertTrue(not os.path.exists(full_path))
# And now resides in s3.
contents = self.get_key_contents(bucket_name, 'foo.txt')
self.assertEqual(contents, 'this is foo.txt')
示例10: test_basic_exclude_filter_for_single_file
def test_basic_exclude_filter_for_single_file(self):
full_path = self.files.create_file('foo.txt', 'this is foo.txt')
# With no exclude we should upload the file.
p = aws('s3 cp %s s3://random-bucket-name/ --dryrun' % full_path)
self.assert_no_errors(p)
self.assertIn('(dryrun) upload:', p.stdout)
p2 = aws("s3 cp %s s3://random-bucket-name/ --dryrun --exclude '*'"
% full_path)
self.assert_no_files_would_be_uploaded(p2)
示例11: test_mv_s3_to_s3
def test_mv_s3_to_s3(self):
from_bucket = self.create_bucket()
to_bucket = self.create_bucket()
self.put_object(from_bucket, 'foo.txt', 'this is foo.txt')
aws('s3 mv s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
contents = self.get_key_contents(to_bucket, 'foo.txt')
self.assertEqual(contents, 'this is foo.txt')
# And verify that the object no longer exists in the from_bucket.
self.assertTrue(not self.key_exists(from_bucket, key_name='foo.txt'))
示例12: test_json_param_parsing
def test_json_param_parsing(self):
# This is convered by unit tests in botocore, but this is a sanity
# check that we get a json response from a json service.
p = aws('swf list-domains --registration-status REGISTERED')
self.assertEqual(p.rc, 0)
self.assertIsInstance(p.json, dict)
p = aws('dynamodb list-tables')
self.assertEqual(p.rc, 0)
self.assertIsInstance(p.json, dict)
示例13: test_cp_s3_s3_multipart
def test_cp_s3_s3_multipart(self):
from_bucket = self.create_bucket()
to_bucket = self.create_bucket()
file_contents = 'abcd' * (1024 * 1024 * 10)
self.put_object(from_bucket, 'foo.txt', file_contents)
aws('s3 cp s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
contents = self.get_key_contents(to_bucket, 'foo.txt')
self.assertEqual(contents, file_contents)
self.assertTrue(self.key_exists(from_bucket, key_name='foo.txt'))
示例14: test_mv_s3_to_s3_multipart
def test_mv_s3_to_s3_multipart(self):
from_bucket = self.create_bucket()
to_bucket = self.create_bucket()
file_contents = 'abcd' * (1024 * 1024 * 10)
self.put_object(from_bucket, 'foo.txt', file_contents)
aws('s3 mv s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
contents = self.get_key_contents(to_bucket, 'foo.txt')
self.assertEqual(contents, file_contents)
# And verify that the object no longer exists in the from_bucket.
self.assertTrue(not self.key_exists(from_bucket, key_name='foo.txt'))
示例15: test_mv_s3_to_local
def test_mv_s3_to_local(self):
bucket_name = self.create_bucket()
self.put_object(bucket_name, 'foo.txt', 'this is foo.txt')
full_path = self.files.full_path('foo.txt')
self.assertTrue(self.key_exists(bucket_name, key_name='foo.txt'))
aws('s3 mv s3://%s/foo.txt %s' % (bucket_name, full_path))
self.assertTrue(os.path.exists(full_path))
with open(full_path, 'r') as f:
self.assertEqual(f.read(), 'this is foo.txt')
# The s3 file should not be there anymore.
self.assertTrue(not self.key_exists(bucket_name, key_name='foo.txt'))