本文整理汇总了Python中wal_e.blobstore.s3.calling_format.from_store_name函数的典型用法代码示例。如果您正苦于以下问题:Python from_store_name函数的具体用法?Python from_store_name怎么用?Python from_store_name使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了from_store_name函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_str_repr_call_info
def test_str_repr_call_info(monkeypatch):
"""Ensure CallingInfo renders sensibly.
Try a few cases sensitive to the bucket name.
"""
monkeypatch.setenv('AWS_REGION', 'us-east-1')
cinfo = calling_format.from_store_name('hello-world')
assert repr(cinfo) == str(cinfo)
assert repr(cinfo) == (
"CallingInfo(hello-world, "
"<class 'boto.s3.connection.SubdomainCallingFormat'>, "
"'us-east-1', None)"
)
cinfo = calling_format.from_store_name('hello.world')
assert repr(cinfo) == str(cinfo)
assert repr(cinfo) == (
"CallingInfo(hello.world, "
"<class 'boto.s3.connection.OrdinaryCallingFormat'>, "
"'us-east-1', u's3-external-1.amazonaws.com')"
)
cinfo = calling_format.from_store_name('Hello-World')
assert repr(cinfo) == str(cinfo)
assert repr(cinfo) == (
"CallingInfo(Hello-World, "
"<class 'boto.s3.connection.OrdinaryCallingFormat'>, "
"'us-east-1', u's3-external-1.amazonaws.com')"
)
示例2: test_str_repr_call_info
def test_str_repr_call_info():
"""Ensure CallingInfo renders sensibly.
Try a few cases sensitive to the bucket name.
"""
if boto.__version__ <= '2.2.0':
pytest.skip('Class name output is unstable on older boto versions')
cinfo = calling_format.from_store_name('hello-world')
assert repr(cinfo) == str(cinfo)
assert repr(cinfo) == (
"CallingInfo(hello-world, "
"<class 'boto.s3.connection.SubdomainCallingFormat'>, "
"None, None)"
)
cinfo = calling_format.from_store_name('hello.world')
assert repr(cinfo) == str(cinfo)
assert repr(cinfo) == (
"CallingInfo(hello.world, "
"<class 'boto.s3.connection.OrdinaryCallingFormat'>, "
"None, None)"
)
cinfo = calling_format.from_store_name('Hello-World')
assert repr(cinfo) == str(cinfo)
assert repr(cinfo) == (
"CallingInfo(Hello-World, "
"<class 'boto.s3.connection.OrdinaryCallingFormat'>, "
"'us-standard', 's3.amazonaws.com')"
)
示例3: test_bogus_region
def test_bogus_region(monkeypatch):
# Raises an error when it is necessary to resolve a hostname for a
# bucket but no such region is found in the AWS endpoint
# dictionary.
monkeypatch.setenv('AWS_REGION', 'not-a-valid-region-name')
with pytest.raises(wal_e.exception.UserException) as e:
calling_format.from_store_name('forces.OrdinaryCallingFormat')
assert e.value.msg == 'Could not resolve host for AWS_REGION'
assert e.value.detail == 'AWS_REGION is set to "not-a-valid-region-name".'
# Doesn't raise an error when it is unnecessary to resolve a
# hostname for given bucket name.
monkeypatch.setenv('AWS_REGION', 'not-a-valid-region-name')
calling_format.from_store_name('subdomain-format-acceptable')
示例4: test_cipher_suites
def test_cipher_suites():
# Imported for its side effects of setting up ssl cipher suites
# and gevent.
from wal_e import cmd
# Quiet pyflakes.
assert cmd
creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY'))
cinfo = calling_format.from_store_name('irrelevant')
conn = cinfo.connect(creds)
# Warm up the pool and the connection in it; new_http_connection
# seems to be a more natural choice, but leaves the '.sock'
# attribute null.
conn.get_all_buckets()
# Set up 'port' keyword argument for newer Botos that require it.
spec = inspect.getargspec(conn._pool.get_http_connection)
kw = {'host': 's3.amazonaws.com',
'is_secure': True}
if 'port' in spec.args:
kw['port'] = 443
htcon = conn._pool.get_http_connection(**kw)
chosen_cipher_suite = htcon.sock.cipher()[0].split('-')
# Test for the expected cipher suite.
#
# This can change or vary on different platforms somewhat
# harmlessly, but do the simple thing and insist on an exact match
# for now.
assert chosen_cipher_suite == ['AES256', 'SHA']
示例5: test_sigv4_only_region
def test_sigv4_only_region(tmpdir, monkeypatch):
monkeypatch.setenv('AWS_REGION', 'eu-central-1')
sigv4_check_apply()
bucket_name = bucket_name_mangle('sigv4')
creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY'))
cinfo = calling_format.from_store_name(bucket_name)
conn = cinfo.connect(creds)
try:
conn.create_bucket(bucket_name, location='eu-central-1')
except boto.exception.S3CreateError:
pass
source = unicode(tmpdir.join('source'))
contents = 'abcdefghijklmnopqrstuvwxyz\n' * 100
with open(source, 'wb') as f:
f.write(contents)
data_url = 's3://{0}/data'.format(bucket_name)
with open(source) as f:
uri_put_file(creds, data_url, f)
results = uri_get_file(creds, data_url)
assert contents == results
示例6: prepare_s3_default_test_bucket
def prepare_s3_default_test_bucket():
# Check credentials are present: this procedure should not be
# called otherwise.
if no_real_s3_credentials():
assert False
bucket_name = bucket_name_mangle('waletdefwuy')
creds = s3.Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY'),
os.getenv('AWS_SECURITY_TOKEN'))
cinfo = calling_format.from_store_name(bucket_name, region='us-west-1')
conn = cinfo.connect(creds)
def _clean():
bucket = conn.get_bucket(bucket_name)
bucket.delete_keys(key.name for key in bucket.list())
try:
conn.create_bucket(bucket_name, location=Location.USWest)
except boto.exception.S3CreateError as e:
if e.status == 409:
# Conflict: bucket already present. Re-use it, but
# clean it out first.
_clean()
else:
raise
else:
# Success
_clean()
return bucket_name
示例7: test_us_standard_default_for_bogus
def test_us_standard_default_for_bogus():
"""Test degradation to us-standard for all weird bucket names.
Such bucket names are not supported outside of us-standard by
WAL-E.
"""
for bn in SUBDOMAIN_BOGUS:
cinfo = calling_format.from_store_name(bn)
assert cinfo.region == 'us-standard'
示例8: test_cert_validation_sensitivity
def test_cert_validation_sensitivity():
"""Test degradation of dotted bucket names to OrdinaryCallingFormat
Although legal bucket names with SubdomainCallingFormat, these
kinds of bucket names run afoul certification validation, and so
they are forced to fall back to OrdinaryCallingFormat.
"""
for bn in SUBDOMAIN_OK:
if '.' not in bn:
cinfo = calling_format.from_store_name(bn)
assert (cinfo.calling_format ==
boto.s3.connection.SubdomainCallingFormat)
else:
assert '.' in bn
cinfo = calling_format.from_store_name(bn)
assert (cinfo.calling_format == connection.OrdinaryCallingFormat)
assert cinfo.region is None
assert cinfo.ordinary_endpoint is None
示例9: test_get_location_errors
def test_get_location_errors(monkeypatch):
"""Simulate situations where get_location fails
Exercise both the case where IAM refuses the privilege to get the
bucket location and where some other S3ResponseError is raised
instead.
"""
bucket_name = 'wal-e.test.403.get.location'
def just_403(self):
raise boto.exception.S3ResponseError(status=403,
reason=None, body=None)
def unhandled_404(self):
raise boto.exception.S3ResponseError(status=404,
reason=None, body=None)
creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY'))
with FreshBucket(bucket_name,
calling_format=connection.OrdinaryCallingFormat()):
cinfo = calling_format.from_store_name(bucket_name)
# Provoke a 403 when trying to get the bucket location.
monkeypatch.setattr(boto.s3.bucket.Bucket, 'get_location', just_403)
cinfo.connect(creds)
assert cinfo.region == 'us-standard'
assert cinfo.calling_format is connection.OrdinaryCallingFormat
cinfo = calling_format.from_store_name(bucket_name)
# Provoke an unhandled S3ResponseError, in this case 404 not
# found.
monkeypatch.setattr(boto.s3.bucket.Bucket, 'get_location',
unhandled_404)
with pytest.raises(boto.exception.S3ResponseError) as e:
cinfo.connect(creds)
assert e.value.status == 404
示例10: validate_bucket
def validate_bucket():
"""Validate the eu-central-1 bucket's existence
This is done using the subdomain that points to eu-central-1.
"""
sigv4_check_apply()
cinfo = calling_format.from_store_name(bucket_name)
conn = cinfo.connect(creds)
conn.get_bucket(bucket_name, validate=True)
示例11: test_cert_validation_sensitivity
def test_cert_validation_sensitivity(monkeypatch):
"""Test degradation of dotted bucket names to OrdinaryCallingFormat
Although legal bucket names with SubdomainCallingFormat, these
kinds of bucket names run afoul certification validation, and so
they are forced to fall back to OrdinaryCallingFormat.
"""
monkeypatch.setenv('AWS_REGION', 'us-east-1')
for bn in SUBDOMAIN_OK:
if '.' not in bn:
cinfo = calling_format.from_store_name(bn)
assert (cinfo.calling_format ==
boto.s3.connection.SubdomainCallingFormat)
else:
assert '.' in bn
cinfo = calling_format.from_store_name(bn)
assert (cinfo.calling_format == connection.OrdinaryCallingFormat)
assert cinfo.region == 'us-east-1'
assert cinfo.ordinary_endpoint == u's3-external-1.amazonaws.com'
示例12: create_bucket_if_not_exists
def create_bucket_if_not_exists():
"""Create a bucket via path-based API calls.
This is because the preferred "$BUCKETNAME.s3.amazonaws"
subdomain doesn't yet exist for a non-existent bucket.
"""
monkeypatch.setenv("WALE_S3_ENDPOINT", "https+path://s3-eu-central-1.amazonaws.com")
cinfo = calling_format.from_store_name(bucket_name)
conn = cinfo.connect(creds)
try:
conn.create_bucket(bucket_name, location="eu-central-1")
except boto.exception.S3CreateError:
pass
monkeypatch.delenv("WALE_S3_ENDPOINT")
示例13: test_classic_get_location
def test_classic_get_location():
"""Exercise get location on a s3-classic bucket."""
creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY'))
bucket_name = 'wal-e-test.classic.get.location'
cinfo = calling_format.from_store_name(bucket_name)
with FreshBucket(bucket_name,
host='s3.amazonaws.com',
calling_format=connection.OrdinaryCallingFormat()) as fb:
fb.create()
conn = cinfo.connect(creds)
assert cinfo.region == 'us-standard'
assert cinfo.calling_format is connection.OrdinaryCallingFormat
assert conn.host == 's3.amazonaws.com'
示例14: test_classic_get_location
def test_classic_get_location():
"""Exercise get location on a s3-classic bucket."""
aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
bucket_name = ('wal-e-test.classic.get.location.' +
aws_access_key_id.lower())
cinfo = calling_format.from_store_name(bucket_name)
with FreshBucket(bucket_name,
host='s3.amazonaws.com',
calling_format=connection.OrdinaryCallingFormat()) as fb:
fb.create()
conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)
assert cinfo.region == 'us-standard'
assert cinfo.calling_format is connection.OrdinaryCallingFormat
assert conn.host == 's3.amazonaws.com'
示例15: test_subdomain_compatible
def test_subdomain_compatible():
"""Exercise a case where connecting is region-oblivious."""
creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY'))
bucket_name = 'wal-e-test-us-west-1-no-dots'
cinfo = calling_format.from_store_name(bucket_name)
with FreshBucket(bucket_name,
host='s3-us-west-1.amazonaws.com',
calling_format=connection.OrdinaryCallingFormat()) as fb:
fb.create(location='us-west-1')
conn = cinfo.connect(creds)
assert cinfo.region is None
assert cinfo.calling_format is connection.SubdomainCallingFormat
assert isinstance(conn.calling_format,
connection.SubdomainCallingFormat)