本文整理汇总了Python中botocore.client.Config方法的典型用法代码示例。如果您正苦于以下问题:Python client.Config方法的具体用法?Python client.Config怎么用?Python client.Config使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类botocore.client
的用法示例。
在下文中一共展示了client.Config方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: proxy_response
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def proxy_response(req):
s3 = boto3.resource('s3')
s3_client = boto3.client('s3', config=Config(signature_version='s3v4'))
bucket = s3.Bucket(BUCKET_NAME)
file_name = str(uuid4())
obj = bucket.put_object(
Key=file_name,
Body=req.content,
ACL="authenticated-read",
ContentType=req.headers["content-type"]
)
url = s3_client.generate_presigned_url(
"get_object",
Params={
"Bucket": BUCKET_NAME,
"Key": file_name},
ExpiresIn=120
)
return redirect(url, 303)
示例2: _save_chunk
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def _save_chunk(self, data, chunk_info, executor=None):
# Keyname
key_name = f"{self.strax_unique_key}/{chunk_info['chunk_i']:06d}"
# Save chunk via temporary file
with tempfile.SpooledTemporaryFile() as f:
filesize = strax.save_file(f,
data=data,
compressor=self.md['compressor'])
f.seek(0)
self.s3.upload_fileobj(f,
BUCKET_NAME,
key_name,
Config=self.config)
return dict(key_name=key_name, filesize=filesize), None
示例3: s3_cleanup
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def s3_cleanup(bucket, cluster_name, user_name):
s3_res = boto3.resource('s3', config=Config(signature_version='s3v4'))
client = boto3.client('s3', config=Config(signature_version='s3v4'), region_name=os.environ['aws_region'])
try:
client.head_bucket(Bucket=bucket)
except:
print("There is no bucket {} or you do not permission to access it".format(bucket))
sys.exit(0)
try:
resource = s3_res.Bucket(bucket)
prefix = user_name + '/' + cluster_name + "/"
for i in resource.objects.filter(Prefix=prefix):
s3_res.Object(resource.name, i.key).delete()
except Exception as err:
logging.info("Unable to clean S3 bucket: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout))
append_result(str({"error": "Unable to clean S3 bucket",
"error_message": str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout)}))
traceback.print_exc(file=sys.stdout)
示例4: get_object_count
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def get_object_count(bucket, prefix):
try:
s3_cli = boto3.client('s3', config=Config(signature_version='s3v4'),
region_name=args.region)
content = s3_cli.get_paginator('list_objects')
file_list = []
try:
for i in content.paginate(Bucket=bucket, Delimiter='/',
Prefix=prefix):
for file in i.get('Contents'):
file_list.append(file.get('Key'))
count = len(file_list)
except:
print("{} still not exist. Waiting...".format(prefix))
count = 0
return count
except Exception as err:
logging.error("Unable to get objects from s3: " +
str(err) + "\n Traceback: " +
traceback.print_exc(file=sys.stdout))
示例5: download_file_from_s3
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def download_file_from_s3(bucket_name: str, key: str, local_path: str) -> None:
"""
Downloads file from S3 anonymously
:param bucket_name: S3 Bucket name
:param key: S3 File key name
:param local_path: Local file path to download as
"""
verify_ssl = get_verify_ssl()
if not os.path.isfile(local_path):
client = boto3.client(
"s3", config=Config(signature_version=UNSIGNED), verify=verify_ssl
)
try:
logger.info("Downloading S3 data file...")
total = client.head_object(Bucket=bucket_name, Key=key)["ContentLength"]
with ProgressPercentage(client, bucket_name, key, total) as Callback:
client.download_file(bucket_name, key, local_path, Callback=Callback)
except ClientError:
raise KeyError(f"File {key} not available in {bucket_name} bucket.")
else:
logger.info(f"Reusing cached file {local_path}...")
示例6: assume_role
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def assume_role(cls, role_arn, principal_arn, saml_response, duration=3600):
''' Assumes the desired role using the saml_response given. The response should be b64 encoded.
Duration is in seconds
:param role_arn: role amazon resource name
:param principal_arn: principal name
:param saml_response: SAML object to assume role with
:param duration: session duration (default: 3600)
:return: AWS session token
'''
# Assume role with new SAML
conn = boto3.client('sts', config=client.Config(signature_version=botocore.UNSIGNED, user_agent=cls.USER_AGENT, region_name=None))
aws_session_token = conn.assume_role_with_saml(
RoleArn=role_arn,
PrincipalArn=principal_arn,
SAMLAssertion=saml_response,
DurationSeconds=duration,
)
return aws_session_token
示例7: _make_boto3_athena_client
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def _make_boto3_athena_client(container):
region = container.get_parameter('aws_region')
logger = container.get('logger')
config = botocore_client.Config(
connect_timeout=5,
read_timeout=5,
region_name=region
)
session_kwargs = {}
try:
session = boto3.Session(**session_kwargs)
return session.client(
'athena',
config=config,
)
except ProfileNotFound:
logger.error('AWS Athena Connection via Profile Failed')
示例8: _make_boto3_kinesis_client
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def _make_boto3_kinesis_client(container):
region = container.get_parameter('aws_region')
logger = container.get('logger')
config = botocore_client.Config(
connect_timeout=5,
read_timeout=5,
region_name=region
)
session_kwargs = {}
try:
session = boto3.Session(**session_kwargs)
return session.client('kinesis', config=config)
except ProfileNotFound:
logger.error('AWS Kinesis Connection via Profile Failed')
示例9: upload_export_tarball
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def upload_export_tarball(self, realm: Optional[Realm], tarball_path: str) -> str:
def percent_callback(bytes_transferred: Any) -> None:
sys.stdout.write('.')
sys.stdout.flush()
# We use the avatar bucket, because it's world-readable.
key = self.avatar_bucket.Object(os.path.join("exports", generate_random_token(32),
os.path.basename(tarball_path)))
key.upload_file(tarball_path, Callback=percent_callback)
session = botocore.session.get_session()
config = Config(signature_version=botocore.UNSIGNED)
public_url = session.create_client('s3', config=config).generate_presigned_url(
'get_object',
Params={
'Bucket': self.avatar_bucket.name,
'Key': key.key,
},
ExpiresIn=0,
)
return public_url
示例10: get_s3_client
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def get_s3_client(unsigned=True):
"""Return a boto3 S3 client with optional unsigned config.
Parameters
----------
unsigned : Optional[bool]
If True, the client will be using unsigned mode in which public
resources can be accessed without credentials. Default: True
Returns
-------
botocore.client.S3
A client object to AWS S3.
"""
if unsigned:
return boto3.client('s3', config=Config(signature_version=UNSIGNED))
else:
return boto3.client('s3')
示例11: test_only_dynamodb_calls_are_traced
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def test_only_dynamodb_calls_are_traced():
"""Test only a single subsegment is created for other AWS services.
As the pynamodb patch applies the botocore patch as well, we need
to ensure that only one subsegment is created for all calls not
made by PynamoDB. As PynamoDB calls botocore differently than the
botocore patch expects we also just get a single subsegment per
PynamoDB call.
"""
session = botocore.session.get_session()
s3 = session.create_client('s3', region_name='us-west-2',
config=Config(signature_version=UNSIGNED))
try:
s3.get_bucket_location(Bucket='mybucket')
except ClientError:
pass
subsegments = xray_recorder.current_segment().subsegments
assert len(subsegments) == 1
assert subsegments[0].name == 's3'
assert len(subsegments[0].subsegments) == 0
示例12: __init__
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def __init__(self, handle_task = lambda t, i: None, **kwargs):
"""Will not be called if used as a mixin. Provides just the expected variables.
Args:
handle_task (callable) : Callable to process task input and send success or
failure
kwargs : Arguments for heaviside.utils.create_session
"""
session, _ = create_session(**kwargs)
# DP NOTE: read_timeout is needed so that the long poll for tasking doesn't
# timeout client side before AWS returns that there is no work
self.client = session.client('stepfunctions', config=Config(read_timeout=70))
self.log = logging.getLogger(__name__)
self.name = None
self.arn = None
self.handle_task = handle_task
self.max_concurrent = 0
self.poll_delay = 1
self.polling = False
示例13: register_domain
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def register_domain(domain=None, region=None):
client = boto3.client(
'swf',
region_name=region or config.REGION,
config=Config(connect_timeout=config.CONNECT_TIMEOUT,
read_timeout=config.READ_TIMEOUT))
# register domain for Mass
try:
res = client.register_domain(
name=domain or config.DOMAIN,
description='The SWF domain for Mass',
workflowExecutionRetentionPeriodInDays=str(
int(math.ceil(float(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT) / 60 / 60 / 24)))
)
except ClientError:
# DomainAlreadyExists
pass
示例14: register_activity_type
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def register_activity_type(domain=None, region=None):
client = boto3.client(
'swf',
region_name=region or config.REGION,
config=Config(connect_timeout=config.CONNECT_TIMEOUT,
read_timeout=config.READ_TIMEOUT))
# register activity type for Cmd
try:
res = client.register_activity_type(
domain=domain or config.DOMAIN,
name=config.ACTIVITY_TYPE_FOR_ACTION['name'],
version=config.ACTIVITY_TYPE_FOR_ACTION['version'],
description='The SWF activity type for Cmd of Mass.',
defaultTaskStartToCloseTimeout=str(config.ACTIVITY_TASK_START_TO_CLOSE_TIMEOUT),
defaultTaskHeartbeatTimeout=str(config.ACTIVITY_HEARTBEAT_TIMEOUT),
defaultTaskList={'name': config.ACTIVITY_TASK_LIST},
defaultTaskPriority='1',
defaultTaskScheduleToStartTimeout=str(config.ACTIVITY_TASK_START_TO_CLOSE_TIMEOUT),
defaultTaskScheduleToCloseTimeout=str(config.ACTIVITY_TASK_START_TO_CLOSE_TIMEOUT)
)
except ClientError:
# TypeAlreadyExists
pass
示例15: iter_workflow_execution_history
# 需要导入模块: from botocore import client [as 别名]
# 或者: from botocore.client import Config [as 别名]
def iter_workflow_execution_history(workflow_id, run_id, reverse_order=False, ignore_decision_task=True):
client = boto3.client(
'swf',
region_name=config.REGION,
config=Config(connect_timeout=config.CONNECT_TIMEOUT,
read_timeout=config.READ_TIMEOUT))
paginator = client.get_paginator('get_workflow_execution_history')
for res in paginator.paginate(
domain=config.DOMAIN,
execution={
'workflowId': workflow_id,
'runId': run_id
},
reverseOrder=reverse_order
):
for event in res['events']:
if ignore_decision_task and event['eventType'].startswith('DecisionTask'):
continue
yield event