當前位置: 首頁>>代碼示例>>Python>>正文


Python client.Config方法代碼示例

本文整理匯總了Python中botocore.client.Config方法的典型用法代碼示例。如果您正苦於以下問題:Python client.Config方法的具體用法?Python client.Config怎麽用?Python client.Config使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在botocore.client的用法示例。


在下文中一共展示了client.Config方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: proxy_response

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def proxy_response(req):
    s3 = boto3.resource('s3')
    s3_client = boto3.client('s3', config=Config(signature_version='s3v4'))

    bucket = s3.Bucket(BUCKET_NAME)
    file_name = str(uuid4())

    obj = bucket.put_object(
        Key=file_name,
        Body=req.content,
        ACL="authenticated-read",
        ContentType=req.headers["content-type"]
    )

    url = s3_client.generate_presigned_url(
        "get_object",
        Params={
            "Bucket": BUCKET_NAME,
            "Key": file_name},
        ExpiresIn=120
    )
    return redirect(url, 303) 
開發者ID:stevenleeg,項目名稱:geemusic,代碼行數:24,代碼來源:controllers.py

示例2: _save_chunk

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def _save_chunk(self, data, chunk_info, executor=None):
        # Keyname
        key_name = f"{self.strax_unique_key}/{chunk_info['chunk_i']:06d}"

        # Save chunk via temporary file
        with tempfile.SpooledTemporaryFile() as f:
            filesize = strax.save_file(f,
                                       data=data,
                                       compressor=self.md['compressor'])
            f.seek(0)
            self.s3.upload_fileobj(f,
                                   BUCKET_NAME,
                                   key_name,
                                   Config=self.config)

        return dict(key_name=key_name, filesize=filesize), None 
開發者ID:AxFoundation,項目名稱:strax,代碼行數:18,代碼來源:s3.py

示例3: s3_cleanup

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def s3_cleanup(bucket, cluster_name, user_name):
    s3_res = boto3.resource('s3', config=Config(signature_version='s3v4'))
    client = boto3.client('s3', config=Config(signature_version='s3v4'), region_name=os.environ['aws_region'])
    try:
        client.head_bucket(Bucket=bucket)
    except:
        print("There is no bucket {} or you do not permission to access it".format(bucket))
        sys.exit(0)
    try:
        resource = s3_res.Bucket(bucket)
        prefix = user_name + '/' + cluster_name + "/"
        for i in resource.objects.filter(Prefix=prefix):
            s3_res.Object(resource.name, i.key).delete()
    except Exception as err:
        logging.info("Unable to clean S3 bucket: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout))
        append_result(str({"error": "Unable to clean S3 bucket",
                           "error_message": str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout)}))
        traceback.print_exc(file=sys.stdout) 
開發者ID:apache,項目名稱:incubator-dlab,代碼行數:20,代碼來源:actions_lib.py

示例4: get_object_count

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def get_object_count(bucket, prefix):
    try:
        s3_cli = boto3.client('s3', config=Config(signature_version='s3v4'),
                              region_name=args.region)
        content = s3_cli.get_paginator('list_objects')
        file_list = []
        try:
            for i in content.paginate(Bucket=bucket, Delimiter='/',
                                      Prefix=prefix):
                for file in i.get('Contents'):
                    file_list.append(file.get('Key'))
            count = len(file_list)
        except:
            print("{} still not exist. Waiting...".format(prefix))
            count = 0
        return count
    except Exception as err:
        logging.error("Unable to get objects from s3: " +
                     str(err) + "\n Traceback: " +
                     traceback.print_exc(file=sys.stdout)) 
開發者ID:apache,項目名稱:incubator-dlab,代碼行數:22,代碼來源:dataengine-service_create.py

示例5: download_file_from_s3

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def download_file_from_s3(bucket_name: str, key: str, local_path: str) -> None:
    """
    Downloads file from S3 anonymously
    :param bucket_name: S3 Bucket name
    :param key: S3 File key name
    :param local_path: Local file path to download as
    """
    verify_ssl = get_verify_ssl()
    if not os.path.isfile(local_path):
        client = boto3.client(
            "s3", config=Config(signature_version=UNSIGNED), verify=verify_ssl
        )

        try:
            logger.info("Downloading S3 data file...")
            total = client.head_object(Bucket=bucket_name, Key=key)["ContentLength"]
            with ProgressPercentage(client, bucket_name, key, total) as Callback:
                client.download_file(bucket_name, key, local_path, Callback=Callback)
        except ClientError:
            raise KeyError(f"File {key} not available in {bucket_name} bucket.")

    else:
        logger.info(f"Reusing cached file {local_path}...") 
開發者ID:twosixlabs,項目名稱:armory,代碼行數:25,代碼來源:utils.py

示例6: assume_role

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def assume_role(cls, role_arn, principal_arn, saml_response, duration=3600):
        ''' Assumes the desired role using the saml_response given. The response should be b64 encoded.
            Duration is in seconds
            :param role_arn: role amazon resource name
            :param principal_arn: principal name
            :param saml_response: SAML object to assume role with
            :param duration: session duration (default: 3600)
            :return: AWS session token
        '''
        # Assume role with new SAML
        conn = boto3.client('sts', config=client.Config(signature_version=botocore.UNSIGNED, user_agent=cls.USER_AGENT, region_name=None))
        aws_session_token = conn.assume_role_with_saml(
            RoleArn=role_arn,
            PrincipalArn=principal_arn,
            SAMLAssertion=saml_response,
            DurationSeconds=duration,
            
        )
        return aws_session_token 
開發者ID:cyberark,項目名稱:shimit,代碼行數:21,代碼來源:aws.py

示例7: _make_boto3_athena_client

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def _make_boto3_athena_client(container):
    region = container.get_parameter('aws_region')
    logger = container.get('logger')

    config = botocore_client.Config(
        connect_timeout=5,
        read_timeout=5,
        region_name=region
    )

    session_kwargs = {}
    try:
        session = boto3.Session(**session_kwargs)
        return session.client(
            'athena',
            config=config,
        )
    except ProfileNotFound:
        logger.error('AWS Athena Connection via Profile Failed') 
開發者ID:airbnb,項目名稱:streamalert,代碼行數:21,代碼來源:services.py

示例8: _make_boto3_kinesis_client

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def _make_boto3_kinesis_client(container):
    region = container.get_parameter('aws_region')
    logger = container.get('logger')

    config = botocore_client.Config(
        connect_timeout=5,
        read_timeout=5,
        region_name=region
    )

    session_kwargs = {}
    try:
        session = boto3.Session(**session_kwargs)
        return session.client('kinesis', config=config)
    except ProfileNotFound:
        logger.error('AWS Kinesis Connection via Profile Failed') 
開發者ID:airbnb,項目名稱:streamalert,代碼行數:18,代碼來源:services.py

示例9: upload_export_tarball

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def upload_export_tarball(self, realm: Optional[Realm], tarball_path: str) -> str:
        def percent_callback(bytes_transferred: Any) -> None:
            sys.stdout.write('.')
            sys.stdout.flush()

        # We use the avatar bucket, because it's world-readable.
        key = self.avatar_bucket.Object(os.path.join("exports", generate_random_token(32),
                                                     os.path.basename(tarball_path)))

        key.upload_file(tarball_path, Callback=percent_callback)

        session = botocore.session.get_session()
        config = Config(signature_version=botocore.UNSIGNED)

        public_url = session.create_client('s3', config=config).generate_presigned_url(
            'get_object',
            Params={
                'Bucket': self.avatar_bucket.name,
                'Key': key.key,
            },
            ExpiresIn=0,
        )
        return public_url 
開發者ID:zulip,項目名稱:zulip,代碼行數:25,代碼來源:upload.py

示例10: get_s3_client

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def get_s3_client(unsigned=True):
    """Return a boto3 S3 client with optional unsigned config.

    Parameters
    ----------
    unsigned : Optional[bool]
        If True, the client will be using unsigned mode in which public
        resources can be accessed without credentials. Default: True

    Returns
    -------
    botocore.client.S3
        A client object to AWS S3.
    """
    if unsigned:
        return boto3.client('s3', config=Config(signature_version=UNSIGNED))
    else:
        return boto3.client('s3') 
開發者ID:sorgerlab,項目名稱:indra,代碼行數:20,代碼來源:aws.py

示例11: test_only_dynamodb_calls_are_traced

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def test_only_dynamodb_calls_are_traced():
    """Test only a single subsegment is created for other AWS services.

    As the pynamodb patch applies the botocore patch as well, we need
    to ensure that only one subsegment is created for all calls not
    made by PynamoDB. As PynamoDB calls botocore differently than the
    botocore patch expects we also just get a single subsegment per
    PynamoDB call.
    """
    session = botocore.session.get_session()
    s3 = session.create_client('s3', region_name='us-west-2',
                               config=Config(signature_version=UNSIGNED))
    try:
        s3.get_bucket_location(Bucket='mybucket')
    except ClientError:
        pass

    subsegments = xray_recorder.current_segment().subsegments
    assert len(subsegments) == 1
    assert subsegments[0].name == 's3'
    assert len(subsegments[0].subsegments) == 0 
開發者ID:aws,項目名稱:aws-xray-sdk-python,代碼行數:23,代碼來源:test_pynamodb.py

示例12: __init__

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def __init__(self, handle_task = lambda t, i: None, **kwargs):
        """Will not be called if used as a mixin. Provides just the expected variables.
        
        Args:
            handle_task (callable) : Callable to process task input and send success or
                                     failure
            kwargs : Arguments for heaviside.utils.create_session
        """
        session, _ = create_session(**kwargs)
        # DP NOTE: read_timeout is needed so that the long poll for tasking doesn't
        #          timeout client side before AWS returns that there is no work
        self.client = session.client('stepfunctions', config=Config(read_timeout=70))
        self.log = logging.getLogger(__name__)
        self.name = None
        self.arn = None
        self.handle_task = handle_task
        self.max_concurrent = 0
        self.poll_delay = 1
        self.polling = False 
開發者ID:jhuapl-boss,項目名稱:heaviside,代碼行數:21,代碼來源:activities.py

示例13: register_domain

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def register_domain(domain=None, region=None):
    client = boto3.client(
        'swf',
        region_name=region or config.REGION,
        config=Config(connect_timeout=config.CONNECT_TIMEOUT,
                      read_timeout=config.READ_TIMEOUT))

    # register domain for Mass
    try:
        res = client.register_domain(
            name=domain or config.DOMAIN,
            description='The SWF domain for Mass',
            workflowExecutionRetentionPeriodInDays=str(
                int(math.ceil(float(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT) / 60 / 60 / 24)))
        )
    except ClientError:
        # DomainAlreadyExists
        pass 
開發者ID:KKBOX,項目名稱:mass,代碼行數:20,代碼來源:utils.py

示例14: register_activity_type

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def register_activity_type(domain=None, region=None):
    client = boto3.client(
        'swf',
        region_name=region or config.REGION,
        config=Config(connect_timeout=config.CONNECT_TIMEOUT,
                      read_timeout=config.READ_TIMEOUT))

    # register activity type for Cmd
    try:
        res = client.register_activity_type(
            domain=domain or config.DOMAIN,
            name=config.ACTIVITY_TYPE_FOR_ACTION['name'],
            version=config.ACTIVITY_TYPE_FOR_ACTION['version'],
            description='The SWF activity type for Cmd of Mass.',
            defaultTaskStartToCloseTimeout=str(config.ACTIVITY_TASK_START_TO_CLOSE_TIMEOUT),
            defaultTaskHeartbeatTimeout=str(config.ACTIVITY_HEARTBEAT_TIMEOUT),
            defaultTaskList={'name': config.ACTIVITY_TASK_LIST},
            defaultTaskPriority='1',
            defaultTaskScheduleToStartTimeout=str(config.ACTIVITY_TASK_START_TO_CLOSE_TIMEOUT),
            defaultTaskScheduleToCloseTimeout=str(config.ACTIVITY_TASK_START_TO_CLOSE_TIMEOUT)
        )
    except ClientError:
        # TypeAlreadyExists
        pass 
開發者ID:KKBOX,項目名稱:mass,代碼行數:26,代碼來源:utils.py

示例15: iter_workflow_execution_history

# 需要導入模塊: from botocore import client [as 別名]
# 或者: from botocore.client import Config [as 別名]
def iter_workflow_execution_history(workflow_id, run_id, reverse_order=False, ignore_decision_task=True):
    client = boto3.client(
        'swf',
        region_name=config.REGION,
        config=Config(connect_timeout=config.CONNECT_TIMEOUT,
                      read_timeout=config.READ_TIMEOUT))
    paginator = client.get_paginator('get_workflow_execution_history')
    for res in paginator.paginate(
        domain=config.DOMAIN,
        execution={
            'workflowId': workflow_id,
            'runId': run_id
        },
        reverseOrder=reverse_order
    ):
        for event in res['events']:
            if ignore_decision_task and event['eventType'].startswith('DecisionTask'):
                continue
            yield event 
開發者ID:KKBOX,項目名稱:mass,代碼行數:21,代碼來源:test_mass.py


注:本文中的botocore.client.Config方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。