当前位置: 首页>>代码示例>>Python>>正文


Python exception.S3ResponseError方法代码示例

本文整理汇总了Python中boto.exception.S3ResponseError方法的典型用法代码示例。如果您正苦于以下问题:Python exception.S3ResponseError方法的具体用法?Python exception.S3ResponseError怎么用?Python exception.S3ResponseError使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在boto.exception的用法示例。


在下文中一共展示了exception.S3ResponseError方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def __init__(self, config, ssh_ca_section, ca_key):
        super(S3Authority, self).__init__(ca_key)

        try:
            # Get a valid S3 bucket
            bucket = ssh_ca.get_config_value(
                config, ssh_ca_section, 'bucket', required=True)

            # Get a valid AWS region
            region = ssh_ca.get_config_value(
                config, ssh_ca_section, 'region', required=True)

            self.s3_conn = boto.s3.connect_to_region(region)
            self.ssh_bucket = self.s3_conn.get_bucket(bucket)
        except S3ResponseError, e:
            if e.code == "AccessDenied":
                raise ssh_ca.SSHCAInvalidConfiguration("Access denied to S3")
            raise 
开发者ID:cloudtools,项目名称:ssh-ca,代码行数:20,代码来源:s3.py

示例2: enumerate_keys

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def enumerate_keys(bucket, bucket_type, wordlist_path):
    """Enumerate bucket keys."""
    keys = []

    with open(wordlist_path) as wordlist:
        wordlist_keys = wordlist.read()
        key_list = wordlist_keys.split('\n')

    if bucket_type != 'azure':
        for key in key_list:
            try:
                key = bucket.get_key(key).key
                keys.append(key)
            except (S3ResponseError, AttributeError):
                continue
    else:
        bucket = bucket if bucket.endswith('/') else bucket + '/'
        for key in key_list:
            try:
                request = urllib_req.Request(bucket + key)
                urllib_req.urlopen(request, timeout=20)
                keys.append(key)
            except (HTTPError, URLError):
                continue
    return keys 
开发者ID:VirtueSecurity,项目名称:aws-extender-cli,代码行数:27,代码来源:aws_extender_cli.py

示例3: load_from_s3_file

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def load_from_s3_file(s3_uri):
    """Load data from S3
    Useful for loading small config or schema files

    :param s3_uri: path to S3 uri
    :returns: file contents
    """
    _, _, path = s3_uri.partition('://')
    bucket_name, _, key_name = path.partition('/')

    # if region is in a bucket name, put that region first
    def preferred_region(item):
        return item.name not in bucket_name

    boto_creds = get_boto_creds()
    for region in sorted(boto.s3.regions(), key=preferred_region):
        try:
            conn = boto.s3.connect_to_region(region.name, **boto_creds)
            return _load_from_s3_region(conn, bucket_name, key_name)
        except S3ResponseError as e:
            # skip to next region if access is not allowed from this one
            if e.status not in [403, 301]:
                raise
    raise ValueError("{0}: No valid region found".format(s3_uri)) 
开发者ID:Yelp,项目名称:mycroft,代码行数:26,代码来源:util.py

示例4: test_load_from_s3_file

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def test_load_from_s3_file(region, s3_uri, expected_content):
    def side_effect_func(*args, **kwargs):
        region_name = args[0]
        for r in S3_REGIONS:
            if r.name == region_name:
                if r.status != 200:
                    raise S3ResponseError(r.status, r.status)
                return region_name
        raise ValueError('Unknown region: {0}'.format(region_name))

    with patch('boto.s3.regions', autospec=True) as mock_regions:
        mock_regions.return_value = S3_REGIONS
        with staticconf.testing.MockConfiguration(MOCK_CONFIG):
            with patch('boto.s3.connect_to_region', autospec=True) as mock_connect:
                mock_connect.side_effect = side_effect_func
                with patch('sherlock.common.util._load_from_s3_region',
                           autospec=True) as mock_load:
                    mock_load.return_value = expected_content
                    result = load_from_s3_file(s3_uri)
                    mock_regions.assert_called_once_with()
                    assert mock_connect.call_count <= len(S3_REGIONS)
                    assert mock_load.call_count == 1
                    assert (mock_load.call_args)[0][0] == 'us-west-2'
                    assert result == expected_content 
开发者ID:Yelp,项目名称:mycroft,代码行数:26,代码来源:test_util.py

示例5: enable_bucket_cors

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def enable_bucket_cors(bucket):
    """ For direct upload to work, the bucket needs to enable
    cross-origin request scripting. """
    try:
        cors_cfg = bucket.get_cors()
    except S3ResponseError:
        cors_cfg = CORSConfiguration()
    rules = [r.id for r in cors_cfg]
    changed = False
    if 'spendb_put' not in rules:
        cors_cfg.add_rule(['PUT', 'POST'], '*',
                          allowed_header='*',
                          id='spendb_put',
                          max_age_seconds=3000,
                          expose_header='x-amz-server-side-encryption')
        changed = True
    if 'spendb_get' not in rules:
        cors_cfg.add_rule('GET', '*', id='spendb_get')
        changed = True

    if changed:
        bucket.set_cors(cors_cfg) 
开发者ID:openspending,项目名称:spendb,代码行数:24,代码来源:upload.py

示例6: get

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def get(self, str_report_id=None):
        """
            select a NmapReport by Id.

            :param str: id
            :rtype: NmapReport
            :return: NmapReport object
        """
        nmapreport = None
        if str_report_id is not None and isinstance(str_report_id, str):
            try:
                mykey = Key(self.bucket)
                mykey.key = str_report_id
                nmapreportjson = json.loads(mykey.get_contents_as_string())
                nmapreport = NmapParser.parse_fromdict(nmapreportjson)
            except S3ResponseError:
                pass
        return nmapreport 
开发者ID:imiyoo2010,项目名称:teye_scanner_for_book,代码行数:20,代码来源:s3.py

示例7: decode_blob

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def decode_blob(self, value):
        if not value:
            return None
        match = re.match("^s3:\/\/([^\/]*)\/(.*)$", value)
        if match:
            s3 = self.manager.get_s3_connection()
            bucket = s3.get_bucket(match.group(1), validate=False)
            try:
                key = bucket.get_key(match.group(2))
            except S3ResponseError as e:
                if e.reason != "Forbidden":
                    raise
                return None
        else:
            return None
        if key:
            return Blob(file=key, id="s3://%s/%s" % (key.bucket.name, key.name))
        else:
            return None 
开发者ID:VirtueSecurity,项目名称:aws-extender,代码行数:21,代码来源:sdbmanager.py

示例8: delete_bucket

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def delete_bucket(self, bucket, headers=None):
        """
        Removes an S3 bucket.

        In order to remove the bucket, it must first be empty. If the bucket is
        not empty, an ``S3ResponseError`` will be raised.

        :type bucket_name: string
        :param bucket_name: The name of the bucket

        :type headers: dict
        :param headers: Additional headers to pass along with the request to
            AWS.
        """
        response = self.make_request('DELETE', bucket, headers=headers)
        body = response.read()
        if response.status != 204:
            raise self.provider.storage_response_error(
                response.status, response.reason, body) 
开发者ID:VirtueSecurity,项目名称:aws-extender,代码行数:21,代码来源:connection.py

示例9: test_copy_mode_bypass

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def test_copy_mode_bypass(self, copy_key_mock):
        copy_key_mock.side_effect = S3ResponseError(None, meta())
        # given
        self.s3_conn.create_bucket('dest_bucket')
        options = create_s3_bypass_simple_config()

        # when:
        with closing(S3Bypass(options, meta())) as bypass:
            bypass.execute()

        # then:
        bucket = self.s3_conn.get_bucket('dest_bucket')
        key = next(iter(bucket.list('some_prefix/')))
        self.assertEquals('some_prefix/test_key', key.name)
        self.assertEqual(self.data, json.loads(key.get_contents_as_string()))
        self.assertEqual(bypass.total_items, 2, 'Bypass got an incorrect number of total items') 
开发者ID:scrapinghub,项目名称:exporters,代码行数:18,代码来源:test_bypass_s3.py

示例10: _check_write_consistency

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def _check_write_consistency(self):
        from boto.exception import S3ResponseError
        for key_info in self.get_metadata('keys_written'):
            try:
                key = self.bucket.get_key(key_info['key_name'])
                if not key:
                    raise InconsistentWriteState('Key {} not found in bucket'.format(
                        key_info['key_name']))
                if str(key.content_length) != str(key_info['size']):
                    raise InconsistentWriteState(
                        'Key {} has unexpected size. (expected {} - got {})'.format(
                            key_info['key_name'], key_info['size'], key.content_length))
                if self.save_metadata:
                    if str(key.get_metadata('total')) != str(key_info['number_of_records']):
                        raise InconsistentWriteState(
                            'Unexpected number of records for key {}. ('
                            'expected {} - got {})'.format(key_info['key_name'],
                                                           key_info['number_of_records'],
                                                           key.get_metadata('total')))
            except S3ResponseError:
                self.logger.warning(
                    'Skipping consistency check for key {}. Probably due to lack of '
                    'read permissions'.format(key_info['key_name']))
        self.logger.info('Consistency check passed') 
开发者ID:scrapinghub,项目名称:exporters,代码行数:26,代码来源:s3_writer.py

示例11: _delete_file

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def _delete_file(self, filename, obj):
        storage_type = getattr(obj, self.storage_type_field, '')
        bucket_name = getattr(obj, self.bucket_name_field, '')

        if not (storage_type and bucket_name):
            return super(S3FileUploadField, self)._delete_file(filename)

        if storage_type != 's3':
            raise ValueError(
                'Storage type "%s" is invalid, the only supported storage type'
                ' (apart from default local storage) is s3.' % storage_type)

        conn = S3Connection(self.access_key_id, self.access_key_secret)
        bucket = conn.get_bucket(bucket_name)

        path = self._get_s3_path(filename)
        k = Key(bucket)
        k.key = path

        try:
            bucket.delete_key(k)
        except S3ResponseError:
            pass 
开发者ID:Jaza,项目名称:flask-admin-s3-upload,代码行数:25,代码来源:flask_admin_s3_upload.py

示例12: _delete_thumbnail

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def _delete_thumbnail(self, filename, storage_type, bucket_name):
        if not (storage_type and bucket_name):
            self._delete_thumbnail_local(filename)
            return

        if storage_type != 's3':
            raise ValueError(
                'Storage type "%s" is invalid, the only supported storage type'
                ' (apart from default local storage) is s3.' % storage_type)

        conn = S3Connection(self.access_key_id, self.access_key_secret)
        bucket = conn.get_bucket(bucket_name)

        path = self._get_s3_path(self.thumbnail_fn(filename))
        k = Key(bucket)
        k.key = path

        try:
            bucket.delete_key(k)
        except S3ResponseError:
            pass

    # Saving 
开发者ID:Jaza,项目名称:flask-admin-s3-upload,代码行数:25,代码来源:flask_admin_s3_upload.py

示例13: disable_static_site

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def disable_static_site(client, args):
    """
    Disables static site for a bucket
    """
    parser = argparse.ArgumentParser(PLUGIN_BASE+' du')

    parser.add_argument('bucket', metavar='BUCKET', type=str, nargs='?',
                        help="The bucket to disable static site for.")

    parsed = parser.parse_args(args)

    # get the bucket
    try:
        bucket = client.get_bucket(parsed.bucket)
    except S3ResponseError:
        print('No bucket named '+parsed.bucket)
        sys.exit(2)

    # make the site
    bucket.delete_website_configuration()
    print('Website configuration deleted for {}'.format(parsed.bucket)) 
开发者ID:linode,项目名称:linode-cli,代码行数:23,代码来源:obj.py

示例14: download

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def download(directory):
    _delete_file_quietly(NEED_TO_UPLOAD_MARKER)
    try:
        print("Downloading {} tarball from S3...".format(friendly_name))
        key.get_contents_to_filename(_tarball_filename_for(directory))
    except S3ResponseError as err:
        open(NEED_TO_UPLOAD_MARKER, 'a').close()
        print(err)
        raise SystemExit("Cached {} download failed!".format(friendly_name))
    print("Downloaded {}.".format(_tarball_size(directory)))
    _extract_tarball(directory)
    print("{} successfully installed from cache.".format(friendly_name)) 
开发者ID:google,项目名称:rekall,代码行数:14,代码来源:s3_cache.py

示例15: copy

# 需要导入模块: from boto import exception [as 别名]
# 或者: from boto.exception import S3ResponseError [as 别名]
def copy(src_bucket, src_key, dst_bucket, dst_key, src_etag=None, validate_src_etag=False):
    '''
    Copy a key from one bucket to another.

    If validate_etag is True, the ETag must match. Raises
    ObjectNotFoundException if the key does not exist,
    or the ETag doesn't match.

    We return the same error in either case, since a mismatched
    ETag might mean the user wasn't the last to upload the object.
    If the bucket is private they may not even know it exists.
    By returning the same error, we avoid giving out extra
    information.

    '''
    import boto
    from boto.exception import S3ResponseError
    conn = boto.connect_s3()
    bucket = conn.get_bucket(dst_bucket)
    if validate_src_etag:
        headers = {
            'x-amz-copy-source-if-match': src_etag,
        }
    else:
        headers = {}
    try:
        bucket.copy_key(
            new_key_name=dst_key,
            src_bucket_name=src_bucket,
            src_key_name=src_key,
            headers=headers
        )
    except S3ResponseError as e:
        if e.status in [status.HTTP_404_NOT_FOUND, status.HTTP_412_PRECONDITION_FAILED]:
            raise ObjectNotFoundException()
        else:
            raise 
开发者ID:metabolize,项目名称:drf-to-s3,代码行数:39,代码来源:s3.py


注:本文中的boto.exception.S3ResponseError方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。