本文整理汇总了Python中awscli.customizations.s3.utils.RequestParamsMapper类的典型用法代码示例。如果您正苦于以下问题:Python RequestParamsMapper类的具体用法?Python RequestParamsMapper怎么用?Python RequestParamsMapper使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了RequestParamsMapper类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_upload_part
def test_upload_part(self):
params = {}
RequestParamsMapper.map_upload_part_params(params, self.cli_params)
self.assertEqual(
params,
{'SSECustomerAlgorithm': 'AES256',
'SSECustomerKey': 'my-sse-c-key'}
)
示例2: create_multipart_upload
def create_multipart_upload(self):
bucket, key = find_bucket_key(self.dest)
params = {'Bucket': bucket, 'Key': key}
self._inject_content_type(params)
RequestParamsMapper.map_create_multipart_upload_params(
params, self.parameters)
response_data = self.client.create_multipart_upload(**params)
upload_id = response_data['UploadId']
return upload_id
示例3: _handle_upload
def _handle_upload(self, body):
bucket, key = find_bucket_key(self.dest)
params = {
'Bucket': bucket,
'Key': key,
'Body': body,
}
self._inject_content_type(params)
RequestParamsMapper.map_put_object_params(params, self.parameters)
response_data = self.client.put_object(**params)
示例4: set_size_from_s3
def set_size_from_s3(self):
"""
This runs a ``HeadObject`` on the s3 object and sets the size.
"""
bucket, key = find_bucket_key(self.src)
params = {'Bucket': bucket,
'Key': key}
RequestParamsMapper.map_head_object_params(params, self.parameters)
response_data = self.client.head_object(**params)
self.size = int(response_data['ContentLength'])
示例5: test_put_object
def test_put_object(self):
params = {}
RequestParamsMapper.map_put_object_params(params, self.cli_params)
self.assertEqual(
params,
{'SSECustomerAlgorithm': 'AES256',
'SSECustomerKey': 'my-sse-c-key',
'SSEKMSKeyId': 'my-kms-key',
'ServerSideEncryption': 'AES256'}
)
示例6: download
def download(self):
"""
Redirects the file to the multipart download function if the file is
large. If it is small enough, it gets the file as an object from s3.
"""
bucket, key = find_bucket_key(self.src)
params = {'Bucket': bucket, 'Key': key}
RequestParamsMapper.map_get_object_params(params, self.parameters)
response_data = self.client.get_object(**params)
save_file(self.dest, response_data, self.last_update,
self.is_stream)
示例7: test_create_multipart_upload
def test_create_multipart_upload(self):
params = {}
RequestParamsMapper.map_create_multipart_upload_params(
params, self.cli_params)
self.assertEqual(
params,
{'SSECustomerAlgorithm': 'AES256',
'SSECustomerKey': 'my-sse-c-key',
'SSEKMSKeyId': 'my-kms-key',
'ServerSideEncryption': 'AES256'}
)
示例8: copy
def copy(self):
"""
Copies a object in s3 to another location in s3.
"""
copy_source = self.src
bucket, key = find_bucket_key(self.dest)
params = {'Bucket': bucket,
'CopySource': copy_source, 'Key': key}
self._inject_content_type(params)
RequestParamsMapper.map_copy_object_params(params, self.parameters)
response_data = self.client.copy_object(**params)
示例9: test_upload_part_copy
def test_upload_part_copy(self):
params = {}
RequestParamsMapper.map_upload_part_copy_params(params, self.cli_params)
self.assertEqual(
params,
{
"CopySourceSSECustomerAlgorithm": "AES256",
"CopySourceSSECustomerKey": "my-sse-c-copy-source-key",
"SSECustomerAlgorithm": "AES256",
"SSECustomerKey": "my-sse-c-key",
},
)
示例10: test_create_multipart_upload
def test_create_multipart_upload(self):
params = {}
RequestParamsMapper.map_create_multipart_upload_params(params, self.cli_params)
self.assertEqual(
params,
{
"SSECustomerAlgorithm": "AES256",
"SSECustomerKey": "my-sse-c-key",
"SSEKMSKeyId": "my-kms-key",
"ServerSideEncryption": "AES256",
},
)
示例11: test_put_object
def test_put_object(self):
params = {}
RequestParamsMapper.map_put_object_params(params, self.cli_params)
self.assertEqual(
params,
{
"SSECustomerAlgorithm": "AES256",
"SSECustomerKey": "my-sse-c-key",
"SSEKMSKeyId": "my-kms-key",
"ServerSideEncryption": "AES256",
},
)
示例12: __call__
def __call__(self):
LOGGER.debug("Uploading part copy %s for filename: %s",
self._part_number, self._filename.src)
total_file_size = self._filename.size
start_range = (self._part_number - 1) * self._chunk_size
if self._is_last_part(self._part_number):
end_range = total_file_size - 1
else:
end_range = start_range + self._chunk_size - 1
range_param = 'bytes=%s-%s' % (start_range, end_range)
try:
LOGGER.debug("Waiting for upload id.")
upload_id = self._upload_context.wait_for_upload_id()
bucket, key = find_bucket_key(self._filename.dest)
src_bucket, src_key = find_bucket_key(self._filename.src)
params = {'Bucket': bucket, 'Key': key,
'PartNumber': self._part_number,
'UploadId': upload_id,
'CopySource': {'Bucket': src_bucket, 'Key': src_key},
'CopySourceRange': range_param}
RequestParamsMapper.map_upload_part_copy_params(
params, self._params)
response_data = self._filename.client.upload_part_copy(**params)
etag = response_data['CopyPartResult']['ETag'][1:-1]
self._upload_context.announce_finished_part(
etag=etag, part_number=self._part_number)
message = print_operation(self._filename, 0)
result = {'message': message, 'total_parts': self._total_parts(),
'error': False}
self._result_queue.put(PrintTask(**result))
except UploadCancelledError as e:
# We don't need to do anything in this case. The task
# has been cancelled, and the task that cancelled the
# task has already queued a message.
LOGGER.debug("Not uploading part copy, task has been cancelled.")
except Exception as e:
LOGGER.debug('Error during upload part copy: %s', e,
exc_info=True)
message = print_operation(self._filename, failed=True,
dryrun=False)
message += '\n' + str(e)
result = {'message': message, 'error': True}
self._result_queue.put(PrintTask(**result))
self._upload_context.cancel_upload()
else:
LOGGER.debug("Copy part number %s completed for filename: %s",
self._part_number, self._filename.src)
示例13: _download_part
def _download_part(self):
total_file_size = self._filename.size
start_range = self._part_number * self._chunk_size
if self._part_number == int(total_file_size / self._chunk_size) - 1:
end_range = ''
else:
end_range = start_range + self._chunk_size - 1
range_param = 'bytes=%s-%s' % (start_range, end_range)
LOGGER.debug("Downloading bytes range of %s for file %s", range_param,
self._filename.dest)
bucket, key = find_bucket_key(self._filename.src)
params = {'Bucket': bucket,
'Key': key,
'Range': range_param}
RequestParamsMapper.map_get_object_params(params, self._params)
for i in range(self.TOTAL_ATTEMPTS):
try:
LOGGER.debug("Making GetObject requests with byte range: %s",
range_param)
response_data = self._client.get_object(**params)
LOGGER.debug("Response received from GetObject")
body = response_data['Body']
self._queue_writes(body)
self._context.announce_completed_part(self._part_number)
message = print_operation(self._filename, 0)
total_parts = int(self._filename.size / self._chunk_size)
result = {'message': message, 'error': False,
'total_parts': total_parts}
self._result_queue.put(PrintTask(**result))
LOGGER.debug("Task complete: %s", self)
return
except (socket.timeout, socket.error, ReadTimeoutError) as e:
LOGGER.debug("Timeout error caught, retrying request, "
"(attempt %s / %s)", i, self.TOTAL_ATTEMPTS,
exc_info=True)
continue
except IncompleteReadError as e:
LOGGER.debug("Incomplete read detected: %s, (attempt %s / %s)",
e, i, self.TOTAL_ATTEMPTS)
continue
raise RetriesExeededError("Maximum number of attempts exceeded: %s" %
self.TOTAL_ATTEMPTS)
示例14: test_upload_part
def test_upload_part(self):
params = {}
RequestParamsMapper.map_upload_part_params(params, self.cli_params)
self.assertEqual(params, {"SSECustomerAlgorithm": "AES256", "SSECustomerKey": "my-sse-c-key"})
示例15: test_delete_object
def test_delete_object(self):
params = {}
RequestParamsMapper.map_delete_object_params(
params, self.cli_params)
self.assertEqual(params, {'RequestPayer': 'requester'})