本文整理汇总了Python中awscli.customizations.s3.utils.RequestParamsMapper.map_head_object_params方法的典型用法代码示例。如果您正苦于以下问题:Python RequestParamsMapper.map_head_object_params方法的具体用法?Python RequestParamsMapper.map_head_object_params怎么用?Python RequestParamsMapper.map_head_object_params使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类awscli.customizations.s3.utils.RequestParamsMapper
的用法示例。
在下文中一共展示了RequestParamsMapper.map_head_object_params方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_head_object
# 需要导入模块: from awscli.customizations.s3.utils import RequestParamsMapper [as 别名]
# 或者: from awscli.customizations.s3.utils.RequestParamsMapper import map_head_object_params [as 别名]
def test_head_object(self):
params = {}
RequestParamsMapper.map_head_object_params(params, self.cli_params)
self.assertEqual(
params,
{'SSECustomerAlgorithm': 'AES256',
'SSECustomerKey': 'my-sse-c-key'}
)
示例2: set_size_from_s3
# 需要导入模块: from awscli.customizations.s3.utils import RequestParamsMapper [as 别名]
# 或者: from awscli.customizations.s3.utils.RequestParamsMapper import map_head_object_params [as 别名]
def set_size_from_s3(self):
"""
This runs a ``HeadObject`` on the s3 object and sets the size.
"""
bucket, key = find_bucket_key(self.src)
params = {'Bucket': bucket,
'Key': key}
RequestParamsMapper.map_head_object_params(params, self.parameters)
response_data = self.client.head_object(**params)
self.size = int(response_data['ContentLength'])
示例3: test_head_object
# 需要导入模块: from awscli.customizations.s3.utils import RequestParamsMapper [as 别名]
# 或者: from awscli.customizations.s3.utils.RequestParamsMapper import map_head_object_params [as 别名]
def test_head_object(self):
params = {}
RequestParamsMapper.map_head_object_params(params, self.cli_params)
self.assertEqual(params, {"SSECustomerAlgorithm": "AES256", "SSECustomerKey": "my-sse-c-key"})
示例4: run
# 需要导入模块: from awscli.customizations.s3.utils import RequestParamsMapper [as 别名]
# 或者: from awscli.customizations.s3.utils.RequestParamsMapper import map_head_object_params [as 别名]
def run(self):
"""
This function wires together all of the generators and completes
the command. First a dictionary is created that is indexed first by
the command name. Then using the instruction, another dictionary
can be indexed to obtain the objects corresponding to the
particular instruction for that command. To begin the wiring,
either a ``FileFormat`` or ``TaskInfo`` object, depending on the
command, is put into a list. Then the function enters a while loop
that pops off an instruction. It then determines the object needed
and calls the call function of the object using the list as the input.
Depending on the number of objects in the input list and the number
of components in the list corresponding to the instruction, the call
method of the component can be called two different ways. If the
number of inputs is equal to the number of components a 1:1 mapping of
inputs to components is used when calling the call function. If the
there are more inputs than components, then a 2:1 mapping of inputs to
components is used where the component call method takes two inputs
instead of one. Whatever files are yielded from the call function
is appended to a list and used as the input for the next repetition
of the while loop until there are no more instructions.
"""
src = self.parameters["src"]
dest = self.parameters["dest"]
paths_type = self.parameters["paths_type"]
files = FileFormat().format(src, dest, self.parameters)
rev_files = FileFormat().format(dest, src, self.parameters)
cmd_translation = {}
cmd_translation["locals3"] = {"cp": "upload", "sync": "upload", "mv": "move"}
cmd_translation["s3s3"] = {"cp": "copy", "sync": "copy", "mv": "move"}
cmd_translation["s3local"] = {"cp": "download", "sync": "download", "mv": "move"}
cmd_translation["s3"] = {"rm": "delete", "mb": "make_bucket", "rb": "remove_bucket"}
result_queue = queue.Queue()
operation_name = cmd_translation[paths_type][self.cmd]
fgen_kwargs = {
"client": self._source_client,
"operation_name": operation_name,
"follow_symlinks": self.parameters["follow_symlinks"],
"page_size": self.parameters["page_size"],
"result_queue": result_queue,
}
rgen_kwargs = {
"client": self._client,
"operation_name": "",
"follow_symlinks": self.parameters["follow_symlinks"],
"page_size": self.parameters["page_size"],
"result_queue": result_queue,
}
fgen_request_parameters = {}
fgen_head_object_params = {}
fgen_request_parameters["HeadObject"] = fgen_head_object_params
fgen_kwargs["request_parameters"] = fgen_request_parameters
# SSE-C may be neaded for HeadObject for copies/downloads/deletes
# If the operation is s3 to s3, the FileGenerator should use the
# copy source key and algorithm. Otherwise, use the regular
# SSE-C key and algorithm. Note the reverse FileGenerator does
# not need any of these because it is used only for sync operations
# which only use ListObjects which does not require HeadObject.
RequestParamsMapper.map_head_object_params(fgen_head_object_params, self.parameters)
if paths_type == "s3s3":
RequestParamsMapper.map_head_object_params(
fgen_head_object_params,
{
"sse_c": self.parameters.get("sse_c_copy_source"),
"sse_c_key": self.parameters.get("sse_c_copy_source_key"),
},
)
file_generator = FileGenerator(**fgen_kwargs)
rev_generator = FileGenerator(**rgen_kwargs)
taskinfo = [
TaskInfo(src=files["src"]["path"], src_type="s3", operation_name=operation_name, client=self._client)
]
stream_dest_path, stream_compare_key = find_dest_path_comp_key(files)
stream_file_info = [
FileInfo(
src=files["src"]["path"],
dest=stream_dest_path,
compare_key=stream_compare_key,
src_type=files["src"]["type"],
dest_type=files["dest"]["type"],
operation_name=operation_name,
client=self._client,
is_stream=True,
)
]
file_info_builder = FileInfoBuilder(self._client, self._source_client, self.parameters)
s3handler = S3Handler(
self.session, self.parameters, runtime_config=self._runtime_config, result_queue=result_queue
)
s3_stream_handler = S3StreamHandler(self.session, self.parameters, result_queue=result_queue)
sync_strategies = self.choose_sync_strategies()
command_dict = {}
if self.cmd == "sync":
#.........这里部分代码省略.........