本文整理汇总了Python中ray.tune.function方法的典型用法代码示例。如果您正苦于以下问题:Python tune.function方法的具体用法?Python tune.function怎么用?Python tune.function使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ray.tune
的用法示例。
在下文中一共展示了tune.function方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _rsync_func
# 需要导入模块: from ray import tune [as 别名]
# 或者: from ray.tune import function [as 别名]
def _rsync_func(local_dir, remote_uri):
"""rsync data from worker to a remote location (by default the driver)."""
# SOMEDAY: This function blocks until syncing completes, which is unfortunate.
# If we instead specified a shell command, ray.tune._LogSyncer would run it asynchronously.
# But we need to do a two-stage command, creating the directories first, because rsync will
# balk if destination directory does not exist; so no easy way to do that.
remote_host, ssh_key, *remainder = remote_uri.split(":")
remote_dir = ":".join(remainder) # remote directory may contain :
remote_dir = shlex.quote(remote_dir) # make safe for SSH/rsync call
ssh_command = ["ssh", "-o", "StrictHostKeyChecking=no", "-i", ssh_key]
ssh_mkdir = ssh_command + [remote_host, "mkdir", "-p", remote_dir]
subprocess.run(ssh_mkdir, check=True)
rsync = [
"rsync",
"-rlptv",
"-e",
" ".join(ssh_command),
f"{local_dir}/",
f"{remote_host}:{remote_dir}",
]
subprocess.run(rsync)
示例2: parse_search_config
# 需要导入模块: from ray import tune [as 别名]
# 或者: from ray.tune import function [as 别名]
def parse_search_config(self, search_config: Dict) -> Dict:
for hyperparameter, val in search_config.items():
if not isinstance(val, dict):
ray_sampler = val
elif val['sampling strategy'] == 'loguniform':
low, high = val['bounds'][0], val['bounds'][1]
ray_sampler = function(RandomSearch.random_loguniform(low, high))
elif val['sampling strategy'] == 'integer':
low, high = val['bounds'][0], val['bounds'][1]
ray_sampler = function(RandomSearch.random_integer(low, high))
elif val['sampling strategy'] == 'choice':
ray_sampler = function(RandomSearch.random_choice(val['choices']))
elif val['sampling strategy'] == 'uniform':
low, high = val['bounds'][0], val['bounds'][1]
ray_sampler = function(RandomSearch.random_uniform(low, high))
else:
raise KeyError(f"sampling strategy {val['sampling strategy']} does not exist")
search_config[hyperparameter] = ray_sampler
return search_config
示例3: tune_example
# 需要导入模块: from ray import tune [as 别名]
# 或者: from ray.tune import function [as 别名]
def tune_example(num_replicas=1, use_gpu=False):
config = {
"model_creator": tune.function(simple_model),
"data_creator": tune.function(simple_dataset),
"num_replicas": num_replicas,
"use_gpu": use_gpu,
"trainer_config": create_config(batch_size=128)
}
analysis = tune.run(
TFTrainable,
num_samples=2,
config=config,
stop={"training_iteration": 2},
verbose=1)
return analysis.get_best_config(metric="validation_loss", mode="min")
示例4: test_tune_train
# 需要导入模块: from ray import tune [as 别名]
# 或者: from ray.tune import function [as 别名]
def test_tune_train(ray_start_2_cpus, num_replicas): # noqa: F811
config = {
"model_creator": tune.function(simple_model),
"data_creator": tune.function(simple_dataset),
"num_replicas": num_replicas,
"use_gpu": False,
"trainer_config": SIMPLE_CONFIG,
"num_cpus_per_worker": 1
}
tune.run(
TFTrainable,
num_samples=2,
config=config,
stop={"training_iteration": 2},
verbose=1)
示例5: generate_experiment
# 需要导入模块: from ray import tune [as 别名]
# 或者: from ray.tune import function [as 别名]
def generate_experiment(trainable_class, variant_spec, command_line_args):
params = variant_spec.get('algorithm_params')
local_dir = os.path.join(
command_line_args.log_dir or params.get('log_dir'),
params.get('domain'))
resources_per_trial = _normalize_trial_resources(
command_line_args.resources_per_trial,
command_line_args.trial_cpus,
command_line_args.trial_gpus,
command_line_args.trial_extra_cpus,
command_line_args.trial_extra_gpus)
experiment_id = params.get('exp_name')
variant_spec = add_command_line_args_to_variant_spec(
variant_spec, command_line_args)
if command_line_args.video_save_frequency is not None:
assert 'algorithm_params' in variant_spec
variant_spec['algorithm_params']['kwargs']['video_save_frequency'] = (
command_line_args.video_save_frequency)
def create_trial_name_creator(trial_name_template=None):
if not trial_name_template:
return None
def trial_name_creator(trial):
return trial_name_template.format(trial=trial)
return tune.function(trial_name_creator)
experiment = {
'run': trainable_class,
'resources_per_trial': resources_per_trial,
'config': variant_spec,
'local_dir': local_dir,
'num_samples': command_line_args.num_samples,
'upload_dir': command_line_args.upload_dir,
'checkpoint_freq': (
variant_spec['run_params']['checkpoint_frequency']),
'checkpoint_at_end': (
variant_spec['run_params']['checkpoint_at_end']),
'trial_name_creator': create_trial_name_creator(
command_line_args.trial_name_template),
'restore': command_line_args.restore, # Defaults to None
}
return experiment_id, experiment