本文整理汇总了Python中hyperopt.hp.qloguniform方法的典型用法代码示例。如果您正苦于以下问题:Python hp.qloguniform方法的具体用法?Python hp.qloguniform怎么用?Python hp.qloguniform使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类hyperopt.hp
的用法示例。
在下文中一共展示了hp.qloguniform方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_write_uniform
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def test_write_uniform(self):
a = configuration_space.UniformFloatHyperparameter("a", 0, 1)
expected = ('a', 'param_0 = hp.uniform("a", 0.0, 1.0)')
value = self.pyll_writer.write_hyperparameter(a, None)
self.assertEqual(expected, value)
# The hyperparameter name has to be converted seperately because
# otherwise the parameter values are converted at object costruction
# time
a = configuration_space.UniformFloatHyperparameter("a", 1, 10, base=10)
a.name = self.pyll_writer.convert_name(a)
expected = ('LOG10_a', 'param_1 = hp.uniform("LOG10_a", 0.0, 1.0)')
value = self.pyll_writer.write_hyperparameter(a, None)
self.assertEqual(expected, value)
nhid1 = configuration_space.UniformFloatHyperparameter(
"nhid1", 16, 1024, q=16, base=np.e)
expected = ('nhid1', 'param_2 = hp.qloguniform('
'"nhid1", 2.0794540416, 6.93925394604, 16.0)')
value = self.pyll_writer.write_hyperparameter(nhid1, None)
self.assertEqual(expected, value)
示例2: __init__
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def __init__(self):
self.search_space = {
'learning_rate': hp.loguniform('learning_rate', np.log(0.00001), np.log(0.1)),
'L1_flag': hp.choice('L1_flag', [True, False]),
'hidden_size': scope.int(hp.qloguniform('hidden_size', np.log(8), np.log(256),1)),
'batch_size': scope.int(hp.qloguniform('batch_size', np.log(8), np.log(4096),1)),
'margin': hp.uniform('margin', 0.0, 10.0),
'optimizer': hp.choice('optimizer', ["adam", "sgd", 'rms']),
'epochs': hp.choice('epochs', [500]) # always choose 10 training epochs.
}
示例3: test_read_qloguniform
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def test_read_qloguniform(self):
# 0 float
# 1 hyperopt_param
# 2 Literal{nhid1}
# 3 qloguniform
# 4 Literal{2.77258872224}
# 5 Literal{6.9314718056}
# 6 q =
# 7 Literal{16}
qloguniform = hp.qloguniform('nhid1', np.log(16), np.log(1024), q=16). \
inputs()[0].inputs()[1]
ret = self.pyll_reader.read_qloguniform(qloguniform, 'nhid1')
expected = configuration_space.UniformFloatHyperparameter(
'nhid1', 16, 1024, q=16, base=np.e)
self.assertEqual(expected, ret)
示例4: test_write_loguniform_int
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def test_write_loguniform_int(self):
c_int = configuration_space.UniformIntegerHyperparameter(
"c_int", 1, 10, base=np.e)
expected = ("c_int", 'param_0 = pyll.scope.int(hp.qloguniform('
'"c_int", -0.69312718076, 2.35137525716, 1.0))')
value = self.pyll_writer.write_hyperparameter(c_int, None)
self.assertEqual(expected, value)
示例5: test_write_qloguniform
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def test_write_qloguniform(self):
d = configuration_space.UniformFloatHyperparameter("d", 0.1, 3, q=0.1,
base=np.e)
expected = ("d", 'param_0 = hp.qloguniform("d", -2.99373427089, '
'1.11514159062, 0.1)')
value = self.pyll_writer.write_hyperparameter(d, None)
self.assertEqual(expected, value)
示例6: test_write_qloguniform_int
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def test_write_qloguniform_int(self):
d_int_1 = configuration_space.UniformIntegerHyperparameter(
"d_int", 1, 3, q=1.0, base=np.e)
expected = ("d_int", 'param_0 = pyll.scope.int(hp.qloguniform('
'"d_int", -0.69312718076, 1.2527629685, 1.0))')
value = self.pyll_writer.write_hyperparameter(d_int_1, None)
self.assertEqual(expected, value)
d_int_2 = configuration_space.UniformIntegerHyperparameter(
"d_int", 1, 3, q=2.0, base=np.e)
expected = ("d_int", 'param_1 = pyll.scope.int(hp.qloguniform('
'"d_int", -0.69312718076, 1.2527629685, 2.0))')
value = self.pyll_writer.write_hyperparameter(d_int_2, None)
self.assertEqual(expected, value)
示例7: tpe_configspace
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def tpe_configspace(self):
from hyperopt import hp
import numpy as np
space = {
'l_rate': hp.loguniform('l_rate', np.log(1e-6), np.log(1e-1)),
'burn_in': hp.uniform('burn_in', 0, .8),
'n_units_1': hp.qloguniform('n_units_1', np.log(16), np.log(512), 1),
'n_units_2': hp.qloguniform('n_units_2', np.log(16), np.log(512), 1),
'mdecay': hp.uniform('mdecay', 0, 1)
}
return(space)
示例8: tpe_configspace
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def tpe_configspace(self):
import numpy as np
from hyperopt import hp
space = {
'learning_rate': hp.loguniform('learning_rate', np.log(1e-7), np.log(1e-1)),
'batch_size': hp.qloguniform('batch_size', np.log(8), np.log(256), 1),
'n_units_1': hp.qloguniform('n_units_1', np.log(8), np.log(128), 1),
'n_units_2': hp.qloguniform('n_units_2', np.log(8), np.log(128), 1),
'discount': hp.uniform('discount', 0, 1),
'likelihood_ratio_clipping': hp.uniform('likelihood_ratio_clipping', 0, 1),
'entropy_regularization': hp.uniform('entropy_regularization', 0, 1)
}
return(space)
示例9: get_hyperopt_dimensions
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def get_hyperopt_dimensions(api_config):
"""Help routine to setup hyperopt search space in constructor.
Take api_config as argument so this can be static.
"""
# The ordering of iteration prob makes no difference, but just to be
# safe and consistnent with space.py, I will make sorted.
param_list = sorted(api_config.keys())
space = {}
round_to_values = {}
for param_name in param_list:
param_config = api_config[param_name]
param_type = param_config["type"]
param_space = param_config.get("space", None)
param_range = param_config.get("range", None)
param_values = param_config.get("values", None)
# Some setup for case that whitelist of values is provided:
values_only_type = param_type in ("cat", "ordinal")
if (param_values is not None) and (not values_only_type):
assert param_range is None
param_values = np.unique(param_values)
param_range = (param_values[0], param_values[-1])
round_to_values[param_name] = interp1d(
param_values, param_values, kind="nearest", fill_value="extrapolate"
)
if param_type == "int":
low, high = param_range
if param_space in ("log", "logit"):
space[param_name] = hp.qloguniform(param_name, np.log(low), np.log(high), 1)
else:
space[param_name] = hp.quniform(param_name, low, high, 1)
elif param_type == "bool":
assert param_range is None
assert param_values is None
space[param_name] = hp.choice(param_name, (False, True))
elif param_type in ("cat", "ordinal"):
assert param_range is None
space[param_name] = hp.choice(param_name, param_values)
elif param_type == "real":
low, high = param_range
if param_space in ("log", "logit"):
space[param_name] = hp.loguniform(param_name, np.log(low), np.log(high))
else:
space[param_name] = hp.uniform(param_name, low, high)
else:
assert False, "type %s not handled in API" % param_type
return space, round_to_values
示例10: visitSearchSpaceNumber
# 需要导入模块: from hyperopt import hp [as 别名]
# 或者: from hyperopt.hp import qloguniform [as 别名]
def visitSearchSpaceNumber(self, space:SearchSpaceNumber, path:str, counter=None):
label = self.mk_label(path, counter)
if space.pgo is not None:
return scope.pgo_sample(space.pgo, hp.quniform(label, 0, len(space.pgo)-1, 1))
dist = "uniform"
if space.distribution:
dist = space.distribution
if space.maximum is None:
raise SearchSpaceError(path, f"maximum not specified for a number with distribution {dist}")
max = space.getInclusiveMax()
# These distributions need only a maximum
if dist == "integer":
if not space.discrete:
raise SearchSpaceError(path, "integer distribution specified for a non discrete numeric type")
return hp.randint(label, max)
if space.minimum is None:
raise SearchSpaceError(path, f"minimum not specified for a number with distribution {dist}")
min = space.getInclusiveMin()
if dist == "uniform":
if space.discrete:
return scope.int(hp.quniform(label, min, max, 1))
else:
return hp.uniform(label, min, max)
elif dist == "loguniform":
# for log distributions, hyperopt requires that we provide the log of the min/max
if min <= 0:
raise SearchSpaceError(path, f"minimum of 0 specified with a {dist} distribution. This is not allowed; please set it (possibly using minimumForOptimizer) to be positive")
if min > 0:
min = math.log(min)
if max > 0:
max = math.log(max)
if space.discrete:
return scope.int(hp.qloguniform(label, min, max, 1))
else:
return hp.loguniform(label, min, max)
else:
raise SearchSpaceError(path, f"Unknown distribution type: {dist}")