本文整理匯總了Python中skopt.gp_minimize方法的典型用法代碼示例。如果您正苦於以下問題:Python skopt.gp_minimize方法的具體用法?Python skopt.gp_minimize怎麽用?Python skopt.gp_minimize使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類skopt
的用法示例。
在下文中一共展示了skopt.gp_minimize方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: run_measure
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def run_measure(initial_point_generator, n_initial_points=10):
start = time.time()
# n_repeats must set to a much higher value to obtain meaningful results.
n_repeats = 1
res = run(gp_minimize, initial_point_generator,
n_initial_points=n_initial_points, n_repeats=n_repeats)
duration = time.time() - start
# print("%s %s: %.2f s" % (initial_point_generator,
# str(init_point_gen_kwargs),
# duration))
return res
#############################################################################
# Objective
# =========
#
# The objective of this example is to find one of these minima in as
# few iterations as possible. One iteration is defined as one call
# to the :class:`benchmarks.hart6` function.
#
# We will evaluate each model several times using a different seed for the
# random number generator. Then compare the average performance of these
# models. This makes the comparison more robust against models that get
# "lucky".
示例2: test_mixed_categoricals
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_mixed_categoricals(initgen):
space = Space([
Categorical(name="x", categories=["1", "2", "3"]),
Categorical(name="y", categories=[4, 5, 6]),
Real(name="z", low=1.0, high=5.0)
])
def objective(param_list):
x = param_list[0]
y = param_list[1]
z = param_list[2]
loss = int(x) + y * z
return loss
res = gp_minimize(objective, space, n_calls=12, random_state=1,
initial_point_generator=initgen)
assert res["x"] in [['1', 4, 1.0], ['2', 4, 1.0]]
示例3: test_evaluate_min_params
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_evaluate_min_params():
res = gp_minimize(bench3,
[(-2.0, 2.0)],
x0=[0.],
noise=1e-8,
n_calls=8,
n_random_starts=3,
random_state=1)
x_min, f_min = expected_minimum(res, random_state=1)
x_min2, f_min2 = expected_minimum_random_sampling(res,
n_random_starts=1000,
random_state=1)
plots.plot_gaussian_process(res)
assert _evaluate_min_params(res, params='result') == res.x
assert _evaluate_min_params(res, params=[1.]) == [1.]
assert _evaluate_min_params(res, params='expected_minimum',
random_state=1) == x_min
assert _evaluate_min_params(res, params='expected_minimum',
n_minimum_search=20,
random_state=1) == x_min
assert _evaluate_min_params(res, params='expected_minimum_random',
n_minimum_search=1000,
random_state=1) == x_min2
示例4: optimize
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def optimize(n_trials):
"""Run the gp_minimize function"""
dimensions = [(4, 10), # batch size
(1e-3, 1e-1), # learning rate
(0.7, 1.5), # std mult
(3, 6), # filter_size
(2, 4), # n_rings
(0.5,1.5)] # phase_preconditioner
x0 = [5, 1e-2, 1., 5, 2, 1.]
print gp_minimize(wrapper_function, dimensions, x0=x0, n_calls=n_trials, verbose=True, callback=dump)
示例5: optimize_threshold
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def optimize_threshold(self, xtrain, ytrain, xval, yval):
ytrain_pred = self.predict_labels(xtrain, raw_prob=True)
yval_pred = self.predict_labels(xval, raw_prob=True)
self.opt_threshold = 0.5
ytrain_pred_labels = self.get_labels_from_prob(ytrain_pred, threshold=self.opt_threshold)
yval_pred_labels = self.get_labels_from_prob(yval_pred, threshold=self.opt_threshold)
train_f1_score = f1_score(ytrain_pred_labels, ytrain)
val_f1_score = f1_score(yval_pred_labels, yval)
print(f"train f1 score: {train_f1_score}, val f1 score: {val_f1_score}")
f1_train_partial = partial(self.get_f1score_for_optimization, y_true=ytrain.copy(), y_pred=ytrain_pred.copy(), ismin=True)
n_searches = 50
dim_0 = Real(low=0.2, high=0.8, name='dim_0')
dimensions = [dim_0]
search_result = gp_minimize(func=f1_train_partial,
dimensions=dimensions,
acq_func='gp_hedge', # Expected Improvement.
n_calls=n_searches,
# n_jobs=n_cpu,
verbose=False)
self.opt_threshold = search_result.x
if isinstance(self.opt_threshold,list):
self.opt_threshold = self.opt_threshold[0]
self.optimum_threshold_filename = f"model_threshold_{'_'.join(str(v) for k, v in model_params.items())}.npy"
np.save(os.path.join(f"{model_params['model_save_dir']}",self.optimum_threshold_filename), self.opt_threshold)
train_f1_score = self.get_f1score_for_optimization(self.opt_threshold, y_true=ytrain, y_pred=ytrain_pred)
val_f1_score = self.get_f1score_for_optimization(self.opt_threshold, y_true=yval, y_pred=yval_pred )
print(f"optimized train f1 score: {train_f1_score}, optimized val f1 score: {val_f1_score}")
示例6: check_minimize
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def check_minimize(func, y_opt, bounds, acq_optimizer, acq_func,
margin, n_calls, n_initial_points=10, init_gen="random"):
r = gp_minimize(func, bounds, acq_optimizer=acq_optimizer,
acq_func=acq_func, n_initial_points=n_initial_points,
n_calls=n_calls, random_state=1,
initial_point_generator=init_gen,
noise=1e-10)
assert r.fun < y_opt + margin
示例7: test_n_jobs
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_n_jobs():
r_single = gp_minimize(bench3, [(-2.0, 2.0)], acq_optimizer="lbfgs",
acq_func="EI", n_calls=2, n_initial_points=1,
random_state=1, noise=1e-10)
r_double = gp_minimize(bench3, [(-2.0, 2.0)], acq_optimizer="lbfgs",
acq_func="EI", n_calls=2, n_initial_points=1,
random_state=1, noise=1e-10, n_jobs=2)
assert_array_equal(r_single.x_iters, r_double.x_iters)
示例8: test_gpr_default
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_gpr_default():
"""Smoke test that gp_minimize does not fail for default values."""
gp_minimize(branin, ((-5.0, 10.0), (0.0, 15.0)), n_initial_points=1,
n_calls=2)
示例9: test_use_given_estimator
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_use_given_estimator():
""" Test that gp_minimize does not use default estimator if one is passed
in explicitly. """
domain = [(1.0, 2.0), (3.0, 4.0)]
noise_correct = 1e+5
noise_fake = 1e-10
estimator = cook_estimator("GP", domain, noise=noise_correct)
res = gp_minimize(branin, domain, n_calls=1, n_initial_points=1,
base_estimator=estimator, noise=noise_fake)
assert res['models'][-1].noise == noise_correct
示例10: test_use_given_estimator_with_max_model_size
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_use_given_estimator_with_max_model_size():
""" Test that gp_minimize does not use default estimator if one is passed
in explicitly. """
domain = [(1.0, 2.0), (3.0, 4.0)]
noise_correct = 1e+5
noise_fake = 1e-10
estimator = cook_estimator("GP", domain, noise=noise_correct)
res = gp_minimize(branin, domain, n_calls=1, n_initial_points=1,
base_estimator=estimator, noise=noise_fake,
model_queue_size=1)
assert len(res['models']) == 1
assert res['models'][-1].noise == noise_correct
示例11: test_mixed_categoricals2
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_mixed_categoricals2(initgen):
space = Space([
Categorical(name="x", categories=["1", "2", "3"]),
Categorical(name="y", categories=[4, 5, 6])
])
def objective(param_list):
x = param_list[0]
y = param_list[1]
loss = int(x) + y
return loss
res = gp_minimize(objective, space, n_calls=12, random_state=1,
initial_point_generator=initgen)
assert res["x"] == ['1', 4]
示例12: test_dump_and_load
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_dump_and_load():
res = gp_minimize(bench3,
[(-2.0, 2.0)],
x0=[0.],
acq_func="LCB",
n_calls=2,
n_random_starts=0,
random_state=1)
# Test normal dumping and loading
with tempfile.TemporaryFile() as f:
dump(res, f)
f.seek(0)
res_loaded = load(f)
check_optimization_results_equality(res, res_loaded)
assert "func" in res_loaded.specs["args"]
# Test dumping without objective function
with tempfile.TemporaryFile() as f:
dump(res, f, store_objective=False)
f.seek(0)
res_loaded = load(f)
check_optimization_results_equality(res, res_loaded)
assert not ("func" in res_loaded.specs["args"])
# Delete the objective function and dump the modified object
del res.specs["args"]["func"]
with tempfile.TemporaryFile() as f:
dump(res, f, store_objective=False)
f.seek(0)
res_loaded = load(f)
check_optimization_results_equality(res, res_loaded)
assert not ("func" in res_loaded.specs["args"])
示例13: test_expected_minimum
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_expected_minimum():
res = gp_minimize(bench3,
[(-2.0, 2.0)],
x0=[0.],
noise=1e-8,
n_calls=8,
n_random_starts=3,
random_state=1)
x_min, f_min = expected_minimum(res, random_state=1)
x_min2, f_min2 = expected_minimum(res, random_state=1)
assert f_min <= res.fun # true since noise ~= 0.0
assert x_min == x_min2
assert f_min == f_min2
示例14: test_expected_minimum_random_sampling
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_expected_minimum_random_sampling():
res = gp_minimize(bench3,
[(-2.0, 2.0)],
x0=[0.],
noise=1e-8,
n_calls=8,
n_random_starts=3,
random_state=1)
x_min, f_min = expected_minimum_random_sampling(res, random_state=1)
x_min2, f_min2 = expected_minimum_random_sampling(res, random_state=1)
assert f_min <= res.fun # true since noise ~= 0.0
assert x_min == x_min2
assert f_min == f_min2
示例15: test_plots_work_without_cat
# 需要導入模塊: import skopt [as 別名]
# 或者: from skopt import gp_minimize [as 別名]
def test_plots_work_without_cat():
"""Basic smoke tests to make sure plotting doesn't crash."""
SPACE = [
Integer(1, 20, name='max_depth'),
Integer(2, 100, name='min_samples_split'),
Integer(5, 30, name='min_samples_leaf'),
Integer(1, 30, name='max_features'),
]
def objective(params):
clf = DecisionTreeClassifier(random_state=3,
**{dim.name: val
for dim, val in zip(SPACE, params)
if dim.name != 'dummy'})
return -np.mean(cross_val_score(clf, *load_breast_cancer(True)))
res = gp_minimize(objective, SPACE, n_calls=10, random_state=3)
plots.plot_convergence(res)
plots.plot_evaluations(res)
plots.plot_objective(res)
plots.plot_objective(res,
minimum='expected_minimum')
plots.plot_objective(res,
sample_source='expected_minimum',
n_minimum_search=10)
plots.plot_objective(res, sample_source='result')
plots.plot_regret(res)
# TODO: Compare plots to known good results?
# Look into how matplotlib does this.