当前位置: 首页>>代码示例>>Python>>正文


Python bayes_opt.BayesianOptimization方法代码示例

本文整理汇总了Python中bayes_opt.BayesianOptimization方法的典型用法代码示例。如果您正苦于以下问题:Python bayes_opt.BayesianOptimization方法的具体用法?Python bayes_opt.BayesianOptimization怎么用?Python bayes_opt.BayesianOptimization使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在bayes_opt的用法示例。


在下文中一共展示了bayes_opt.BayesianOptimization方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: bo

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def bo(self) -> BayesianOptimization:
        if self._bo is None:
            bounds = {f"x{i}": (0.0, 1.0) for i in range(self.dimension)}
            self._bo = BayesianOptimization(self._fake_function, bounds, random_state=self._rng)
            if self.gp_parameters is not None:
                self._bo.set_gp_params(**self.gp_parameters)
            # init
            init = self.initialization
            if self.middle_point:
                self._bo.probe([0.5] * self.dimension, lazy=True)
            elif init is None:
                self._bo._queue.add(self._bo._space.random_sample())
            if init is not None:
                init_budget = int(np.sqrt(self.budget) if self.init_budget is None else self.init_budget)
                init_budget -= self.middle_point
                if init_budget > 0:
                    sampler = {"Hammersley": sequences.HammersleySampler, "LHS": sequences.LHSSampler, "random": sequences.RandomSampler}[
                        init
                    ](self.dimension, budget=init_budget, scrambling=(init == "Hammersley"), random_state=self._rng)
                    for point in sampler:
                        self._bo.probe(point, lazy=True)
        return self._bo 
开发者ID:facebookresearch,项目名称:nevergrad,代码行数:24,代码来源:optimizerlib.py

示例2: optimize_svc

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def optimize_svc(data, targets):
    """Apply Bayesian Optimization to SVC parameters."""
    def svc_crossval(expC, expGamma):
        """Wrapper of SVC cross validation.

        Notice how we transform between regular and log scale. While this
        is not technically necessary, it greatly improves the performance
        of the optimizer.
        """
        C = 10 ** expC
        gamma = 10 ** expGamma
        return svc_cv(C=C, gamma=gamma, data=data, targets=targets)

    optimizer = BayesianOptimization(
        f=svc_crossval,
        pbounds={"expC": (-3, 2), "expGamma": (-4, -1)},
        random_state=1234,
        verbose=2
    )
    optimizer.maximize(n_iter=10)

    print("Final result:", optimizer.max) 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:24,代码来源:sklearn_example.py

示例3: optimize_rfc

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def optimize_rfc(data, targets):
    """Apply Bayesian Optimization to Random Forest parameters."""
    def rfc_crossval(n_estimators, min_samples_split, max_features):
        """Wrapper of RandomForest cross validation.

        Notice how we ensure n_estimators and min_samples_split are casted
        to integer before we pass them along. Moreover, to avoid max_features
        taking values outside the (0, 1) range, we also ensure it is capped
        accordingly.
        """
        return rfc_cv(
            n_estimators=int(n_estimators),
            min_samples_split=int(min_samples_split),
            max_features=max(min(max_features, 0.999), 1e-3),
            data=data,
            targets=targets,
        )

    optimizer = BayesianOptimization(
        f=rfc_crossval,
        pbounds={
            "n_estimators": (10, 250),
            "min_samples_split": (2, 25),
            "max_features": (0.1, 0.999),
        },
        random_state=1234,
        verbose=2
    )
    optimizer.maximize(n_iter=10)

    print("Final result:", optimizer.max) 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:33,代码来源:sklearn_example.py

示例4: test_logs

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def test_logs():
    import pytest
    def f(x, y):
        return -x ** 2 - (y - 1) ** 2 + 1

    optimizer = BayesianOptimization(
        f=f,
        pbounds={"x": (-2, 2), "y": (-2, 2)}
    )
    assert len(optimizer.space) == 0

    load_logs(optimizer, "./tests/test_logs.json")
    assert len(optimizer.space) == 5

    load_logs(optimizer, ["./tests/test_logs.json"])
    assert len(optimizer.space) == 5

    other_optimizer = BayesianOptimization(
        f=lambda x: -x ** 2,
        pbounds={"x": (-2, 2)}
    )
    with pytest.raises(ValueError):
        load_logs(other_optimizer, ["./tests/test_logs.json"]) 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:25,代码来源:test_util.py

示例5: test_register

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def test_register():
    optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
    assert len(optimizer.space) == 0

    optimizer.register(params={"p1": 1, "p2": 2}, target=3)
    assert len(optimizer.res) == 1
    assert len(optimizer.space) == 1

    optimizer.space.register(params={"p1": 5, "p2": 4}, target=9)
    assert len(optimizer.res) == 2
    assert len(optimizer.space) == 2

    with pytest.raises(KeyError):
        optimizer.register(params={"p1": 1, "p2": 2}, target=3)
    with pytest.raises(KeyError):
        optimizer.register(params={"p1": 5, "p2": 4}, target=9) 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:18,代码来源:test_bayesian_optimization.py

示例6: test_suggest_with_one_observation

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def test_suggest_with_one_observation():
    util = UtilityFunction(kind="ucb", kappa=5, xi=0)
    optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)

    optimizer.register(params={"p1": 1, "p2": 2}, target=3)

    for _ in range(5):
        sample = optimizer.space.params_to_array(optimizer.suggest(util))
        assert len(sample) == optimizer.space.dim
        assert all(sample >= optimizer.space.bounds[:, 0])
        assert all(sample <= optimizer.space.bounds[:, 1])

    # suggestion = optimizer.suggest(util)
    # for _ in range(5):
    #     new_suggestion = optimizer.suggest(util)
    #     assert suggestion == new_suggestion 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:18,代码来源:test_bayesian_optimization.py

示例7: test_set_bounds

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def test_set_bounds():
    pbounds = {
        'p1': (0, 1),
        'p3': (0, 3),
        'p2': (0, 2),
        'p4': (0, 4),
    }
    optimizer = BayesianOptimization(target_func, pbounds, random_state=1)

    # Ignore unknown keys
    optimizer.set_bounds({"other": (7, 8)})
    assert all(optimizer.space.bounds[:, 0] == np.array([0, 0, 0, 0]))
    assert all(optimizer.space.bounds[:, 1] == np.array([1, 2, 3, 4]))

    # Update bounds accordingly
    optimizer.set_bounds({"p2": (1, 8)})
    assert all(optimizer.space.bounds[:, 0] == np.array([0, 1, 0, 0]))
    assert all(optimizer.space.bounds[:, 1] == np.array([1, 8, 3, 4])) 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:20,代码来源:test_bayesian_optimization.py

示例8: initial_queries

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def initial_queries(bo):
    """
    script which explores the initial query points of a BayesianOptimization
    instance, reports errors to Slack
    Input: instance of a BayesianOptimization
    """
    # loop to try a second time in case of error
    errcount = 0
    for i in range(2):
        try:
            bo.maximize(init_points=3, n_iter=1, kappa=5) # would be just this line without errorhandling
        except KeyBoardInterrupt:
            raise
        except:
            if errcount == 1:
                text = "Exception occured twice in initialization, aborting!"
                print(text)
                sc.api_call("chat.postMessage",channel="CA26521FW",
                    text=text,username="Botty",
                    unfurl_links="true")
                raise
            errcount =+ 1

    return bo 
开发者ID:921kiyo,项目名称:3d-dl,代码行数:26,代码来源:optimization.py

示例9: bayes_optimization_cnn

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def bayes_optimization_cnn(iterations):
    """
    script to set boundaries for search space for bayesian optimization
    of the cnn parameters
    """

    gp_params = {"alpha": 1e-5}
    bo = BayesianOptimization(evaluate_cnn,
        {'learning_rate': (1e-07, 1e-03),
        'batch_size': (1, 1),
        'dropout': (0, 0),
        'dense_dim': (1.51, 1.51), # 0.51, 4.49 = 512 - 2048
        'dense_layers': (0.5001, 0.5001)}
        )
    bo.explore({'learning_rate': [1.1787686347935867e-05],
            'dropout': [0],
            'dense_layers': [1.0],
            'dense_dim': [1.51],
            'batch_size': [4.49]
            })

    bo = initial_queries(bo)
    bo = exploration(iterations,bo)

    print(bo.res['max']) 
开发者ID:921kiyo,项目名称:3d-dl,代码行数:27,代码来源:optimization.py

示例10: __init__

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def __init__(
        self,
        parametrization: IntOrParameter,
        budget: Optional[int] = None,
        num_workers: int = 1,
        *,
        initialization: Optional[str] = None,
        init_budget: Optional[int] = None,
        middle_point: bool = False,
        utility_kind: str = "ucb",  # bayes_opt default
        utility_kappa: float = 2.576,
        utility_xi: float = 0.0,
        gp_parameters: Optional[Dict[str, Any]] = None,
    ) -> None:
        super().__init__(parametrization, budget=budget, num_workers=num_workers)
        self._transform = transforms.ArctanBound(0, 1)
        self._bo: Optional[BayesianOptimization] = None
        self._fake_function = _FakeFunction()
        # configuration
        assert initialization is None or initialization in ["random", "Hammersley", "LHS"], f"Unknown init {initialization}"
        self.initialization = initialization
        self.init_budget = init_budget
        self.middle_point = middle_point
        self.utility_kind = utility_kind
        self.utility_kappa = utility_kappa
        self.utility_xi = utility_xi
        self.gp_parameters = {} if gp_parameters is None else gp_parameters
        if isinstance(parametrization, p.Parameter) and self.gp_parameters.get("alpha", 0) == 0:
            noisy = not parametrization.descriptors.deterministic
            cont = parametrization.descriptors.continuous
            if noisy or not cont:
                warnings.warn(
                    "Dis-continuous and noisy parametrization require gp_parameters['alpha'] > 0 "
                    "(for your parametrization, continuity={cont} and noisy={noisy}).\n"
                    "Find more information on BayesianOptimization's github.\n"
                    "You should then create a new instance of optimizerlib.ParametrizedBO with appropriate parametrization.",
                    InefficientSettingsWarning,
                ) 
开发者ID:facebookresearch,项目名称:nevergrad,代码行数:40,代码来源:optimizerlib.py

示例11: hyperopt

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def hyperopt(self,
                 fn,
                 X, 
                 X_val=None,
                 X_tst=None,
                 n_trials=5,
                 init_points=5,
                 n_iter=20,
                 batch_size=64,
                 params={'n_hidden_l1': (32, 256),
                        'n_hidden_l2': (32, 256),
                        'dropout_rate': (.1, .8)}, 
                 verbose=0,
                 seed=None,
                 **kwargs):
        self.bo = bo = BayesianOptimization(
                        f=functools.partial(fn, 
                                            X, 
                                            X_val, 
                                            X_tst, 
                                            n_trials=n_trials,
                                            batch_size=batch_size, 
                                            verbose=verbose-1,
                                            seed=seed,
                                            return_probs=False),
                        pbounds=params,
                        # random_state=1,
                        **kwargs
                    )
        
        bo.maximize(
                        init_points=init_points,
                        n_iter=n_iter,
                    )
        
        print(bo.max) 
开发者ID:sattree,项目名称:gap,代码行数:38,代码来源:mean_pool_model.py

示例12: hyperopt

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def hyperopt(self,
                 fn,
                 X, 
                 X_val=None,
                 X_tst=None,
                 init_points=5,
                 n_iter=20,
                 batch_size=32,
                 params={'n_hidden_l1': (32, 256),
                        'n_hidden_l2': (32, 256),
                        'dropout_rate': (.1, .8)}, 
                 verbose=0,
                 **kwargs):
        self.bo = bo = BayesianOptimization(
                        f=functools.partial(fn, 
                                            X, 
                                            X_val, 
                                            X_tst, 
                                            batch_size=batch_size, 
                                            verbose=0,
                                            return_probs=False),
                        pbounds=params,
                        # random_state=1,
                        **kwargs
                    )
        
#         logger = JSONLogger(path="tmp/hyperopt/logs.json")
#         bo.subscribe(Events.OPTMIZATION_STEP, logger)
        
        bo.maximize(
                        init_points=init_points,
                        n_iter=n_iter,
                    )
        
        print(bo.max) 
开发者ID:sattree,项目名称:gap,代码行数:37,代码来源:ensemble.py

示例13: test_probe_lazy

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def test_probe_lazy():
    optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)

    optimizer.probe(params={"p1": 1, "p2": 2}, lazy=True)
    assert len(optimizer.space) == 0
    assert len(optimizer._queue) == 1

    optimizer.probe(params={"p1": 6, "p2": 2}, lazy=True)
    assert len(optimizer.space) == 0
    assert len(optimizer._queue) == 2

    optimizer.probe(params={"p1": 6, "p2": 2}, lazy=True)
    assert len(optimizer.space) == 0
    assert len(optimizer._queue) == 3 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:16,代码来源:test_bayesian_optimization.py

示例14: test_suggest_at_random

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def test_suggest_at_random():
    util = UtilityFunction(kind="poi", kappa=5, xi=0)
    optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)

    for _ in range(50):
        sample = optimizer.space.params_to_array(optimizer.suggest(util))
        assert len(sample) == optimizer.space.dim
        assert all(sample >= optimizer.space.bounds[:, 0])
        assert all(sample <= optimizer.space.bounds[:, 1]) 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:11,代码来源:test_bayesian_optimization.py

示例15: test_prime_queue_all_empty

# 需要导入模块: import bayes_opt [as 别名]
# 或者: from bayes_opt import BayesianOptimization [as 别名]
def test_prime_queue_all_empty():
    optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)
    assert len(optimizer._queue) == 0
    assert len(optimizer.space) == 0

    optimizer._prime_queue(init_points=0)
    assert len(optimizer._queue) == 1
    assert len(optimizer.space) == 0 
开发者ID:fmfn,项目名称:BayesianOptimization,代码行数:10,代码来源:test_bayesian_optimization.py


注:本文中的bayes_opt.BayesianOptimization方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。