當前位置: 首頁>>代碼示例>>Python>>正文


Python hyperopt.fmin方法代碼示例

本文整理匯總了Python中hyperopt.fmin方法的典型用法代碼示例。如果您正苦於以下問題:Python hyperopt.fmin方法的具體用法?Python hyperopt.fmin怎麽用?Python hyperopt.fmin使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在hyperopt的用法示例。


在下文中一共展示了hyperopt.fmin方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: test_compilefn_train_test_split

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def test_compilefn_train_test_split(tmpdir):
    db_name = "test"
    exp_name = "test2"
    fn = CompileFN(db_name, exp_name,
                   data_fn=data.data,
                   model_fn=model.build_model,
                   optim_metric="acc",
                   optim_metric_mode="max",
                   # eval
                   valid_split=.5,
                   stratified=False,
                   random_state=True,
                   save_dir="/tmp/")
    hyper_params = {
        "data": {},
        "shared": {"max_features": 100, "maxlen": 20},
        "model": {"filters": hp.choice("m_filters", (2, 5)),
                  "hidden_dims": 3,
                  },
        "fit": {"epochs": 1}
    }
    fn_test(fn, hyper_params, tmp_dir=str(tmpdir))
    trials = Trials()
    best = fmin(fn, hyper_params, trials=trials, algo=tpe.suggest, max_evals=2)
    assert isinstance(best, dict) 
開發者ID:Avsecz,項目名稱:kopt,代碼行數:27,代碼來源:test_hyopt.py

示例2: optimize_hyperparam

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def optimize_hyperparam(self, X, y, test_size=.2, n_eval=100):
        X_trn, X_val, y_trn, y_val = train_test_split(X, y, test_size=test_size, shuffle=self.shuffle)

        def objective(hyperparams):
            model = XGBModel(n_estimators=self.n_est, **self.params, **hyperparams)
            model.fit(X=X_trn, y=y_trn,
                      eval_set=[(X_val, y_val)],
                      eval_metric=self.metric,
                      early_stopping_rounds=self.n_stop,
                      verbose=False)
            score = model.evals_result()['validation_0'][self.metric][model.best_iteration] * self.loss_sign

            return {'loss': score, 'status': STATUS_OK, 'model': model}

        trials = Trials()
        best = hyperopt.fmin(fn=objective, space=self.space, trials=trials,
                             algo=tpe.suggest, max_evals=n_eval, verbose=1,
                             rstate=self.random_state)

        hyperparams = space_eval(self.space, best)
        return hyperparams, trials 
開發者ID:jeongyoonlee,項目名稱:Kaggler,代碼行數:23,代碼來源:automl.py

示例3: run

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def run(self):
        logger_all.debug("Instantiating MongoTrials object.")
        trials = MongoTrials(
            as_mongo_str(os.path.join(self.mongo_url, "jobs")), exp_key=self.exp_key
        )
        logger_all.debug("Calling fmin.")
        fmin(
            fn=self.objective_hyperopt,
            space=self.space,
            algo=self.algo,
            max_evals=self.max_evals,
            trials=trials,
            show_progressbar=self.show_progressbar,
        )
        # queue.put uses pickle so remove attribute containing thread.lock
        if hasattr(trials, "handle"):
            logger_all.debug("fmin returned. Deleting Trial handle for pickling.")
            del trials.handle
        logger_all.debug("Putting Trials in Queue.")
        self.queue.put(trials) 
開發者ID:YosefLab,項目名稱:scVI,代碼行數:22,代碼來源:autotune.py

示例4: optimize

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def optimize(self):
        """Function that performs bayesian optimization"""
        trials = Trials()

        self._best_result = fmin(fn=self._get_loss, space=self.search_space, trials=trials,
                                 algo=tpe.suggest, max_evals=self.max_evals)
        
        columns = list(self.search_space.keys())   
        results = pd.DataFrame(columns=['iteration'] + columns + ['loss'])
        
        for idx, trial in enumerate(trials.trials):
            row = [idx]
            translated_eval = space_eval(self.search_space, {k: v[0] for k, v in trial['misc']['vals'].items()})
            for k in columns:
                row.append(translated_eval[k])
            row.append(trial['result']['loss'])
            results.loc[idx] = row

        path = self.config_local.path_result / self.model_name
        path.mkdir(parents=True, exist_ok=True)
        results.to_csv(str(path / "trials.csv"), index=False)
        
        self._logger.info(results)
        self._logger.info('Found golden setting:')
        self._logger.info(space_eval(self.search_space, self._best_result)) 
開發者ID:Sujit-O,項目名稱:pykg2vec,代碼行數:27,代碼來源:bayesian_optimizer.py

示例5: _fmin

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def _fmin(self, trials):
        # new version of hyperopt has keyword argument `show_progressbar` that
        # breaks doctests, so here's a workaround
        fmin_kwargs = dict(
            fn=self._run,
            space=self._params.hyper_space,
            algo=hyperopt.tpe.suggest,
            max_evals=self._num_runs,
            trials=trials
        )
        try:
            hyperopt.fmin(
                **fmin_kwargs,
                show_progressbar=False
            )
        except TypeError:
            hyperopt.fmin(**fmin_kwargs) 
開發者ID:NTMC-Community,項目名稱:MatchZoo-py,代碼行數:19,代碼來源:tuner.py

示例6: run

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def run(self):
        trials = Trials()
        best = fmin(self._obj, self.model_param_space._build_space(),
                tpe.suggest, self.max_evals, trials)
        best_params = space_eval(self.model_param_space._build_space(), best)
        best_params = self.model_param_space._convert_into_param(best_params)
        trial_loss = np.asarray(trials.losses(), dtype=float)
        best_ind = np.argmin(trial_loss)
        best_ap = trial_loss[best_ind]
        best_loss = trials.trial_attachments(trials.trials[best_ind])["loss"]
        best_acc = trials.trial_attachments(trials.trials[best_ind])["acc"]
        self.logger.info("-" * 50)
        self.logger.info("Best Average Precision: %.3f" % best_ap)
        self.logger.info("with Loss %.3f, Accuracy %.3f" % (best_loss, best_acc))
        self.logger.info("Best Param:")
        self.task._print_param_dict(best_params)
        self.logger.info("-" * 50) 
開發者ID:billy-inn,項目名稱:HRERE,代碼行數:19,代碼來源:task.py

示例7: params_search

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def params_search(self):
        """
 ˜      function to search params
        """
        def objective(args):
            logger.info(f"Params : {args}")
            try:
                self.params = args
                self.exchange = BitMexBackTest()
                self.exchange.on_update(self.bin_size, self.strategy)
                profit_factor = self.exchange.win_profit/self.exchange.lose_loss
                logger.info(f"Profit Factor : {profit_factor}")
                ret = {
                    'status': STATUS_OK,
                    'loss': 1/profit_factor
                }
            except Exception as e:
                ret = {
                    'status': STATUS_FAIL
                }

            return ret

        trials = Trials()
        best_params = fmin(objective, self.options(), algo=tpe.suggest, trials=trials, max_evals=200)
        logger.info(f"Best params is {best_params}")
        logger.info(f"Best profit factor is {1/trials.best_trial['result']['loss']}") 
開發者ID:noda-sin,項目名稱:ebisu,代碼行數:29,代碼來源:bot.py

示例8: run

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def run(self):
        trials = hyperopt.Trials()
        hyperopt.fmin(fn=lambda kwargs: {'loss': self.train(kwargs), 'status': hyperopt.STATUS_OK},
                      space=self.search_space,
                      algo=hyperopt.tpe.suggest,
                      max_evals=self.num_eval,
                      trials=trials,
                      verbose=10)

        # from the trials, get the values for every parameter
        # set the number of iter to None as they are not changed in Hyperopt
        # and zip the loss
        self.history.extend(zip([(
            {name: val[0] for name, val in params["misc"]["vals"].items()}, None)
            for params in trials.trials], trials.losses()))
        return self.history[int(np.argmin([val[1] for val in self.history]))] 
開發者ID:araffin,項目名稱:robotics-rl-srl,代碼行數:18,代碼來源:hyperparam_search.py

示例9: test_compilefn_cross_val

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def test_compilefn_cross_val(tmpdir):
    db_name = "test"
    exp_name = "test2"
    fn = CompileFN(db_name, exp_name,
                   cv_n_folds=3,
                   stratified=False,
                   random_state=True,
                   data_fn=data.data,
                   model_fn=model.build_model,
                   optim_metric="loss",
                   optim_metric_mode="min",
                   save_dir="/tmp/")
    hyper_params = {
        "data": {},
        "shared": {"max_features": 100, "maxlen": 20},
        "model": {"filters": hp.choice("m_filters", (2, 5)),
                  "hidden_dims": 3,
                  },
        "fit": {"epochs": 1}
    }
    fn_test(fn, hyper_params, tmp_dir=str(tmpdir))
    trials = Trials()
    best = fmin(fn, hyper_params, trials=trials, algo=tpe.suggest, max_evals=2)
    assert isinstance(best, dict) 
開發者ID:Avsecz,項目名稱:kopt,代碼行數:26,代碼來源:test_hyopt.py

示例10: run

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def run():

    param_space = {

            'w0': 1.0,
            'w1': hp.quniform('w1', 0.01, 2.0, 0.01),
            'max_evals': 800
            }
    
    
    trial_counter = 0
    trials = Trials()
    objective = lambda p: hyperopt_wrapper(p)
    best_params = fmin(objective, param_space, algo=tpe.suggest,\
        trials = trials, max_evals=param_space["max_evals"])
    
    print 'best parameters: '
    for k, v in best_params.items():
        print "%s: %s" % (k ,v)
    
    trial_loss = np.asarray(trials.losses(), dtype=float)
    best_loss = min(trial_loss)
    print 'best loss: ', best_loss 
開發者ID:Cisco-Talos,項目名稱:fnc-1,代碼行數:25,代碼來源:average.py

示例11: run_hyperopt

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def run_hyperopt(self, max_eval, space):
        """
        Runs the hyperopt trainer
        :param max_eval: (int) max evaluations to carry out when running hyperopt
        :param space: {dict} }dictionary of hyperparameter space to explore
        :return: dictionary of best fit models by dna
        """
        # Reset run parameters
        self._max_eval = max_eval
        self._results = {}
        self._eval_idx = 0

        # Hyperopt is picky about the function handle
        def model_handle(params):
            return self.model(params)

        # Run the hyperparameter optimization
        _ = fmin(fn=model_handle, space=space, algo=tpe.suggest, max_evals=max_eval)
        return self._results 
開發者ID:HugoCMU,項目名稱:pirateAI,代碼行數:21,代碼來源:hyperopt_trainer.py

示例12: recommendNextParameters

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def recommendNextParameters(self, hyperparameterSpace, results, currentTrials, lockedValues=None):
        if lockedValues is None:
            lockedValues = {}

        rstate = numpy.random.RandomState(seed=int(random.randint(1, 2 ** 32 - 1)))

        trials = self.convertResultsToTrials(hyperparameterSpace, results)

        space = Hyperparameter(hyperparameterSpace).createHyperoptSpace(lockedValues)

        params = {}
        def sample(parameters):
            nonlocal params
            params = parameters
            return {"loss": 0.5, 'status': 'ok'}

        hyperopt.fmin(fn=sample,
                      space=space,
                      algo=functools.partial(hyperopt.rand.suggest),
                      max_evals=1,
                      trials=trials,
                      rstate=rstate,
                      show_progressbar=False)
        return params 
開發者ID:electricbrainio,項目名稱:hypermax,代碼行數:26,代碼來源:random_search_optimizer.py

示例13: recommendNextParameters

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def recommendNextParameters(self, hyperparameterSpace, results, currentTrials, lockedValues=None):
        if lockedValues is None:
            lockedValues = {}

        rstate = numpy.random.RandomState(seed=int(random.randint(1, 2 ** 32 - 1)))

        trials = self.convertResultsToTrials(hyperparameterSpace, results)

        space = Hyperparameter(hyperparameterSpace).createHyperoptSpace(lockedValues)

        params = {}
        def sample(parameters):
            nonlocal params
            params = parameters
            return {"loss": 0.5, 'status': 'ok'}

        hyperopt.fmin(fn=sample,
                      space=space,
                      algo=functools.partial(hyperopt.tpe.suggest, n_EI_candidates=24, gamma=0.25),
                      max_evals=1,
                      trials=trials,
                      rstate=rstate,
                      show_progressbar=False)
        return params 
開發者ID:electricbrainio,項目名稱:hypermax,代碼行數:26,代碼來源:tpe_optimizer.py

示例14: hyperopt_lightgbm_basic

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def hyperopt_lightgbm_basic(X, y, params, config, max_evals=50):
    X_train, X_test, y_train, y_test = data_split_by_time(X, y, test_size=0.2)
    X_train, X_val, y_train, y_val = data_split_by_time(X, y, test_size=0.3)
    train_data = lgb.Dataset(X_train, label=y_train)
    val_data = lgb.Dataset(X_val, label=y_val)

    space = {
        "learning_rate": hp.loguniform("learning_rate", np.log(0.01), np.log(0.5)),
        #"forgetting_factor": hp.loguniform("forgetting_factor", 0.01, 0.1)
        #"max_depth": hp.choice("max_depth", [-1, 2, 3, 4, 5, 6]),
        "max_depth": hp.choice("max_depth", [1, 2, 3, 4, 5, 6]),
        "num_leaves": hp.choice("num_leaves", np.linspace(10, 200, 50, dtype=int)),
        "feature_fraction": hp.quniform("feature_fraction", 0.5, 1.0, 0.1),
        "bagging_fraction": hp.quniform("bagging_fraction", 0.5, 1.0, 0.1),
        "bagging_freq": hp.choice("bagging_freq", np.linspace(0, 50, 10, dtype=int)),
        "reg_alpha": hp.uniform("reg_alpha", 0, 2),
        "reg_lambda": hp.uniform("reg_lambda", 0, 2),
        "min_child_weight": hp.uniform('min_child_weight', 0.5, 10),
    }

    def objective(hyperparams):
        model = lgb.train({**params, **hyperparams}, train_data, 100,
                        val_data, early_stopping_rounds=30, verbose_eval=0)
        pred = model.predict(X_test)
        score = roc_auc_score(y_test, pred)
        return {'loss': -score, 'status': STATUS_OK}

    trials = Trials()
    best = hyperopt.fmin(fn=objective, space=space, trials=trials,
                         algo=tpe.suggest, max_evals=max_evals, verbose=1,
                         rstate=np.random.RandomState(1))

    hyperparams = space_eval(space, best)
    log(f"auc = {-trials.best_trial['result']['loss']:0.4f} {hyperparams}")
    return hyperparams 
開發者ID:DominickZhang,項目名稱:KDDCup2019_admin,代碼行數:37,代碼來源:automl.py

示例15: hyperopt_lightgbm

# 需要導入模塊: import hyperopt [as 別名]
# 或者: from hyperopt import fmin [as 別名]
def hyperopt_lightgbm(X_train: pd.DataFrame, y_train: pd.Series, params: Dict, config: Config, max_evals=10):
    X_train, X_test, y_train, y_test = data_split_by_time(X_train, y_train, test_size=0.2)
    X_train, X_val, y_train, y_val = data_split_by_time(X_train, y_train, test_size=0.3)
    train_data = lgb.Dataset(X_train, label=y_train)
    valid_data = lgb.Dataset(X_val, label=y_val)

    space = {
        "learning_rate": hp.loguniform("learning_rate", np.log(0.01), np.log(0.5)),
        #"max_depth": hp.choice("max_depth", [-1, 2, 3, 4, 5, 6]),
        "max_depth": hp.choice("max_depth", [1, 2, 3, 4, 5, 6]),
        "num_leaves": hp.choice("num_leaves", np.linspace(10, 200, 50, dtype=int)),
        "feature_fraction": hp.quniform("feature_fraction", 0.5, 1.0, 0.1),
        "bagging_fraction": hp.quniform("bagging_fraction", 0.5, 1.0, 0.1),
        "bagging_freq": hp.choice("bagging_freq", np.linspace(0, 50, 10, dtype=int)),
        "reg_alpha": hp.uniform("reg_alpha", 0, 2),
        "reg_lambda": hp.uniform("reg_lambda", 0, 2),
        "min_child_weight": hp.uniform('min_child_weight', 0.5, 10),
    }

    def objective(hyperparams):
        if config.time_left() < 50:
            return {'status': STATUS_FAIL}
        else:
            model = lgb.train({**params, **hyperparams}, train_data, 100,
                          valid_data, early_stopping_rounds=10, verbose_eval=0)
            pred = model.predict(X_test)
            score = roc_auc_score(y_test, pred)

            #score = model.best_score["valid_0"][params["metric"]]

            # in classification, less is better
            return {'loss': -score, 'status': STATUS_OK}

    trials = Trials()
    best = hyperopt.fmin(fn=objective, space=space, trials=trials,
                         algo=tpe.suggest, max_evals=max_evals, verbose=1,
                         rstate=np.random.RandomState(1))

    hyperparams = space_eval(space, best)
    log(f"auc = {-trials.best_trial['result']['loss']:0.4f} {hyperparams}")
    return hyperparams 
開發者ID:DominickZhang,項目名稱:KDDCup2019_admin,代碼行數:43,代碼來源:automl.py


注:本文中的hyperopt.fmin方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。