当前位置: 首页>>代码示例>>Python>>正文


Python hyperopt.STATUS_OK属性代码示例

本文整理汇总了Python中hyperopt.STATUS_OK属性的典型用法代码示例。如果您正苦于以下问题:Python hyperopt.STATUS_OK属性的具体用法?Python hyperopt.STATUS_OK怎么用?Python hyperopt.STATUS_OK使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在hyperopt的用法示例。


在下文中一共展示了hyperopt.STATUS_OK属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: optimize_hyperparam

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def optimize_hyperparam(self, X, y, test_size=.2, n_eval=100):
        X_trn, X_val, y_trn, y_val = train_test_split(X, y, test_size=test_size, shuffle=self.shuffle)

        def objective(hyperparams):
            model = XGBModel(n_estimators=self.n_est, **self.params, **hyperparams)
            model.fit(X=X_trn, y=y_trn,
                      eval_set=[(X_val, y_val)],
                      eval_metric=self.metric,
                      early_stopping_rounds=self.n_stop,
                      verbose=False)
            score = model.evals_result()['validation_0'][self.metric][model.best_iteration] * self.loss_sign

            return {'loss': score, 'status': STATUS_OK, 'model': model}

        trials = Trials()
        best = hyperopt.fmin(fn=objective, space=self.space, trials=trials,
                             algo=tpe.suggest, max_evals=n_eval, verbose=1,
                             rstate=self.random_state)

        hyperparams = space_eval(self.space, best)
        return hyperparams, trials 
开发者ID:jeongyoonlee,项目名称:Kaggler,代码行数:23,代码来源:automl.py

示例2: _obj

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def _obj(self, param_dict):
        self.trial_counter += 1
        param_dict = self.model_param_space._convert_int_param(param_dict)
        learner = Learner(self.learner_name, param_dict)
        suffix = "_[Id@%s]"%str(self.trial_counter)
        if self.task_mode == "single":
            self.task = Task(learner, self.feature, suffix, self.logger, self.verbose, self.plot_importance)
        elif self.task_mode == "stacking":
            self.task = StackingTask(learner, self.feature, suffix, self.logger, self.verbose, self.refit_once)
        self.task.go()
        ret = {
            "loss": self.task.rmse_cv_mean,
            "attachments": {
                "std": self.task.rmse_cv_std,
            },
            "status": STATUS_OK,
        }
        return ret 
开发者ID:ChenglongChen,项目名称:kaggle-HomeDepot,代码行数:20,代码来源:task.py

示例3: objective

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def objective(x):
    config = deepcopy(x)
    for h in cs.get_hyperparameters():
        if type(h) == ConfigSpace.hyperparameters.OrdinalHyperparameter:
            
            config[h.name] = h.sequence[int(x[h.name])]

        elif type(h) == ConfigSpace.hyperparameters.UniformIntegerHyperparameter:

            config[h.name] = int(x[h.name])
    y, c = b.objective_function(config)

    return {
        'config': config,
        'loss': y,
        'cost': c,
        'status': STATUS_OK} 
开发者ID:automl,项目名称:nas_benchmarks,代码行数:19,代码来源:run_tpe.py

示例4: function_to_minimize

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def function_to_minimize(hyperparams, gamma='auto', decision_function='ovr'):
    decision_function = hyperparams['decision_function']
    gamma = hyperparams['gamma']
    global current_eval 
    global max_evals
    print( "#################################")
    print( "       Evaluation {} of {}".format(current_eval, max_evals))
    print( "#################################")
    start_time = time.time()
    try:
        accuracy = train(epochs=HYPERPARAMS.epochs_during_hyperopt, decision_function=decision_function, gamma=gamma)
        training_time = int(round(time.time() - start_time))
        current_eval += 1
        train_history.append({'accuracy':accuracy, 'decision_function':decision_function, 'gamma':gamma, 'time':training_time})
    except Exception as e:
        print( "#################################")
        print( "Exception during training: {}".format(str(e)))
        print( "Saving train history in train_history.npy")
        np.save("train_history.npy", train_history)
        exit()
    return {'loss': -accuracy, 'time': training_time, 'status': STATUS_OK}

# lunch the hyperparameters search 
开发者ID:amineHorseman,项目名称:facial-expression-recognition-svm,代码行数:25,代码来源:optimize_parameters.py

示例5: params_search

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def params_search(self):
        """
 ˜      function to search params
        """
        def objective(args):
            logger.info(f"Params : {args}")
            try:
                self.params = args
                self.exchange = BitMexBackTest()
                self.exchange.on_update(self.bin_size, self.strategy)
                profit_factor = self.exchange.win_profit/self.exchange.lose_loss
                logger.info(f"Profit Factor : {profit_factor}")
                ret = {
                    'status': STATUS_OK,
                    'loss': 1/profit_factor
                }
            except Exception as e:
                ret = {
                    'status': STATUS_FAIL
                }

            return ret

        trials = Trials()
        best_params = fmin(objective, self.options(), algo=tpe.suggest, trials=trials, max_evals=200)
        logger.info(f"Best params is {best_params}")
        logger.info(f"Best profit factor is {1/trials.best_trial['result']['loss']}") 
开发者ID:noda-sin,项目名称:ebisu,代码行数:29,代码来源:bot.py

示例6: score

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def score(params):
    # パラメータセットを指定したときに最小化すべき関数を指定する
    # モデルのパラメータ探索においては、モデルにパラメータを指定して学習・予測させた場合のスコアとする
    model = MLP(params)
    model.fit(tr_x, tr_y, va_x, va_y)
    va_pred = model.predict(va_x)
    score = log_loss(va_y, va_pred)
    print(f'params: {params}, logloss: {score:.4f}')

    # 情報を記録しておく
    history.append((params, score))

    return {'loss': score, 'status': STATUS_OK}


# hyperoptによるパラメータ探索の実行 
开发者ID:ghmagazine,项目名称:kagglebook,代码行数:18,代码来源:ch06-03-hopt_nn.py

示例7: score

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def score(params):
    # パラメータを与えたときに最小化する評価指標を指定する
    # 具体的には、モデルにパラメータを指定して学習・予測させた場合のスコアを返すようにする

    # max_depthの型を整数型に修正する
    params['max_depth'] = int(params['max_depth'])

    # Modelクラスを定義しているものとする
    # Modelクラスは、fitで学習し、predictで予測値の確率を出力する
    model = Model(params)
    model.fit(tr_x, tr_y, va_x, va_y)
    va_pred = model.predict(va_x)
    score = log_loss(va_y, va_pred)
    print(f'params: {params}, logloss: {score:.4f}')

    # 情報を記録しておく
    history.append((params, score))

    return {'loss': score, 'status': STATUS_OK}


# 探索するパラメータの空間を指定する 
开发者ID:ghmagazine,项目名称:kagglebook,代码行数:24,代码来源:ch06-01-hopt.py

示例8: hyperopt_model

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def hyperopt_model(self, params):
        """
        A Hyperopt-friendly wrapper for build_model
        """
        # skip building this model if hyperparameter combination already attempted
        for i in self.hyperopt_trials.results:
            if 'memo' in i:
                if params == i['memo']:
                    return {'loss': i['loss'], 'status': STATUS_OK, 'memo': 'repeat'}
        if self.itercount > self.hp_maxit:
            return {'loss': 0.0, 'status': STATUS_FAIL, 'memo': 'max iters reached'}
        error_test, error_valid = self.build_model(params)
        self.itercount += 1
        if np.isnan(error_valid):
            return {'loss': 1e5, 'status': STATUS_FAIL, 'memo': 'nan'}
        else:
            return {'loss': error_valid, 'status': STATUS_OK, 'memo': params} 
开发者ID:CCQC,项目名称:PES-Learn,代码行数:19,代码来源:neural_network.py

示例9: run

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def run(self):
        trials = hyperopt.Trials()
        hyperopt.fmin(fn=lambda kwargs: {'loss': self.train(kwargs), 'status': hyperopt.STATUS_OK},
                      space=self.search_space,
                      algo=hyperopt.tpe.suggest,
                      max_evals=self.num_eval,
                      trials=trials,
                      verbose=10)

        # from the trials, get the values for every parameter
        # set the number of iter to None as they are not changed in Hyperopt
        # and zip the loss
        self.history.extend(zip([(
            {name: val[0] for name, val in params["misc"]["vals"].items()}, None)
            for params in trials.trials], trials.losses()))
        return self.history[int(np.argmin([val[1] for val in self.history]))] 
开发者ID:araffin,项目名称:robotics-rl-srl,代码行数:18,代码来源:hyperparam_search.py

示例10: create_model

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def create_model(x_train):
    network = scgen.VAEArith(x_dimension=x_train.X.shape[1],
                             z_dimension={{choice([10, 20, 50, 75, 100])}},
                             learning_rate={{choice([0.1, 0.01, 0.001, 0.0001])}},
                             alpha={{choice([0.1, 0.01, 0.001, 0.0001, 0.00001, 0.000001])}},
                             dropout_rate={{choice([0.2, 0.25, 0.5, 0.75, 0.8])}},
                             model_path=f"./")

    result = network.train(x_train,
                           n_epochs={{choice([100, 150, 200, 250])}},
                           batch_size={{choice([32, 64, 128, 256])}},
                           verbose=2,
                           shuffle=True,
                           save=False)
    best_loss = np.amin(result.history['loss'])
    print('Best Loss of model:', best_loss)
    return {'loss': best_loss, 'status': STATUS_OK, 'model': network.vae_model} 
开发者ID:theislab,项目名称:scgen,代码行数:19,代码来源:hyperoptim.py

示例11: fit

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def fit(self, X_train, y_train):
        optimizer_instance = self.optimizer(estimator=self.estimator, **self.args_to_optimizer)
        trained_optimizer1 = optimizer_instance.fit(X_train, y_train)
        results = trained_optimizer1.summary()
        results = results[results['status']==STATUS_OK]#Consider only successful trials
        results = results.sort_values(by=['loss'], axis=0)
        k = min(self.k, results.shape[0])
        top_k_pipelines = results.iloc[0:k]
        pipeline_tuples=[]
        for pipeline_name in top_k_pipelines.index:
            pipeline_instance = trained_optimizer1.get_pipeline(pipeline_name)
            pipeline_tuple = (pipeline_name, pipeline_instance)
            pipeline_tuples.append(pipeline_tuple)
        voting = VotingClassifier(estimators=pipeline_tuples)
        args_to_optimizer = copy.copy(self.args_to_optimizer)
        try:
            del args_to_optimizer['max_evals']
        except KeyError:
            pass
        args_to_optimizer['max_evals'] = 1 #Currently, voting classifier has no useful hyperparameters to tune.
        optimizer_instance2 = self.optimizer(estimator=voting, **args_to_optimizer)
        trained_optimizer2 = optimizer_instance2.fit(X_train, y_train)
        self._best_estimator = trained_optimizer2.get_pipeline()
        return self 
开发者ID:IBM,项目名称:lale,代码行数:26,代码来源:topk_voting_classifier.py

示例12: model

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def model(X_train, Y_train, X_test, Y_test):
    model = Sequential()
    model.add(Dense({{choice([15, 512, 1024])}},input_dim=8,init='uniform', activation='softplus'))
    model.add(Dropout({{uniform(0, 1)}}))
    model.add(Dense({{choice([256, 512, 1024])}}))
    model.add(Activation({{choice(['relu', 'sigmoid','softplus'])}}))
    model.add(Dropout({{uniform(0, 1)}}))
    
    model.add(Dense(1, init='uniform', activation='sigmoid'))

    model.compile(loss='mse', metrics=['accuracy'],
                  optimizer={{choice(['rmsprop', 'adam', 'sgd'])}})

    model.fit(X_train, Y_train,
              batch_size={{choice([10, 50, 100])}},
              nb_epoch={{choice([1, 50])}},
              show_accuracy=True,
              verbose=2,
              validation_data=(X_test, Y_test))
    score, acc = model.evaluate(X_test, Y_test, verbose=0)
    print('Test accuracy:', acc)
    return {'loss': -acc, 'status': STATUS_OK, 'model': model} 
开发者ID:54chen,项目名称:deep,代码行数:24,代码来源:testh.py

示例13: model

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def model(X_train, X_test, Y_train, Y_test):
    model = Sequential()
    model.add(Dense(512, input_shape=(784,)))
    model.add(Activation('relu'))
    model.add(Dropout({{uniform(0, 1)}}))
    model.add(Dense({{choice([400, 512, 600])}}))
    model.add(Activation('relu'))
    model.add(Dropout({{uniform(0, 1)}}))
    model.add(Dense(10))
    model.add(Activation('softmax'))

    rms = RMSprop()
    model.compile(loss='categorical_crossentropy', optimizer=rms, metrics=['accuracy'])

    nb_epoch = 10
    batch_size = 128

    model.fit(X_train, Y_train,
              batch_size=batch_size, nb_epoch=nb_epoch,
              verbose=2,
              validation_data=(X_test, Y_test))

    score, acc = model.evaluate(X_test, Y_test, verbose=0)

    return {'loss': -acc, 'status': STATUS_OK, 'model': model} 
开发者ID:maxpumperla,项目名称:hyperas,代码行数:27,代码来源:mnist_ensemble.py

示例14: model

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def model(X_train, Y_train, X_test, Y_test):
    inputs = Input(shape=(784,))

    x = Dense({{choice([20, 30, 40])}}, activation='relu')(inputs)
    x = Dense(64, activation='relu')(x)
    predictions = Dense(10, activation='softmax')(x)
    model = Model(inputs=inputs, outputs=predictions)

    rms = RMSprop()
    model.compile(loss='categorical_crossentropy', optimizer=rms, metrics=['accuracy'])

    model.fit(X_train, Y_train,
              batch_size={{choice([64, 128])}},
              epochs=1,
              verbose=2,
              validation_data=(X_test, Y_test))
    score, acc = model.evaluate(X_test, Y_test, verbose=0)
    print('Test accuracy:', acc)
    return {'loss': -acc, 'status': STATUS_OK, 'model': model} 
开发者ID:maxpumperla,项目名称:hyperas,代码行数:21,代码来源:test_functional_api.py

示例15: create_model

# 需要导入模块: import hyperopt [as 别名]
# 或者: from hyperopt import STATUS_OK [as 别名]
def create_model(x_train, y_train, x_test, y_test):
    model = Sequential()
    model.add(Dense(44, input_shape=(784,)))
    model.add(Activation({{choice(['relu', 'sigmoid'])}}))
    model.add(Dense(44))
    model.add(Activation({{choice(['relu', 'sigmoid'])}}))
    model.add(Dense(10))

    model.compile(loss='mae', metrics=['mse'], optimizer="adam")

    es = EarlyStopping(monitor='val_loss', min_delta=1e-5, patience=10)
    rlr = ReduceLROnPlateau(factor=0.1, patience=10)
    _ = model.fit(x_train, y_train, epochs=1, verbose=0, callbacks=[es, rlr],
                  batch_size=24, validation_data=(x_test, y_test))

    mae, mse = model.evaluate(x_test, y_test, verbose=0)
    print('MAE:', mae)
    return {'loss': mae, 'status': STATUS_OK, 'model': model} 
开发者ID:maxpumperla,项目名称:hyperas,代码行数:20,代码来源:test_lr_plateau.py


注:本文中的hyperopt.STATUS_OK属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。