当前位置: 首页>>代码示例>>Python>>正文


Python sklearn.XGBClassifier方法代码示例

本文整理汇总了Python中xgboost.sklearn.XGBClassifier方法的典型用法代码示例。如果您正苦于以下问题:Python sklearn.XGBClassifier方法的具体用法?Python sklearn.XGBClassifier怎么用?Python sklearn.XGBClassifier使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在xgboost.sklearn的用法示例。


在下文中一共展示了sklearn.XGBClassifier方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _build_model

# 需要导入模块: from xgboost import sklearn [as 别名]
# 或者: from xgboost.sklearn import XGBClassifier [as 别名]
def _build_model(self,model_name,params=None):
        if params==None:
            if model_name=='xgb':
                self.model=XGBClassifier(n_estimators=100,learning_rate=0.02)
            elif model_name=='svm':
                kernel_function=chi2_kernel if not (self.model_kernel=='linear' or self.model_kernel=='rbf') else self.model_kernel
                self.model=SVC(C=1,kernel=kernel_function,gamma=1,probability=True)
            elif model_name=='lr':
                self.model=LR(C=1,penalty='l1',tol=1e-6)
        else:
            if model_name=='xgb':
                self.model=XGBClassifier(n_estimators=1000,learning_rate=0.02,**params)
            elif model_name=='svm':
                self.model=SVC(C=1,kernel=kernel_function,gamma=1,probability=True)
            elif model_name=='lr':
                self.model=LR(C=1,penalty='l1',tol=1e-6)

        log.l.info('=======> built the model {} done'.format(self.model_name)) 
开发者ID:qijiezhao,项目名称:Video-Highlight-Detection,代码行数:20,代码来源:classifier.py

示例2: modelfit

# 需要导入模块: from xgboost import sklearn [as 别名]
# 或者: from xgboost.sklearn import XGBClassifier [as 别名]
def modelfit(params,x,y):
    #Fit the algorithm on the data
    print("fit")
    alg = XGBClassifier(**params)
    alg.fit(x,y,verbose=True)
    feat_imp = pd.Series(alg.booster().get_fscore()).sort_values(ascending=False)
    print(feat_imp) 
开发者ID:jeffheaton,项目名称:jh-kaggle-util,代码行数:9,代码来源:tune_xgboost.py

示例3: init_XGBoost_withSettings

# 需要导入模块: from xgboost import sklearn [as 别名]
# 或者: from xgboost.sklearn import XGBClassifier [as 别名]
def init_XGBoost_withSettings(self):
        """
        Takes in 
        Returns
        """

        ##########################     Initial Machine Learning Using XGBoost classification   ##########################
        ##########################     Optional
        model = XGBClassifier(
            max_depth=3,
            objective="multi:softmax",  # error evaluation for multiclass training
            num_class=5,
            n_gpus=0,
            n_jobs=-1
            # gamma=gamma,
            # reg_alpha=reg_alpha,
            # max_depth=max_depth,
            # subsample=subsample,
            # colsample_bytree= colsample_bytree,
            # n_estimators= n_estimators,
            # learning_rate= learning_rate,
            # min_child_weight= min_child_weight,
            # n_jobs=n_jobs
            # params
        )
        print(
            " init_XGBoost_withSettings function has been called which initiates a XGBoost classifier with settings of : max_depth=4,objective='multi:softmax', training,num_class=5,n_gpus= 0,n_jobs=8"
        )
        print("model coming out of init_XGBoost_withSettings() function is:", model)
        return model 
开发者ID:JustinGOSSES,项目名称:predictatops,代码行数:32,代码来源:trainclasses.py

示例4: gridsearch_run

# 需要导入模块: from xgboost import sklearn [as 别名]
# 或者: from xgboost.sklearn import XGBClassifier [as 别名]
def gridsearch_run(X_train, y_train):

    # Default classified which will be tuned
    xgb_model = XGBClassifier(
        n_estimators=100,
        max_depth=8,
        min_child_weight=1,
        gamma=0,
        subsample=0.5,
        colsample_bytree=0.5,
        learning_rate=0.1, # ok for Gridsearch
        objective='multi:softprob',
        silent=True,
        nthread=1,
        num_class=3
        )

    # A parameter grid for XGBoost
    params = set_gridsearch_params()

    clf = GridSearchCV(xgb_model,
        params,
        cv=list(KFold(n_splits=5, shuffle=True).split(X_train)), # at least 5 splits
        verbose=2,
        scoring='neg_log_loss',
        n_jobs=-1
        )

    grid_result = clf.fit(X_train, y_train.values.ravel())

    print("\n\nBest score: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
    means = grid_result.cv_results_['mean_test_score']
    stds = grid_result.cv_results_['std_test_score']
    params = grid_result.cv_results_['params']
    print("\nStats:")
    for mean, stdev, param in zip(means, stds, params):
        print("%f (%f) with: %r" % (mean, stdev, param))

### Train - test and save 
开发者ID:illi4,项目名称:Crypto_trading_robot,代码行数:41,代码来源:ml_workflow.py

示例5: xgb_classifier

# 需要导入模块: from xgboost import sklearn [as 别名]
# 或者: from xgboost.sklearn import XGBClassifier [as 别名]
def xgb_classifier(self, assign=True, **kwargs):
        """
        有监督学习分类器,默认使用:
                        GBC(n_estimators=100)

        通过**kwargs即关键字参数透传GBC(**kwargs),即:
                        GBC(**kwargs)

        注意导入使用:
            try:
                from xgboost.sklearn import XGBClassifier as GBC
            except ImportError:
                from sklearn.ensemble import GradientBoostingClassifier as GBC


        :param assign: 是否保存实例后的分类器对象,默认True,self.clf = clf
        :param kwargs: 有参数情况下初始化: GBC(n_estimators=100)
                       无参数情况下初始化: GBC(**kwargs)

        :return: 实例化的GBC对象
        """
        if kwargs is not None and len(kwargs) > 0:
            clf = GBC(**kwargs)
        else:
            clf = GBC(n_estimators=100)
        if assign:
            self.clf = clf
        return clf 
开发者ID:bbfamily,项目名称:abu,代码行数:30,代码来源:ABuMLCreater.py

示例6: tune_params

# 需要导入模块: from xgboost import sklearn [as 别名]
# 或者: from xgboost.sklearn import XGBClassifier [as 别名]
def tune_params(self):
        """
        tune specified (and default) parameters
        """
        self._start_time = time.time()
        self.default_params() # set default parameters
        self.score_init() # set initial score
        iround = 0
        while iround<self.max_rounds:
            print('\nLearning rate for iteration %i: %f.' %(iround+1,self._params['learning_rate']))
            while self._step<5:
                istep_time = time.time()
                if self._step==0:
                    xgb = XGBClassifier(**self._params)
                    self.get_n_estimators(xgb)
                else:
                    self.apply_gridsearch(XGBClassifier(**self._params))
                self.print_progress(istep_time,iround=iround,max_rounds=self.max_rounds) # print params and performance
                self._step+=1
            
            # store model each iteration
            self._params_iround[iround] = {}
            for key,value in self._params.items():
                self._params_iround[iround][key] = value
            self._params_iround[iround]['model_score'] = self._temp_score
            
            # check if max_runtime is breached
            if (time.time() - self._start_time) > self.max_runtime:
                print('Tuning stopped after iteration %i. Max runtime of %i sec exceeded.'
                            %(iround+1,self.max_runtime))
                return
            
            # early stopping criterium
            if (iround>=self.running_rounds and 
                    self.best_score==self._params_iround[max(0,iround-self.running_rounds)]['model_score']):
                print('Tuning stopped after iteration %i. No model improvement for %i consecutive rounds.'
                            %(iround+1,self.running_rounds))
                return
            
            # update learning rate and reset n_estimators for next iteration
            if iround<self.max_rounds-1:
                self.update_learning_rate()
            
            if self._stop_learning:
                print('Tuning stopped after iteration %i. Minimum learning rate %f reached.'
                            %(iround+1,self._min_learning_rate))
                return
            
            self._step=0
            iround+=1
        
        return 
开发者ID:arnaudvl,项目名称:ml-parameter-optimization,代码行数:54,代码来源:xgb_tune.py


注:本文中的xgboost.sklearn.XGBClassifier方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。