本文整理汇总了Python中sklearn.ensemble.ExtraTreesRegressor.predict_proba方法的典型用法代码示例。如果您正苦于以下问题:Python ExtraTreesRegressor.predict_proba方法的具体用法?Python ExtraTreesRegressor.predict_proba怎么用?Python ExtraTreesRegressor.predict_proba使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.ensemble.ExtraTreesRegressor
的用法示例。
在下文中一共展示了ExtraTreesRegressor.predict_proba方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ExtraTreesRegressor
# 需要导入模块: from sklearn.ensemble import ExtraTreesRegressor [as 别名]
# 或者: from sklearn.ensemble.ExtraTreesRegressor import predict_proba [as 别名]
class ExtraTreesRegressor(ParamSklearnRegressionAlgorithm):
def __init__(self, n_estimators, criterion, min_samples_leaf,
min_samples_split, max_features,
max_leaf_nodes_or_max_depth="max_depth",
bootstrap=False, max_leaf_nodes=None, max_depth="None",
oob_score=False, n_jobs=1, random_state=None, verbose=0):
self.n_estimators = int(n_estimators)
self.estimator_increment = 10
if criterion not in ("mse"):
raise ValueError("'criterion' is not in ('mse'): "
"%s" % criterion)
self.criterion = criterion
if max_leaf_nodes_or_max_depth == "max_depth":
self.max_leaf_nodes = None
if max_depth == "None":
self.max_depth = None
else:
self.max_depth = int(max_depth)
#if use_max_depth == "True":
# self.max_depth = int(max_depth)
#elif use_max_depth == "False":
# self.max_depth = None
else:
if max_leaf_nodes == "None":
self.max_leaf_nodes = None
else:
self.max_leaf_nodes = int(max_leaf_nodes)
self.max_depth = None
self.min_samples_leaf = int(min_samples_leaf)
self.min_samples_split = int(min_samples_split)
self.max_features = float(max_features)
if bootstrap == "True":
self.bootstrap = True
elif bootstrap == "False":
self.bootstrap = False
self.oob_score = oob_score
self.n_jobs = int(n_jobs)
self.random_state = random_state
self.verbose = int(verbose)
self.estimator = None
def fit(self, X, y, refit=False):
if self.estimator is None or refit:
self.iterative_fit(X, y, n_iter=1, refit=refit)
while not self.configuration_fully_fitted():
self.iterative_fit(X, y, n_iter=1)
return self
def iterative_fit(self, X, y, n_iter=1, refit=False):
if refit:
self.estimator = None
if self.estimator is None:
num_features = X.shape[1]
max_features = int(
float(self.max_features) * (np.log(num_features) + 1))
# Use at most half of the features
max_features = max(1, min(int(X.shape[1] / 2), max_features))
self.estimator = ETR(
n_estimators=0, criterion=self.criterion,
max_depth=self.max_depth,
min_samples_split=self.min_samples_split,
min_samples_leaf=self.min_samples_leaf,
bootstrap=self.bootstrap,
max_features=max_features, max_leaf_nodes=self.max_leaf_nodes,
oob_score=self.oob_score, n_jobs=self.n_jobs,
verbose=self.verbose,
random_state=self.random_state,
warm_start=True
)
tmp = self.estimator # TODO copy ?
tmp.n_estimators += n_iter
tmp.fit(X, y,)
self.estimator = tmp
return self
def configuration_fully_fitted(self):
if self.estimator is None:
return False
return not len(self.estimator.estimators_) < self.n_estimators
def predict(self, X):
if self.estimator is None:
raise NotImplementedError
return self.estimator.predict(X)
def predict_proba(self, X):
if self.estimator is None:
raise NotImplementedError()
return self.estimator.predict_proba(X)
@staticmethod
def get_properties(dataset_properties=None):
#.........这里部分代码省略.........