本文整理匯總了Python中sklearn.linear_model.HuberRegressor方法的典型用法代碼示例。如果您正苦於以下問題:Python linear_model.HuberRegressor方法的具體用法?Python linear_model.HuberRegressor怎麽用?Python linear_model.HuberRegressor使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類sklearn.linear_model
的用法示例。
在下文中一共展示了linear_model.HuberRegressor方法的10個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: ensure_many_models
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def ensure_many_models(self):
from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.linear_model import ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import SVR, LinearSVR
import warnings
from sklearn.exceptions import ConvergenceWarning
warnings.filterwarnings('ignore', category=ConvergenceWarning)
for learner in [GradientBoostingRegressor, RandomForestRegressor, MLPRegressor,
ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor,
KNeighborsRegressor, SVR, LinearSVR]:
learner = learner()
learner_name = str(learner).split("(", maxsplit=1)[0]
with self.subTest("Test fit using {learner}".format(learner=learner_name)):
model = self.estimator.__class__(learner)
model.fit(self.data_lin["X"], self.data_lin["a"], self.data_lin["y"])
self.assertTrue(True) # Fit did not crash
示例2: getModels
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def getModels():
result = []
result.append("LinearRegression")
result.append("BayesianRidge")
result.append("ARDRegression")
result.append("ElasticNet")
result.append("HuberRegressor")
result.append("Lasso")
result.append("LassoLars")
result.append("Rigid")
result.append("SGDRegressor")
result.append("SVR")
result.append("MLPClassifier")
result.append("KNeighborsClassifier")
result.append("SVC")
result.append("GaussianProcessClassifier")
result.append("DecisionTreeClassifier")
result.append("RandomForestClassifier")
result.append("AdaBoostClassifier")
result.append("GaussianNB")
result.append("LogisticRegression")
result.append("QuadraticDiscriminantAnalysis")
return result
示例3: fit
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def fit(candidate_data, reference_data):
''' Tries a variety of robust fitting methods in what is considered
descending order of how good the fits are with this type of data set
(found empirically).
:param list candidate_data: A 1D list or array representing only the image
data of the candidate band
:param list reference_data: A 1D list or array representing only the image
data of the reference band
:returns: A gain and an offset (tuple of floats)
'''
try:
logging.debug('Robust: Trying HuberRegressor with epsilon 1.01')
gain, offset = _huber_regressor(
candidate_data, reference_data, 1.01)
except:
try:
logging.debug('Robust: Trying HuberRegressor with epsilon 1.05')
gain, offset = _huber_regressor(
candidate_data, reference_data, 1.05)
except:
try:
logging.debug('Robust: Trying HuberRegressor with epsilon 1.1')
gain, offset = _huber_regressor(
candidate_data, reference_data, 1.1)
except:
try:
logging.debug('Robust: Trying HuberRegressor with epsilon '
'1.35')
gain, offset = _huber_regressor(
candidate_data, reference_data, 1.35)
except:
logging.debug('Robust: Trying RANSAC')
gain, offset = _ransac_regressor(
candidate_data, reference_data)
return gain, offset
示例4: _huber_regressor
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def _huber_regressor(candidate_data, reference_data, epsilon, max_iter=10000):
model = linear_model.HuberRegressor(epsilon=epsilon, max_iter=max_iter)
model.fit(numpy.array([[c] for c in candidate_data]),
numpy.array(reference_data))
gain = model.coef_
offset = model.intercept_
return gain, offset
示例5: ensure_many_models
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def ensure_many_models(self):
from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.linear_model import ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import SVR, LinearSVR
from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.exceptions import ConvergenceWarning
warnings.filterwarnings('ignore', category=ConvergenceWarning)
data = self.create_uninformative_ox_dataset()
for propensity_learner in [GradientBoostingClassifier(n_estimators=10),
RandomForestClassifier(n_estimators=100),
MLPClassifier(hidden_layer_sizes=(5,)),
KNeighborsClassifier(n_neighbors=20)]:
weight_model = IPW(propensity_learner)
propensity_learner_name = str(propensity_learner).split("(", maxsplit=1)[0]
for outcome_learner in [GradientBoostingRegressor(n_estimators=10), RandomForestRegressor(n_estimators=10),
MLPRegressor(hidden_layer_sizes=(5,)),
ElasticNet(), RANSACRegressor(), HuberRegressor(), PassiveAggressiveRegressor(),
KNeighborsRegressor(), SVR(), LinearSVR()]:
outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0]
outcome_model = Standardization(outcome_learner)
with self.subTest("Test fit & predict using {} & {}".format(propensity_learner_name,
outcome_learner_name)):
model = self.estimator.__class__(outcome_model, weight_model)
model.fit(data["X"], data["a"], data["y"], refit_weight_model=False)
model.estimate_individual_outcome(data["X"], data["a"])
self.assertTrue(True) # Fit did not crash
示例6: test_model_huber_regressor
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def test_model_huber_regressor(self):
model, X = fit_regression_model(linear_model.HuberRegressor())
model_onnx = convert_sklearn(
model, "huber regressor",
[("input", FloatTensorType([None, X.shape[1]]))])
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnHuberRegressor-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
示例7: test_objectmapper
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def test_objectmapper(self):
df = pdml.ModelFrame([])
self.assertIs(df.linear_model.ARDRegression, lm.ARDRegression)
self.assertIs(df.linear_model.BayesianRidge, lm.BayesianRidge)
self.assertIs(df.linear_model.ElasticNet, lm.ElasticNet)
self.assertIs(df.linear_model.ElasticNetCV, lm.ElasticNetCV)
self.assertIs(df.linear_model.HuberRegressor, lm.HuberRegressor)
self.assertIs(df.linear_model.Lars, lm.Lars)
self.assertIs(df.linear_model.LarsCV, lm.LarsCV)
self.assertIs(df.linear_model.Lasso, lm.Lasso)
self.assertIs(df.linear_model.LassoCV, lm.LassoCV)
self.assertIs(df.linear_model.LassoLars, lm.LassoLars)
self.assertIs(df.linear_model.LassoLarsCV, lm.LassoLarsCV)
self.assertIs(df.linear_model.LassoLarsIC, lm.LassoLarsIC)
self.assertIs(df.linear_model.LinearRegression, lm.LinearRegression)
self.assertIs(df.linear_model.LogisticRegression, lm.LogisticRegression)
self.assertIs(df.linear_model.LogisticRegressionCV, lm.LogisticRegressionCV)
self.assertIs(df.linear_model.MultiTaskLasso, lm.MultiTaskLasso)
self.assertIs(df.linear_model.MultiTaskElasticNet, lm.MultiTaskElasticNet)
self.assertIs(df.linear_model.MultiTaskLassoCV, lm.MultiTaskLassoCV)
self.assertIs(df.linear_model.MultiTaskElasticNetCV, lm.MultiTaskElasticNetCV)
self.assertIs(df.linear_model.OrthogonalMatchingPursuit, lm.OrthogonalMatchingPursuit)
self.assertIs(df.linear_model.OrthogonalMatchingPursuitCV, lm.OrthogonalMatchingPursuitCV)
self.assertIs(df.linear_model.PassiveAggressiveClassifier, lm.PassiveAggressiveClassifier)
self.assertIs(df.linear_model.PassiveAggressiveRegressor, lm.PassiveAggressiveRegressor)
self.assertIs(df.linear_model.Perceptron, lm.Perceptron)
self.assertIs(df.linear_model.RandomizedLasso, lm.RandomizedLasso)
self.assertIs(df.linear_model.RandomizedLogisticRegression, lm.RandomizedLogisticRegression)
self.assertIs(df.linear_model.RANSACRegressor, lm.RANSACRegressor)
self.assertIs(df.linear_model.Ridge, lm.Ridge)
self.assertIs(df.linear_model.RidgeClassifier, lm.RidgeClassifier)
self.assertIs(df.linear_model.RidgeClassifierCV, lm.RidgeClassifierCV)
self.assertIs(df.linear_model.RidgeCV, lm.RidgeCV)
self.assertIs(df.linear_model.SGDClassifier, lm.SGDClassifier)
self.assertIs(df.linear_model.SGDRegressor, lm.SGDRegressor)
self.assertIs(df.linear_model.TheilSenRegressor, lm.TheilSenRegressor)
示例8: getSKLearnModel
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def getSKLearnModel(modelName):
if modelName == 'LinearRegression':
model = linear_model.LinearRegression()
elif modelName == 'BayesianRidge':
model = linear_model.BayesianRidge()
elif modelName == 'ARDRegression':
model = linear_model.ARDRegression()
elif modelName == 'ElasticNet':
model = linear_model.ElasticNet()
elif modelName == 'HuberRegressor':
model = linear_model.HuberRegressor()
elif modelName == 'Lasso':
model = linear_model.Lasso()
elif modelName == 'LassoLars':
model = linear_model.LassoLars()
elif modelName == 'Rigid':
model = linear_model.Ridge()
elif modelName == 'SGDRegressor':
model = linear_model.SGDRegressor()
elif modelName == 'SVR':
model = SVR()
elif modelName=='MLPClassifier':
model = MLPClassifier()
elif modelName=='KNeighborsClassifier':
model = KNeighborsClassifier()
elif modelName=='SVC':
model = SVC()
elif modelName=='GaussianProcessClassifier':
model = GaussianProcessClassifier()
elif modelName=='DecisionTreeClassifier':
model = DecisionTreeClassifier()
elif modelName=='RandomForestClassifier':
model = RandomForestClassifier()
elif modelName=='AdaBoostClassifier':
model = AdaBoostClassifier()
elif modelName=='GaussianNB':
model = GaussianNB()
elif modelName=='LogisticRegression':
model = linear_model.LogisticRegression()
elif modelName=='QuadraticDiscriminantAnalysis':
model = QuadraticDiscriminantAnalysis()
return model
示例9: lets_try
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def lets_try(train, labels):
results = {}
def test_model(clf):
cv = KFold(n_splits=5, shuffle=True, random_state=45)
r2 = make_scorer(r2_score)
r2_val_score = cross_val_score(clf, train, labels, cv=cv, scoring=r2)
scores = [r2_val_score.mean()]
return scores
clf = linear_model.LinearRegression()
results["Linear"] = test_model(clf)
clf = linear_model.Ridge()
results["Ridge"] = test_model(clf)
clf = linear_model.BayesianRidge()
results["Bayesian Ridge"] = test_model(clf)
clf = linear_model.HuberRegressor()
results["Hubber"] = test_model(clf)
clf = linear_model.Lasso(alpha=1e-4)
results["Lasso"] = test_model(clf)
clf = BaggingRegressor()
results["Bagging"] = test_model(clf)
clf = RandomForestRegressor()
results["RandomForest"] = test_model(clf)
clf = AdaBoostRegressor()
results["AdaBoost"] = test_model(clf)
clf = svm.SVR()
results["SVM RBF"] = test_model(clf)
clf = svm.SVR(kernel="linear")
results["SVM Linear"] = test_model(clf)
results = pd.DataFrame.from_dict(results, orient='index')
results.columns = ["R Square Score"]
# results = results.sort(columns=["R Square Score"], ascending=False)
results.plot(kind="bar", title="Model Scores")
axes = plt.gca()
axes.set_ylim([0.5, 1])
return results
示例10: test_many_models
# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import HuberRegressor [as 別名]
def test_many_models(self):
from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.linear_model import ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import SVR, LinearSVR
from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.exceptions import ConvergenceWarning
warnings.filterwarnings('ignore', category=ConvergenceWarning)
data = self.create_uninformative_ox_dataset()
for propensity_learner in [GradientBoostingClassifier(n_estimators=10),
RandomForestClassifier(n_estimators=100),
MLPClassifier(hidden_layer_sizes=(5,)),
KNeighborsClassifier(n_neighbors=20)]:
weight_model = IPW(propensity_learner)
propensity_learner_name = str(propensity_learner).split("(", maxsplit=1)[0]
for outcome_learner in [GradientBoostingRegressor(n_estimators=10),
RandomForestRegressor(n_estimators=10),
RANSACRegressor(), HuberRegressor(), SVR(), LinearSVR()]:
outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0]
outcome_model = Standardization(outcome_learner)
with self.subTest("Test fit using {} & {}".format(propensity_learner_name, outcome_learner_name)):
model = self.estimator.__class__(outcome_model, weight_model)
model.fit(data["X"], data["a"], data["y"], refit_weight_model=False)
self.assertTrue(True) # Fit did not crash
for outcome_learner in [MLPRegressor(hidden_layer_sizes=(5,)), ElasticNet(),
PassiveAggressiveRegressor(), KNeighborsRegressor()]:
outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0]
outcome_model = Standardization(outcome_learner)
with self.subTest("Test fit using {} & {}".format(propensity_learner_name, outcome_learner_name)):
model = self.estimator.__class__(outcome_model, weight_model)
with self.assertRaises(TypeError):
# Joffe forces learning with sample_weights,
# not all ML models support that and so calling should fail
model.fit(data["X"], data["a"], data["y"], refit_weight_model=False)