本文整理汇总了Python中sklearn.linear_model.PassiveAggressiveRegressor方法的典型用法代码示例。如果您正苦于以下问题:Python linear_model.PassiveAggressiveRegressor方法的具体用法?Python linear_model.PassiveAggressiveRegressor怎么用?Python linear_model.PassiveAggressiveRegressor使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.linear_model
的用法示例。
在下文中一共展示了linear_model.PassiveAggressiveRegressor方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ensure_many_models
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def ensure_many_models(self):
from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.linear_model import ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import SVR, LinearSVR
import warnings
from sklearn.exceptions import ConvergenceWarning
warnings.filterwarnings('ignore', category=ConvergenceWarning)
for learner in [GradientBoostingRegressor, RandomForestRegressor, MLPRegressor,
ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor,
KNeighborsRegressor, SVR, LinearSVR]:
learner = learner()
learner_name = str(learner).split("(", maxsplit=1)[0]
with self.subTest("Test fit using {learner}".format(learner=learner_name)):
model = self.estimator.__class__(learner)
model.fit(self.data_lin["X"], self.data_lin["a"], self.data_lin["y"])
self.assertTrue(True) # Fit did not crash
示例2: test_regressor_mse
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_regressor_mse():
y_bin = y.copy()
y_bin[y != 1] = -1
for data in (X, X_csr):
for fit_intercept in (True, False):
for average in (False, True):
reg = PassiveAggressiveRegressor(
C=1.0, fit_intercept=fit_intercept,
random_state=0, average=average, max_iter=5)
reg.fit(data, y_bin)
pred = reg.predict(data)
assert_less(np.mean((pred - y_bin) ** 2), 1.7)
if average:
assert hasattr(reg, 'average_coef_')
assert hasattr(reg, 'average_intercept_')
assert hasattr(reg, 'standard_intercept_')
assert hasattr(reg, 'standard_coef_')
# 0.23. warning about tol not having its correct default value.
示例3: test_regressor_partial_fit
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_regressor_partial_fit():
y_bin = y.copy()
y_bin[y != 1] = -1
for data in (X, X_csr):
for average in (False, True):
reg = PassiveAggressiveRegressor(
C=1.0, fit_intercept=True, random_state=0,
average=average, max_iter=100)
for t in range(50):
reg.partial_fit(data, y_bin)
pred = reg.predict(data)
assert_less(np.mean((pred - y_bin) ** 2), 1.7)
if average:
assert hasattr(reg, 'average_coef_')
assert hasattr(reg, 'average_intercept_')
assert hasattr(reg, 'standard_intercept_')
assert hasattr(reg, 'standard_coef_')
# 0.23. warning about tol not having its correct default value.
示例4: test_model_passive_aggressive_regressor
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_model_passive_aggressive_regressor(self):
model, X = fit_regression_model(
linear_model.PassiveAggressiveRegressor())
model_onnx = convert_sklearn(
model, "passive aggressive regressor",
[("input", FloatTensorType([None, X.shape[1]]))])
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
verbose=False,
basename="SklearnPassiveAggressiveRegressor-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
示例5: test_regressor_mse
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_regressor_mse():
y_bin = y.copy()
y_bin[y != 1] = -1
for data in (X, X_csr):
for fit_intercept in (True, False):
for average in (False, True):
reg = PassiveAggressiveRegressor(
C=1.0, fit_intercept=fit_intercept,
random_state=0, average=average, max_iter=5)
reg.fit(data, y_bin)
pred = reg.predict(data)
assert_less(np.mean((pred - y_bin) ** 2), 1.7)
if average:
assert_true(hasattr(reg, 'average_coef_'))
assert_true(hasattr(reg, 'average_intercept_'))
assert_true(hasattr(reg, 'standard_intercept_'))
assert_true(hasattr(reg, 'standard_coef_'))
示例6: test_regressor_partial_fit
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_regressor_partial_fit():
y_bin = y.copy()
y_bin[y != 1] = -1
for data in (X, X_csr):
for average in (False, True):
reg = PassiveAggressiveRegressor(
C=1.0, fit_intercept=True, random_state=0,
average=average, max_iter=100)
for t in range(50):
reg.partial_fit(data, y_bin)
pred = reg.predict(data)
assert_less(np.mean((pred - y_bin) ** 2), 1.7)
if average:
assert_true(hasattr(reg, 'average_coef_'))
assert_true(hasattr(reg, 'average_intercept_'))
assert_true(hasattr(reg, 'standard_intercept_'))
assert_true(hasattr(reg, 'standard_coef_'))
示例7: test_regressor_correctness
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_regressor_correctness():
y_bin = y.copy()
y_bin[y != 1] = -1
for loss in ("epsilon_insensitive", "squared_epsilon_insensitive"):
reg1 = MyPassiveAggressive(
C=1.0, loss=loss, fit_intercept=True, n_iter=2)
reg1.fit(X, y_bin)
for data in (X, X_csr):
reg2 = PassiveAggressiveRegressor(
C=1.0, tol=None, loss=loss, fit_intercept=True, max_iter=2,
shuffle=False)
reg2.fit(data, y_bin)
assert_array_almost_equal(reg1.w, reg2.coef_.ravel(), decimal=2)
示例8: test_regressor_correctness
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_regressor_correctness(loss):
y_bin = y.copy()
y_bin[y != 1] = -1
reg1 = MyPassiveAggressive(
C=1.0, loss=loss, fit_intercept=True, n_iter=2)
reg1.fit(X, y_bin)
for data in (X, X_csr):
reg2 = PassiveAggressiveRegressor(
C=1.0, tol=None, loss=loss, fit_intercept=True, max_iter=2,
shuffle=False)
reg2.fit(data, y_bin)
assert_array_almost_equal(reg1.w, reg2.coef_.ravel(), decimal=2)
示例9: test_regressor_undefined_methods
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_regressor_undefined_methods():
reg = PassiveAggressiveRegressor(max_iter=100)
for meth in ("transform",):
assert_raises(AttributeError, lambda x: getattr(reg, x), meth)
示例10: ensure_many_models
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def ensure_many_models(self):
from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.linear_model import ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import SVR, LinearSVR
from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.exceptions import ConvergenceWarning
warnings.filterwarnings('ignore', category=ConvergenceWarning)
data = self.create_uninformative_ox_dataset()
for propensity_learner in [GradientBoostingClassifier(n_estimators=10),
RandomForestClassifier(n_estimators=100),
MLPClassifier(hidden_layer_sizes=(5,)),
KNeighborsClassifier(n_neighbors=20)]:
weight_model = IPW(propensity_learner)
propensity_learner_name = str(propensity_learner).split("(", maxsplit=1)[0]
for outcome_learner in [GradientBoostingRegressor(n_estimators=10), RandomForestRegressor(n_estimators=10),
MLPRegressor(hidden_layer_sizes=(5,)),
ElasticNet(), RANSACRegressor(), HuberRegressor(), PassiveAggressiveRegressor(),
KNeighborsRegressor(), SVR(), LinearSVR()]:
outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0]
outcome_model = Standardization(outcome_learner)
with self.subTest("Test fit & predict using {} & {}".format(propensity_learner_name,
outcome_learner_name)):
model = self.estimator.__class__(outcome_model, weight_model)
model.fit(data["X"], data["a"], data["y"], refit_weight_model=False)
model.estimate_individual_outcome(data["X"], data["a"])
self.assertTrue(True) # Fit did not crash
示例11: test_objectmapper
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_objectmapper(self):
df = pdml.ModelFrame([])
self.assertIs(df.linear_model.ARDRegression, lm.ARDRegression)
self.assertIs(df.linear_model.BayesianRidge, lm.BayesianRidge)
self.assertIs(df.linear_model.ElasticNet, lm.ElasticNet)
self.assertIs(df.linear_model.ElasticNetCV, lm.ElasticNetCV)
self.assertIs(df.linear_model.HuberRegressor, lm.HuberRegressor)
self.assertIs(df.linear_model.Lars, lm.Lars)
self.assertIs(df.linear_model.LarsCV, lm.LarsCV)
self.assertIs(df.linear_model.Lasso, lm.Lasso)
self.assertIs(df.linear_model.LassoCV, lm.LassoCV)
self.assertIs(df.linear_model.LassoLars, lm.LassoLars)
self.assertIs(df.linear_model.LassoLarsCV, lm.LassoLarsCV)
self.assertIs(df.linear_model.LassoLarsIC, lm.LassoLarsIC)
self.assertIs(df.linear_model.LinearRegression, lm.LinearRegression)
self.assertIs(df.linear_model.LogisticRegression, lm.LogisticRegression)
self.assertIs(df.linear_model.LogisticRegressionCV, lm.LogisticRegressionCV)
self.assertIs(df.linear_model.MultiTaskLasso, lm.MultiTaskLasso)
self.assertIs(df.linear_model.MultiTaskElasticNet, lm.MultiTaskElasticNet)
self.assertIs(df.linear_model.MultiTaskLassoCV, lm.MultiTaskLassoCV)
self.assertIs(df.linear_model.MultiTaskElasticNetCV, lm.MultiTaskElasticNetCV)
self.assertIs(df.linear_model.OrthogonalMatchingPursuit, lm.OrthogonalMatchingPursuit)
self.assertIs(df.linear_model.OrthogonalMatchingPursuitCV, lm.OrthogonalMatchingPursuitCV)
self.assertIs(df.linear_model.PassiveAggressiveClassifier, lm.PassiveAggressiveClassifier)
self.assertIs(df.linear_model.PassiveAggressiveRegressor, lm.PassiveAggressiveRegressor)
self.assertIs(df.linear_model.Perceptron, lm.Perceptron)
self.assertIs(df.linear_model.RandomizedLasso, lm.RandomizedLasso)
self.assertIs(df.linear_model.RandomizedLogisticRegression, lm.RandomizedLogisticRegression)
self.assertIs(df.linear_model.RANSACRegressor, lm.RANSACRegressor)
self.assertIs(df.linear_model.Ridge, lm.Ridge)
self.assertIs(df.linear_model.RidgeClassifier, lm.RidgeClassifier)
self.assertIs(df.linear_model.RidgeClassifierCV, lm.RidgeClassifierCV)
self.assertIs(df.linear_model.RidgeCV, lm.RidgeCV)
self.assertIs(df.linear_model.SGDClassifier, lm.SGDClassifier)
self.assertIs(df.linear_model.SGDRegressor, lm.SGDRegressor)
self.assertIs(df.linear_model.TheilSenRegressor, lm.TheilSenRegressor)
示例12: test_many_models
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import PassiveAggressiveRegressor [as 别名]
def test_many_models(self):
from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.linear_model import ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import SVR, LinearSVR
from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.exceptions import ConvergenceWarning
warnings.filterwarnings('ignore', category=ConvergenceWarning)
data = self.create_uninformative_ox_dataset()
for propensity_learner in [GradientBoostingClassifier(n_estimators=10),
RandomForestClassifier(n_estimators=100),
MLPClassifier(hidden_layer_sizes=(5,)),
KNeighborsClassifier(n_neighbors=20)]:
weight_model = IPW(propensity_learner)
propensity_learner_name = str(propensity_learner).split("(", maxsplit=1)[0]
for outcome_learner in [GradientBoostingRegressor(n_estimators=10),
RandomForestRegressor(n_estimators=10),
RANSACRegressor(), HuberRegressor(), SVR(), LinearSVR()]:
outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0]
outcome_model = Standardization(outcome_learner)
with self.subTest("Test fit using {} & {}".format(propensity_learner_name, outcome_learner_name)):
model = self.estimator.__class__(outcome_model, weight_model)
model.fit(data["X"], data["a"], data["y"], refit_weight_model=False)
self.assertTrue(True) # Fit did not crash
for outcome_learner in [MLPRegressor(hidden_layer_sizes=(5,)), ElasticNet(),
PassiveAggressiveRegressor(), KNeighborsRegressor()]:
outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0]
outcome_model = Standardization(outcome_learner)
with self.subTest("Test fit using {} & {}".format(propensity_learner_name, outcome_learner_name)):
model = self.estimator.__class__(outcome_model, weight_model)
with self.assertRaises(TypeError):
# Joffe forces learning with sample_weights,
# not all ML models support that and so calling should fail
model.fit(data["X"], data["a"], data["y"], refit_weight_model=False)