本文整理汇总了Python中sklearn.linear_model.Lars方法的典型用法代码示例。如果您正苦于以下问题:Python linear_model.Lars方法的具体用法?Python linear_model.Lars怎么用?Python linear_model.Lars使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.linear_model
的用法示例。
在下文中一共展示了linear_model.Lars方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_multitarget
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_multitarget():
# Assure that estimators receiving multidimensional y do the right thing
Y = np.vstack([y, y ** 2]).T
n_targets = Y.shape[1]
estimators = [
linear_model.LassoLars(),
linear_model.Lars(),
# regression test for gh-1615
linear_model.LassoLars(fit_intercept=False),
linear_model.Lars(fit_intercept=False),
]
for estimator in estimators:
estimator.fit(X, Y)
Y_pred = estimator.predict(X)
alphas, active, coef, path = (estimator.alphas_, estimator.active_,
estimator.coef_, estimator.coef_path_)
for k in range(n_targets):
estimator.fit(X, Y[:, k])
y_pred = estimator.predict(X)
assert_array_almost_equal(alphas[k], estimator.alphas_)
assert_array_almost_equal(active[k], estimator.active_)
assert_array_almost_equal(coef[k], estimator.coef_)
assert_array_almost_equal(path[k], estimator.coef_path_)
assert_array_almost_equal(Y_pred[:, k], y_pred)
示例2: get_model
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def get_model(PARAMS):
'''Get model according to parameters'''
model_dict = {
'LinearRegression': LinearRegression(),
'Ridge': Ridge(),
'Lars': Lars(),
'ARDRegression': ARDRegression()
}
if not model_dict.get(PARAMS['model_name']):
LOG.exception('Not supported model!')
exit(1)
model = model_dict[PARAMS['model_name']]
model.normalize = bool(PARAMS['normalize'])
return model
示例3: test_simple
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_simple():
# Principle of Lars is to keep covariances tied and decreasing
# also test verbose output
from io import StringIO
import sys
old_stdout = sys.stdout
try:
sys.stdout = StringIO()
_, _, coef_path_ = linear_model.lars_path(
X, y, method='lar', verbose=10)
sys.stdout = old_stdout
for i, coef_ in enumerate(coef_path_.T):
res = y - np.dot(X, coef_)
cov = np.dot(X.T, res)
C = np.max(abs(cov))
eps = 1e-3
ocur = len(cov[C - eps < abs(cov)])
if i < X.shape[1]:
assert ocur == i + 1
else:
# no more than max_pred variables can go into the active set
assert ocur == X.shape[1]
finally:
sys.stdout = old_stdout
示例4: test_lars_lstsq
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_lars_lstsq():
# Test that Lars gives least square solution at the end
# of the path
X1 = 3 * X # use un-normalized dataset
clf = linear_model.LassoLars(alpha=0.)
clf.fit(X1, y)
# Avoid FutureWarning about default value change when numpy >= 1.14
rcond = None if LooseVersion(np.__version__) >= '1.14' else -1
coef_lstsq = np.linalg.lstsq(X1, y, rcond=rcond)[0]
assert_array_almost_equal(clf.coef_, coef_lstsq)
示例5: test_lasso_gives_lstsq_solution
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_lasso_gives_lstsq_solution():
# Test that Lars Lasso gives least square solution at the end
# of the path
_, _, coef_path_ = linear_model.lars_path(X, y, method='lasso')
coef_lstsq = np.linalg.lstsq(X, y)[0]
assert_array_almost_equal(coef_lstsq, coef_path_[:, -1])
示例6: test_lasso_lars_vs_lasso_cd_ill_conditioned2
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_lasso_lars_vs_lasso_cd_ill_conditioned2():
# Create an ill-conditioned situation in which the LARS has to go
# far in the path to converge, and check that LARS and coordinate
# descent give the same answers
# Note it used to be the case that Lars had to use the drop for good
# strategy for this but this is no longer the case with the
# equality_tolerance checks
X = [[1e20, 1e20, 0],
[-1e-32, 0, 0],
[1, 1, 1]]
y = [10, 10, 1]
alpha = .0001
def objective_function(coef):
return (1. / (2. * len(X)) * linalg.norm(y - np.dot(X, coef)) ** 2
+ alpha * linalg.norm(coef, 1))
lars = linear_model.LassoLars(alpha=alpha, normalize=False)
assert_warns(ConvergenceWarning, lars.fit, X, y)
lars_coef_ = lars.coef_
lars_obj = objective_function(lars_coef_)
coord_descent = linear_model.Lasso(alpha=alpha, tol=1e-4, normalize=False)
cd_coef_ = coord_descent.fit(X, y).coef_
cd_obj = objective_function(cd_coef_)
assert_less(lars_obj, cd_obj * (1. + 1e-8))
示例7: test_lars_add_features
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_lars_add_features():
# assure that at least some features get added if necessary
# test for 6d2b4c
# Hilbert matrix
n = 5
H = 1. / (np.arange(1, n + 1) + np.arange(n)[:, np.newaxis])
clf = linear_model.Lars(fit_intercept=False).fit(
H, np.arange(n))
assert np.all(np.isfinite(clf.coef_))
示例8: _create_regressor
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def _create_regressor(self):
if self.mode == 'default':
return Lars()
if self.mode == 'lasso':
return LassoLars(alpha=self.alpha)
raise ValueError('Unexpected mode ' + self.mode + '. Expected "default" or "lasso"')
示例9: test_model_lars
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_model_lars(self):
model, X = fit_regression_model(linear_model.Lars())
model_onnx = convert_sklearn(
model, "lars",
[("input", FloatTensorType([None, X.shape[1]]))])
self.assertIsNotNone(model_onnx)
dump_data_and_model(
X,
model,
model_onnx,
basename="SklearnLars-Dec4",
allow_failure="StrictVersion("
"onnxruntime.__version__)"
"<= StrictVersion('0.2.1')",
)
示例10: test_objectmapper
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_objectmapper(self):
df = pdml.ModelFrame([])
self.assertIs(df.linear_model.ARDRegression, lm.ARDRegression)
self.assertIs(df.linear_model.BayesianRidge, lm.BayesianRidge)
self.assertIs(df.linear_model.ElasticNet, lm.ElasticNet)
self.assertIs(df.linear_model.ElasticNetCV, lm.ElasticNetCV)
self.assertIs(df.linear_model.HuberRegressor, lm.HuberRegressor)
self.assertIs(df.linear_model.Lars, lm.Lars)
self.assertIs(df.linear_model.LarsCV, lm.LarsCV)
self.assertIs(df.linear_model.Lasso, lm.Lasso)
self.assertIs(df.linear_model.LassoCV, lm.LassoCV)
self.assertIs(df.linear_model.LassoLars, lm.LassoLars)
self.assertIs(df.linear_model.LassoLarsCV, lm.LassoLarsCV)
self.assertIs(df.linear_model.LassoLarsIC, lm.LassoLarsIC)
self.assertIs(df.linear_model.LinearRegression, lm.LinearRegression)
self.assertIs(df.linear_model.LogisticRegression, lm.LogisticRegression)
self.assertIs(df.linear_model.LogisticRegressionCV, lm.LogisticRegressionCV)
self.assertIs(df.linear_model.MultiTaskLasso, lm.MultiTaskLasso)
self.assertIs(df.linear_model.MultiTaskElasticNet, lm.MultiTaskElasticNet)
self.assertIs(df.linear_model.MultiTaskLassoCV, lm.MultiTaskLassoCV)
self.assertIs(df.linear_model.MultiTaskElasticNetCV, lm.MultiTaskElasticNetCV)
self.assertIs(df.linear_model.OrthogonalMatchingPursuit, lm.OrthogonalMatchingPursuit)
self.assertIs(df.linear_model.OrthogonalMatchingPursuitCV, lm.OrthogonalMatchingPursuitCV)
self.assertIs(df.linear_model.PassiveAggressiveClassifier, lm.PassiveAggressiveClassifier)
self.assertIs(df.linear_model.PassiveAggressiveRegressor, lm.PassiveAggressiveRegressor)
self.assertIs(df.linear_model.Perceptron, lm.Perceptron)
self.assertIs(df.linear_model.RandomizedLasso, lm.RandomizedLasso)
self.assertIs(df.linear_model.RandomizedLogisticRegression, lm.RandomizedLogisticRegression)
self.assertIs(df.linear_model.RANSACRegressor, lm.RANSACRegressor)
self.assertIs(df.linear_model.Ridge, lm.Ridge)
self.assertIs(df.linear_model.RidgeClassifier, lm.RidgeClassifier)
self.assertIs(df.linear_model.RidgeClassifierCV, lm.RidgeClassifierCV)
self.assertIs(df.linear_model.RidgeCV, lm.RidgeCV)
self.assertIs(df.linear_model.SGDClassifier, lm.SGDClassifier)
self.assertIs(df.linear_model.SGDRegressor, lm.SGDRegressor)
self.assertIs(df.linear_model.TheilSenRegressor, lm.TheilSenRegressor)
示例11: test_simple
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_simple():
# Principle of Lars is to keep covariances tied and decreasing
# also test verbose output
from sklearn.externals.six.moves import cStringIO as StringIO
import sys
old_stdout = sys.stdout
try:
sys.stdout = StringIO()
alphas_, active, coef_path_ = linear_model.lars_path(
diabetes.data, diabetes.target, method="lar", verbose=10)
sys.stdout = old_stdout
for (i, coef_) in enumerate(coef_path_.T):
res = y - np.dot(X, coef_)
cov = np.dot(X.T, res)
C = np.max(abs(cov))
eps = 1e-3
ocur = len(cov[C - eps < abs(cov)])
if i < X.shape[1]:
assert_true(ocur == i + 1)
else:
# no more than max_pred variables can go into the active set
assert_true(ocur == X.shape[1])
finally:
sys.stdout = old_stdout
示例12: test_lars_lstsq
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_lars_lstsq():
# Test that Lars gives least square solution at the end
# of the path
X1 = 3 * diabetes.data # use un-normalized dataset
clf = linear_model.LassoLars(alpha=0.)
clf.fit(X1, y)
coef_lstsq = np.linalg.lstsq(X1, y)[0]
assert_array_almost_equal(clf.coef_, coef_lstsq)
示例13: test_lasso_gives_lstsq_solution
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_lasso_gives_lstsq_solution():
# Test that Lars Lasso gives least square solution at the end
# of the path
alphas_, active, coef_path_ = linear_model.lars_path(X, y, method="lasso")
coef_lstsq = np.linalg.lstsq(X, y)[0]
assert_array_almost_equal(coef_lstsq, coef_path_[:, -1])
示例14: test_lars_precompute
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_lars_precompute():
# Check for different values of precompute
X, y = diabetes.data, diabetes.target
G = np.dot(X.T, X)
for classifier in [linear_model.Lars, linear_model.LarsCV,
linear_model.LassoLarsIC]:
clf = classifier(precompute=G)
output_1 = ignore_warnings(clf.fit)(X, y).coef_
for precompute in [True, False, 'auto', None]:
clf = classifier(precompute=precompute)
output_2 = clf.fit(X, y).coef_
assert_array_almost_equal(output_1, output_2, decimal=8)
示例15: test_lars_n_nonzero_coefs
# 需要导入模块: from sklearn import linear_model [as 别名]
# 或者: from sklearn.linear_model import Lars [as 别名]
def test_lars_n_nonzero_coefs(verbose=False):
lars = linear_model.Lars(n_nonzero_coefs=6, verbose=verbose)
lars.fit(X, y)
assert_equal(len(lars.coef_.nonzero()[0]), 6)
# The path should be of length 6 + 1 in a Lars going down to 6
# non-zero coefs
assert_equal(len(lars.alphas_), 7)