本文整理汇总了Python中sklearn.linear_model.coordinate_descent.ElasticNet.set_params方法的典型用法代码示例。如果您正苦于以下问题:Python ElasticNet.set_params方法的具体用法?Python ElasticNet.set_params怎么用?Python ElasticNet.set_params使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.linear_model.coordinate_descent.ElasticNet
的用法示例。
在下文中一共展示了ElasticNet.set_params方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_warm_start
# 需要导入模块: from sklearn.linear_model.coordinate_descent import ElasticNet [as 别名]
# 或者: from sklearn.linear_model.coordinate_descent.ElasticNet import set_params [as 别名]
def test_warm_start():
X, y, _, _ = build_dataset()
clf = ElasticNet(alpha=1.0, max_iter=50, warm_start=True)
clf.fit(X, y)
clf.set_params(alpha=0.1)
clf.fit(X, y)
clf2 = ElasticNet(alpha=0.1, max_iter=500)
clf2.fit(X, y)
assert_array_almost_equal(clf2.coef_, clf.coef_)
示例2: test_enet_toy
# 需要导入模块: from sklearn.linear_model.coordinate_descent import ElasticNet [as 别名]
# 或者: from sklearn.linear_model.coordinate_descent.ElasticNet import set_params [as 别名]
def test_enet_toy():
"""
Test ElasticNet for various parameters of alpha and l1_ratio.
Actually, the parameters alpha = 0 should not be allowed. However,
we test it as a border case.
ElasticNet is tested with and without precomputed Gram matrix
"""
X = np.array([[-1.], [0.], [1.]])
Y = [-1, 0, 1] # just a straight line
T = [[2.], [3.], [4.]] # test sample
# this should be the same as lasso
clf = ElasticNet(alpha=1e-8, l1_ratio=1.0)
clf.fit(X, Y)
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [1])
assert_array_almost_equal(pred, [2, 3, 4])
assert_almost_equal(clf.dual_gap_, 0)
clf = ElasticNet(alpha=0.5, l1_ratio=0.3, max_iter=100,
precompute=False)
clf.fit(X, Y)
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
assert_almost_equal(clf.dual_gap_, 0)
clf.set_params(max_iter=100, precompute=True)
clf.fit(X, Y) # with Gram
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
assert_almost_equal(clf.dual_gap_, 0)
clf.set_params(max_iter=100, precompute=np.dot(X.T, X))
clf.fit(X, Y) # with Gram
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
assert_almost_equal(clf.dual_gap_, 0)
clf = ElasticNet(alpha=0.5, l1_ratio=0.5)
clf.fit(X, Y)
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [0.45454], 3)
assert_array_almost_equal(pred, [0.9090, 1.3636, 1.8181], 3)
assert_almost_equal(clf.dual_gap_, 0)
示例3: test_enet_small
# 需要导入模块: from sklearn.linear_model.coordinate_descent import ElasticNet [as 别名]
# 或者: from sklearn.linear_model.coordinate_descent.ElasticNet import set_params [as 别名]
def test_enet_small():
"""Toy tests with generated X and Y"""
# TODO: add \theta prior knowledge here and test the output
X = np.array([[-1.], [0.], [1.]])
Y = [-1, 0, 1] # a straight line
T = [[2.], [3.], [4.]] # test sample
# this should be the same as lasso
clf = ElasticNet(alpha=1e-8, l1_ratio=1.0)
clf.fit(X, Y)
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [1])
assert_array_almost_equal(pred, [2, 3, 4])
assert_almost_equal(clf.dual_gap_, 0)
clf = ElasticNet(alpha=0.5, l1_ratio=0.3, max_iter=100,
precompute=False)
clf.fit(X, Y)
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
assert_almost_equal(clf.dual_gap_, 0)
clf.set_params(max_iter=100, precompute=True)
clf.fit(X, Y) # with Gram
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
assert_almost_equal(clf.dual_gap_, 0)
clf.set_params(max_iter=100, precompute=np.dot(X.T, X))
clf.fit(X, Y) # with Gram
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
assert_almost_equal(clf.dual_gap_, 0)
clf = ElasticNet(alpha=0.5, l1_ratio=0.5)
clf.fit(X, Y)
pred = clf.predict(T)
assert_array_almost_equal(clf.coef_, [0.45454], 3)
assert_array_almost_equal(pred, [0.9090, 1.3636, 1.8181], 3)
assert_almost_equal(clf.dual_gap_, 0)
示例4: test_warm_start
# 需要导入模块: from sklearn.linear_model.coordinate_descent import ElasticNet [as 别名]
# 或者: from sklearn.linear_model.coordinate_descent.ElasticNet import set_params [as 别名]
def test_warm_start():
X, y, _, _ = build_dataset()
# Test that explicit warm restart...
clf = ElasticNet(alpha=1.0, max_iter=50)
clf.fit(X, y)
clf2 = ElasticNet(alpha=0.1, max_iter=50)
clf2.fit(X, y, coef_init=clf.coef_.copy())
# ... and implicit warm restart are equivalent.
clf3 = ElasticNet(alpha=1.0, max_iter=50, warm_start=True)
clf3.fit(X, y)
assert_array_almost_equal(clf3.coef_, clf.coef_)
clf3.set_params(alpha=0.1)
clf3.fit(X, y)
assert_array_almost_equal(clf3.coef_, clf2.coef_)
示例5: test_warm_start_convergence
# 需要导入模块: from sklearn.linear_model.coordinate_descent import ElasticNet [as 别名]
# 或者: from sklearn.linear_model.coordinate_descent.ElasticNet import set_params [as 别名]
def test_warm_start_convergence():
X, y, _, _ = build_dataset()
model = ElasticNet(alpha=1e-3, tol=1e-3).fit(X, y)
n_iter_reference = model.n_iter_
# This dataset is not trivial enough for the model to converge in one pass.
assert_greater(n_iter_reference, 2)
# Check that n_iter_ is invariant to multiple calls to fit
# when warm_start=False, all else being equal.
model.fit(X, y)
n_iter_cold_start = model.n_iter_
assert_equal(n_iter_cold_start, n_iter_reference)
# Fit the same model again, using a warm start: the optimizer just performs
# a single pass before checking that it has already converged
model.set_params(warm_start=True)
model.fit(X, y)
n_iter_warm_start = model.n_iter_
assert_equal(n_iter_warm_start, 1)