本文整理匯總了Python中sklearn.utils.estimator_checks.check_estimator方法的典型用法代碼示例。如果您正苦於以下問題:Python estimator_checks.check_estimator方法的具體用法?Python estimator_checks.check_estimator怎麽用?Python estimator_checks.check_estimator使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類sklearn.utils.estimator_checks
的用法示例。
在下文中一共展示了estimator_checks.check_estimator方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: test_MultiTaskLassoCV
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_MultiTaskLassoCV():
"""Test that our MultitaskLassoCV behaves like sklearn's."""
X, y = build_dataset(n_samples=30, n_features=50, n_targets=3)
params = dict(eps=1e-2, n_alphas=10, tol=1e-10, cv=2, n_jobs=1,
fit_intercept=False, verbose=2)
clf = MultiTaskLassoCV(**params)
clf.fit(X, y)
clf2 = sklearn_MultiTaskLassoCV(**params)
clf2.max_iter = 10000 # increase max_iter bc of low tol
clf2.fit(X, y)
np.testing.assert_allclose(clf.mse_path_, clf2.mse_path_,
atol=1e-4, rtol=1e-04)
np.testing.assert_allclose(clf.alpha_, clf2.alpha_,
atol=1e-4, rtol=1e-04)
np.testing.assert_allclose(clf.coef_, clf2.coef_,
atol=1e-4, rtol=1e-04)
# check_estimator tests float32 so using tol < 1e-7 causes precision
# issues
clf.tol = 1e-5
check_estimator(clf)
示例2: test_MultiTaskLasso
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_MultiTaskLasso(fit_intercept):
"""Test that our MultiTaskLasso behaves as sklearn's."""
X, Y = build_dataset(n_samples=20, n_features=30, n_targets=10)
alpha_max = np.max(norm(X.T.dot(Y), axis=1)) / X.shape[0]
alpha = alpha_max / 2.
params = dict(alpha=alpha, fit_intercept=fit_intercept, tol=1e-10,
normalize=True)
clf = MultiTaskLasso(**params)
clf.verbose = 2
clf.fit(X, Y)
clf2 = sklearn_MultiTaskLasso(**params)
clf2.fit(X, Y)
np.testing.assert_allclose(clf.coef_, clf2.coef_, rtol=1e-5)
if fit_intercept:
np.testing.assert_allclose(clf.intercept_, clf2.intercept_)
clf.tol = 1e-7
check_estimator(clf)
示例3: test_LassoCV
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_LassoCV(sparse_X, fit_intercept, positive):
"""Test that our LassoCV behaves like sklearn's LassoCV."""
X, y = build_dataset(n_samples=20, n_features=30, sparse_X=sparse_X)
params = dict(eps=0.05, n_alphas=10, tol=1e-10, cv=2,
fit_intercept=fit_intercept, positive=positive, verbose=2,
n_jobs=-1)
clf = LassoCV(**params)
clf.fit(X, y)
clf2 = sklearn_LassoCV(**params, max_iter=10000)
clf2.fit(X, y)
np.testing.assert_allclose(clf.mse_path_, clf2.mse_path_, atol=1e-4)
np.testing.assert_allclose(clf.alpha_, clf2.alpha_)
np.testing.assert_allclose(clf.coef_, clf2.coef_, atol=1e-5)
# TODO this one is slow (3s * 8 tests). Pass an instance and icnrease tol
# check_estimator(LassoCV)
示例4: _validate_estimator
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def _validate_estimator(self, default=None):
"""Check the value of alpha and beta and clustering algorithm.
"""
check_parameter(self.alpha, low=0, high=1, param_name='alpha',
include_left=False, include_right=False)
check_parameter(self.beta, low=1, param_name='beta',
include_left=False)
if self.clustering_estimator is not None:
self.clustering_estimator_ = self.clustering_estimator
else:
self.clustering_estimator_ = default
# make sure the base clustering algorithm is valid
if self.clustering_estimator_ is None:
raise ValueError("clustering algorithm cannot be None")
if self.check_estimator:
check_estimator(self.clustering_estimator_)
示例5: __init__
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def __init__(self, hidden_units=(256,), batch_size=128, n_epochs=300,
keep_prob=1.0, hidden_activation=tf.nn.relu,
encoding_activation=None,
output_activation=None, random_state=None,
learning_rate=1e-3, loss='mse', sigmoid_indices=None,
softmax_indices=None):
super(AutoencoderManyEpochs, self).__init__(
hidden_units=hidden_units, batch_size=batch_size,
n_epochs=n_epochs, keep_prob=keep_prob,
hidden_activation=hidden_activation,
encoding_activation=encoding_activation,
output_activation=output_activation, random_state=random_state,
learning_rate=learning_rate, loss=loss,
sigmoid_indices=sigmoid_indices, softmax_indices=softmax_indices,
)
# The subset invariance part of check_estimator seems to fail due to
# small numerical differences (e.g., 1e-6). The test failure is difficult to
# replicate outside of travis, but I was able to get the test to fail locally
# by changing atol in sklearn.utils.check_methods_subset_invariance from 1e-7
# to 1e-10. This simply skips that part of check_estimator.
示例6: test_GroupLasso
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_GroupLasso(sparse_X):
n_features = 50
X, y = build_dataset(
n_samples=11, n_features=n_features, sparse_X=sparse_X)
tol = 1e-8
clf = GroupLasso(alpha=0.8, groups=10, tol=tol)
clf.fit(X, y)
np.testing.assert_array_less(clf.dual_gap_, tol)
clf.tol = 1e-6
clf.groups = 1 # unsatisfying but sklearn will fit of 5 features
check_estimator(clf)
示例7: test_LogisticRegression
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_LogisticRegression(sparse_X):
np.random.seed(1409)
X, y = build_dataset(
n_samples=30, n_features=60, sparse_X=sparse_X)
y = np.sign(y)
alpha_max = norm(X.T.dot(y), ord=np.inf) / 2
C = 30. / alpha_max
tol = 1e-8
clf1 = LogisticRegression(C=C, tol=tol, verbose=2)
clf1.fit(X, y)
clf2 = sklearn_Logreg(
C=C, penalty='l1', solver='liblinear', fit_intercept=False, tol=tol)
clf2.fit(X, y)
np.testing.assert_allclose(clf1.coef_, clf2.coef_, rtol=1e-3, atol=1e-5)
# this uses float32 so we increase the tol else there are precision issues
clf1.tol = 1e-4
check_estimator(clf1)
# multinomial test, need to have a slightly lower tol
# for results to be comparable
y = np.random.choice(4, len(y))
clf3 = LogisticRegression(C=C, tol=tol, verbose=2)
clf3.fit(X, y)
clf4 = sklearn_Logreg(
C=C, penalty='l1', solver='liblinear', fit_intercept=False, tol=tol)
clf4.fit(X, y)
np.testing.assert_allclose(clf3.coef_, clf4.coef_, rtol=1e-3, atol=1e-3)
clf3.tol = 1e-3
check_estimator(clf3)
示例8: test_transformer
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_transformer():
# With defaults this can fail because in the low sample size case
# some of the bootstrap samples can have zero cases of the positive class
return check_estimator(StabilitySelection(n_bootstrap_iterations=10, sample_fraction=1.0))
示例9: test_ols_estimators
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_ols_estimators():
"""[Utils] OLS: check estimators."""
if check_estimator is not None:
check_estimator(OLS)
示例10: test_logistic_regression_estimators
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_logistic_regression_estimators():
"""[Utils] LogisticRegression: check estimators."""
if check_estimator is not None:
check_estimator(LogisticRegression)
示例11: test_scale_estimators
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_scale_estimators():
"""[Utils] Scale: check estimators"""
if check_estimator is not None:
check_estimator(Scale)
示例12: test_init_mixin
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_init_mixin():
"""[Utils] InitMixin: test mixin."""
if check_estimator is not None:
check_estimator(TestMixin)
示例13: __init__
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def __init__(self, n_clusters=8, contamination=0.1,
clustering_estimator=None, alpha=0.9, beta=5,
use_weights=False, check_estimator=False, random_state=None,
n_jobs=1):
super(CBLOF, self).__init__(contamination=contamination)
self.n_clusters = n_clusters
self.clustering_estimator = clustering_estimator
self.alpha = alpha
self.beta = beta
self.use_weights = use_weights
self.check_estimator = check_estimator
self.random_state = random_state
self.n_jobs = n_jobs
# noinspection PyIncorrectDocstring
示例14: test_sklearn_estimator
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_sklearn_estimator(self):
# check_estimator(self.clf)
pass
示例15: test_check_estimator_transformer_no_mixin
# 需要導入模塊: from sklearn.utils import estimator_checks [as 別名]
# 或者: from sklearn.utils.estimator_checks import check_estimator [as 別名]
def test_check_estimator_transformer_no_mixin():
# check that TransformerMixin is not required for transformer tests to run
assert_raises_regex(AttributeError, '.*fit_transform.*',
check_estimator, BadTransformerWithoutMixin())