當前位置: 首頁>>代碼示例>>Python>>正文


Python base.LinearRegression方法代碼示例

本文整理匯總了Python中sklearn.linear_model.base.LinearRegression方法的典型用法代碼示例。如果您正苦於以下問題:Python base.LinearRegression方法的具體用法?Python base.LinearRegression怎麽用?Python base.LinearRegression使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在sklearn.linear_model.base的用法示例。


在下文中一共展示了base.LinearRegression方法的14個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: test_raises_value_error_if_sample_weights_greater_than_1d

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_raises_value_error_if_sample_weights_greater_than_1d():
    # Sample weights must be either scalar or 1D

    n_sampless = [2, 3]
    n_featuress = [3, 2]

    for n_samples, n_features in zip(n_sampless, n_featuress):
        X = rng.randn(n_samples, n_features)
        y = rng.randn(n_samples)
        sample_weights_OK = rng.randn(n_samples) ** 2 + 1
        sample_weights_OK_1 = 1.
        sample_weights_OK_2 = 2.

        reg = LinearRegression()

        # make sure the "OK" sample weights actually work
        reg.fit(X, y, sample_weights_OK)
        reg.fit(X, y, sample_weights_OK_1)
        reg.fit(X, y, sample_weights_OK_2) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:21,代碼來源:test_base.py

示例2: test_linear_regression

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_linear_regression():
    # Test LinearRegression on a simple dataset.
    # a simple dataset
    X = [[1], [2]]
    Y = [1, 2]

    reg = LinearRegression()
    reg.fit(X, Y)

    assert_array_almost_equal(reg.coef_, [1])
    assert_array_almost_equal(reg.intercept_, [0])
    assert_array_almost_equal(reg.predict(X), [1, 2])

    # test it also for degenerate input
    X = [[1]]
    Y = [0]

    reg = LinearRegression()
    reg.fit(X, Y)
    assert_array_almost_equal(reg.coef_, [0])
    assert_array_almost_equal(reg.intercept_, [0])
    assert_array_almost_equal(reg.predict(X), [0]) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:24,代碼來源:test_base.py

示例3: test_fit_intercept

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_fit_intercept():
    # Test assertions on betas shape.
    X2 = np.array([[0.38349978, 0.61650022],
                   [0.58853682, 0.41146318]])
    X3 = np.array([[0.27677969, 0.70693172, 0.01628859],
                   [0.08385139, 0.20692515, 0.70922346]])
    y = np.array([1, 1])

    lr2_without_intercept = LinearRegression(fit_intercept=False).fit(X2, y)
    lr2_with_intercept = LinearRegression(fit_intercept=True).fit(X2, y)

    lr3_without_intercept = LinearRegression(fit_intercept=False).fit(X3, y)
    lr3_with_intercept = LinearRegression(fit_intercept=True).fit(X3, y)

    assert_equal(lr2_with_intercept.coef_.shape,
                 lr2_without_intercept.coef_.shape)
    assert_equal(lr3_with_intercept.coef_.shape,
                 lr3_without_intercept.coef_.shape)
    assert_equal(lr2_without_intercept.coef_.ndim,
                 lr3_without_intercept.coef_.ndim) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:22,代碼來源:test_base.py

示例4: test_ridge_vs_lstsq

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_ridge_vs_lstsq():
    # On alpha=0., Ridge and OLS yield the same solution.

    rng = np.random.RandomState(0)
    # we need more samples than features
    n_samples, n_features = 5, 4
    y = rng.randn(n_samples)
    X = rng.randn(n_samples, n_features)

    ridge = Ridge(alpha=0., fit_intercept=False)
    ols = LinearRegression(fit_intercept=False)

    ridge.fit(X, y)
    ols.fit(X, y)
    assert_almost_equal(ridge.coef_, ols.coef_)

    ridge.fit(X, y)
    ols.fit(X, y)
    assert_almost_equal(ridge.coef_, ols.coef_) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:21,代碼來源:test_ridge.py

示例5: test_intercept_flag

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_intercept_flag(rows=10, columns=9):
        inout = get_random_array(rows, columns)
        test_overfitting(rows, columns)
        x = inout[0]
        y = inout[1]

        ntX = HomogenNumericTable(x)
        ntY = HomogenNumericTable(y)

        lr_train = linear_training.Batch()
        lr_train.input.set(linear_training.data, ntX)
        lr_train.input.set(linear_training.dependentVariables, ntY)
        result = lr_train.compute()
        model = result.get(linear_training.model)
        beta_coeff = model.getBeta()
        np_beta = getNumpyArray(beta_coeff)
        daal_intercept = np_beta[0,0]

        from sklearn.linear_model.base import LinearRegression as ScikitLinearRegression
        regression = ScikitLinearRegression()
        regression.fit(x, y)

        scikit_intercept = regression.intercept_
        assert_array_almost_equal(scikit_intercept, [daal_intercept]) 
開發者ID:h2oai,項目名稱:h2o4gpu,代碼行數:26,代碼來源:test_daal_regression.py

示例6: test_linear_regression_sample_weights

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_linear_regression_sample_weights():
    # TODO: loop over sparse data as well

    rng = np.random.RandomState(0)

    # It would not work with under-determined systems
    for n_samples, n_features in ((6, 5), ):

        y = rng.randn(n_samples)
        X = rng.randn(n_samples, n_features)
        sample_weight = 1.0 + rng.rand(n_samples)

        for intercept in (True, False):

            # LinearRegression with explicit sample_weight
            reg = LinearRegression(fit_intercept=intercept)
            reg.fit(X, y, sample_weight=sample_weight)
            coefs1 = reg.coef_
            inter1 = reg.intercept_

            assert_equal(reg.coef_.shape, (X.shape[1], ))  # sanity checks
            assert_greater(reg.score(X, y), 0.5)

            # Closed form of the weighted least square
            # theta = (X^T W X)^(-1) * X^T W y
            W = np.diag(sample_weight)
            if intercept is False:
                X_aug = X
            else:
                dummy_column = np.ones(shape=(n_samples, 1))
                X_aug = np.concatenate((dummy_column, X), axis=1)

            coefs2 = linalg.solve(X_aug.T.dot(W).dot(X_aug),
                                  X_aug.T.dot(W).dot(y))

            if intercept is False:
                assert_array_almost_equal(coefs1, coefs2)
            else:
                assert_array_almost_equal(coefs1, coefs2[1:])
                assert_almost_equal(inter1, coefs2[0]) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:42,代碼來源:test_base.py

示例7: test_linear_regression_sparse

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_linear_regression_sparse(random_state=0):
    # Test that linear regression also works with sparse data
    random_state = check_random_state(random_state)
    for i in range(10):
        n = 100
        X = sparse.eye(n, n)
        beta = random_state.rand(n)
        y = X * beta[:, np.newaxis]

        ols = LinearRegression()
        ols.fit(X, y.ravel())
        assert_array_almost_equal(beta, ols.coef_ + ols.intercept_)

        assert_array_almost_equal(ols.predict(X) - y.ravel(), 0) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:16,代碼來源:test_base.py

示例8: test_linear_regression_multiple_outcome

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_linear_regression_multiple_outcome(random_state=0):
    # Test multiple-outcome linear regressions
    X, y = make_regression(random_state=random_state)

    Y = np.vstack((y, y)).T
    n_features = X.shape[1]

    reg = LinearRegression(fit_intercept=True)
    reg.fit((X), Y)
    assert_equal(reg.coef_.shape, (2, n_features))
    Y_pred = reg.predict(X)
    reg.fit(X, y)
    y_pred = reg.predict(X)
    assert_array_almost_equal(np.vstack((y_pred, y_pred)).T, Y_pred, decimal=3) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:16,代碼來源:test_base.py

示例9: test_linear_regression_sparse_multiple_outcome

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def test_linear_regression_sparse_multiple_outcome(random_state=0):
    # Test multiple-outcome linear regressions with sparse data
    random_state = check_random_state(random_state)
    X, y = make_sparse_uncorrelated(random_state=random_state)
    X = sparse.coo_matrix(X)
    Y = np.vstack((y, y)).T
    n_features = X.shape[1]

    ols = LinearRegression()
    ols.fit(X, Y)
    assert_equal(ols.coef_.shape, (2, n_features))
    Y_pred = ols.predict(X)
    ols.fit(X, y.ravel())
    y_pred = ols.predict(X)
    assert_array_almost_equal(np.vstack((y_pred, y_pred)).T, Y_pred, decimal=3) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:17,代碼來源:test_base.py

示例10: __init__

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def __init__(self,
                 base_estimator: RegressorMixin = None,
                 **kwargs):

        if base_estimator is not None:
            self.base_estimator = clone(base_estimator)
        else:
            base_estimator = LinearRegression()
        self.base_estimator = base_estimator
        super().__init__(**kwargs) 
開發者ID:JakeColtman,項目名稱:bartpy,代碼行數:12,代碼來源:baseestimator.py

示例11: __init__

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def __init__(self, fit_intercept=True, normalize=False, copy_X=True, n_jobs=None):
        self._hyperparams = {
            'fit_intercept': fit_intercept,
            'normalize': normalize,
            'copy_X': copy_X,
            'n_jobs': n_jobs}
        self._wrapped_model = Op(**self._hyperparams) 
開發者ID:IBM,項目名稱:lale,代碼行數:9,代碼來源:linear_regression.py

示例12: _checkLM

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def _checkLM(lm):
	if isinstance(lm, (LinearModel, LinearRegression, SparseCoefMixin)):
		return lm
	raise ValueError("LM class " + _class_name(lm) + " is not supported") 
開發者ID:jpmml,項目名稱:sklearn2pmml,代碼行數:6,代碼來源:__init__.py

示例13: get_scikit_prediction

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def get_scikit_prediction(x=np.array([1,2,3]), y=np.array([1,2,3])):

        from sklearn.linear_model.base import LinearRegression as ScikitLinearRegression

        regression = ScikitLinearRegression()
        regression.fit(x, y)

        return regression.predict(x) 
開發者ID:h2oai,項目名稱:h2o4gpu,代碼行數:10,代碼來源:test_daal_regression.py

示例14: to_scikit

# 需要導入模塊: from sklearn.linear_model import base [as 別名]
# 或者: from sklearn.linear_model.base import LinearRegression [as 別名]
def to_scikit(self):
        return self._to_scikit(LinearRegression) 
開發者ID:lensacom,項目名稱:sparkit-learn,代碼行數:4,代碼來源:base.py


注:本文中的sklearn.linear_model.base.LinearRegression方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。