當前位置: 首頁>>代碼示例>>Python>>正文


Python linear_model.LinearRegression方法代碼示例

本文整理匯總了Python中sklearn.linear_model.LinearRegression方法的典型用法代碼示例。如果您正苦於以下問題:Python linear_model.LinearRegression方法的具體用法?Python linear_model.LinearRegression怎麽用?Python linear_model.LinearRegression使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在sklearn.linear_model的用法示例。


在下文中一共展示了linear_model.LinearRegression方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: trainModel

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def trainModel(trainData, features, labels):
    """
    利用訓練數據,估計模型參數

    參數
    ----
    trainData : DataFrame,訓練數據集,包含特征和標簽

    features : 特征名列表

    labels : 標簽名列表

    返回
    ----
    model : LinearRegression, 訓練好的線性模型
    """
    # 創建一個線性回歸模型
    model = linear_model.LinearRegression()
    # 訓練模型,估計模型參數
    model.fit(trainData[features], trainData[labels])
    return model 
開發者ID:wdxtub,項目名稱:deep-learning-note,代碼行數:23,代碼來源:1_linear_basic.py

示例2: test_lin_reg

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_lin_reg(self):
        # python -m unittest tests_regression.Tests_Regression.test_lin_reg
        from sklearn import linear_model
        from discomll.regression import linear_regression

        x_train, y_train, x_test, y_test = datasets.ex3()
        train_data, test_data = datasets.ex3_discomll()

        lin_reg = linear_model.LinearRegression()  # Create linear regression object
        lin_reg.fit(x_train, y_train)  # Train the model using the training sets
        thetas1 = [lin_reg.intercept_] + lin_reg.coef_[1:].tolist()
        prediction1 = lin_reg.predict(x_test)

        thetas_url = linear_regression.fit(train_data)
        thetas2 = [v for k, v in result_iterator(thetas_url["linreg_fitmodel"])]
        results = linear_regression.predict(test_data, thetas_url)
        prediction2 = [v[0] for k, v in result_iterator(results)]

        self.assertTrue(np.allclose(thetas1, thetas2))
        self.assertTrue(np.allclose(prediction1, prediction2)) 
開發者ID:romanorac,項目名稱:discomll,代碼行數:22,代碼來源:tests_regression.py

示例3: test_same_results

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_same_results(self):
        from sklearn import datasets
        from sklearn.model_selection import train_test_split
        from sklearn import linear_model

        dataset = datasets.load_iris()
        X_train, X_test, y_train, y_test = train_test_split(dataset.data, dataset.target, test_size=0.2)

        clf = LinearRegression(data_norm=12, epsilon=float("inf"),
                               bounds_X=([4.3, 2.0, 1.0, 0.1], [7.9, 4.4, 6.9, 2.5]), bounds_y=(0, 2))
        clf.fit(X_train, y_train)

        predict1 = clf.predict(X_test)

        clf = linear_model.LinearRegression(normalize=False)
        clf.fit(X_train, y_train)

        predict2 = clf.predict(X_test)

        self.assertTrue(np.allclose(predict1, predict2)) 
開發者ID:IBM,項目名稱:differential-privacy-library,代碼行數:22,代碼來源:test_LinearRegression.py

示例4: test_accountant

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_accountant(self):
        from diffprivlib.accountant import BudgetAccountant

        acc = BudgetAccountant()
        X = np.linspace(-1, 1, 1000)
        y = X.copy()
        X = X[:, np.newaxis]

        clf = LinearRegression(epsilon=2, data_norm=1, fit_intercept=False, accountant=acc)
        clf.fit(X, y)
        self.assertEqual((2, 0), acc.total())

        with BudgetAccountant(3, 0) as acc2:
            clf = LinearRegression(epsilon=2, data_norm=1, fit_intercept=False)
            clf.fit(X, y)
            self.assertEqual((2, 0), acc2.total())

            with self.assertRaises(BudgetError):
                clf.fit(X, y) 
開發者ID:IBM,項目名稱:differential-privacy-library,代碼行數:21,代碼來源:test_LinearRegression.py

示例5: test_parameter_estimation_resampling_low_memory

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_parameter_estimation_resampling_low_memory(self):
        X = np.random.uniform(0, 4, 1000)
        y = X + np.random.normal(0, 1, 1000)
        m = BayesianBootstrapBagging(LinearRegression(), 10000, 1000, low_mem=True)
        m.fit(X.reshape(-1, 1), y)
        coef_samples = [b.coef_ for b in m.base_models_]
        intercept_samples = [b.intercept_ for b in m.base_models_]
        self.assertAlmostEqual(np.mean(coef_samples), 1, delta=0.3)
        l, r = central_credible_interval(coef_samples, alpha=0.05)
        self.assertLess(l, 1)
        self.assertGreater(r, 1)
        l, r = highest_density_interval(coef_samples, alpha=0.05)
        self.assertLess(l, 1)
        self.assertGreater(r, 1)
        self.assertAlmostEqual(np.mean(intercept_samples), 0, delta=0.3)
        l, r = central_credible_interval(intercept_samples, alpha=0.05)
        self.assertLess(l, 0)
        self.assertGreater(r, 0)
        self.assertAlmostEqual(np.mean(intercept_samples), 0, delta=0.3)
        l, r = highest_density_interval(intercept_samples, alpha=0.05)
        self.assertLess(l, 0)
        self.assertGreater(r, 0) 
開發者ID:lmc2179,項目名稱:bayesian_bootstrap,代碼行數:24,代碼來源:test_bootstrap.py

示例6: test_parameter_estimation_resampling

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_parameter_estimation_resampling(self):
        X = np.random.uniform(0, 4, 1000)
        y = X + np.random.normal(0, 1, 1000)
        m = BayesianBootstrapBagging(LinearRegression(), 10000, 1000, low_mem=False)
        m.fit(X.reshape(-1, 1), y)
        coef_samples = [b.coef_ for b in m.base_models_]
        intercept_samples = [b.intercept_ for b in m.base_models_]
        self.assertAlmostEqual(np.mean(coef_samples), 1, delta=0.3)
        l, r = central_credible_interval(coef_samples, alpha=0.05)
        self.assertLess(l, 1)
        self.assertGreater(r, 1)
        l, r = highest_density_interval(coef_samples, alpha=0.05)
        self.assertLess(l, 1)
        self.assertGreater(r, 1)
        self.assertAlmostEqual(np.mean(intercept_samples), 0, delta=0.3)
        l, r = central_credible_interval(intercept_samples, alpha=0.05)
        self.assertLess(l, 0)
        self.assertGreater(r, 0)
        self.assertAlmostEqual(np.mean(intercept_samples), 0, delta=0.3)
        l, r = highest_density_interval(intercept_samples, alpha=0.05)
        self.assertLess(l, 0)
        self.assertGreater(r, 0) 
開發者ID:lmc2179,項目名稱:bayesian_bootstrap,代碼行數:24,代碼來源:test_bootstrap.py

示例7: test_parameter_estimation_bayes_low_memory

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_parameter_estimation_bayes_low_memory(self):
        X = np.random.uniform(0, 4, 1000)
        y = X + np.random.normal(0, 1, 1000)
        m = BayesianBootstrapBagging(LinearRegression(), 10000, low_mem=True)
        m.fit(X.reshape(-1, 1), y)
        coef_samples = [b.coef_ for b in m.base_models_]
        intercept_samples = [b.intercept_ for b in m.base_models_]
        self.assertAlmostEqual(np.mean(coef_samples), 1, delta=0.3)
        l, r = central_credible_interval(coef_samples, alpha=0.05)
        self.assertLess(l, 1)
        self.assertGreater(r, 1)
        l, r = highest_density_interval(coef_samples, alpha=0.05)
        self.assertLess(l, 1)
        self.assertGreater(r, 1)
        self.assertAlmostEqual(np.mean(intercept_samples), 0, delta=0.3)
        l, r = central_credible_interval(intercept_samples, alpha=0.05)
        self.assertLess(l, 0)
        self.assertGreater(r, 0)
        self.assertAlmostEqual(np.mean(intercept_samples), 0, delta=0.3)
        l, r = highest_density_interval(intercept_samples, alpha=0.05)
        self.assertLess(l, 0)
        self.assertGreater(r, 0) 
開發者ID:lmc2179,項目名稱:bayesian_bootstrap,代碼行數:24,代碼來源:test_bootstrap.py

示例8: test_refit_nochange_reg

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_refit_nochange_reg(sim_nochange):
    """ Test refit ``keep_regularized=False`` (i.e., not ignoring coef == 0)
    """
    from sklearn.linear_model import LinearRegression as OLS
    estimator = OLS()

    refit = refit_record(sim_nochange, 'ols', estimator,
                         keep_regularized=False)
    assert 'ols_coef' in refit.dtype.names
    assert 'ols_rmse' in refit.dtype.names

    coef = np.array([[-3.83016528e+03, -3.83016528e+03],
                     [5.24635240e-03, 5.24635240e-03]])
    rmse = np.array([0.96794599, 0.96794599])
    np.testing.assert_allclose(refit[0]['ols_coef'], coef)
    np.testing.assert_allclose(refit[0]['ols_rmse'], rmse) 
開發者ID:ceholden,項目名稱:yatsm,代碼行數:18,代碼來源:test_postprocess.py

示例9: prepare_fit_model_for_factors

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def prepare_fit_model_for_factors(model_type, x_train, y_train):
    """
    Given a model type, train and test data
    
    Args:
        model_type (str): 'classification' or 'regression'
        x_train:
        y_train:

    Returns:
        (sklearn.base.BaseEstimator): A fit model.
    """

    if model_type == 'classification':
        algorithm = LogisticRegression()
    elif model_type == 'regression':
        algorithm = LinearRegression()
    else:
        algorithm = None

    if algorithm is not None:
        algorithm.fit(x_train, y_train)

    return algorithm 
開發者ID:HealthCatalyst,項目名稱:healthcareai-py,代碼行數:26,代碼來源:top_factors.py

示例10: retrieve_from_scan

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def retrieve_from_scan(self, df_scan):
        from sklearn.linear_model import LinearRegression

        bpm_x = [self.bpm2x_name(bpm) for bpm in self.bpm_names]
        bpm_y = [self.bpm2y_name(bpm) for bpm in self.bpm_names]
        bpm_names_xy = bpm_x + bpm_y

        x = df_scan.loc[:, self.cor_names].values
        y = df_scan.loc[:, bpm_names_xy].values

        reg = LinearRegression().fit(x, y)
        #x_test = np.eye(np.shape(x)[1])
        rm = reg.coef_
        #df_rm = pd.DataFrame(rm.T, columns=self.cor_names, index=bpm_x+bpm_y)
        self.df = self.data2df(matrix=rm, bpm_names=self.bpm_names, cor_names=self.cor_names)
        return self.df 
開發者ID:ocelot-collab,項目名稱:ocelot,代碼行數:18,代碼來源:response_matrix.py

示例11: test_stacking

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_stacking():
    model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset)
    ds = model.stack(10)

    assert ds.X_train.shape[0] == model.dataset.X_train.shape[0]
    assert ds.X_test.shape[0] == model.dataset.X_test.shape[0]
    assert ds.y_train.shape[0] == model.dataset.y_train.shape[0]

    model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset)
    ds = model.stack(10, full_test=False)
    assert np.isnan(ds.X_train).sum() == 0
    assert ds.X_train.shape[0] == model.dataset.X_train.shape[0]
    assert ds.X_test.shape[0] == model.dataset.X_test.shape[0]
    assert ds.y_train.shape[0] == model.dataset.y_train.shape[0]

    model = Regressor(estimator=LinearRegression, parameters={}, dataset=RealDataset)
    model.dataset.load()
    ds = model.stack(10, full_test=False)
    # Check cache
    assert np.isnan(ds.X_train).sum() == 0
    assert ds.X_train.shape[0] == model.dataset.X_train.shape[0]
    assert ds.X_test.shape[0] == model.dataset.X_test.shape[0]
    assert ds.y_train.shape[0] == model.dataset.y_train.shape[0] 
開發者ID:rushter,項目名稱:heamy,代碼行數:25,代碼來源:test_estimator.py

示例12: test_fixed_effect_contrast_nonzero_effect

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_fixed_effect_contrast_nonzero_effect():
    X, y = make_regression(n_features=5, n_samples=20, random_state=0)
    y = y[:, None]
    labels, results = run_glm(y, X, 'ols')
    coef = LinearRegression(fit_intercept=False).fit(X, y).coef_
    for i in range(X.shape[1]):
        contrast = np.zeros(X.shape[1])
        contrast[i] = 1.
        fixed_effect = _compute_fixed_effect_contrast([labels],
                                                      [results],
                                                      [contrast],
                                                      )
        assert_almost_equal(fixed_effect.effect_size(), coef.ravel()[i])
        fixed_effect = _compute_fixed_effect_contrast(
            [labels] * 3, [results] * 3, [contrast] * 3)
        assert_almost_equal(fixed_effect.effect_size(), coef.ravel()[i]) 
開發者ID:nilearn,項目名稱:nistats,代碼行數:18,代碼來源:test_contrasts.py

示例13: CalculateVIF2

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def CalculateVIF2(self, df):
        # initialize dictionaries
        vif_dict, tolerance_dict = {}, {}

        # form input data for each exogenous variable
        for exog in df.columns:
            not_exog = [i for i in df.columns if i != exog]
            X, y = df[not_exog], df[exog]

            # extract r-squared from the fit
            r_squared = LinearRegression().fit(X, y).score(X, y)

            # calculate VIF
            vif = 1/(1 - r_squared)
            vif_dict[exog] = vif

            # calculate tolerance
            tolerance = 1 - r_squared
            tolerance_dict[exog] = tolerance

        # return VIF DataFrame
        df_vif = pd.DataFrame({'VIF': vif_dict, 'Tolerance': tolerance_dict})

        return df_vif 
開發者ID:salan668,項目名稱:FAE,代碼行數:26,代碼來源:DimensionReduction.py

示例14: test_diff_detector_cross_validate

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_diff_detector_cross_validate(return_estimator: bool):
    """
    DiffBasedAnomalyDetector.cross_validate implementation should be the
    same as sklearn.model_selection.cross_validate if called the same.

    And it always will update `return_estimator` to True, as it requires
    the intermediate models to calculate the thresholds
    """
    X = np.random.random((100, 10))
    y = np.random.random((100, 1))

    model = DiffBasedAnomalyDetector(base_estimator=LinearRegression())

    cv = TimeSeriesSplit(n_splits=3)
    cv_results_da = model.cross_validate(
        X=X, y=y, cv=cv, return_estimator=return_estimator
    )
    cv_results_sk = cross_validate(model, X=X, y=y, cv=cv, return_estimator=True)

    assert cv_results_da.keys() == cv_results_sk.keys() 
開發者ID:equinor,項目名稱:gordo,代碼行數:22,代碼來源:test_anomaly_detectors.py

示例15: test_diff_detector_require_thresholds

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LinearRegression [as 別名]
def test_diff_detector_require_thresholds(require_threshold: bool):
    """
    Should fail if requiring thresholds, but not calling cross_validate
    """
    X = pd.DataFrame(np.random.random((100, 5)))
    y = pd.DataFrame(np.random.random((100, 2)))

    model = DiffBasedAnomalyDetector(
        base_estimator=MultiOutputRegressor(LinearRegression()),
        require_thresholds=require_threshold,
    )

    model.fit(X, y)

    if require_threshold:
        # FAIL: Forgot to call .cross_validate to calculate thresholds.
        with pytest.raises(AttributeError):
            model.anomaly(X, y)

        model.cross_validate(X=X, y=y)
        model.anomaly(X, y)
    else:
        # thresholds not required
        model.anomaly(X, y) 
開發者ID:equinor,項目名稱:gordo,代碼行數:26,代碼來源:test_anomaly_detectors.py


注:本文中的sklearn.linear_model.LinearRegression方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。