当前位置: 首页>>代码示例>>Python>>正文


Python OLS.cov_params方法代码示例

本文整理汇总了Python中statsmodels.regression.linear_model.OLS.cov_params方法的典型用法代码示例。如果您正苦于以下问题:Python OLS.cov_params方法的具体用法?Python OLS.cov_params怎么用?Python OLS.cov_params使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在statsmodels.regression.linear_model.OLS的用法示例。


在下文中一共展示了OLS.cov_params方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_compatibility

# 需要导入模块: from statsmodels.regression.linear_model import OLS [as 别名]
# 或者: from statsmodels.regression.linear_model.OLS import cov_params [as 别名]
    def test_compatibility(self):
        """Hypothesis test for the compatibility of prior mean with data

        """
        # TODO: should we store the OLS results ?  not needed so far, but maybe cache
        #params_ols = np.linalg.pinv(self.model.exog).dot(self.model.endog)
        #res = self.wald_test(self.model.r_matrix, q_matrix=self.model.q_matrix, use_f=False)
        #from scratch
        res_ols = OLS(self.model.endog, self.model.exog).fit()
        r_mat = self.model.r_matrix
        r_diff = self.model.q_matrix - r_mat.dot(res_ols.params)[:,None]
        ols_cov_r = res_ols.cov_params(r_matrix=r_mat)
        statistic = r_diff.T.dot(np.linalg.solve(ols_cov_r + self.model.sigma_prior, r_diff))
        from scipy import stats
        df = np.linalg.matrix_rank(self.model.sigma_prior)   # same as r_mat.shape[0]
        pvalue = stats.chi2.sf(statistic, df)
        # TODO: return results class
        return statistic, pvalue, df
开发者ID:N-Wouda,项目名称:statsmodels,代码行数:20,代码来源:penalized.py

示例2: test_combine_subset_regression

# 需要导入模块: from statsmodels.regression.linear_model import OLS [as 别名]
# 或者: from statsmodels.regression.linear_model.OLS import cov_params [as 别名]
    def test_combine_subset_regression(self):
        # split sample into two, use first sample as prior for second
        endog = self.endog
        exog = self.exog
        nobs = len(endog)

        n05 = nobs // 2
        np.random.seed(987125)
        # shuffle to get random subsamples
        shuffle_idx = np.random.permutation(np.arange(nobs))
        ys = endog[shuffle_idx]
        xs = exog[shuffle_idx]
        k = 10
        res_ols0 = OLS(ys[:n05], xs[:n05, :k]).fit()
        res_ols1 = OLS(ys[n05:], xs[n05:, :k]).fit()

        w = res_ols1.scale / res_ols0.scale   #1.01
        mod_1 = TheilGLS(ys[n05:], xs[n05:, :k], r_matrix=np.eye(k),
                         q_matrix=res_ols0.params,
                         sigma_prior=w * res_ols0.cov_params())
        res_1p = mod_1.fit(cov_type='data-prior')
        res_1s = mod_1.fit(cov_type='sandwich')
        res_olsf = OLS(ys, xs[:, :k]).fit()

        assert_allclose(res_1p.params, res_olsf.params, rtol=1e-9)
        corr_fact = np.sqrt(res_1p.scale / res_olsf.scale)
        # corrct for differences in scale computation
        assert_allclose(res_1p.bse, res_olsf.bse * corr_fact, rtol=1e-3)

        # regression test, does not verify numbers
        # especially why are these smaller than OLS on full sample
        # in larger sample, nobs=600, those were close to full OLS
        bse1 = np.array([
            0.26589869,  0.15224812,  0.38407399,  0.75679949,  0.66084200,
            0.54174080,  0.53697607,  0.66006377,  0.38228551,  0.53920485])
        assert_allclose(res_1s.bse, bse1, rtol=1e-7)
开发者ID:N-Wouda,项目名称:statsmodels,代码行数:38,代码来源:test_theil.py

示例3: test_predict_se

# 需要导入模块: from statsmodels.regression.linear_model import OLS [as 别名]
# 或者: from statsmodels.regression.linear_model.OLS import cov_params [as 别名]
def test_predict_se():
    # this test doesn't use reference values
    # checks conistency across options, and compares to direct calculation

    # generate dataset
    nsample = 50
    x1 = np.linspace(0, 20, nsample)
    x = np.c_[x1, (x1 - 5)**2, np.ones(nsample)]
    np.random.seed(0)#9876789) #9876543)
    beta = [0.5, -0.01, 5.]
    y_true2 = np.dot(x, beta)
    w = np.ones(nsample)
    w[int(nsample * 6. / 10):] = 3
    sig = 0.5
    y2 = y_true2 + sig * w * np.random.normal(size=nsample)
    x2 = x[:,[0,2]]

    # estimate OLS
    res2 = OLS(y2, x2).fit()

    #direct calculation
    covb = res2.cov_params()
    predvar = res2.mse_resid + (x2 * np.dot(covb, x2.T).T).sum(1)
    predstd = np.sqrt(predvar)

    prstd, iv_l, iv_u = wls_prediction_std(res2)
    np.testing.assert_almost_equal(prstd, predstd, 15)

    #stats.t.isf(0.05/2., 50 - 2)
    q = 2.0106347546964458
    ci_half = q * predstd
    np.testing.assert_allclose(iv_u, res2.fittedvalues + ci_half, rtol=1e-12)
    np.testing.assert_allclose(iv_l, res2.fittedvalues - ci_half, rtol=1e-12)

    prstd, iv_l, iv_u = wls_prediction_std(res2, x2[:3,:])
    np.testing.assert_equal(prstd, prstd[:3])
    np.testing.assert_allclose(iv_u, res2.fittedvalues[:3] + ci_half[:3],
                               rtol=1e-12)
    np.testing.assert_allclose(iv_l, res2.fittedvalues[:3] - ci_half[:3],
                               rtol=1e-12)


    # check WLS
    res3 = WLS(y2, x2, 1. / w).fit()

    #direct calculation
    covb = res3.cov_params()
    predvar = res3.mse_resid * w + (x2 * np.dot(covb, x2.T).T).sum(1)
    predstd = np.sqrt(predvar)

    prstd, iv_l, iv_u = wls_prediction_std(res3)
    np.testing.assert_almost_equal(prstd, predstd, 15)

    #stats.t.isf(0.05/2., 50 - 2)
    q = 2.0106347546964458
    ci_half = q * predstd
    np.testing.assert_allclose(iv_u, res3.fittedvalues + ci_half, rtol=1e-12)
    np.testing.assert_allclose(iv_l, res3.fittedvalues - ci_half, rtol=1e-12)

    # testing shapes of exog
    prstd, iv_l, iv_u = wls_prediction_std(res3, x2[-1:,:], weights=3.)
    np.testing.assert_equal(prstd, prstd[-1])
    prstd, iv_l, iv_u = wls_prediction_std(res3, x2[-1,:], weights=3.)
    np.testing.assert_equal(prstd, prstd[-1])

    prstd, iv_l, iv_u = wls_prediction_std(res3, x2[-2:,:], weights=3.)
    np.testing.assert_equal(prstd, prstd[-2:])

    prstd, iv_l, iv_u = wls_prediction_std(res3, x2[-2:,:], weights=[3, 3])
    np.testing.assert_equal(prstd, prstd[-2:])

    prstd, iv_l, iv_u = wls_prediction_std(res3, x2[:3,:])
    np.testing.assert_equal(prstd, prstd[:3])
    np.testing.assert_allclose(iv_u, res3.fittedvalues[:3] + ci_half[:3],
                               rtol=1e-12)
    np.testing.assert_allclose(iv_l, res3.fittedvalues[:3] - ci_half[:3],
                               rtol=1e-12)


    #use wrong size for exog
    #prstd, iv_l, iv_u = wls_prediction_std(res3, x2[-1,0], weights=3.)
    np.testing.assert_raises(ValueError, wls_prediction_std, res3, x2[-1,0],
                             weights=3.)

    # check some weight values
    sew1 = wls_prediction_std(res3, x2[-3:,:])[0]**2
    for wv in np.linspace(0.5, 3, 5):

        sew = wls_prediction_std(res3, x2[-3:,:], weights=1. / wv)[0]**2
        np.testing.assert_allclose(sew, sew1 + res3.scale * (wv - 1))
开发者ID:0ceangypsy,项目名称:statsmodels,代码行数:92,代码来源:tests_predict.py

示例4: TransformRestriction

# 需要导入模块: from statsmodels.regression.linear_model import OLS [as 别名]
# 或者: from statsmodels.regression.linear_model.OLS import cov_params [as 别名]
    #transf = TransformRestriction(np.eye(exog.shape[1])[:2], res2.params[:2] / 2)
    transf3 = TransformRestriction([[0, 0, 0, 1, 0],[0, 0, 0, 0, 1]], [0, 1])
    exog3_st = transf3.reduce(exog)
    res3 = OLS(endog, exog3_st).fit()
    # need to correct for constant/offset in the optimization
    res3 = OLS(endog - exog.dot(transf3.constant.squeeze()), exog3_st).fit()
    params = transf3.expand(res3.params).squeeze()
    assert_allclose(params[:-2], res3_ols.params, rtol=1e-13)
    print(res3.params)
    print(params)
    print(res3_ols.params)
    print(res3_ols.bse)
    # the following raises `ValueError: can't test a constant constraint`
    #tt = res3.t_test(transf3.transf_mat, transf3.constant.squeeze())
    #print tt.sd
    cov_params3 = transf3.transf_mat.dot(res3.cov_params()).dot(transf3.transf_mat.T)
    bse3 = np.sqrt(np.diag(cov_params3))
    print(bse3)

    tp = transform_params_constraint(res2.params, res2.normalized_cov_params,
                                     transf3.R, transf3.q)
    tp = transform_params_constraint(res2.params, res2.cov_params(), transf3.R, transf3.q)


    from statsmodels.discrete.discrete_model import Poisson
    import statsmodels.api as sm
    rand_data = sm.datasets.randhie.load(as_pandas=False)
    rand_exog = rand_data.exog.view(float).reshape(len(rand_data.exog), -1)
    rand_exog = sm.add_constant(rand_exog, prepend=False)

开发者ID:ChadFulton,项目名称:statsmodels,代码行数:31,代码来源:try_fit_constrained.py

示例5: test_combine_subset_regression

# 需要导入模块: from statsmodels.regression.linear_model import OLS [as 别名]
# 或者: from statsmodels.regression.linear_model.OLS import cov_params [as 别名]
    def test_combine_subset_regression(self):
        # split sample into two, use first sample as prior for second
        endog = self.endog
        exog = self.exog
        nobs = len(endog)

        n05 = nobs // 2
        np.random.seed(987125)
        # shuffle to get random subsamples
        shuffle_idx = np.random.permutation(np.arange(nobs))
        ys = endog[shuffle_idx]
        xs = exog[shuffle_idx]
        k = 10
        res_ols0 = OLS(ys[:n05], xs[:n05, :k]).fit()
        res_ols1 = OLS(ys[n05:], xs[n05:, :k]).fit()

        w = res_ols1.scale / res_ols0.scale   #1.01
        mod_1 = TheilGLS(ys[n05:], xs[n05:, :k], r_matrix=np.eye(k), q_matrix=res_ols0.params, sigma_prior=w * res_ols0.cov_params())
        res_1p = mod_1.fit(cov_type='data-prior')
        res_1s = mod_1.fit(cov_type='sandwich')
        res_olsf = OLS(ys, xs[:, :k]).fit()

        assert_allclose(res_1p.params, res_olsf.params, rtol=1e-9)
        corr_fact = 0.96156318 # corrct for differences in scale computation
        assert_allclose(res_1p.bse, res_olsf.bse * corr_fact, rtol=1e-3)

        # regression test, does not verify numbers
        # especially why are these smaller than OLS on full sample
        # in larger sample, nobs=600, those were close to full OLS
        bse1 = np.array([
            0.27609914,  0.15808869,  0.39880789,  0.78583194,  0.68619331,
            0.56252314,  0.55757562,  0.68538523,  0.39695081,  0.55988991])
        assert_allclose(res_1s.bse, bse1, rtol=1e-7)
开发者ID:cong1989,项目名称:statsmodels,代码行数:35,代码来源:test_theil.py

示例6: print

# 需要导入模块: from statsmodels.regression.linear_model import OLS [as 别名]
# 或者: from statsmodels.regression.linear_model.OLS import cov_params [as 别名]
    x = np.ones((nobs,2))
    x[:,1] = np.arange(nobs)/20.
    y = x.sum(1) + 1.01*(1+1.5*(x[:,1]>10))*np.random.rand(nobs)
    print(het_goldfeldquandt(y,x, 1))

    y = x.sum(1) + 1.01*(1+0.5*(x[:,1]>10))*np.random.rand(nobs)
    print(het_goldfeldquandt(y,x, 1))

    y = x.sum(1) + 1.01*(1-0.5*(x[:,1]>10))*np.random.rand(nobs)
    print(het_goldfeldquandt(y,x, 1))

    print(het_breuschpagan(y,x))
    print(het_white(y,x))

    f, fp, fo = het_goldfeldquandt(y,x, 1)
    print(f, fp)
    resgq = het_goldfeldquandt(y,x, 1, retres=True)
    print(resgq)

    #this is just a syntax check:
    print(_neweywestcov(y, x))

    resols1 = OLS(y, x).fit()
    print(_neweywestcov(resols1.resid, x))
    print(resols1.cov_params())
    print(resols1.HC0_se)
    print(resols1.cov_HC0)

    y = x.sum(1) + 10.*(1-0.5*(x[:,1]>10))*np.random.rand(nobs)
    print(HetGoldfeldQuandt().run(y,x, 1, alternative='dec'))
开发者ID:bashtage,项目名称:statsmodels,代码行数:32,代码来源:diagnostic.py


注:本文中的statsmodels.regression.linear_model.OLS.cov_params方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。