本文整理汇总了Java中org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression.estimateResiduals方法的典型用法代码示例。如果您正苦于以下问题:Java OLSMultipleLinearRegression.estimateResiduals方法的具体用法?Java OLSMultipleLinearRegression.estimateResiduals怎么用?Java OLSMultipleLinearRegression.estimateResiduals使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression
的用法示例。
在下文中一共展示了OLSMultipleLinearRegression.estimateResiduals方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: mutipleLineRegress_OLS
import org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression; //导入方法依赖的package包/类
/**
* Implements ordinary least squares (OLS) to estimate the parameters of a
* multiple linear regression model.
* @param y Y sample data - one dimension array
* @param x X sample data - two dimension array
* @return Estimated regression parameters and residuals
*/
public static Array[] mutipleLineRegress_OLS(Array y, Array x) {
OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression();
double[] yy = (double[])ArrayUtil.copyToNDJavaArray(y);
double[][] xx = (double[][])ArrayUtil.copyToNDJavaArray(x);
regression.newSampleData(yy, xx);
double[] para = regression.estimateRegressionParameters();
double[] residuals = regression.estimateResiduals();
int k = para.length;
int n = residuals.length;
Array aPara = Array.factory(DataType.DOUBLE, new int[]{k});
Array aResiduals = Array.factory(DataType.DOUBLE, new int[]{n});
for (int i = 0; i < k; i++){
aPara.setDouble(i, para[i]);
}
for (int i = 0; i < k; i++){
aResiduals.setDouble(i, residuals[i]);
}
return new Array[]{aPara, aResiduals};
}
示例2: updateLinearParametersGivenRegression
import org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression; //导入方法依赖的package包/类
@Override
protected void updateLinearParametersGivenRegression(
int i, OLSMultipleLinearRegression regression, double[][] x) {
//compute variance as well
double[] residuals = regression.estimateResiduals();
//square them then log them
for(int j=0; j<residuals.length; j++)
residuals[j] = Math.log(residuals[j] * residuals[j]);
//regress
OLSMultipleLinearRegression variance = new OLSMultipleLinearRegression();
variance.setNoIntercept(true);
variance.newSampleData(residuals,x);
temporaryVarianceCoefficients[i] = variance.estimateRegressionParameters();
super.updateLinearParametersGivenRegression(i, regression, x);
}
示例3: calculateOlsRegression
import org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression; //导入方法依赖的package包/类
public void calculateOlsRegression(double[][] x, double[] y){
OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression();
regression.newSampleData(y, x);
double[] beta = regression.estimateRegressionParameters();
double[] residuals = regression.estimateResiduals();
double[][] parametersVariance = regression.estimateRegressionParametersVariance();
double regressandVariance = regression.estimateRegressandVariance();
double rSquared = regression.calculateRSquared();
double sigma = regression.estimateRegressionStandardError();
}
示例4: assertResultsMatch
import org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression; //导入方法依赖的package包/类
/**
* Checks that the Morpheus OLS model yields the same results as Apache Math
* @param actual the Morpheus results
* @param expected the Apache results
*/
private <R,C> void assertResultsMatch(DataFrameLeastSquares<R,C> actual, OLSMultipleLinearRegression expected) {
Assert.assertEquals(actual.getResidualSumOfSquares(), expected.calculateResidualSumOfSquares(), 0.0000001, "Residual sum of squares matches");
Assert.assertEquals(actual.getTotalSumOfSquares(), expected.calculateTotalSumOfSquares(), actual.getTotalSumOfSquares() * 0.000001, "Total sum of squares matches");
Assert.assertEquals(actual.getRSquared(), expected.calculateRSquared(), 0.0000001, "R^2 values match");
Assert.assertEquals(actual.getStdError(), expected.estimateRegressionStandardError(), 0.0000001, "Std error matches");
final DataFrame<C,Field> params1 = actual.getBetas();
final double[] params2 = expected.estimateRegressionParameters();
Assert.assertEquals(params1.rows().count(), params2.length-1, "Same number of parameters");
for (int i=0; i<params1.rows().count(); ++i) {
final double actualParam = params1.data().getDouble(i, Field.PARAMETER);
final double expectedParam = params2[i+1];
Assert.assertEquals(actualParam, expectedParam, 0.000000001, "Parameters match at index " + i);
}
final double intercept = expected.estimateRegressionParameters()[0];
final double interceptStdError = expected.estimateRegressionParametersStandardErrors()[0];
Assert.assertEquals(actual.getInterceptValue(Field.PARAMETER), intercept, 0.0000001, "The intercepts match");
Assert.assertEquals(actual.getInterceptValue(Field.STD_ERROR), interceptStdError, 0.000000001, "The intercept std errors match");
final DataFrame<R,String> residuals1 = actual.getResiduals();
final double[] residuals2 = expected.estimateResiduals();
Assert.assertEquals(residuals1.rows().count(), residuals2.length, "Same number of residuals");
for (int i=0; i<residuals1.rows().count(); ++i) {
Assert.assertEquals(residuals1.data().getDouble(i, 0), residuals2[i], 0.00000001, "Residuals match at index " + i);
}
final DataFrame<C,Field> stdErrs1 = actual.getBetas().cols().select(c -> c.key() == Field.STD_ERROR);
final double[] stdErrs2 = expected.estimateRegressionParametersStandardErrors();
Assert.assertEquals(stdErrs1.rows().count(), stdErrs2.length-1, "Same number of parameter standard errors");
for (int i=0; i<stdErrs1.cols().count(); ++i) {
Assert.assertEquals(stdErrs1.data().getDouble(0, i), stdErrs2[i+1], 0.00000001, "Standard errors match at index " + i);
}
}
示例5: performGranger
import org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression; //导入方法依赖的package包/类
/**
* Suppose y is the target to be checked, x is the impact to be tested, u is the universe
* of all variables (including y, x and all other time series) and L is the given lag. The
* question is then:<br>
* <b>Does x Granger-cause y in the given universe u with the given lag L?</b><br>
* <br>
* The H0-test is then the following equation, which'll be solved via OLS:
* <pre>
* y_t = (y_t-1 ... y_y-L u0_t-1 ... u0_t-L ... un_t-1 ... un_t-L) * (beta_1 ... beta_(n*L))^T + (epsilon_1 ... epsilon_(n*L))^T
* ...
* y_t-d = (y_t-d-1 ... y_y-d-L u0_t-d-1 ... u0_t-d-L ... un_t-d-1 ... un_t-d-L) * (beta_1 ... beta_(n*L))^T + (epsilon_1 ... epsilon_(n*L))^T
* which translates to
* Y = U(without x) * BETA + EPSILON
* </pre>
* The H1-test is then the exact same as H0 but U contains also x.
*
* This translates to the following variable names:
* <ul>
* <li>Y => strippedY</li>
* <li>X(without x) => laggedH0</li>
* <li>X => laggedH1</li>
* </ul>
*
* @param laggedH0Rows The number of rows in U without x (the laggedH0 matrix). This is the
* d index in the above defined equation (which can also be called "number of observations").
* @param strippedY The Y vector that is cut off with the lag size.
* @param laggedH0 The universe matrix without the x variable whose influence is to be tested.
* The Matrix is also lagged with the given lag L.
* @param variablesH0 The number of variables in the universe without the variable x.
* @param laggedH1 The universe matrix with the x variable. The Matrix is also lagged with the
* given lag L.
* @param variablesH1 The number of variables in the universe with the variable x (naturally,
* this should be variablesH0+1).
* @return The computed GrangerCausalIndicator object, in case of success.
* Null - If anything went wrong (e.g., the OLS can't compute the parameters due to a
* SingularMatrixException)
*/
protected GrangerCausalIndicator performGranger(int laggedH0Rows, double[] strippedY,
double[][] laggedH0, int variablesH0, double[][] laggedH1, int variablesH1) {
try {
OLSMultipleLinearRegression h0 = new OLSMultipleLinearRegression();
OLSMultipleLinearRegression h1 = new OLSMultipleLinearRegression();
h0.newSampleData(strippedY, laggedH0);
// print(laggedH0);
h1.newSampleData(strippedY, laggedH1);
// print(laggedH1);
double rs0[] = h0.estimateResiduals();
double rs1[] = h1.estimateResiduals();
// System.out.print("b = {");
// for(int i=0; i<b0.length;i++){System.out.print(FORMATTER.format(b0[i])+", ");};
// System.out.println();
// System.out.print("residuals = {");
// for(int i=0; i<rs0.length;i++){System.out.print(FORMATTER.format(rs0[i])+", ");};
// System.out.println();
double RSS0 = sqrSum(rs0);
double RSS1 = sqrSum(rs1);
int nbrParametersModel0 = this.lagSize * variablesH0;
int nbrParametersModel1 = this.lagSize * variablesH1;
// (RSS1-RSS2) / (p2-p1)
// f = -------------------------
// RSS2 / (n-p2)
// double ftest = ((RSS0 - RSS1)/this.lagSize) / (RSS1 / (laggedH0Rows - 2*this.lagSize - 1));
double ftest = ((RSS0 - RSS1) / (nbrParametersModel1-nbrParametersModel0))
/ (RSS1 / (laggedH0Rows-nbrParametersModel1));
// System.out.println(RSS0 + " " + RSS1);
// System.out.println("F-test " + ftest);
FDistribution fDist = new FDistribution(this.lagSize, laggedH0Rows-2*this.lagSize-1);
double pValue = 1.0 - fDist.cumulativeProbability(ftest);
// System.out.println("P-value " + pValue);
return new GrangerCausalIndicator(pValue, CRITICAL_VALUE, this.lagSize);
} catch(SingularMatrixException smex) {
smex.printStackTrace();
return null;
}
}