本文整理汇总了Java中org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction类的典型用法代码示例。如果您正苦于以下问题:Java MultivariateDifferentiableVectorFunction类的具体用法?Java MultivariateDifferentiableVectorFunction怎么用?Java MultivariateDifferentiableVectorFunction使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
MultivariateDifferentiableVectorFunction类属于org.apache.commons.math3.analysis.differentiation包,在下文中一共展示了MultivariateDifferentiableVectorFunction类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: doTestStRD
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
public void doTestStRD(final StatisticalReferenceDataset dataset,
final double errParams, final double errParamsSd) {
final AbstractLeastSquaresOptimizer optimizer = createOptimizer();
final double[] w = new double[dataset.getNumObservations()];
Arrays.fill(w, 1.0);
final double[][] data = dataset.getData();
final double[] initial = dataset.getStartingPoint(0);
final MultivariateDifferentiableVectorFunction problem;
problem = dataset.getLeastSquaresProblem();
final PointVectorValuePair optimum;
optimum = optimizer.optimize(100, problem, data[1], w, initial);
final double[] actual = optimum.getPoint();
for (int i = 0; i < actual.length; i++) {
double expected = dataset.getParameter(i);
double delta = FastMath.abs(errParams * expected);
Assert.assertEquals(dataset.getName() + ", param #" + i,
expected, actual[i], delta);
}
}
示例2: optimize
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
/**
* Optimize an objective function.
* Optimization is considered to be a weighted least-squares minimization.
* The cost function to be minimized is
* <code>∑weight<sub>i</sub>(objective<sub>i</sub> - target<sub>i</sub>)<sup>2</sup></code>
*
* @param f Objective function.
* @param target Target value for the objective functions at optimum.
* @param weight Weights for the least squares cost computation.
* @param startPoint Start point for optimization.
* @return the point/value pair giving the optimal value for objective
* function.
* @param maxEval Maximum number of function evaluations.
* @throws org.apache.commons.math3.exception.DimensionMismatchException
* if the start point dimension is wrong.
* @throws org.apache.commons.math3.exception.TooManyEvaluationsException
* if the maximal number of evaluations is exceeded.
* @throws org.apache.commons.math3.exception.NullArgumentException if
* any argument is {@code null}.
*/
public PointVectorValuePair optimize(final int maxEval,
final MultivariateDifferentiableVectorFunction f,
final double[] target, final double[] weights,
final double[] startPoint) {
// Reset counter.
jacobianEvaluations = 0;
// Store least squares problem characteristics.
jF = new JacobianFunction(f);
// Arrays shared with the other private methods.
point = startPoint.clone();
rows = target.length;
cols = point.length;
weightedResidualJacobian = new double[rows][cols];
this.weightedResiduals = new double[rows];
cost = Double.POSITIVE_INFINITY;
return optimizeInternal(maxEval, f, target, weights, startPoint);
}
示例3: doTestStRD
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
public void doTestStRD(final StatisticalReferenceDataset dataset,
final double errParams, final double errParamsSd) {
final AbstractLeastSquaresOptimizer optimizer = createOptimizer();
final double[] w = new double[dataset.getNumObservations()];
Arrays.fill(w, 1.0);
final double[][] data = dataset.getData();
final double[] initial = dataset.getStartingPoint(0);
final MultivariateDifferentiableVectorFunction problem;
problem = dataset.getLeastSquaresProblem();
final PointVectorValuePair optimum;
optimum = optimizer.optimize(100, problem, data[1], w, initial);
final double[] actual = optimum.getPoint();
final double[] actualSig = optimizer.guessParametersErrors();
for (int i = 0; i < actual.length; i++) {
double expected = dataset.getParameter(i);
double delta = FastMath.abs(errParams * expected);
Assert.assertEquals(dataset.getName() + ", param #" + i,
expected, actual[i], delta);
expected = dataset.getParameterStandardDeviation(i);
delta = FastMath.abs(errParamsSd * expected);
Assert.assertEquals(dataset.getName() + ", sd of param #" + i,
expected, actualSig[i], delta);
}
}
示例4: solve
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
@Override
public double[] solve(int maxEval,
MultivariateDifferentiableVectorFunction f, double[] startValue) {
final double[] zeros = startValue.clone();
final double[] ones = startValue.clone();
Arrays.fill(zeros, 0.0);
Arrays.fill(ones, 1.0);
return optim.optimize(new MaxEval(maxEval),
new InitialGuess(startValue), new Target(zeros),
new Weight(ones), new ModelFunction(f),
new ModelFunctionJacobian(new JacobianFunction(f))).getPoint();
}
示例5: testTrivial
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
@Test
public void testTrivial() {
LinearProblem problem =
new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
// TODO: the wrapper around GaussNewtonOptimizer is a temporary hack for
// version 3.1 of the library. It should be removed when GaussNewtonOptimizer
// will officialy be declared as implementing MultivariateDifferentiableVectorOptimizer
MultivariateDifferentiableVectorOptimizer underlyingOptimizer =
new MultivariateDifferentiableVectorOptimizer() {
private GaussNewtonOptimizer gn =
new GaussNewtonOptimizer(true,
new SimpleVectorValueChecker(1.0e-6, 1.0e-6));
public PointVectorValuePair optimize(int maxEval,
MultivariateDifferentiableVectorFunction f,
double[] target,
double[] weight,
double[] startPoint) {
return gn.optimize(maxEval, f, target, weight, startPoint);
}
public int getMaxEvaluations() {
return gn.getMaxEvaluations();
}
public int getEvaluations() {
return gn.getEvaluations();
}
public ConvergenceChecker<PointVectorValuePair> getConvergenceChecker() {
return gn.getConvergenceChecker();
}
};
JDKRandomGenerator g = new JDKRandomGenerator();
g.setSeed(16069223052l);
RandomVectorGenerator generator =
new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
MultivariateDifferentiableVectorMultiStartOptimizer optimizer =
new MultivariateDifferentiableVectorMultiStartOptimizer(underlyingOptimizer,
10, generator);
// no optima before first optimization attempt
try {
optimizer.getOptima();
Assert.fail("an exception should have been thrown");
} catch (MathIllegalStateException ise) {
// expected
}
PointVectorValuePair optimum =
optimizer.optimize(100, problem, problem.target, new double[] { 1 }, new double[] { 0 });
Assert.assertEquals(1.5, optimum.getPoint()[0], 1.0e-10);
Assert.assertEquals(3.0, optimum.getValue()[0], 1.0e-10);
PointVectorValuePair[] optima = optimizer.getOptima();
Assert.assertEquals(10, optima.length);
for (int i = 0; i < optima.length; ++i) {
Assert.assertEquals(1.5, optima[i].getPoint()[0], 1.0e-10);
Assert.assertEquals(3.0, optima[i].getValue()[0], 1.0e-10);
}
Assert.assertTrue(optimizer.getEvaluations() > 20);
Assert.assertTrue(optimizer.getEvaluations() < 50);
Assert.assertEquals(100, optimizer.getMaxEvaluations());
}
开发者ID:Quanticol,项目名称:CARMA,代码行数:63,代码来源:MultivariateDifferentiableVectorMultiStartOptimizerTest.java
示例6: optimize
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
/**
* Optimize an objective function.
* Optimization is considered to be a weighted least-squares minimization.
* The cost function to be minimized is
* <code>∑weight<sub>i</sub>(objective<sub>i</sub> - target<sub>i</sub>)<sup>2</sup></code>
*
* @param f Objective function.
* @param target Target value for the objective functions at optimum.
* @param weights Weights for the least squares cost computation.
* @param startPoint Start point for optimization.
* @return the point/value pair giving the optimal value for objective
* function.
* @param maxEval Maximum number of function evaluations.
* @throws org.apache.commons.math3.exception.DimensionMismatchException
* if the start point dimension is wrong.
* @throws org.apache.commons.math3.exception.TooManyEvaluationsException
* if the maximal number of evaluations is exceeded.
* @throws org.apache.commons.math3.exception.NullArgumentException if
* any argument is {@code null}.
* @deprecated As of 3.1. Please use
* {@link BaseAbstractMultivariateVectorOptimizer#optimize(int,
* org.apache.commons.math3.analysis.MultivariateVectorFunction,OptimizationData[])
* optimize(int,MultivariateDifferentiableVectorFunction,OptimizationData...)}
* instead.
*/
@Deprecated
public PointVectorValuePair optimize(final int maxEval,
final MultivariateDifferentiableVectorFunction f,
final double[] target, final double[] weights,
final double[] startPoint) {
return optimizeInternal(maxEval, f,
new Target(target),
new Weight(weights),
new InitialGuess(startPoint));
}
示例7: optimizeInternal
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
/**
* Optimize an objective function.
* Optimization is considered to be a weighted least-squares minimization.
* The cost function to be minimized is
* <code>∑weight<sub>i</sub>(objective<sub>i</sub> - target<sub>i</sub>)<sup>2</sup></code>
*
* @param maxEval Allowed number of evaluations of the objective function.
* @param f Objective function.
* @param optData Optimization data. The following data will be looked for:
* <ul>
* <li>{@link Target}</li>
* <li>{@link Weight}</li>
* <li>{@link InitialGuess}</li>
* </ul>
* @return the point/value pair giving the optimal value of the objective
* function.
* @throws org.apache.commons.math3.exception.TooManyEvaluationsException if
* the maximal number of evaluations is exceeded.
* @throws DimensionMismatchException if the target, and weight arguments
* have inconsistent dimensions.
* @see BaseAbstractMultivariateVectorOptimizer#optimizeInternal(int,
* org.apache.commons.math3.analysis.MultivariateVectorFunction,OptimizationData[])
* @since 3.1
* @deprecated As of 3.1. Override is necessary only until this class's generic
* argument is changed to {@code MultivariateDifferentiableVectorFunction}.
*/
@Deprecated
protected PointVectorValuePair optimizeInternal(final int maxEval,
final MultivariateDifferentiableVectorFunction f,
OptimizationData... optData) {
// XXX Conversion will be removed when the generic argument of the
// base class becomes "MultivariateDifferentiableVectorFunction".
return super.optimizeInternal(maxEval, FunctionUtils.toDifferentiableMultivariateVectorFunction(f), optData);
}
示例8: optimize
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
/**
* Optimize an objective function.
* Optimization is considered to be a weighted least-squares minimization.
* The cost function to be minimized is
* <code>∑weight<sub>i</sub>(objective<sub>i</sub> - target<sub>i</sub>)<sup>2</sup></code>
*
* @param f Objective function.
* @param target Target value for the objective functions at optimum.
* @param weights Weights for the least squares cost computation.
* @param startPoint Start point for optimization.
* @return the point/value pair giving the optimal value for objective
* function.
* @param maxEval Maximum number of function evaluations.
* @throws org.apache.commons.math3.exception.DimensionMismatchException
* if the start point dimension is wrong.
* @throws org.apache.commons.math3.exception.TooManyEvaluationsException
* if the maximal number of evaluations is exceeded.
* @throws org.apache.commons.math3.exception.NullArgumentException if
* any argument is {@code null}.
* @deprecated As of 3.1. Please use
* {@link BaseAbstractMultivariateVectorOptimizer#optimize(int,MultivariateVectorFunction,OptimizationData[])
* optimize(int,MultivariateDifferentiableVectorFunction,OptimizationData...)}
* instead.
*/
@Deprecated
public PointVectorValuePair optimize(final int maxEval,
final MultivariateDifferentiableVectorFunction f,
final double[] target, final double[] weights,
final double[] startPoint) {
return optimizeInternal(maxEval, f,
new Target(target),
new Weight(weights),
new InitialGuess(startPoint));
}
示例9: optimizeInternal
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
/**
* Optimize an objective function.
* Optimization is considered to be a weighted least-squares minimization.
* The cost function to be minimized is
* <code>∑weight<sub>i</sub>(objective<sub>i</sub> - target<sub>i</sub>)<sup>2</sup></code>
*
* @param maxEval Allowed number of evaluations of the objective function.
* @param f Objective function.
* @param optData Optimization data. The following data will be looked for:
* <ul>
* <li>{@link Target}</li>
* <li>{@link Weight}</li>
* <li>{@link InitialGuess}</li>
* </ul>
* @return the point/value pair giving the optimal value of the objective
* function.
* @throws org.apache.commons.math3.exception.TooManyEvaluationsException if
* the maximal number of evaluations is exceeded.
* @throws DimensionMismatchException if the target, and weight arguments
* have inconsistent dimensions.
* @see BaseAbstractMultivariateVectorOptimizer#optimizeInternal(int,MultivariateVectorFunction,OptimizationData[])
* @since 3.1
* @deprecated As of 3.1. Override is necessary only until this class's generic
* argument is changed to {@code MultivariateDifferentiableVectorFunction}.
*/
@Deprecated
protected PointVectorValuePair optimizeInternal(final int maxEval,
final MultivariateDifferentiableVectorFunction f,
OptimizationData... optData) {
// XXX Conversion will be removed when the generic argument of the
// base class becomes "MultivariateDifferentiableVectorFunction".
return super.optimizeInternal(maxEval, FunctionUtils.toDifferentiableMultivariateVectorFunction(f), optData);
}
示例10: getLeastSquaresProblem
import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; //导入依赖的package包/类
/**
* Returns the least-squares problem corresponding to fitting the model to
* the specified data.
*
* @return the least-squares problem
*/
public MultivariateDifferentiableVectorFunction getLeastSquaresProblem() {
return problem;
}