本文整理汇总了C#中Accord.Math.Optimization.NonlinearObjectiveFunction类的典型用法代码示例。如果您正苦于以下问题:C# NonlinearObjectiveFunction类的具体用法?C# NonlinearObjectiveFunction怎么用?C# NonlinearObjectiveFunction使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
NonlinearObjectiveFunction类属于Accord.Math.Optimization命名空间,在下文中一共展示了NonlinearObjectiveFunction类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: QuadraticConstraintConstructorTest
public void QuadraticConstraintConstructorTest()
{
IObjectiveFunction objective = null;
double[,] quadraticTerms =
{
{ 1, 2, 3 },
{ 4, 5, 6 },
{ 7, 8, 9 },
};
double[] linearTerms = { 1, 2, 3 };
objective = new NonlinearObjectiveFunction(3, f => f[0] + f[1] + f[2]);
QuadraticConstraint target = new QuadraticConstraint(objective,
quadraticTerms, linearTerms,
ConstraintType.LesserThanOrEqualTo, 0);
var function = target.Function;
var gradient = target.Gradient;
FiniteDifferences fd = new FiniteDifferences(3, function);
double[][] x =
{
new double[] { 1, 2, 3 },
new double[] { 3, 1, 4 },
new double[] { -6 , 5, 9 },
new double[] { 31, 25, 246 },
new double[] { -0.102, 0, 10 },
};
{ // Function test
for (int i = 0; i < x.Length; i++)
{
double expected =
(x[i].Multiply(quadraticTerms)).InnerProduct(x[i])
+ linearTerms.InnerProduct(x[i]);
double actual = function(x[i]);
Assert.AreEqual(expected, actual, 1e-8);
}
}
{ // Gradient test
for (int i = 0; i < x.Length; i++)
{
double[] expected = fd.Compute(x[i]);
double[] actual = gradient(x[i]);
for (int j = 0; j < actual.Length; j++)
Assert.AreEqual(expected[j], actual[j], 1e-8);
}
}
}
示例2: ConstructorTest2
public void ConstructorTest2()
{
var function = new NonlinearObjectiveFunction(2, x => x[0] * x[1]);
NonlinearConstraint[] constraints =
{
new NonlinearConstraint(function, x => 1.0 - x[0] * x[0] - x[1] * x[1])
};
Cobyla cobyla = new Cobyla(function, constraints);
for (int i = 0; i < cobyla.Solution.Length; i++)
cobyla.Solution[i] = 1;
Assert.IsTrue(cobyla.Minimize());
double minimum = cobyla.Value;
double[] solution = cobyla.Solution;
double sqrthalf = Math.Sqrt(0.5);
Assert.AreEqual(-0.5, minimum, 1e-10);
Assert.AreEqual(sqrthalf, solution[0], 1e-5);
Assert.AreEqual(-sqrthalf, solution[1], 1e-5);
double expectedMinimum = function.Function(cobyla.Solution);
Assert.AreEqual(expectedMinimum, minimum);
}
示例3: BaseOptimizationMethod
/// <summary>
/// Initializes a new instance of the <see cref="BaseOptimizationMethod"/> class.
/// </summary>
///
/// <param name="function">The objective function whose optimum values should be found.</param>
///
protected BaseOptimizationMethod(NonlinearObjectiveFunction function)
{
if (function == null)
throw new ArgumentNullException("function");
init(function.NumberOfVariables);
this.Function = function.Function;
}
示例4: AugmentedLagrangianSolverConstructorTest1
public void AugmentedLagrangianSolverConstructorTest1()
{
Accord.Math.Tools.SetupGenerator(0);
// min 100(y-x*x)²+(1-x)²
//
// s.t. x <= 0
// y <= 0
//
var f = new NonlinearObjectiveFunction(2,
function: (x) => 100 * Math.Pow(x[1] - x[0] * x[0], 2) + Math.Pow(1 - x[0], 2),
gradient: (x) => new[]
{
2.0 * (200.0 * x[0]*x[0]*x[0] - 200.0 * x[0] * x[1] + x[0] - 1), // df/dx
200 * (x[1] - x[0]*x[0]) // df/dy
}
);
var constraints = new List<NonlinearConstraint>();
constraints.Add(new NonlinearConstraint(f,
function: (x) => x[0],
gradient: (x) => new[] { 1.0, 0.0 },
shouldBe: ConstraintType.LesserThanOrEqualTo, value: 0
));
constraints.Add(new NonlinearConstraint(f,
function: (x) => x[1],
gradient: (x) => new[] { 0.0, 1.0 },
shouldBe: ConstraintType.LesserThanOrEqualTo, value: 0
));
var solver = new AugmentedLagrangian(f, constraints);
Assert.IsTrue(solver.Minimize());
double minValue = solver.Value;
Assert.IsFalse(Double.IsNaN(minValue));
Assert.AreEqual(1, minValue, 1e-5);
Assert.AreEqual(0, solver.Solution[0], 1e-5);
Assert.AreEqual(0, solver.Solution[1], 1e-5);
}
示例5: ConstructorTest4
public void ConstructorTest4()
{
var function = new NonlinearObjectiveFunction(2, x =>
Math.Pow(x[0] * x[0] - x[1], 2.0) + Math.Pow(1.0 + x[0], 2.0));
NelderMead solver = new NelderMead(function);
Assert.IsTrue(solver.Minimize());
double minimum = solver.Value;
double[] solution = solver.Solution;
Assert.AreEqual(0, minimum, 1e-10);
Assert.AreEqual(-1, solution[0], 1e-5);
Assert.AreEqual(1, solution[1], 1e-4);
double expectedMinimum = function.Function(solver.Solution);
Assert.AreEqual(expectedMinimum, minimum);
}
示例6: ConstructorTest4
public void ConstructorTest4()
{
// Weak version of Rosenbrock's problem.
var function = new NonlinearObjectiveFunction(2, x =>
Math.Pow(x[0] * x[0] - x[1], 2.0) + Math.Pow(1.0 + x[0], 2.0));
Subplex solver = new Subplex(function);
Assert.IsTrue(solver.Minimize());
double minimum = solver.Value;
double[] solution = solver.Solution;
Assert.AreEqual(2, solution.Length);
Assert.AreEqual(0, minimum, 1e-10);
Assert.AreEqual(-1, solution[0], 1e-5);
Assert.AreEqual(1, solution[1], 1e-4);
double expectedMinimum = function.Function(solver.Solution);
Assert.AreEqual(expectedMinimum, minimum);
}
示例7: CreateConstraints
// We don't use this at the moment
static List<NonlinearConstraint> CreateConstraints (Parameter[] x, NonlinearObjectiveFunction f)
{
// Now we can start stating the constraints
var nlConstraints = x.SelectMany ((p, i) => {
Func<double[], double> cfn = args => x [i].Value;
return new[] {
new NonlinearConstraint
( f
, function: cfn
, shouldBe: ConstraintType.GreaterThanOrEqualTo
, value: p.Min
, gradient: Grad (x.Length, cfn)),
new NonlinearConstraint
( f
, function: cfn
, shouldBe: ConstraintType.LesserThanOrEqualTo
, value: p.Max
, gradient: Grad (x.Length, cfn)),
};
}).ToList ();
return nlConstraints;
}
示例8: ConstructorTest2
public void ConstructorTest2()
{
Accord.Math.Tools.SetupGenerator(0);
var function = new NonlinearObjectiveFunction(2,
function: x => x[0] * x[1],
gradient: x => new[] { x[1], x[0] });
NonlinearConstraint[] constraints =
{
new NonlinearConstraint(function,
function: x => 1.0 - x[0] * x[0] - x[1] * x[1],
gradient: x => new [] { -2 * x[0], -2 * x[1]}),
new NonlinearConstraint(function,
function: x => x[0],
gradient: x => new [] { 1.0, 0.0}),
};
var target = new ConjugateGradient(2);
AugmentedLagrangian solver = new AugmentedLagrangian(target, function, constraints);
Assert.IsTrue(solver.Minimize());
double minimum = solver.Value;
double[] solution = solver.Solution;
double sqrthalf = Math.Sqrt(0.5);
Assert.AreEqual(-0.5, minimum, 1e-5);
Assert.AreEqual(sqrthalf, solution[0], 1e-5);
Assert.AreEqual(-sqrthalf, solution[1], 1e-5);
double expectedMinimum = function.Function(solver.Solution);
Assert.AreEqual(expectedMinimum, minimum);
}
示例9: ConstructorTest3
public void ConstructorTest3()
{
// minimize f(x) = x*y*z,
// s.t.
//
// 1 - x² - 2y² - 3z² > 0
// x > 0,
// y > 0
//
// Easy three dimensional minimization in ellipsoid.
var function = new NonlinearObjectiveFunction(3,
function: x => x[0] * x[1] * x[2],
gradient: x => new[] { x[1] * x[2], x[0] * x[2], x[0] * x[1] });
NonlinearConstraint[] constraints =
{
new NonlinearConstraint(3,
function: x => 1.0 - x[0] * x[0] - 2.0 * x[1] * x[1] - 3.0 * x[2] * x[2],
gradient: x => new[] { -2.0 * x[0], -4.0 * x[1], -6.0 * x[2] }),
new NonlinearConstraint(3,
function: x => x[0],
gradient: x => new[] { 1.0, 0, 0 }),
new NonlinearConstraint(3,
function: x => x[1],
gradient: x => new[] { 0, 1.0, 0 }),
new NonlinearConstraint(3,
function: x => -x[2],
gradient: x => new[] { 0, 0, -1.0 }),
};
for (int i = 0; i < constraints.Length; i++)
{
Assert.AreEqual(ConstraintType.GreaterThanOrEqualTo, constraints[i].ShouldBe);
Assert.AreEqual(0, constraints[i].Value);
}
var inner = new BroydenFletcherGoldfarbShanno(3);
inner.LineSearch = LineSearch.BacktrackingArmijo;
inner.Corrections = 10;
var solver = new AugmentedLagrangian(inner, function, constraints);
Assert.AreEqual(inner, solver.Optimizer);
Assert.IsTrue(solver.Minimize());
double minimum = solver.Value;
double[] solution = solver.Solution;
double[] expected =
{
1.0 / Math.Sqrt(3.0), 1.0 / Math.Sqrt(6.0), -1.0 / 3.0
};
for (int i = 0; i < expected.Length; i++)
Assert.AreEqual(expected[i], solver.Solution[i], 1e-3);
Assert.AreEqual(-0.078567420132031968, minimum, 1e-4);
double expectedMinimum = function.Function(solver.Solution);
Assert.AreEqual(expectedMinimum, minimum);
}
示例10: test1
private static void test1(IGradientOptimizationMethod inner, double tol)
{
// maximize 2x + 3y, s.t. 2x² + 2y² <= 50 and x+y = 1
// Max x' * c
// x
// s.t. x' * A * x <= k
// x' * i = 1
// lower_bound < x < upper_bound
double[] c = { 2, 3 };
double[,] A = { { 2, 0 }, { 0, 2 } };
double k = 50;
// Create the objective function
var objective = new NonlinearObjectiveFunction(2,
function: (x) => x.InnerProduct(c),
gradient: (x) => c
);
// Test objective
for (int i = 0; i < 10; i++)
{
for (int j = 0; j < 10; j++)
{
double expected = i * 2 + j * 3;
double actual = objective.Function(new double[] { i, j });
Assert.AreEqual(expected, actual);
}
}
// Create the optimization constraints
var constraints = new List<NonlinearConstraint>();
constraints.Add(new QuadraticConstraint(objective,
quadraticTerms: A,
shouldBe: ConstraintType.LesserThanOrEqualTo, value: k
));
constraints.Add(new NonlinearConstraint(objective,
function: (x) => x.Sum(),
gradient: (x) => new[] { 1.0, 1.0 },
shouldBe: ConstraintType.EqualTo, value: 1,
withinTolerance: 1e-10
));
// Test first constraint
for (int i = 0; i < 10; i++)
{
for (int j = 0; j < 10; j++)
{
double expected = i * (2 * i + 0 * j) + j * (0 * i + 2 * j);
double actual = constraints[0].Function(new double[] { i, j });
Assert.AreEqual(expected, actual);
}
}
// Test second constraint
for (int i = 0; i < 10; i++)
{
for (int j = 0; j < 10; j++)
{
double expected = i + j;
double actual = constraints[1].Function(new double[] { i, j });
Assert.AreEqual(expected, actual);
}
}
AugmentedLagrangian solver =
new AugmentedLagrangian(inner, objective, constraints);
Assert.AreEqual(inner, solver.Optimizer);
Assert.IsTrue(solver.Maximize());
double maxValue = solver.Value;
Assert.AreEqual(6, maxValue, tol);
Assert.AreEqual(-3, solver.Solution[0], tol);
Assert.AreEqual(4, solver.Solution[1], tol);
}
示例11: test2
private static void test2(IGradientOptimizationMethod inner)
{
// maximize 2x + 3y, s.t. 2x² + 2y² <= 50
//
// http://www.wolframalpha.com/input/?i=max+2x+%2B+3y%2C+s.t.+2x%C2%B2+%2B+2y%C2%B2+%3C%3D+50
// Max x' * c
// x
// s.t. x' * A * x <= k
// x' * i = 1
// lower_bound < x < upper_bound
double[] c = { 2, 3 };
double[,] A = { { 2, 0 }, { 0, 2 } };
double k = 50;
// Create the objective function
var objective = new NonlinearObjectiveFunction(2,
function: (x) => x.InnerProduct(c),
gradient: (x) => c
);
// Test objective
for (int i = 0; i < 10; i++)
{
for (int j = 0; j < 10; j++)
{
double expected = i * 2 + j * 3;
double actual = objective.Function(new double[] { i, j });
Assert.AreEqual(expected, actual);
}
}
// Create the optimization constraints
var constraints = new List<NonlinearConstraint>();
constraints.Add(new QuadraticConstraint(objective,
quadraticTerms: A,
shouldBe: ConstraintType.LesserThanOrEqualTo, value: k
));
// Test first constraint
for (int i = 0; i < 10; i++)
{
for (int j = 0; j < 10; j++)
{
var input = new double[] { i, j };
double expected = i * (2 * i + 0 * j) + j * (0 * i + 2 * j);
double actual = constraints[0].Function(input);
Assert.AreEqual(expected, actual);
}
}
// Create the solver algorithm
AugmentedLagrangian solver =
new AugmentedLagrangian(inner, objective, constraints);
Assert.AreEqual(inner, solver.Optimizer);
Assert.IsTrue(solver.Maximize());
double maxValue = solver.Value;
Assert.AreEqual(18.02, maxValue, 1e-2);
Assert.AreEqual(2.77, solver.Solution[0], 1e-2);
Assert.AreEqual(4.16, solver.Solution[1], 1e-2);
}
示例12: AugmentedLagrangianSolverConstructorTest4
public void AugmentedLagrangianSolverConstructorTest4()
{
// min x*y+ y*z
//
// s.t. x^2 - y^2 + z^2 - 2 >= 0
// x^2 + y^2 + z^2 - 10 <= 0
// x + y = 1
//
double x = 0, y = 0, z = 0;
var f = new NonlinearObjectiveFunction(
function: () => x * y + y * z,
gradient: () => new[]
{
y, // df/dx
x + z, // df/dy
y, // df/dz
}
);
var constraints = new List<NonlinearConstraint>();
constraints.Add(new NonlinearConstraint(f,
function: () => x * x - y * y + z * z,
gradient: () => new[] { 2 * x, -2 * y, 2 * z },
shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 2
));
constraints.Add(new NonlinearConstraint(f,
function: () => x * x + y * y + z * z,
gradient: () => new[] { 2 * x, 2 * y, 2 * z },
shouldBe: ConstraintType.LesserThanOrEqualTo, value: 10
));
constraints.Add(new NonlinearConstraint(f,
function: () => x + y,
gradient: () => new[] { 1.0, 1.0, 0.0 },
shouldBe: ConstraintType.EqualTo, value: 1
)
{
Tolerance = 1e-5
});
var solver = new AugmentedLagrangian(f, constraints);
solver.Solution[0] = 1;
solver.Solution[1] = 1;
solver.Solution[2] = 1;
Assert.IsTrue(solver.Minimize());
double minValue = solver.Value;
Assert.AreEqual(1, solver.Solution[0] + solver.Solution[1], 1e-4);
Assert.IsFalse(Double.IsNaN(minValue));
Assert.IsFalse(Double.IsNaN(solver.Solution[0]));
Assert.IsFalse(Double.IsNaN(solver.Solution[1]));
Assert.IsFalse(Double.IsNaN(solver.Solution[2]));
}
示例13: AugmentedLagrangianSolverConstructorTest5
public void AugmentedLagrangianSolverConstructorTest5()
{
// Suppose we would like to minimize the following function:
//
// f(x,y) = min 100(y-x²)²+(1-x)²
//
// Subject to the constraints
//
// x >= 0 (x must be positive)
// y >= 0 (y must be positive)
//
double x = 0, y = 0;
// First, we create our objective function
var f = new NonlinearObjectiveFunction(
// This is the objective function: f(x,y) = min 100(y-x²)²+(1-x)²
function: () => 100 * Math.Pow(y - x * x, 2) + Math.Pow(1 - x, 2),
// The gradient vector:
gradient: () => new[]
{
2 * (200 * Math.Pow(x, 3) - 200 * x * y + x - 1), // df/dx = 2(200x³-200xy+x-1)
200 * (y - x*x) // df/dy = 200(y-x²)
}
);
// Now we can start stating the constraints
var constraints = new List<NonlinearConstraint>();
// Add the non-negativity constraint for x
constraints.Add(new NonlinearConstraint(f,
// 1st constraint: x should be greater than or equal to 0
function: () => x, shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 0,
gradient: () => new[] { 1.0, 0.0 }
));
// Add the non-negativity constraint for y
constraints.Add(new NonlinearConstraint(f,
// 2nd constraint: y should be greater than or equal to 0
function: () => y, shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 0,
gradient: () => new[] { 0.0, 1.0 }
));
// Finally, we create the non-linear programming solver
var solver = new AugmentedLagrangian(f, constraints);
// And attempt to solve the problem
Assert.IsTrue(solver.Minimize());
double minValue = solver.Value;
Assert.AreEqual(0, minValue, 1e-10);
Assert.AreEqual(1, solver.Solution[0], 1e-6);
Assert.AreEqual(1, solver.Solution[1], 1e-6);
Assert.IsFalse(Double.IsNaN(minValue));
Assert.IsFalse(Double.IsNaN(solver.Solution[0]));
Assert.IsFalse(Double.IsNaN(solver.Solution[1]));
}
示例14: ConstructorTest7
public void ConstructorTest7()
{
/// This problem is taken from Fletcher's book Practical Methods of
/// Optimization and has the equation number (14.4.2).
var function = new NonlinearObjectiveFunction(3, x => x[2]);
NonlinearConstraint[] constraints =
{
new NonlinearConstraint(3, x=> 5.0 * x[0] - x[1] + x[2]),
new NonlinearConstraint(3, x => x[2] - x[0] * x[0] - x[1] * x[1] - 4.0 * x[1]),
new NonlinearConstraint(3, x => x[2] - 5.0 * x[0] - x[1]),
};
Cobyla cobyla = new Cobyla(function, constraints);
Assert.IsTrue(cobyla.Minimize());
double minimum = cobyla.Value;
double[] solution = cobyla.Solution;
Assert.AreEqual(-3, minimum, 1e-5);
Assert.AreEqual(0.0, solution[0], 1e-5);
Assert.AreEqual(-3.0, solution[1], 1e-5);
Assert.AreEqual(-3.0, solution[2], 1e-5);
double expectedMinimum = function.Function(cobyla.Solution);
Assert.AreEqual(expectedMinimum, minimum);
}
示例15: ConstructorTest6_3
public void ConstructorTest6_3()
{
bool thrown = false;
try
{
var function = new NonlinearObjectiveFunction(2, x => -x[0] - x[1]);
NonlinearConstraint[] constraints =
{
new NonlinearConstraint(2, x => x[1] - x[0] * x[0]),
new NonlinearConstraint(4, x => 1.0 - x[0] * x[0] - x[1] * x[1]),
};
Cobyla cobyla = new Cobyla(function, constraints);
Assert.IsTrue(cobyla.Minimize());
double minimum = cobyla.Value;
}
catch (Exception)
{
thrown = true;
}
Assert.IsTrue(thrown);
}