Skip to content
37 changes: 37 additions & 0 deletions src/Numerics.Tests/OptimizationTests/NewtonMinimizerTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,32 @@ protected override void Evaluate()
}
}

public class LazySixHumpCamelObjectiveFunction : LazyObjectiveFunctionBase
{
public LazySixHumpCamelObjectiveFunction() : base(true, true) { }

public override IObjectiveFunction CreateNew()
{
return new LazySixHumpCamelObjectiveFunction();
}

protected override void EvaluateValue()
{
Value = SixHumpCamelFunction.Value(Point);
}

protected override void EvaluateGradient()
{
Gradient = SixHumpCamelFunction.Gradient(Point);
}

protected override void EvaluateHessian()
{
Hessian = SixHumpCamelFunction.Hessian(Point);
}

}

[TestFixture]
public class NewtonMinimizerTests
{
Expand Down Expand Up @@ -153,6 +179,17 @@ public void FindMinimum_Linesearch_Rosenbrock_Overton()
Assert.That(Math.Abs(result.MinimizingPoint[1] - 1.0), Is.LessThan(1e-3));
}

[Test]
public void FindMinimum_SixHumpCamel_IndefiniteHessian()
{
var obj = new LazySixHumpCamelObjectiveFunction();
var solver = new NewtonMinimizer(1e-5, 1000, true, HessianModifiers.ReverseNegativeEigenValues);
var result = solver.FindMinimum(obj, new DenseVector(new double[] { 1.0, -0.6 }));

Assert.That(result.MinimizingPoint[0], Is.EqualTo(0.0898).Within(1e-3));
Assert.That(result.MinimizingPoint[1], Is.EqualTo(-0.7126).Within(1e-3));
}

private class MghTestCaseEnumerator : IEnumerable<ITestCaseData>
{
private static readonly string[] _ignore_list =
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
// <copyright file="SixHumpCamelFunction.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// https://numerics.mathdotnet.com
// https://github.com/mathnet/mathnet-numerics
//
// Copyright (c) 2009-$CURRENT_YEAR$ Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>

using MathNet.Numerics.LinearAlgebra;
using MathNet.Numerics.LinearAlgebra.Double;

namespace MathNet.Numerics.Tests.OptimizationTests.TestFunctions
{
/// <summary>
/// Six-Hump Camel Function, see http://www.sfu.ca/~ssurjano/camel6.html for formula and global minimum locations
/// </summary>
public static class SixHumpCamelFunction
{
public static double Value(Vector<double> input)
{
double x = input[0];
double y = input[1];

double x2 = x * x;
double x4 = x * x * x * x;
double y2 = y * y;

return x2 * (4 - 2.1 * x2 + x4 / 3) + x * y + 4 * y2 * (y2 - 1);
}

public static Vector<double> Gradient(Vector<double> input)
{
double x = input[0];
double y = input[1];

double x3 = x * x * x;
double x5 = x * x * x * x * x;
double y2 = y * y;
double y3 = y * y * y;

Vector<double> output = new DenseVector(2);

output[0] = 2 * (4 * x - 4.2 * x3 + x5 + 0.5 * y);
output[1] = x - 8 * y + 16 * y3;
return output;
}

public static Matrix<double> Hessian(Vector<double> input)
{
double x = input[0];
double y = input[1];

double x2 = x * x;
double x4 = x * x * x * x;
double y2 = y * y;


Matrix<double> output = new DenseMatrix(2, 2);
output[0, 0] = 10 * (0.8 - 2.52 * x2 + x4);
output[1, 1] = 48 * y2 - 8;
output[0, 1] = 1;
output[1, 0] = output[0, 1];
return output;
}

public static Vector<double> Minimum
{
get
{
return new DenseVector(new double[] { 0.0898, -0.7126 });
}
}
}
}
43 changes: 43 additions & 0 deletions src/Numerics/Optimization/HessianModifiers.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
// <copyright file="HessianModifiers.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// https://numerics.mathdotnet.com
// https://github.com/mathnet/mathnet-numerics
//
// Copyright (c) 2009-$CURRENT_YEAR$ Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace MathNet.Numerics.Optimization
{
public enum HessianModifiers
{
None,
ReverseNegativeEigenValues
}
}
51 changes: 47 additions & 4 deletions src/Numerics/Optimization/NewtonMinimizer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
using System;
using MathNet.Numerics.LinearAlgebra;
using MathNet.Numerics.Optimization.LineSearch;
using MathNet.Numerics.LinearAlgebra.Factorization;

namespace MathNet.Numerics.Optimization
{
Expand All @@ -38,20 +39,22 @@ public sealed class NewtonMinimizer : IUnconstrainedMinimizer
public double GradientTolerance { get; set; }
public int MaximumIterations { get; set; }
public bool UseLineSearch { get; set; }
public HessianModifiers Modifier { get; set; }

public NewtonMinimizer(double gradientTolerance, int maximumIterations, bool useLineSearch = false)
public NewtonMinimizer(double gradientTolerance, int maximumIterations, bool useLineSearch = false, HessianModifiers modifier = HessianModifiers.None)
{
GradientTolerance = gradientTolerance;
MaximumIterations = maximumIterations;
UseLineSearch = useLineSearch;
Modifier = modifier;
}

public MinimizationResult FindMinimum(IObjectiveFunction objective, Vector<double> initialGuess)
{
return Minimum(objective, initialGuess, GradientTolerance, MaximumIterations, UseLineSearch);
return Minimum(objective, initialGuess, GradientTolerance, MaximumIterations, UseLineSearch, Modifier);
}

public static MinimizationResult Minimum(IObjectiveFunction objective, Vector<double> initialGuess, double gradientTolerance=1e-8, int maxIterations=1000, bool useLineSearch=false)
public static MinimizationResult Minimum(IObjectiveFunction objective, Vector<double> initialGuess, double gradientTolerance=1e-8, int maxIterations=1000, bool useLineSearch=false, HessianModifiers modifier=HessianModifiers.None)
{
if (!objective.IsGradientSupported)
{
Expand Down Expand Up @@ -83,7 +86,7 @@ public static MinimizationResult Minimum(IObjectiveFunction objective, Vector<do
{
ValidateHessian(objective);

var searchDirection = objective.Hessian.LU().Solve(-objective.Gradient);
var searchDirection = CalculateSearchDirection(objective, modifier);
if (searchDirection * objective.Gradient >= 0)
{
searchDirection = -objective.Gradient;
Expand Down Expand Up @@ -125,6 +128,46 @@ public static MinimizationResult Minimum(IObjectiveFunction objective, Vector<do
return new MinimizationWithLineSearchResult(objective, iterations, ExitCondition.AbsoluteGradient, totalLineSearchSteps, iterationsWithNontrivialLineSearch);
}

static Vector<double> CalculateSearchDirection(IObjectiveFunction objective, HessianModifiers modifier)
{
Vector<double> searchDirection = null;
switch (modifier)
{
case HessianModifiers.None:
searchDirection = SolveLU(objective);
break;
case HessianModifiers.ReverseNegativeEigenValues:
searchDirection = ReverseNegativeEigenValuesAndSolve(objective);
break;
}

return searchDirection;
}

static Vector<double> SolveLU(IObjectiveFunction objective)
{
return objective.Hessian.LU().Solve(-objective.Gradient);
}

/// <summary>
/// Force the Hessian to be positive definite by reversing the negative eigenvalues. Use the EVD decomposition to then solve for the search direction.
/// Implementation based on Philip E. Gill, Walter Murray, and Margaret H. Wright, Practical Optimization, 1981, 107–8.
/// </summary>
/// <param name="objective"></param>
/// <returns></returns>
static Vector<double> ReverseNegativeEigenValuesAndSolve(IObjectiveFunction objective)
{
Evd<double> evd = objective.Hessian.Evd(Symmetricity.Symmetric);
for (int i = 0; i < evd.EigenValues.Count; i++)
{
if (evd.EigenValues[i].Real < double.Epsilon)
{
evd.EigenValues[i] = Math.Max(-evd.EigenValues[i].Real, double.Epsilon);
}
}
return evd.Solve(-objective.Gradient);
}

static void ValidateGradient(IObjectiveFunctionEvaluation eval)
{
foreach (var x in eval.Gradient)
Expand Down