diff --git a/src/Numerics.Tests/OptimizationTests/BasinHoppingTests.cs b/src/Numerics.Tests/OptimizationTests/BasinHoppingTests.cs
new file mode 100644
index 000000000..94ee6bca6
--- /dev/null
+++ b/src/Numerics.Tests/OptimizationTests/BasinHoppingTests.cs
@@ -0,0 +1,691 @@
+//
+// Math.NET Numerics, part of the Math.NET Project
+// https://numerics.mathdotnet.com
+// https://github.com/mathnet/mathnet-numerics
+//
+// Copyright (c) 2009-$CURRENT_YEAR$ Math.NET
+//
+// Permission is hereby granted, free of charge, to any person
+// obtaining a copy of this software and associated documentation
+// files (the "Software"), to deal in the Software without
+// restriction, including without limitation the rights to use,
+// copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the
+// Software is furnished to do so, subject to the following
+// conditions:
+//
+// The above copyright notice and this permission notice shall be
+// included in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+// OTHER DEALINGS IN THE SOFTWARE.
+//
+
+using MathNet.Numerics.LinearAlgebra;
+using MathNet.Numerics.Optimization;
+using MathNet.Numerics.Optimization.ObjectiveFunctions;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace MathNet.Numerics.Tests.OptimizationTests
+{
+ using Random = System.Random;
+
+ [TestFixture]
+ public class BasinHoppingTests
+ {
+ #region Test Objective Functions
+
+ ///
+ /// Base class for test functions implementing IObjectiveFunction
+ ///
+ private abstract class TestFunctionBase : IObjectiveFunction
+ {
+ protected int dimensions;
+ protected Vector currentPoint;
+ protected double functionValue;
+ protected Vector gradient;
+ protected Matrix hessian;
+
+ protected TestFunctionBase(int dimensions)
+ {
+ this.dimensions = dimensions;
+ currentPoint = Vector.Build.Dense(dimensions);
+ gradient = Vector.Build.Dense(dimensions);
+ hessian = Matrix.Build.Dense(dimensions, dimensions);
+ }
+
+ // Abstract method to be implemented by derived classes
+ protected abstract void CalculateValues();
+
+ public void EvaluateAt(Vector point)
+ {
+ currentPoint = point;
+ CalculateValues();
+ }
+
+ public abstract IObjectiveFunction Fork();
+ public abstract IObjectiveFunction CreateNew();
+
+ public Vector Point => currentPoint;
+ public double Value => functionValue;
+ public Vector Gradient => gradient;
+ public Matrix Hessian => hessian;
+
+ // Support flags
+ public bool IsGradientSupported => true;
+ public bool IsHessianSupported => false; // Set to false as we don't implement full Hessian in all cases
+ }
+
+ ///
+ /// Rastrigin function implementation
+ /// f(x) = 10n + Σ[x_i² - 10cos(2πx_i)]
+ /// Global minimum at (0,0,...,0) with f(x) = 0
+ ///
+ private class RastriginFunction : TestFunctionBase
+ {
+ public RastriginFunction(int dimensions) : base(dimensions) { }
+
+ protected override void CalculateValues()
+ {
+ // Calculate function value
+ functionValue = 10 * dimensions;
+ for (var i = 0; i < dimensions; i++)
+ {
+ functionValue += Math.Pow(currentPoint[i], 2) - 10 * Math.Cos(2 * Math.PI * currentPoint[i]);
+ }
+
+ // Calculate gradient
+ for (var i = 0; i < dimensions; i++)
+ {
+ gradient[i] = 2 * currentPoint[i] + 20 * Math.PI * Math.Sin(2 * Math.PI * currentPoint[i]);
+ }
+ }
+
+ public override IObjectiveFunction Fork()
+ {
+ var fork = new RastriginFunction(dimensions);
+ fork.EvaluateAt(currentPoint);
+ return fork;
+ }
+
+ public override IObjectiveFunction CreateNew()
+ {
+ return new RastriginFunction(dimensions);
+ }
+ }
+
+ ///
+ /// Rosenbrock function implementation
+ /// f(x) = Σ[100(x_{i+1} - x_i²)² + (x_i - 1)²]
+ /// Global minimum at (1,1,...,1) with f(x) = 0
+ ///
+ private class RosenbrockFunction : TestFunctionBase
+ {
+ public RosenbrockFunction(int dimensions) : base(dimensions)
+ {
+ if (dimensions < 2)
+ throw new ArgumentException("Rosenbrock function requires at least 2 dimensions");
+ }
+
+ protected override void CalculateValues()
+ {
+ // Calculate function value
+ functionValue = 0;
+ for (var i = 0; i < dimensions - 1; i++)
+ {
+ var a = currentPoint[i + 1] - Math.Pow(currentPoint[i], 2);
+ var b = currentPoint[i] - 1;
+ functionValue += 100 * Math.Pow(a, 2) + Math.Pow(b, 2);
+ }
+
+ // Calculate gradient
+ for (var i = 0; i < dimensions; i++)
+ {
+ if (i == 0)
+ {
+ gradient[i] = -400 * currentPoint[i] * (currentPoint[i + 1] - Math.Pow(currentPoint[i], 2)) - 2 * (1 - currentPoint[i]);
+ }
+ else if (i == dimensions - 1)
+ {
+ gradient[i] = 200 * (currentPoint[i] - Math.Pow(currentPoint[i - 1], 2));
+ }
+ else
+ {
+ gradient[i] = 200 * (currentPoint[i] - Math.Pow(currentPoint[i - 1], 2))
+ - 400 * currentPoint[i] * (currentPoint[i + 1] - Math.Pow(currentPoint[i], 2))
+ - 2 * (1 - currentPoint[i]);
+ }
+ }
+ }
+
+ public override IObjectiveFunction Fork()
+ {
+ var fork = new RosenbrockFunction(dimensions);
+ fork.EvaluateAt(currentPoint);
+ return fork;
+ }
+
+ public override IObjectiveFunction CreateNew()
+ {
+ return new RosenbrockFunction(dimensions);
+ }
+ }
+
+ ///
+ /// Himmelblau function implementation
+ /// f(x,y) = (x² + y - 11)² + (x + y² - 7)²
+ /// Has 4 local minima
+ ///
+ private class HimmelblauFunction : TestFunctionBase
+ {
+ public HimmelblauFunction() : base(2) { }
+
+ protected override void CalculateValues()
+ {
+ var x = currentPoint[0];
+ var y = currentPoint[1];
+
+ // Calculate function value
+ var term1 = Math.Pow(x, 2) + y - 11;
+ var term2 = x + Math.Pow(y, 2) - 7;
+ functionValue = Math.Pow(term1, 2) + Math.Pow(term2, 2);
+
+ // Calculate gradient
+ gradient[0] = 4 * x * term1 + 2 * term2;
+ gradient[1] = 2 * term1 + 4 * y * term2;
+ }
+
+ public override IObjectiveFunction Fork()
+ {
+ var fork = new HimmelblauFunction();
+ fork.EvaluateAt(currentPoint);
+ return fork;
+ }
+
+ public override IObjectiveFunction CreateNew()
+ {
+ return new HimmelblauFunction();
+ }
+ }
+
+ ///
+ /// Ackley function implementation
+ /// f(x) = -20*exp(-0.2*sqrt(0.5*(x₁² + x₂²))) - exp(0.5*(cos(2πx₁) + cos(2πx₂))) + 20 + e
+ /// Global minimum at (0,0) with f(x) = 0
+ ///
+ private class AckleyFunction : TestFunctionBase
+ {
+ private const double A = 20;
+ private const double B = 0.2;
+ private const double C = 2 * Math.PI;
+
+ public AckleyFunction(int dimensions) : base(dimensions) { }
+
+ protected override void CalculateValues()
+ {
+ double sumSquares = 0;
+ double sumCos = 0;
+
+ for (var i = 0; i < dimensions; i++)
+ {
+ sumSquares += Math.Pow(currentPoint[i], 2);
+ sumCos += Math.Cos(C * currentPoint[i]);
+ }
+
+ var term1 = -A * Math.Exp(-B * Math.Sqrt(sumSquares / dimensions));
+ var term2 = -Math.Exp(sumCos / dimensions);
+
+ functionValue = term1 + term2 + A + Math.E;
+
+ // Calculate gradient
+ var sqrtTerm = Math.Sqrt(sumSquares / dimensions);
+ var expTerm1 = Math.Exp(-B * sqrtTerm);
+ var expTerm2 = Math.Exp(sumCos / dimensions);
+
+ for (var i = 0; i < dimensions; i++)
+ {
+ var gradTerm1 = A * B * expTerm1 * currentPoint[i] / (dimensions * sqrtTerm);
+ var gradTerm2 = C * expTerm2 * Math.Sin(C * currentPoint[i]) / dimensions;
+
+ gradient[i] = gradTerm1 + gradTerm2;
+ }
+ }
+
+ public override IObjectiveFunction Fork()
+ {
+ var fork = new AckleyFunction(dimensions);
+ fork.EvaluateAt(currentPoint);
+ return fork;
+ }
+
+ public override IObjectiveFunction CreateNew()
+ {
+ return new AckleyFunction(dimensions);
+ }
+ }
+
+ #endregion
+
+ // Note: NelderMeadSimplex is not used as it doesn't work properly with BasinHopping in this implementation
+
+ [Test]
+ public void BasinHopping_RastriginFunction_FindsGlobalMinimum()
+ {
+ var dimensions = 2;
+ var initialGuess = Vector.Build.Dense(dimensions, 3.0);
+ var objective = new RastriginFunction(dimensions);
+ objective.EvaluateAt(initialGuess); // Initial evaluation
+
+ var localMinimizer = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 500);
+ var basinHopping = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 1.0,
+ stepSize: 1.0,
+ maxIterations: 50);
+
+ // Track progress
+ var functionValues = new List();
+ var acceptedSteps = new List();
+
+ basinHopping.SetCallback((x, f, accepted) =>
+ {
+ functionValues.Add(f);
+ acceptedSteps.Add(accepted);
+ Debug.WriteLine($"Iteration: {functionValues.Count}, Value: {f}, Accepted: {accepted}");
+ return false; // Continue optimization
+ });
+
+ var result = basinHopping.FindMinimum(objective, initialGuess);
+
+ Debug.WriteLine($"Final solution: [{string.Join(", ", result.MinimizingPoint)}]");
+
+ // Evaluate objective at final point to get function value
+ var finalObjective = objective.CreateNew();
+ finalObjective.EvaluateAt(result.MinimizingPoint);
+ var finalValue = finalObjective.Value;
+
+ Debug.WriteLine($"Function value: {finalValue}");
+ Debug.WriteLine($"Iterations: {result.Iterations}");
+ Debug.WriteLine($"Acceptance rate: {acceptedSteps.Count(x => x) / (double)acceptedSteps.Count:P2}");
+
+ // Global minimum should be close to (0,0)
+ for (var i = 0; i < dimensions; i++)
+ {
+ Assert.IsTrue(Math.Abs(result.MinimizingPoint[i]) < 0.1,
+ $"Parameter {i} should be close to 0, got {result.MinimizingPoint[i]}");
+ }
+
+ Assert.IsTrue(finalValue < 0.1,
+ $"Function value should be close to 0, got {finalValue}");
+ }
+
+ [Test]
+ public void BasinHopping_RosenbrockFunction_FindsGlobalMinimum()
+ {
+ var dimensions = 4;
+ var initialGuess = Vector.Build.Dense(dimensions, 0.0); // Start at origin
+ var objective = new RosenbrockFunction(dimensions);
+ objective.EvaluateAt(initialGuess);
+
+ var localMinimizer = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 500);
+ var basinHopping = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 1.0,
+ stepSize: 0.5,
+ maxIterations: 100);
+
+ var result = basinHopping.FindMinimum(objective, initialGuess);
+
+ Debug.WriteLine($"Final solution: [{string.Join(", ", result.MinimizingPoint)}]");
+
+ // Evaluate at final point
+ var finalObjective = objective.CreateNew();
+ finalObjective.EvaluateAt(result.MinimizingPoint);
+ var finalValue = finalObjective.Value;
+
+ Debug.WriteLine($"Function value: {finalValue}");
+
+ // Global minimum is at (1,1,...,1)
+ for (var i = 0; i < dimensions; i++)
+ {
+ Assert.IsTrue(Math.Abs(result.MinimizingPoint[i] - 1.0) < 1E-5,
+ $"Parameter {i} should be close to 1, got {result.MinimizingPoint[i]}");
+ }
+
+ Assert.IsTrue(finalValue < 1.0,
+ $"Function value should be close to 0, got {finalValue}");
+ }
+
+ [Test]
+ public void BasinHopping_HimmelblauFunction_FindsLocalMinimum()
+ {
+ var initialGuess = Vector.Build.DenseOfArray(new double[] { 0.0, 0.0 });
+ var objective = new HimmelblauFunction();
+ objective.EvaluateAt(initialGuess);
+
+ var localMinimizer = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 500);
+ var basinHopping = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 2.0,
+ stepSize: 2.0,
+ maxIterations: 30);
+
+ var result = basinHopping.FindMinimum(objective, initialGuess);
+
+ Debug.WriteLine($"Found minimum at ({result.MinimizingPoint[0]}, {result.MinimizingPoint[1]})");
+
+ // Evaluate at final point
+ var finalObjective = objective.CreateNew();
+ finalObjective.EvaluateAt(result.MinimizingPoint);
+ var finalValue = finalObjective.Value;
+
+ Debug.WriteLine($"Function value: {finalValue}");
+
+ // Known local minima for Himmelblau function
+ var knownMinima = new[]
+ {
+ new[] { 3.0, 2.0 },
+ new[] { -2.805118, 3.131312 },
+ new[] { -3.779310, -3.283186 },
+ new[] { 3.584428, -1.848126 }
+ };
+
+ // Check if result is close to one of the known minima
+ var closeToMinimum = false;
+ foreach (var minima in knownMinima)
+ {
+ var distance = Math.Sqrt(
+ Math.Pow(result.MinimizingPoint[0] - minima[0], 2) +
+ Math.Pow(result.MinimizingPoint[1] - minima[1], 2));
+
+ if (distance < 0.1)
+ {
+ closeToMinimum = true;
+ Debug.WriteLine($"Result is close to known minimum ({minima[0]}, {minima[1]})");
+ break;
+ }
+ }
+
+ Assert.IsTrue(closeToMinimum, "Result should be close to one of the known minima");
+ Assert.IsTrue(finalValue < 0.1,
+ $"Function value should be close to 0, got {finalValue}");
+ }
+
+ [Test]
+ public void BasinHopping_AckleyFunction_FindsGlobalMinimum()
+ {
+ var dimensions = 2;
+ var initialGuess = Vector.Build.Dense(dimensions, 2.0);
+ var objective = new AckleyFunction(dimensions);
+ objective.EvaluateAt(initialGuess);
+
+ var localMinimizer = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 200);
+ var basinHopping = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 5.0, // Higher temperature for better exploration
+ stepSize: 2.0, // Larger step size to jump between basins
+ maxIterations: 50);
+
+ var result = basinHopping.FindMinimum(objective, initialGuess);
+
+ // Global minimum is at (0,0,...,0)
+ var distanceToOrigin = result.MinimizingPoint.L2Norm();
+ Assert.IsTrue(distanceToOrigin < 0.5,
+ $"Solution should be close to origin, distance was {distanceToOrigin}");
+ }
+
+ [Test]
+ public void BasinHopping_CompareTemperatures_RastriginFunction()
+ {
+ var dimensions = 3;
+ var initialGuess = Vector.Build.Dense(dimensions, 2.0);
+ var objective = new RastriginFunction(dimensions);
+ objective.EvaluateAt(initialGuess);
+
+ // Create two optimizers with different temperatures
+ var localMinimizer = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 200);
+
+ var lowTempOptimizer = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 0.1,
+ stepSize: 1.0,
+ maxIterations: 30);
+
+ var highTempOptimizer = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 3.0,
+ stepSize: 1.0,
+ maxIterations: 30);
+
+ // Track acceptance rates
+ int lowTempAccepted = 0, lowTempTotal = 0;
+ int highTempAccepted = 0, highTempTotal = 0;
+
+ lowTempOptimizer.SetCallback((x, f, accepted) =>
+ {
+ lowTempTotal++;
+ if (accepted) lowTempAccepted++;
+ return false;
+ });
+
+ highTempOptimizer.SetCallback((x, f, accepted) =>
+ {
+ highTempTotal++;
+ if (accepted) highTempAccepted++;
+ return false;
+ });
+
+ var lowTempResult = lowTempOptimizer.FindMinimum(objective.CreateNew(), initialGuess);
+ var highTempResult = highTempOptimizer.FindMinimum(objective.CreateNew(), initialGuess);
+
+ var lowTempRate = lowTempTotal > 0 ? (double)lowTempAccepted / lowTempTotal : 0;
+ var highTempRate = highTempTotal > 0 ? (double)highTempAccepted / highTempTotal : 0;
+
+ Debug.WriteLine($"Low temperature acceptance rate: {lowTempRate:P2}");
+ Debug.WriteLine($"High temperature acceptance rate: {highTempRate:P2}");
+
+ // Higher temperature should lead to higher acceptance rate
+ Assert.IsTrue(highTempRate > lowTempRate,
+ "Higher temperature should result in higher acceptance rate");
+
+ // Evaluate both results
+ var lowTempObjective = objective.CreateNew();
+ lowTempObjective.EvaluateAt(lowTempResult.MinimizingPoint);
+
+ var highTempObjective = objective.CreateNew();
+ highTempObjective.EvaluateAt(highTempResult.MinimizingPoint);
+
+ Debug.WriteLine($"Low temperature result value: {lowTempObjective.Value}");
+ Debug.WriteLine($"High temperature result value: {highTempObjective.Value}");
+ }
+
+ [Test]
+ public void BasinHopping_CustomTakeStep_RastriginFunction()
+ {
+ var dimensions = 2;
+ var initialGuess = Vector.Build.Dense(dimensions, 3.0);
+ var objective = new RastriginFunction(dimensions);
+ objective.EvaluateAt(initialGuess);
+
+ var localMinimizer = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 200);
+ var basinHopping = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 1.0,
+ maxIterations: 50);
+
+ // Custom step-taking function with decreasing step size
+ var random = new Random(42); // Fixed seed for reproducibility
+ var currentStepSize = 2.0;
+
+ basinHopping.SetTakeStep(currentPoint =>
+ {
+ var newPoint = currentPoint.Clone();
+
+ // Take random step with decreasing step size
+ for (var i = 0; i < dimensions; i++)
+ {
+ newPoint[i] += (random.NextDouble() * 2 - 1) * currentStepSize;
+ }
+
+ // Decrease step size for next iteration
+ currentStepSize *= 0.95;
+
+ return newPoint;
+ });
+
+ var result = basinHopping.FindMinimum(objective, initialGuess);
+
+ Debug.WriteLine($"Custom step result: [{string.Join(", ", result.MinimizingPoint)}]");
+
+ // Evaluate at final point
+ var finalObjective = objective.CreateNew();
+ finalObjective.EvaluateAt(result.MinimizingPoint);
+
+ Debug.WriteLine($"Function value: {finalObjective.Value}");
+ Debug.WriteLine($"Final step size: {currentStepSize}");
+
+ // Should find global minimum
+ Assert.IsTrue(finalObjective.Value < 1.0,
+ $"Function value should be close to 0, got {finalObjective.Value}");
+ }
+
+ [Test]
+ public void BasinHopping_CustomAcceptTest_RastriginFunction()
+ {
+ var dimensions = 2;
+ var initialGuess = Vector.Build.Dense(dimensions, 3.0);
+ var objective = new RastriginFunction(dimensions);
+ objective.EvaluateAt(initialGuess);
+
+ var localMinimizer = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 200);
+ var basinHopping = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 1.0,
+ stepSize: 1.0,
+ maxIterations: 50);
+
+ // Custom acceptance test with simulated annealing cooling schedule
+ var random = new Random(42);
+ var temperature = 5.0; // Starting temperature
+ var coolingRate = 0.95; // Cooling rate
+
+ basinHopping.SetAcceptTest((newValue, oldValue) =>
+ {
+ // Always accept if new value is better
+ if (newValue <= oldValue)
+ {
+ return AcceptanceStatus.Accept;
+ }
+
+ // Metropolis criterion with decreasing temperature
+ var deltaE = newValue - oldValue;
+ var acceptanceProbability = Math.Exp(-deltaE / temperature);
+
+ var accepted = random.NextDouble() < acceptanceProbability;
+
+ // Cool the temperature
+ temperature *= coolingRate;
+
+ return accepted ? AcceptanceStatus.Accept : AcceptanceStatus.Reject;
+ });
+
+ var result = basinHopping.FindMinimum(objective, initialGuess);
+
+ Debug.WriteLine($"Custom accept test result: [{string.Join(", ", result.MinimizingPoint)}]");
+
+ // Evaluate at final point
+ var finalObjective = objective.CreateNew();
+ finalObjective.EvaluateAt(result.MinimizingPoint);
+
+ Debug.WriteLine($"Function value: {finalObjective.Value}");
+ Debug.WriteLine($"Final temperature: {temperature}");
+
+ // Should find global minimum
+ Assert.IsTrue(finalObjective.Value < 1.0,
+ $"Function value should be close to 0, got {finalObjective.Value}");
+ }
+
+ [Test]
+ public void BasinHopping_CompareLocalMinimizers_RastriginFunction()
+ {
+ var dimensions = 2;
+ var initialGuess = Vector.Build.Dense(dimensions, 3.0);
+ var objective = new RastriginFunction(dimensions);
+ objective.EvaluateAt(initialGuess);
+
+ // Compare different local minimizers
+ var nelderMead = new NelderMeadSimplex(1e-8, 100);
+ var bfgs = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 100);
+ var lbfgs = new LimitedMemoryBfgsMinimizer(1e-8, 1e-8, 1e-8, 100);
+
+ var basinHoppingNM = new BasinHopping(nelderMead, temperature: 1.0, maxIterations: 30);
+ var basinHoppingBFGS = new BasinHopping(bfgs, temperature: 1.0, maxIterations: 30);
+ var basinHoppingLBFGS = new BasinHopping(lbfgs, temperature: 1.0, maxIterations: 30);
+
+ var resultNM = basinHoppingNM.FindMinimum(objective.CreateNew(), initialGuess);
+ var resultBFGS = basinHoppingBFGS.FindMinimum(objective.CreateNew(), initialGuess);
+ var resultLBFGS = basinHoppingLBFGS.FindMinimum(objective.CreateNew(), initialGuess);
+
+ // Evaluate all results
+ var evalNM = objective.CreateNew();
+ evalNM.EvaluateAt(resultNM.MinimizingPoint);
+
+ var evalBFGS = objective.CreateNew();
+ evalBFGS.EvaluateAt(resultBFGS.MinimizingPoint);
+
+ var evalLBFGS = objective.CreateNew();
+ evalLBFGS.EvaluateAt(resultLBFGS.MinimizingPoint);
+
+ Debug.WriteLine($"Nelder-Mead result: {evalNM.Value}");
+ Debug.WriteLine($"BFGS result: {evalBFGS.Value}");
+ Debug.WriteLine($"L-BFGS result: {evalLBFGS.Value}");
+
+ // At least one minimizer should find a good solution
+ Assert.IsTrue(evalNM.Value < 1.0 || evalBFGS.Value < 1.0 || evalLBFGS.Value < 1.0,
+ "At least one minimizer should find a good solution");
+ }
+
+ [Test]
+ public void BasinHopping_HighDimensional_RastriginFunction()
+ {
+ var dimensions = 10; // Higher dimensional problem
+ var initialGuess = Vector.Build.Dense(dimensions, 2.0);
+ var objective = new RastriginFunction(dimensions);
+ objective.EvaluateAt(initialGuess);
+
+ var localMinimizer = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 200);
+ var basinHopping = new BasinHopping(
+ localMinimizer: localMinimizer,
+ temperature: 2.0,
+ stepSize: 1.0,
+ maxIterations: 100);
+
+ var result = basinHopping.FindMinimum(objective, initialGuess);
+
+ // Evaluate at final point
+ var finalObjective = objective.CreateNew();
+ finalObjective.EvaluateAt(result.MinimizingPoint);
+ var functionValue = finalObjective.Value;
+ var distanceToOrigin = result.MinimizingPoint.L2Norm();
+
+ Debug.WriteLine($"High-dimensional result function value: {functionValue}");
+ Debug.WriteLine($"Distance to origin: {distanceToOrigin}");
+
+ // For high dimensions, we use more relaxed tolerances
+ Assert.IsTrue(functionValue < dimensions * 2,
+ $"Function value should be relatively low, got {functionValue}");
+ }
+ }
+}
diff --git a/src/Numerics.Tests/OptimizationTests/NonLinearCurveFittingTests.cs b/src/Numerics.Tests/OptimizationTests/NonLinearCurveFittingTests.cs
index cdaad350f..0de36d88b 100644
--- a/src/Numerics.Tests/OptimizationTests/NonLinearCurveFittingTests.cs
+++ b/src/Numerics.Tests/OptimizationTests/NonLinearCurveFittingTests.cs
@@ -201,6 +201,29 @@ public void Rat43_TRNCG_Dif()
}
}
+ [Test]
+ public void Rat43_BasinHopping()
+ {
+ // Local Minimizer: LevenbergMarquardtMinimizer
+ var obj = ObjectiveFunction.NonlinearModel(Rat43Model, Rat43X, Rat43Y, accuracyOrder: 6);
+ var solver = new BasinHopping(localMinimizer: new LevenbergMarquardtMinimizer());
+ var result = solver.FindMinimum(obj, Rat43Start2);
+
+ for (var i = 0; i < result.MinimizingPoint.Count; i++)
+ {
+ AssertHelpers.AlmostEqualRelative(Rat43Pbest[i], result.MinimizingPoint[i], 2);
+ }
+
+ // Local Minimizer: TrustRegionNewtonCGMinimizer
+ solver = new BasinHopping(localMinimizer: new TrustRegionNewtonCGMinimizer());
+ result = solver.FindMinimum(obj.Fork(), Rat43Start2);
+
+ for (var i = 0; i < result.MinimizingPoint.Count; i++)
+ {
+ AssertHelpers.AlmostEqualRelative(Rat43Pbest[i], result.MinimizingPoint[i], 2);
+ }
+ }
+
[Test]
public void Rat43_Bfgs_Dif()
{
@@ -411,6 +434,42 @@ public void BoxBod_TRNCG_Dif()
}
}
+ [Test]
+ public void BoxBod_BasinHopping()
+ {
+ // Local minimizer: LevenbergMarquardtMinimizer
+
+ var obj = ObjectiveFunction.NonlinearModel(BoxBodModel, BoxBodX, BoxBodY, accuracyOrder: 2);
+ var solver = new BasinHopping(localMinimizer: new LevenbergMarquardtMinimizer());
+ var result = solver.FindMinimum(obj, BoxBodStart2);
+
+ for (var i = 0; i < result.MinimizingPoint.Count; i++)
+ {
+ AssertHelpers.AlmostEqualRelative(BoxBodPbest[i], result.MinimizingPoint[i], 3);
+ AssertHelpers.AlmostEqualRelative(BoxBodPstd[i], result.StandardErrors[i], 3);
+ }
+
+ // Local Minimizer: TrustRegionDogLegMinimizer
+ solver = new BasinHopping(localMinimizer: new TrustRegionDogLegMinimizer());
+ result = solver.FindMinimum(obj.Fork(), BoxBodStart2);
+
+ for (var i = 0; i < result.MinimizingPoint.Count; i++)
+ {
+ AssertHelpers.AlmostEqualRelative(BoxBodPbest[i], result.MinimizingPoint[i], 3);
+ AssertHelpers.AlmostEqualRelative(BoxBodPstd[i], result.StandardErrors[i], 3);
+ }
+
+ // Local minimizer: TrustRegionNewtonCGMinimizer
+ solver = new BasinHopping(localMinimizer: new TrustRegionNewtonCGMinimizer());
+ result = solver.FindMinimum(obj.Fork(), BoxBodStart2);
+
+ for (var i = 0; i < result.MinimizingPoint.Count; i++)
+ {
+ AssertHelpers.AlmostEqualRelative(BoxBodPbest[i], result.MinimizingPoint[i], 3);
+ AssertHelpers.AlmostEqualRelative(BoxBodPstd[i], result.StandardErrors[i], 3);
+ }
+ }
+
[Test]
public void BoxBod_Bfgs_Der()
{
diff --git a/src/Numerics/Optimization/BasinHopping.cs b/src/Numerics/Optimization/BasinHopping.cs
new file mode 100644
index 000000000..1ba8dfeb5
--- /dev/null
+++ b/src/Numerics/Optimization/BasinHopping.cs
@@ -0,0 +1,919 @@
+//
+// Math.NET Numerics, part of the Math.NET Project
+// https://numerics.mathdotnet.com
+// https://github.com/mathnet/mathnet-numerics
+//
+// Copyright (c) 2009-$CURRENT_YEAR$ Math.NET
+//
+// Permission is hereby granted, free of charge, to any person
+// obtaining a copy of this software and associated documentation
+// files (the "Software"), to deal in the Software without
+// restriction, including without limitation the rights to use,
+// copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the
+// Software is furnished to do so, subject to the following
+// conditions:
+//
+// The above copyright notice and this permission notice shall be
+// included in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+// OTHER DEALINGS IN THE SOFTWARE.
+//
+
+using MathNet.Numerics.LinearAlgebra;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace MathNet.Numerics.Optimization
+{
+ using Random = System.Random;
+
+ ///
+ /// Indicates the result of an acceptance test in the basin-hopping algorithm.
+ ///
+ public enum AcceptanceStatus
+ {
+ ///
+ /// The trial point is accepted based on regular acceptance criteria.
+ ///
+ Accept,
+
+ ///
+ /// The trial point is rejected.
+ ///
+ Reject,
+
+ ///
+ /// The trial point is accepted unconditionally, bypassing regular acceptance criteria.
+ ///
+ ForceAccept
+ }
+
+ ///
+ /// Convergence criteria for the optimization algorithm
+ ///
+ internal class GeneralConvergence
+ {
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The number of variables in the optimization problem.
+ public GeneralConvergence(int numberOfVariables)
+ {
+ AbsoluteParameterTolerance = new double[numberOfVariables];
+ for (var i = 0; i < numberOfVariables; i++)
+ {
+ AbsoluteParameterTolerance[i] = 1e-8;
+ }
+
+ MaximumEvaluations = 0;
+ MaximumTime = TimeSpan.FromMinutes(10);
+ StartTime = DateTime.Now;
+ }
+
+ ///
+ /// Gets or sets a value indicating whether to cancel the optimization.
+ ///
+ public bool Cancel { get; set; }
+
+ ///
+ /// Gets or sets the absolute parameter tolerance for each dimension.
+ ///
+ public double[] AbsoluteParameterTolerance { get; set; }
+
+ ///
+ /// Gets or sets the maximum number of function evaluations.
+ ///
+ public int MaximumEvaluations { get; set; } = 1000;
+
+ ///
+ /// Gets or sets the maximum time allowed for optimization.
+ ///
+ public TimeSpan MaximumTime { get; set; } = TimeSpan.FromMinutes(5);
+
+ ///
+ /// Gets or sets the start time of the optimization.
+ ///
+ public DateTime StartTime { get; set; }
+
+ ///
+ /// Gets or sets the number of function evaluations performed.
+ ///
+ public int Evaluations { get; set; }
+ }
+
+ ///
+ /// Implements the basin-hopping algorithm to find the global minimum of a function
+ ///
+ public sealed class BasinHopping : NonlinearMinimizerBase, ILeastSquaresMinimizer, IUnconstrainedMinimizer
+ {
+ ///
+ /// Delegate for taking a random step from the current position
+ ///
+ /// Current position
+ /// New position after random displacement
+ public delegate Vector TakeStepDelegate(Vector x);
+
+ ///
+ /// Delegate for testing whether to accept a step
+ ///
+ /// Function value after step
+ /// Function value before step
+ /// Status indicating whether to accept the step
+ public delegate AcceptanceStatus AcceptTestDelegate(double newValue, double oldValue);
+
+ ///
+ /// Delegate for callback function called for each minimum found
+ ///
+ /// Coordinates of the trial minimum
+ /// Function value at the trial minimum
+ /// Whether the minimum was accepted
+ /// True to stop the algorithm, false to continue
+ public delegate bool CallbackDelegate(Vector x, double f, bool accepted);
+
+ // Internal state variables
+ private Vector solution;
+ private double functionValue;
+ private int iterations;
+ private int evaluations;
+ private BasinHoppingStatus status;
+ private GeneralConvergence convergence;
+ private double temperature;
+ private double stepSize;
+ private int interval;
+ private double targetAcceptRate;
+ private double stepwiseFactor;
+ private int maxIterations;
+ private int? successIterations;
+ private TakeStepDelegate takeStep;
+ private AcceptTestDelegate acceptTest;
+ private CallbackDelegate callback;
+ private Random random;
+
+ // local minimizer
+ private ILeastSquaresMinimizer leastSquaresMinimizer;
+ private IUnconstrainedMinimizer unconstrainedMinimizer;
+
+ private List isFixed;
+
+ // Adaptive step size variables
+ private int stepCount;
+ private int acceptCount;
+
+ ///
+ /// Gets or sets the "temperature" parameter used in the Metropolis acceptance criterion.
+ ///
+ public double Temperature
+ {
+ get => temperature;
+ set => temperature = value;
+ }
+
+ ///
+ /// Gets or sets the maximum step size for the random displacement.
+ ///
+ public double StepSize
+ {
+ get => stepSize;
+ set => stepSize = value;
+ }
+
+ ///
+ /// Gets or sets the interval for step size updates.
+ ///
+ public int Interval
+ {
+ get => interval;
+ set => interval = value;
+ }
+
+ ///
+ /// Gets or sets the target acceptance rate for step size adjustment.
+ ///
+ public double TargetAcceptRate
+ {
+ get => targetAcceptRate;
+ set => targetAcceptRate = value;
+ }
+
+ ///
+ /// Gets or sets the factor for step size adjustment.
+ ///
+ public double StepwiseFactor
+ {
+ get => stepwiseFactor;
+ set => stepwiseFactor = value;
+ }
+
+ #region Using a least squares minimizer
+
+ ///
+ /// Initializes a new instance of the BasinHopping class using a least squares minimizer.
+ ///
+ /// Local least squares minimizer to use at each step.
+ /// The "temperature" parameter for the acceptance criterion.
+ /// Maximum step size for the random displacement.
+ /// Maximum number of basin-hopping iterations.
+ /// Stop if global minimum candidate remains the same for this many iterations.
+ /// Interval for step size updates.
+ /// Target acceptance rate for step size adjustment.
+ /// Factor for step size adjustment.
+ public BasinHopping(
+ ILeastSquaresMinimizer localMinimizer,
+ double temperature = 1.0,
+ double stepSize = 0.5,
+ int maxIterations = 100,
+ int? successIterations = null,
+ int interval = 50,
+ double targetAcceptRate = 0.5,
+ double stepwiseFactor = 0.9)
+ : base(gradientTolerance: 1E-8, stepTolerance: 1E-8, functionTolerance: 1E-8)
+ {
+ this.temperature = temperature;
+ this.stepSize = stepSize;
+ this.leastSquaresMinimizer = localMinimizer;
+ this.maxIterations = maxIterations;
+ this.successIterations = successIterations;
+ this.interval = interval;
+ this.targetAcceptRate = targetAcceptRate;
+ this.stepwiseFactor = stepwiseFactor;
+ random = new Random();
+ convergence = new GeneralConvergence(1);
+
+ // Create default step-taking function
+ takeStep = RandomDisplacement;
+ }
+
+ ///
+ public NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, Vector initialGuess,
+ Vector lowerBound = null, Vector upperBound = null, Vector scales = null, List isFixed = null)
+ {
+ if (objective == null)
+ {
+ throw new ArgumentNullException(nameof(objective));
+ }
+
+ if (initialGuess == null)
+ {
+ throw new ArgumentNullException(nameof(initialGuess));
+ }
+
+ if (leastSquaresMinimizer == null)
+ {
+ throw new ArgumentNullException(nameof(leastSquaresMinimizer),
+ "A local minimizer must be provided for basin-hopping optimization.");
+ }
+
+ // Use default step function and acceptance test if not set
+ if (takeStep == null)
+ {
+ takeStep = RandomDisplacement;
+ }
+
+ if (acceptTest == null)
+ {
+ acceptTest = MetropolisAcceptance;
+ }
+
+ // Proceed with the basin-hopping optimization ignoring additional bounds parameters.
+ return Optimize(objective, initialGuess, lowerBound, upperBound, scales, isFixed);
+ }
+
+ ///
+ public NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, double[] initialGuess,
+ double[] lowerBound = null, double[] upperBound = null, double[] scales = null, bool[] isFixed = null)
+ {
+ if (objective == null)
+ {
+ throw new ArgumentNullException(nameof(objective));
+ }
+
+ if (initialGuess == null)
+ {
+ throw new ArgumentNullException(nameof(initialGuess));
+ }
+
+ var vecInitial = Vector.Build.DenseOfArray(initialGuess);
+ var vecLower = lowerBound != null ? Vector.Build.DenseOfArray(lowerBound) : null;
+ var vecUpper = upperBound != null ? Vector.Build.DenseOfArray(upperBound) : null;
+ var vecScales = scales != null ? Vector.Build.DenseOfArray(scales) : null;
+ var listIsFixed = isFixed != null ? isFixed.ToList() : null;
+
+ // Call the vector-based overload (bounds parameters are ignored).
+ return FindMinimum(objective, vecInitial, vecLower, vecUpper, vecScales, listIsFixed);
+ }
+
+ ///
+ /// Core optimization logic for the basin-hopping algorithm.
+ ///
+ private NonlinearMinimizationResult Optimize(IObjectiveModel objectiveModel, Vector initialGuess,
+ Vector lowerBound = null, Vector upperBound = null, Vector scales = null, List isFixed = null)
+ {
+ // Validate bounds first
+ ValidateBounds(initialGuess, lowerBound, upperBound, scales);
+
+ this.isFixed = isFixed;
+
+ // Set up tracking variables
+ convergence.StartTime = DateTime.Now;
+ convergence.Evaluations = 0;
+ evaluations = 0;
+ iterations = 0;
+ stepCount = 0;
+ acceptCount = 0;
+ status = BasinHoppingStatus.IterationsCompleted;
+
+ // Perform initial minimization
+ var currentResult = leastSquaresMinimizer.FindMinimum(objectiveModel.Fork(), initialGuess, lowerBound, upperBound, scales, isFixed);
+ var currentPoint = currentResult.MinimizingPoint;
+ var currentValue = currentResult.ModelInfoAtMinimum.Value;
+
+ // Store best result
+ var storage = new LeastSqauresStorage(currentResult);
+
+ // Call callback if provided
+ if (callback != null)
+ {
+ var stop = callback(currentPoint, currentValue, true);
+ if (stop)
+ {
+ status = BasinHoppingStatus.CallbackRequestedStop;
+ return currentResult;
+ }
+ }
+
+ // Initialize success counter
+ var successCount = 0;
+ var maxSuccessCount = successIterations ?? (maxIterations + 2);
+
+ // Main iteration loop
+ for (iterations = 0; iterations < maxIterations; iterations++)
+ {
+ // Check stopping conditions
+ if (convergence.Cancel)
+ {
+ status = BasinHoppingStatus.ForcedStop;
+ break;
+ }
+
+ if (convergence.MaximumEvaluations > 0 && evaluations >= convergence.MaximumEvaluations)
+ {
+ status = BasinHoppingStatus.MaximumEvaluationsReached;
+ break;
+ }
+
+ if (convergence.MaximumTime > TimeSpan.Zero &&
+ DateTime.Now - convergence.StartTime >= convergence.MaximumTime)
+ {
+ status = BasinHoppingStatus.MaximumTimeReached;
+ break;
+ }
+
+ // Perform Monte Carlo step
+ stepCount++;
+ var newGlobalMin = false;
+ var accepted = false;
+
+ // Take random step
+ var trialPoint = takeStep(currentPoint.Clone());
+
+ // Perform local minimization
+ var trialFunc = objectiveModel.Fork();
+ var trialResult = leastSquaresMinimizer.FindMinimum(trialFunc, trialPoint, lowerBound, upperBound, scales, isFixed);
+
+ evaluations += trialResult.Iterations;
+ convergence.Evaluations += trialResult.Iterations;
+
+ // Perform acceptance test
+ var testResult = acceptTest(trialResult.ModelInfoAtMinimum.Value, currentResult.ModelInfoAtMinimum.Value);
+ accepted = testResult == AcceptanceStatus.Accept || testResult == AcceptanceStatus.ForceAccept;
+
+ // Update current position if step is accepted
+ if (accepted)
+ {
+ acceptCount++;
+ currentResult = trialResult;
+ currentPoint = trialResult.MinimizingPoint;
+ currentValue = trialResult.ModelInfoAtMinimum.Value;
+
+ // Check if we found a new global minimum
+ newGlobalMin = storage.Update(trialResult);
+ }
+
+ // Adjust step size if needed
+ if (stepCount >= interval)
+ {
+ AdjustStepSize();
+ }
+
+ // Call callback if provided
+ if (callback != null)
+ {
+ var stop = callback(trialResult.MinimizingPoint, trialResult.ModelInfoAtMinimum.Value, accepted);
+ if (stop)
+ {
+ status = BasinHoppingStatus.CallbackRequestedStop;
+ break;
+ }
+ }
+
+ // Check success condition
+ if (newGlobalMin)
+ {
+ successCount = 0;
+ }
+ else
+ {
+ successCount++;
+ if (successCount > maxSuccessCount)
+ {
+ status = BasinHoppingStatus.SuccessConditionSatisfied;
+ break;
+ }
+ }
+ }
+
+ // Set final result
+ var bestResult = storage.BestResult;
+ solution = bestResult.MinimizingPoint;
+ functionValue = bestResult.ModelInfoAtMinimum.Value;
+
+ // Create a new result with our exit condition
+ var exitCondition = ConvertToExitCondition(status);
+
+ // If using the exact same object is important, you could potentially modify the reasonForExit field
+ // via reflection, but creating a new result is cleaner
+ return new NonlinearMinimizationResult(
+ bestResult.ModelInfoAtMinimum,
+ iterations + 1,
+ exitCondition);
+ }
+
+ ///
+ /// LeastSqauresStorage class to keep track of the best result found.
+ ///
+ private class LeastSqauresStorage
+ {
+ ///
+ /// Gets the best minimization result found so far.
+ ///
+ public NonlinearMinimizationResult BestResult { get; private set; }
+
+ ///
+ /// Initializes a new instance of the LeastSqauresStorage class.
+ ///
+ /// The initial minimization result.
+ public LeastSqauresStorage(NonlinearMinimizationResult initialResult)
+ {
+ BestResult = initialResult;
+ }
+
+ ///
+ /// Updates the best result if the new result is better.
+ ///
+ /// The new minimization result to consider.
+ /// True if the best result was updated, false otherwise.
+ public bool Update(NonlinearMinimizationResult newResult)
+ {
+ if (newResult.ReasonForExit == ExitCondition.Converged &&
+ (newResult.ModelInfoAtMinimum.Value < BestResult.ModelInfoAtMinimum.Value ||
+ BestResult.ReasonForExit != ExitCondition.Converged))
+ {
+ BestResult = newResult;
+ return true;
+ }
+ return false;
+ }
+ }
+
+ #endregion
+
+ #region using an unconstrained minimizer
+
+ ///
+ /// Initializes a new instance of the BasinHopping class using an unconstrained minimizer.
+ ///
+ /// Local unconstrained minimizer to use at each step.
+ /// The "temperature" parameter for the acceptance criterion.
+ /// Maximum step size for the random displacement.
+ /// Maximum number of basin-hopping iterations.
+ /// Stop if global minimum candidate remains the same for this many iterations.
+ /// Interval for step size updates.
+ /// Target acceptance rate for step size adjustment.
+ /// Factor for step size adjustment.
+ public BasinHopping(
+ IUnconstrainedMinimizer localMinimizer,
+ double temperature = 1.0,
+ double stepSize = 0.5,
+ int maxIterations = 100,
+ int? successIterations = null,
+ int interval = 50,
+ double targetAcceptRate = 0.5,
+ double stepwiseFactor = 0.9)
+ : base(gradientTolerance: 1E-8, stepTolerance: 1E-8, functionTolerance: 1E-8)
+ {
+ this.temperature = temperature;
+ this.stepSize = stepSize;
+ this.unconstrainedMinimizer = localMinimizer;
+ this.maxIterations = maxIterations;
+ this.successIterations = successIterations;
+ this.interval = interval;
+ this.targetAcceptRate = targetAcceptRate;
+ this.stepwiseFactor = stepwiseFactor;
+ random = new Random();
+ convergence = new GeneralConvergence(1);
+
+ // Create default step-taking function
+ takeStep = RandomDisplacement;
+ }
+
+ ///
+ public MinimizationResult FindMinimum(IObjectiveFunction objective, Vector initialGuess)
+ {
+ if (objective == null)
+ {
+ throw new ArgumentNullException(nameof(objective));
+ }
+
+ if (initialGuess == null)
+ {
+ throw new ArgumentNullException(nameof(initialGuess));
+ }
+
+ if (unconstrainedMinimizer == null)
+ {
+ throw new ArgumentNullException(nameof(unconstrainedMinimizer),
+ "An unconstrained minimizer must be provided for this optimization.");
+ }
+
+ // Use default step function and acceptance test if not set
+ if (takeStep == null)
+ {
+ takeStep = RandomDisplacement;
+ }
+
+ if (acceptTest == null)
+ {
+ acceptTest = MetropolisAcceptance;
+ }
+
+ // Proceed with the basin-hopping optimization
+ return Optimize(objective, initialGuess);
+ }
+
+ ///
+ /// Core optimization logic for the basin-hopping algorithm.
+ ///
+ private MinimizationResult Optimize(IObjectiveFunction objective, Vector initialGuess)
+ {
+ // Validate bounds first
+ ValidateBounds(initialGuess, null, null, null);
+
+ // Set up tracking variables
+ convergence.StartTime = DateTime.Now;
+ convergence.Evaluations = 0;
+ evaluations = 0;
+ iterations = 0;
+ stepCount = 0;
+ acceptCount = 0;
+ status = BasinHoppingStatus.IterationsCompleted;
+
+ // Perform initial minimization
+ var currentResult = unconstrainedMinimizer.FindMinimum(objective, initialGuess);
+ var currentPoint = currentResult.MinimizingPoint;
+ var currentValue = currentResult.FunctionInfoAtMinimum.Value;
+ evaluations += currentResult.Iterations;
+
+ // Store best result
+ var storage = new UnconstrainedStorage(currentResult);
+
+ // Call callback if provided
+ if (callback != null)
+ {
+ var stop = callback(currentPoint, currentValue, true);
+ if (stop)
+ {
+ status = BasinHoppingStatus.CallbackRequestedStop;
+ return currentResult;
+ }
+ }
+
+ // Initialize success counter
+ var successCount = 0;
+ var maxSuccessCount = successIterations ?? (maxIterations + 2);
+
+ // Main iteration loop
+ for (iterations = 0; iterations < maxIterations; iterations++)
+ {
+ // Check stopping conditions
+ if (convergence.Cancel)
+ {
+ status = BasinHoppingStatus.ForcedStop;
+ break;
+ }
+
+ if (convergence.MaximumEvaluations > 0 && evaluations >= convergence.MaximumEvaluations)
+ {
+ status = BasinHoppingStatus.MaximumEvaluationsReached;
+ break;
+ }
+
+ if (convergence.MaximumTime > TimeSpan.Zero &&
+ DateTime.Now - convergence.StartTime >= convergence.MaximumTime)
+ {
+ status = BasinHoppingStatus.MaximumTimeReached;
+ break;
+ }
+
+ // Perform Monte Carlo step
+ stepCount++;
+ var newGlobalMin = false;
+ var accepted = false;
+
+ // Take random step
+ var trialPoint = takeStep(currentPoint.Clone());
+
+ // Perform local minimization
+ var trialFunc = objective.Fork();
+ var trialResult = unconstrainedMinimizer.FindMinimum(trialFunc, trialPoint);
+
+ evaluations += trialResult.Iterations;
+ convergence.Evaluations += trialResult.Iterations;
+
+ // Perform acceptance test
+ var testResult = acceptTest(trialResult.FunctionInfoAtMinimum.Value, currentResult.FunctionInfoAtMinimum.Value);
+ accepted = testResult == AcceptanceStatus.Accept || testResult == AcceptanceStatus.ForceAccept;
+
+ // Update current position if step is accepted
+ if (accepted)
+ {
+ acceptCount++;
+ currentResult = trialResult;
+ currentPoint = trialResult.MinimizingPoint;
+ currentValue = trialResult.FunctionInfoAtMinimum.Value;
+
+ // Check if we found a new global minimum
+ newGlobalMin = storage.Update(trialResult);
+ }
+
+ // Adjust step size if needed
+ if (stepCount >= interval)
+ {
+ AdjustStepSize();
+ }
+
+ // Call callback if provided
+ if (callback != null)
+ {
+ var stop = callback(trialResult.MinimizingPoint, trialResult.FunctionInfoAtMinimum.Value, accepted);
+ if (stop)
+ {
+ status = BasinHoppingStatus.CallbackRequestedStop;
+ break;
+ }
+ }
+
+ // Check success condition
+ if (newGlobalMin)
+ {
+ successCount = 0;
+ }
+ else
+ {
+ successCount++;
+ if (successCount > maxSuccessCount)
+ {
+ status = BasinHoppingStatus.SuccessConditionSatisfied;
+ break;
+ }
+ }
+ }
+
+ // Set final result
+ var bestResult = storage.BestResult;
+ solution = bestResult.MinimizingPoint;
+ functionValue = bestResult.FunctionInfoAtMinimum.Value;
+
+ // Create a new result with our exit condition
+ return new MinimizationResult(
+ bestResult.FunctionInfoAtMinimum,
+ iterations + 1,
+ ConvertToExitCondition(status));
+ }
+
+ ///
+ /// Storage class to keep track of the best result found when using IUnconstrainedMinimizer.
+ ///
+ private class UnconstrainedStorage
+ {
+ ///
+ /// Gets the best minimization result found so far.
+ ///
+ public MinimizationResult BestResult { get; private set; }
+
+ ///
+ /// Initializes a new instance of the UnconstrainedStorage class.
+ ///
+ /// The initial minimization result.
+ public UnconstrainedStorage(MinimizationResult initialResult)
+ {
+ BestResult = initialResult;
+ }
+
+ ///
+ /// Updates the best result if the new result is better.
+ ///
+ /// The new minimization result to consider.
+ /// True if the best result was updated, false otherwise.
+ public bool Update(MinimizationResult newResult)
+ {
+ var isNewSuccessful = newResult.ReasonForExit == ExitCondition.Converged
+ || newResult.ReasonForExit == ExitCondition.RelativePoints
+ || newResult.ReasonForExit == ExitCondition.RelativeGradient
+ || newResult.ReasonForExit == ExitCondition.AbsoluteGradient;
+
+ var isBestSuccessful = BestResult.ReasonForExit == ExitCondition.Converged
+ || BestResult.ReasonForExit == ExitCondition.RelativePoints
+ || BestResult.ReasonForExit == ExitCondition.RelativeGradient
+ || BestResult.ReasonForExit == ExitCondition.AbsoluteGradient;
+
+ if (isNewSuccessful
+ && (newResult.FunctionInfoAtMinimum.Value < BestResult.FunctionInfoAtMinimum.Value || !isBestSuccessful))
+ {
+ BestResult = newResult;
+ return true;
+ }
+ return false;
+ }
+ }
+
+ #endregion
+
+ ///
+ /// Sets the step-taking function for the basin-hopping algorithm.
+ ///
+ /// The function that performs random displacements.
+ public void SetTakeStep(TakeStepDelegate stepFunction)
+ {
+ takeStep = stepFunction ?? throw new ArgumentNullException(nameof(stepFunction));
+ }
+
+ ///
+ /// Sets the acceptance test function for the basin-hopping algorithm.
+ ///
+ /// The function that tests whether to accept steps.
+ public void SetAcceptTest(AcceptTestDelegate testFunction)
+ {
+ acceptTest = testFunction ?? throw new ArgumentNullException(nameof(testFunction));
+ }
+
+ ///
+ /// Sets the callback function for monitoring optimization progress.
+ ///
+ /// The callback function to call for each minimum found.
+ public void SetCallback(CallbackDelegate callbackFunction)
+ {
+ callback = callbackFunction;
+ }
+
+ ///
+ /// Default random displacement function.
+ ///
+ /// Current position.
+ /// New position after random displacement.
+ private Vector RandomDisplacement(Vector x)
+ {
+ // Convert external parameters to internal
+ var internalParams = ProjectToInternalParameters(x);
+
+ for (var i = 0; i < internalParams.Count; i++)
+ {
+ if (isFixed != null && i < isFixed.Count && isFixed[i])
+ {
+ continue;
+ }
+
+ var displacement = (random.NextDouble() * 2 - 1) * stepSize;
+ internalParams[i] += displacement;
+ }
+
+ // Convert back to external parameters
+ return ProjectToExternalParameters(internalParams);
+ }
+
+ ///
+ /// Metropolis acceptance criterion.
+ ///
+ /// New function value.
+ /// Previous function value.
+ /// Boolean indicating whether to accept the step.
+ private AcceptanceStatus MetropolisAcceptance(double newValue, double oldValue)
+ {
+ // Always accept if new value is lower
+ if (newValue < oldValue)
+ {
+ return AcceptanceStatus.Accept;
+ }
+
+ // Reject all steps that increase energy if T = 0
+ if (temperature == 0)
+ {
+ return AcceptanceStatus.Reject;
+ }
+
+ // Accept with probability based on temperature
+ var w = Math.Exp(-(newValue - oldValue) / temperature);
+ return random.NextDouble() < w ? AcceptanceStatus.Accept : AcceptanceStatus.Reject;
+ }
+
+ ///
+ /// Adjusts the step size based on the acceptance rate.
+ ///
+ private void AdjustStepSize()
+ {
+ var acceptRate = (double)acceptCount / stepCount;
+
+ if (acceptRate > targetAcceptRate)
+ {
+ // Accepting too many steps - increase step size
+ stepSize /= stepwiseFactor;
+ }
+ else
+ {
+ // Not accepting enough steps - decrease step size
+ stepSize *= stepwiseFactor;
+ }
+
+ // Reset counters
+ stepCount = 0;
+ acceptCount = 0;
+ }
+
+ ///
+ /// Converts BasinHoppingStatus to ExitCondition.
+ ///
+ /// The status to convert.
+ /// The corresponding ExitCondition.
+ private static ExitCondition ConvertToExitCondition(BasinHoppingStatus status)
+ {
+ switch (status)
+ {
+ case BasinHoppingStatus.IterationsCompleted:
+ case BasinHoppingStatus.SuccessConditionSatisfied:
+ return ExitCondition.Converged;
+ case BasinHoppingStatus.CallbackRequestedStop:
+ case BasinHoppingStatus.ForcedStop:
+ return ExitCondition.ManuallyStopped;
+ case BasinHoppingStatus.MaximumTimeReached:
+ case BasinHoppingStatus.MaximumEvaluationsReached:
+ return ExitCondition.ExceedIterations;
+ default:
+ return ExitCondition.None;
+ }
+ }
+
+ ///
+ /// Indicates the status of a basin-hopping optimization.
+ ///
+ private enum BasinHoppingStatus
+ {
+ ///
+ /// Completed the requested number of iterations.
+ ///
+ IterationsCompleted,
+
+ ///
+ /// Success condition was satisfied.
+ ///
+ SuccessConditionSatisfied,
+
+ ///
+ /// The callback function requested early termination.
+ ///
+ CallbackRequestedStop,
+
+ ///
+ /// The optimization was forcibly stopped.
+ ///
+ ForcedStop,
+
+ ///
+ /// The maximum time limit was reached.
+ ///
+ MaximumTimeReached,
+
+ ///
+ /// The maximum number of function evaluations was reached.
+ ///
+ MaximumEvaluationsReached
+ }
+ }
+}
diff --git a/src/Numerics/Optimization/ILeastSquaresMinimizer.cs b/src/Numerics/Optimization/ILeastSquaresMinimizer.cs
new file mode 100644
index 000000000..4d321f4e3
--- /dev/null
+++ b/src/Numerics/Optimization/ILeastSquaresMinimizer.cs
@@ -0,0 +1,67 @@
+//
+// Math.NET Numerics, part of the Math.NET Project
+// https://numerics.mathdotnet.com
+// https://github.com/mathnet/mathnet-numerics
+//
+// Copyright (c) 2009-$CURRENT_YEAR$ Math.NET
+//
+// Permission is hereby granted, free of charge, to any person
+// obtaining a copy of this software and associated documentation
+// files (the "Software"), to deal in the Software without
+// restriction, including without limitation the rights to use,
+// copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the
+// Software is furnished to do so, subject to the following
+// conditions:
+//
+// The above copyright notice and this permission notice shall be
+// included in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+// OTHER DEALINGS IN THE SOFTWARE.
+//
+
+using MathNet.Numerics.LinearAlgebra;
+using System.Collections.Generic;
+
+namespace MathNet.Numerics.Optimization
+{
+ ///
+ /// Interface for solving nonlinear least squares problems.
+ /// This interface unifies the Levenberg-Marquardt and Trust Region minimization algorithms.
+ ///
+ public interface ILeastSquaresMinimizer
+ {
+ ///
+ /// Finds the minimum of a nonlinear least squares problem using the specified objective model and initial guess vector.
+ ///
+ /// The objective function model to be minimized.
+ /// The initial guess vector.
+ /// Optional lower bound for the parameters.
+ /// Optional upper bound for the parameters.
+ /// Optional scales for the parameters.
+ /// Optional list indicating which parameters are fixed.
+ /// A containing the results of the minimization.
+ NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, Vector initialGuess,
+ Vector lowerBound = null, Vector upperBound = null, Vector scales = null, List isFixed = null);
+
+ ///
+ /// Finds the minimum of a nonlinear least squares problem using the specified objective model and initial guess array.
+ ///
+ /// The objective function model to be minimized.
+ /// The initial guess array.
+ /// Optional lower bound array for the parameters.
+ /// Optional upper bound array for the parameters.
+ /// Optional scales array for the parameters.
+ /// Optional array indicating which parameters are fixed.
+ /// A containing the results of the minimization.
+ NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, double[] initialGuess,
+ double[] lowerBound = null, double[] upperBound = null, double[] scales = null, bool[] isFixed = null);
+ }
+}
diff --git a/src/Numerics/Optimization/LevenbergMarquardtMinimizer.cs b/src/Numerics/Optimization/LevenbergMarquardtMinimizer.cs
index 30b40a93e..87a8f9bc2 100644
--- a/src/Numerics/Optimization/LevenbergMarquardtMinimizer.cs
+++ b/src/Numerics/Optimization/LevenbergMarquardtMinimizer.cs
@@ -5,25 +5,39 @@
namespace MathNet.Numerics.Optimization
{
- public class LevenbergMarquardtMinimizer : NonlinearMinimizerBase
+ ///
+ /// Implements the Levenberg-Marquardt algorithm for solving nonlinear least squares problems.
+ /// This class inherits from and implements .
+ ///
+ public class LevenbergMarquardtMinimizer : NonlinearMinimizerBase, ILeastSquaresMinimizer
{
///
/// The scale factor for initial mu
///
public double InitialMu { get; set; }
+ ///
+ /// Initializes a new instance of the class using the Levenberg-Marquardt algorithm.
+ ///
+ /// The initial damping parameter (mu) for the algorithm. Default is 1E-3.
+ /// The tolerance for the infinity norm of the gradient. Default is 1E-15.
+ /// The tolerance for the parameter update step size. Default is 1E-15.
+ /// The tolerance for the function value (residual sum of squares). Default is 1E-15.
+ /// The maximum number of iterations. Default is -1 (unlimited).
public LevenbergMarquardtMinimizer(double initialMu = 1E-3, double gradientTolerance = 1E-15, double stepTolerance = 1E-15, double functionTolerance = 1E-15, int maximumIterations = -1)
: base(gradientTolerance, stepTolerance, functionTolerance, maximumIterations)
{
InitialMu = initialMu;
}
+ ///
public NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, Vector initialGuess,
Vector lowerBound = null, Vector upperBound = null, Vector scales = null, List isFixed = null)
{
return Minimum(objective, initialGuess, lowerBound, upperBound, scales, isFixed, InitialMu, GradientTolerance, StepTolerance, FunctionTolerance, MaximumIterations);
}
+ ///
public NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, double[] initialGuess,
double[] lowerBound = null, double[] upperBound = null, double[] scales = null, bool[] isFixed = null)
{
diff --git a/src/Numerics/Optimization/TrustRegion/TrustRegionMinimizerBase.cs b/src/Numerics/Optimization/TrustRegion/TrustRegionMinimizerBase.cs
index 222b3ccf3..2314d5e81 100644
--- a/src/Numerics/Optimization/TrustRegion/TrustRegionMinimizerBase.cs
+++ b/src/Numerics/Optimization/TrustRegion/TrustRegionMinimizerBase.cs
@@ -5,7 +5,11 @@
namespace MathNet.Numerics.Optimization.TrustRegion
{
- public abstract class TrustRegionMinimizerBase : NonlinearMinimizerBase
+ ///
+ /// Abstract base class for trust region minimizers that solve nonlinear least squares problems.
+ /// This class inherits from and implements .
+ ///
+ public abstract class TrustRegionMinimizerBase : NonlinearMinimizerBase, ILeastSquaresMinimizer
{
///
/// The trust region subproblem.
@@ -17,6 +21,15 @@ public abstract class TrustRegionMinimizerBase : NonlinearMinimizerBase
///
public double RadiusTolerance { get; set; }
+ ///
+ /// Initializes a new instance of the class using the specified trust region subproblem.
+ ///
+ /// The trust region subproblem to be solved at each iteration.
+ /// The tolerance for the infinity norm of the gradient. Default is 1E-8.
+ /// The tolerance for the parameter update step size. Default is 1E-8.
+ /// The tolerance for the function value (residual sum of squares). Default is 1E-8.
+ /// The tolerance for the trust region radius. Default is 1E-8.
+ /// The maximum number of iterations. Default is -1 (unlimited).
public TrustRegionMinimizerBase(ITrustRegionSubproblem subproblem,
double gradientTolerance = 1E-8, double stepTolerance = 1E-8, double functionTolerance = 1E-8, double radiusTolerance = 1E-8, int maximumIterations = -1)
: base(gradientTolerance, stepTolerance, functionTolerance, maximumIterations)
@@ -25,6 +38,7 @@ public TrustRegionMinimizerBase(ITrustRegionSubproblem subproblem,
RadiusTolerance = radiusTolerance;
}
+ ///
public NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, Vector initialGuess,
Vector lowerBound = null, Vector upperBound = null, Vector scales = null, List isFixed = null)
{
@@ -32,6 +46,7 @@ public NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, Vector
GradientTolerance, StepTolerance, FunctionTolerance, RadiusTolerance, MaximumIterations);
}
+ ///
public NonlinearMinimizationResult FindMinimum(IObjectiveModel objective, double[] initialGuess,
double[] lowerBound = null, double[] upperBound = null, double[] scales = null, bool[] isFixed = null)
{