Cross Entropy |
This topic contains the following sections:
This section describes the Cross Entropy optimization method implementation, based on the paper [Thiery, Scherrer 2009] .
The algorithm aims to minimize an objective function through stochastic search. It works iteratively until a certain stopping criteria is met. At each iteration, it samples a number of vectors from a Gaussian distribution and evaluates each of these against the supplied objective function. Based on the return value from the objective function, a subset of the best ranked vectors are chosen to update the search parameters of the next generation.
The mean of the Gaussian distribution is set to the centroid of the best ranked vectors, and the variance is set to the variance of the best ranked vectors in each individual dimension.
Next most important methods and properties are featured in the CrossEntropy class:
Algorithm parameters:
Property | Description |
---|---|
The population size (number of function estimations on one step). | |
The selection size (the number of points from population, selected to create new step). | |
The start point initial variance estimation. |
Multithreading:
Property | Description |
---|---|
Use multiple threads to find objective function minimum. |
1using System; 2using System.Collections.Generic; 3using System.Reflection; 4using System.Threading; 5using FinMath.LinearAlgebra; 6using FinMath.Statistics; 7using FinMath.MachineLearning.EvolutionaryAlgorithms; 8 9namespace FinMath.Samples 10{ 11 class EvolutionaryOptimizationSample 12 { 13 private static Object iterationsLock = new Object(); 14 private static int iterationsCounter = 0; 15 16 private static double ObjectiveSphere(Vector xArray) 17 { 18 lock (iterationsLock) 19 { 20 ++iterationsCounter; 21 } 22 23 return xArray.Sum(x => x * x); 24 } 25 26 private static double ObjectiveRastrigin(Vector xArray) 27 { 28 lock (iterationsLock) 29 { 30 ++iterationsCounter; 31 } 32 33 double ret = 0; 34 foreach (var x in xArray) 35 ret += x * x - 10.0 * Math.Cos(2 * Math.PI * x); 36 return 10 * xArray.Count + ret; 37 } 38 39 private static double ObjectiveRosenbrock(Vector x) 40 { 41 lock (iterationsLock) 42 { 43 ++iterationsCounter; 44 } 45 46 double ret = 0; 47 double xi, xi1; 48 49 xi1 = x[0]; 50 for (int i = 0, ie = x.Count; i + 1 < ie; ++i) 51 { 52 xi = xi1; 53 xi1 = x[i + 1]; 54 55 double t1 = xi1 - xi * xi; 56 double t2 = 1 - xi; 57 ret += 100 * t1 * t1 + t2 * t2; 58 } 59 return ret; 60 } 61 62 private static void TestOptimizer(Vector startPoint, BaseOptimizer optimizer, BaseOptimizer.ObjectiveDelegateType objective) 63 { 64 Console.WriteLine($""); 65 Console.WriteLine($"{optimizer.GetType().Name} with {objective.GetMethodInfo().Name} solution:"); 66 67 lock (iterationsLock) 68 { 69 iterationsCounter = 0; 70 } 71 72 optimizer.TerminationIterations = 300; 73 optimizer.TerminationTimeout = TimeSpan.MaxValue; 74 optimizer.TerminationObjectiveChange = null; 75 76 optimizer.Optimize(objective, startPoint); 77 Console.WriteLine($" Value: {optimizer.SolutionValue}"); 78 Console.WriteLine($" Point: [{String.Join(" ", optimizer.SolutionPoint)}]"); 79 Console.WriteLine($" Step: {optimizer.SolutionStep}/{optimizer.MinimizationSteps}"); 80 Console.WriteLine($" Calls: {iterationsCounter}"); 81 } 82 83 public static void Main(string[] args) 84 { 85 var startPoint = Vector.Random(4); 86 var optimizers = new BaseOptimizer[] { new CrossEntropy(), new CMSA(), new VDCMA(), new SimplexDownhill() }; 87 var objectives = new BaseOptimizer.ObjectiveDelegateType[] { ObjectiveSphere, ObjectiveRastrigin, ObjectiveRosenbrock }; 88 89 foreach (var opt in optimizers) 90 foreach (var obj in objectives) 91 TestOptimizer(startPoint, opt, obj); 92 } 93 } 94}