Click or drag to resize

Covariance Matrix Adaptation for High Dimension

This topic contains the following sections:

This section describes the VD-CMA-ES algorithm implementation based on the paper [Akimoto, Auger, Hansen 2014] .

The VD-CMA-ES implements a restricted form of the CMA-ES where the covariance matrix is restricted to be DS VDCMACov. Where D is a diagonal matrix and v a single vector. Therefore this variant is capable of large-scale optimization.

Implementation

Next most important methods and properties are featured in the VDCMA class:

Algorithm parameters:

Property

Description

PropertyLambda

Population size, offspring number. At least two, generally > 4.

PropertyMu

Number of parents/points for recombination.

PropertyInitialSigma

Initial coordinate wise standard deviation (step size).

Multithreading:

Property

Description

PropertyParallelize

Use multiple threads to find objective function minimum.

Code sample

C#
 1using System;
 2using System.Collections.Generic;
 3using System.Reflection;
 4using System.Threading;
 5using FinMath.LinearAlgebra;
 6using FinMath.Statistics;
 7using FinMath.MachineLearning.EvolutionaryAlgorithms;
 8
 9namespace FinMath.Samples
10{
11    class EvolutionaryOptimizationSample
12    {
13        private static Object iterationsLock = new Object();
14        private static int iterationsCounter = 0;
15
16        private static double ObjectiveSphere(Vector xArray)
17        {
18            lock (iterationsLock)
19            {
20                ++iterationsCounter;
21            }
22
23            return xArray.Sum(x => x * x);
24        }
25
26        private static double ObjectiveRastrigin(Vector xArray)
27        {
28            lock (iterationsLock)
29            {
30                ++iterationsCounter;
31            }
32
33            double ret = 0;
34            foreach (var x in xArray)
35                ret += x * x - 10.0 * Math.Cos(2 * Math.PI * x);
36            return 10 * xArray.Count + ret;
37        }
38
39        private static double ObjectiveRosenbrock(Vector x)
40        {
41            lock (iterationsLock)
42            {
43                ++iterationsCounter;
44            }
45
46            double ret = 0;
47            double xi, xi1;
48
49            xi1 = x[0];
50            for (int i = 0, ie = x.Count; i + 1 < ie; ++i)
51            {
52                xi = xi1;
53                xi1 = x[i + 1];
54
55                double t1 = xi1 - xi * xi;
56                double t2 = 1 - xi;
57                ret += 100 * t1 * t1 + t2 * t2;
58            }
59            return ret;
60        }
61
62        private static void TestOptimizer(Vector startPoint, BaseOptimizer optimizer, BaseOptimizer.ObjectiveDelegateType objective)
63        {
64            Console.WriteLine($"");
65            Console.WriteLine($"{optimizer.GetType().Name} with {objective.GetMethodInfo().Name} solution:");
66
67            lock (iterationsLock)
68            {
69                iterationsCounter = 0;
70            }
71
72            optimizer.TerminationIterations = 300;
73            optimizer.TerminationTimeout = TimeSpan.MaxValue;
74            optimizer.TerminationObjectiveChange = null;
75
76            optimizer.Optimize(objective, startPoint);
77            Console.WriteLine($"    Value: {optimizer.SolutionValue}");
78            Console.WriteLine($"    Point: [{String.Join(" ", optimizer.SolutionPoint)}]");
79            Console.WriteLine($"    Step: {optimizer.SolutionStep}/{optimizer.MinimizationSteps}");
80            Console.WriteLine($"    Calls: {iterationsCounter}");
81        }
82
83        public static void Main(string[] args)
84        {
85            var startPoint = Vector.Random(4);
86            var optimizers = new BaseOptimizer[] { new CrossEntropy(), new CMSA(), new VDCMA(), new SimplexDownhill() };
87            var objectives = new BaseOptimizer.ObjectiveDelegateType[] { ObjectiveSphere, ObjectiveRastrigin, ObjectiveRosenbrock };
88
89            foreach (var opt in optimizers)
90                foreach (var obj in objectives)
91                    TestOptimizer(startPoint, opt, obj);
92        }
93    }
94}

See Also