diff options
Diffstat (limited to 'examples/minimize.hs')
-rw-r--r-- | examples/minimize.hs | 43 |
1 files changed, 43 insertions, 0 deletions
diff --git a/examples/minimize.hs b/examples/minimize.hs new file mode 100644 index 0000000..0429a24 --- /dev/null +++ b/examples/minimize.hs | |||
@@ -0,0 +1,43 @@ | |||
1 | -- the multidimensional minimization example in the GSL manual | ||
2 | import GSL | ||
3 | import LinearAlgebra | ||
4 | import Graphics.Plot | ||
5 | |||
6 | -- the function to be minimized | ||
7 | f [x,y] = 10*(x-1)^2 + 20*(y-2)^2 + 30 | ||
8 | |||
9 | -- its gradient | ||
10 | df [x,y] = [20*(x-1), 40*(y-2)] | ||
11 | |||
12 | -- the conjugate gradient method | ||
13 | minimizeCG = minimizeConjugateGradient 1E-2 1E-4 1E-3 30 | ||
14 | |||
15 | -- a minimization algorithm which does not require the gradient | ||
16 | minimizeS f xi = minimizeNMSimplex f xi (replicate (length xi) 1) 1E-2 100 | ||
17 | |||
18 | -- Numerical estimation of the gradient | ||
19 | gradient f v = [partialDerivative k f v | k <- [0 .. length v -1]] | ||
20 | |||
21 | partialDerivative n f v = fst (derivCentral 0.01 g (v!!n)) where | ||
22 | g x = f (concat [a,x:b]) | ||
23 | (a,_:b) = splitAt n v | ||
24 | |||
25 | main = do | ||
26 | -- conjugate gradient with true gradient | ||
27 | let (s,p) = minimizeCG f df [5,7] | ||
28 | print s -- solution | ||
29 | dispR 2 p -- evolution of the algorithm | ||
30 | let [x,y] = drop 2 (toColumns p) | ||
31 | mplot [x,y] -- path from the starting point to the solution | ||
32 | |||
33 | -- conjugate gradient with estimated gradient | ||
34 | let (s,p) = minimizeCG f (gradient f) [5,7] | ||
35 | print s | ||
36 | dispR 2 p | ||
37 | mplot $ drop 2 (toColumns p) | ||
38 | |||
39 | -- without gradient, using the NM Simplex method | ||
40 | let (s,p) = minimizeS f [5,7] | ||
41 | print s | ||
42 | dispR 2 p | ||
43 | mplot $ drop 3 (toColumns p) | ||