summaryrefslogtreecommitdiff
path: root/examples/minimize.hs
diff options
context:
space:
mode:
authorAlberto Ruiz <aruiz@um.es>2009-06-08 09:45:14 +0000
committerAlberto Ruiz <aruiz@um.es>2009-06-08 09:45:14 +0000
commitd9efdd9334da1a63f739d6e2e68c4ff78f52e505 (patch)
tree4c4c4c798fd1e67ec4565a441e1357d5b75f37da /examples/minimize.hs
parent34de6154086224a0e9f774bd8a2ab804d78e8a10 (diff)
auxiliary functions moved to Numeric.GSL.Internal
Diffstat (limited to 'examples/minimize.hs')
-rw-r--r--examples/minimize.hs60
1 files changed, 19 insertions, 41 deletions
diff --git a/examples/minimize.hs b/examples/minimize.hs
index 11643c9..19b2cb3 100644
--- a/examples/minimize.hs
+++ b/examples/minimize.hs
@@ -4,20 +4,14 @@ import Numeric.LinearAlgebra
4import Graphics.Plot 4import Graphics.Plot
5import Text.Printf(printf) 5import Text.Printf(printf)
6 6
7-- the function to be minimized 7-- the function to be minimized
8f [x,y] = 10*(x-1)^2 + 20*(y-2)^2 + 30 8f [x,y] = 10*(x-1)^2 + 20*(y-2)^2 + 30
9 9
10-- its gradient 10-- exact gradient
11df [x,y] = [20*(x-1), 40*(y-2)] 11df [x,y] = [20*(x-1), 40*(y-2)]
12 12
13-- the conjugate gradient method
14minimizeCG = minimizeConjugateGradient 1E-2 1E-4 1E-3 30
15
16-- the BFGS2 method
17minimizeBFGS2 = minimizeVectorBFGS2 1E-2 1E-2 1E-3 30
18
19-- a minimization algorithm which does not require the gradient 13-- a minimization algorithm which does not require the gradient
20minimizeS f xi = minimizeNMSimplex f xi (replicate (length xi) 1) 1E-2 100 14minimizeS f xi = minimize NMSimplex2 1E-2 100 (replicate (length xi) 1) f xi
21 15
22-- Numerical estimation of the gradient 16-- Numerical estimation of the gradient
23gradient f v = [partialDerivative k f v | k <- [0 .. length v -1]] 17gradient f v = [partialDerivative k f v | k <- [0 .. length v -1]]
@@ -26,47 +20,31 @@ partialDerivative n f v = fst (derivCentral 0.01 g (v!!n)) where
26 g x = f (concat [a,x:b]) 20 g x = f (concat [a,x:b])
27 (a,_:b) = splitAt n v 21 (a,_:b) = splitAt n v
28 22
29main = do
30 putStrLn "BFGS2 with true gradient"
31 let (s,p) = minimizeBFGS2 f df [5,7]
32 print s -- solution
33 disp p -- evolution of the algorithm
34 let [x,y] = drop 2 (toColumns p)
35 mplot [x,y] -- path from the starting point to the solution
36
37 putStrLn "conjugate gradient with true gradient"
38 let (s,p) = minimizeCG f df [5,7]
39 print s
40 disp p
41 let [x,y] = drop 2 (toColumns p)
42 mplot [x,y]
43
44 putStrLn "conjugate gradient with estimated gradient"
45 let (s,p) = minimizeCG f (gradient f) [5,7]
46 print s
47 disp p
48 mplot $ drop 2 (toColumns p)
49
50 putStrLn "without gradient, using the NM Simplex method"
51 let (s,p) = minimizeS f [5,7]
52 print s
53 disp p
54 mplot $ drop 3 (toColumns p)
55
56 putStrLn "-------------------------"
57 mapM_ test [NMSimplex,NMSimplex2]
58 mapM_ testd [ConjugateFR .. SteepestDescent]
59
60disp = putStrLn . format " " (printf "%.3f") 23disp = putStrLn . format " " (printf "%.3f")
61 24
25allMethods :: (Enum a, Bounded a) => [a]
26allMethods = [minBound .. maxBound]
27
62test method = do 28test method = do
63 print method 29 print method
64 let (s,p) = minimize method 1E-2 30 [1,1] f [5,7] 30 let (s,p) = minimize method 1E-2 30 [1,1] f [5,7]
65 print s 31 print s
66 disp p 32 disp p
67 33
68testd method = do 34testD method = do
69 print method 35 print method
70 let (s,p) = minimizeD method 1E-3 30 1E-2 1E-4 f df [5,7] 36 let (s,p) = minimizeD method 1E-3 30 1E-2 1E-4 f df [5,7]
71 print s 37 print s
72 disp p 38 disp p
39
40testD' method = do
41 putStrLn $ show method ++ " with estimated gradient"
42 let (s,p) = minimizeD method 1E-3 30 1E-2 1E-4 f (gradient f) [5,7]
43 print s
44 disp p
45
46main = do
47 mapM_ test [NMSimplex, NMSimplex2]
48 mapM_ testD allMethods
49 testD' ConjugateFR
50 mplot $ drop 3 . toColumns . snd $ minimizeS f [5,7]