summaryrefslogtreecommitdiff
path: root/examples/minimize.hs
diff options
context:
space:
mode:
authorAlberto Ruiz <aruiz@um.es>2007-09-21 18:28:08 +0000
committerAlberto Ruiz <aruiz@um.es>2007-09-21 18:28:08 +0000
commit0198366bba7a5f2d67338633f9eb90889ffc31b2 (patch)
tree4897d90233b333ee2092e63a4b74c7bcb2d22577 /examples/minimize.hs
parentd4cb2692f9dae748da23371057a983deca4b2f80 (diff)
add examples
Diffstat (limited to 'examples/minimize.hs')
-rw-r--r--examples/minimize.hs43
1 files changed, 43 insertions, 0 deletions
diff --git a/examples/minimize.hs b/examples/minimize.hs
new file mode 100644
index 0000000..0429a24
--- /dev/null
+++ b/examples/minimize.hs
@@ -0,0 +1,43 @@
1-- the multidimensional minimization example in the GSL manual
2import GSL
3import LinearAlgebra
4import Graphics.Plot
5
6-- the function to be minimized
7f [x,y] = 10*(x-1)^2 + 20*(y-2)^2 + 30
8
9-- its gradient
10df [x,y] = [20*(x-1), 40*(y-2)]
11
12-- the conjugate gradient method
13minimizeCG = minimizeConjugateGradient 1E-2 1E-4 1E-3 30
14
15-- a minimization algorithm which does not require the gradient
16minimizeS f xi = minimizeNMSimplex f xi (replicate (length xi) 1) 1E-2 100
17
18-- Numerical estimation of the gradient
19gradient f v = [partialDerivative k f v | k <- [0 .. length v -1]]
20
21partialDerivative n f v = fst (derivCentral 0.01 g (v!!n)) where
22 g x = f (concat [a,x:b])
23 (a,_:b) = splitAt n v
24
25main = do
26 -- conjugate gradient with true gradient
27 let (s,p) = minimizeCG f df [5,7]
28 print s -- solution
29 dispR 2 p -- evolution of the algorithm
30 let [x,y] = drop 2 (toColumns p)
31 mplot [x,y] -- path from the starting point to the solution
32
33 -- conjugate gradient with estimated gradient
34 let (s,p) = minimizeCG f (gradient f) [5,7]
35 print s
36 dispR 2 p
37 mplot $ drop 2 (toColumns p)
38
39 -- without gradient, using the NM Simplex method
40 let (s,p) = minimizeS f [5,7]
41 print s
42 dispR 2 p
43 mplot $ drop 3 (toColumns p)