summaryrefslogtreecommitdiff
path: root/packages/hmatrix/examples/minimize.hs
blob: 19b2cb3e59f622e5833baa065a09358e551e2269 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
-- the multidimensional minimization example in the GSL manual
import Numeric.GSL
import Numeric.LinearAlgebra
import Graphics.Plot
import Text.Printf(printf)

-- the function to be minimized
f [x,y] = 10*(x-1)^2 + 20*(y-2)^2 + 30

-- exact gradient
df [x,y] = [20*(x-1), 40*(y-2)]

-- a minimization algorithm which does not require the gradient
minimizeS f xi = minimize NMSimplex2 1E-2 100 (replicate (length xi) 1) f xi

-- Numerical estimation of the gradient
gradient f v = [partialDerivative k f v | k <- [0 .. length v -1]]

partialDerivative n f v = fst (derivCentral 0.01 g (v!!n)) where
    g x = f (concat [a,x:b])
    (a,_:b) = splitAt n v

disp = putStrLn . format "  " (printf "%.3f")

allMethods :: (Enum a, Bounded a) => [a]
allMethods = [minBound .. maxBound]

test method = do
    print method
    let (s,p) = minimize method 1E-2 30 [1,1] f [5,7]
    print s
    disp p

testD method = do
    print method
    let (s,p) = minimizeD method 1E-3 30 1E-2 1E-4 f df [5,7]
    print s
    disp p

testD' method = do
    putStrLn $ show method ++ " with estimated gradient"
    let (s,p) = minimizeD method 1E-3 30 1E-2 1E-4 f (gradient f) [5,7]
    print s
    disp p

main = do
    mapM_ test [NMSimplex, NMSimplex2]
    mapM_ testD allMethods
    testD' ConjugateFR
    mplot $ drop 3 . toColumns . snd $ minimizeS f [5,7]