diff options
author | Alberto Ruiz <aruiz@um.es> | 2014-05-08 08:48:12 +0200 |
---|---|---|
committer | Alberto Ruiz <aruiz@um.es> | 2014-05-08 08:48:12 +0200 |
commit | 1925c123d7d8184a1d2ddc0a413e0fd2776e1083 (patch) | |
tree | fad79f909d9c3be53d68e6ebd67202650536d387 /examples/minimize.hs | |
parent | eb3f702d065a4a967bb754977233e6eec408fd1f (diff) |
empty hmatrix-base
Diffstat (limited to 'examples/minimize.hs')
-rw-r--r-- | examples/minimize.hs | 50 |
1 files changed, 0 insertions, 50 deletions
diff --git a/examples/minimize.hs b/examples/minimize.hs deleted file mode 100644 index 19b2cb3..0000000 --- a/examples/minimize.hs +++ /dev/null | |||
@@ -1,50 +0,0 @@ | |||
1 | -- the multidimensional minimization example in the GSL manual | ||
2 | import Numeric.GSL | ||
3 | import Numeric.LinearAlgebra | ||
4 | import Graphics.Plot | ||
5 | import Text.Printf(printf) | ||
6 | |||
7 | -- the function to be minimized | ||
8 | f [x,y] = 10*(x-1)^2 + 20*(y-2)^2 + 30 | ||
9 | |||
10 | -- exact gradient | ||
11 | df [x,y] = [20*(x-1), 40*(y-2)] | ||
12 | |||
13 | -- a minimization algorithm which does not require the gradient | ||
14 | minimizeS f xi = minimize NMSimplex2 1E-2 100 (replicate (length xi) 1) f xi | ||
15 | |||
16 | -- Numerical estimation of the gradient | ||
17 | gradient f v = [partialDerivative k f v | k <- [0 .. length v -1]] | ||
18 | |||
19 | partialDerivative n f v = fst (derivCentral 0.01 g (v!!n)) where | ||
20 | g x = f (concat [a,x:b]) | ||
21 | (a,_:b) = splitAt n v | ||
22 | |||
23 | disp = putStrLn . format " " (printf "%.3f") | ||
24 | |||
25 | allMethods :: (Enum a, Bounded a) => [a] | ||
26 | allMethods = [minBound .. maxBound] | ||
27 | |||
28 | test method = do | ||
29 | print method | ||
30 | let (s,p) = minimize method 1E-2 30 [1,1] f [5,7] | ||
31 | print s | ||
32 | disp p | ||
33 | |||
34 | testD method = do | ||
35 | print method | ||
36 | let (s,p) = minimizeD method 1E-3 30 1E-2 1E-4 f df [5,7] | ||
37 | print s | ||
38 | disp p | ||
39 | |||
40 | testD' method = do | ||
41 | putStrLn $ show method ++ " with estimated gradient" | ||
42 | let (s,p) = minimizeD method 1E-3 30 1E-2 1E-4 f (gradient f) [5,7] | ||
43 | print s | ||
44 | disp p | ||
45 | |||
46 | main = do | ||
47 | mapM_ test [NMSimplex, NMSimplex2] | ||
48 | mapM_ testD allMethods | ||
49 | testD' ConjugateFR | ||
50 | mplot $ drop 3 . toColumns . snd $ minimizeS f [5,7] | ||