/[escript]/trunk/downunder/py_src/costfunctions.py
ViewVC logotype

Contents of /trunk/downunder/py_src/costfunctions.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3990 - (show annotations)
Tue Sep 25 05:03:20 2012 UTC (7 years ago) by caltinay
File MIME type: text/x-python
File size: 7584 byte(s)
First set of assorted epydoc fixes/additions.

1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
17 http://www.uq.edu.au
18 Primary Business: Queensland, Australia"""
19 __license__="""Licensed under the Open Software License version 3.0
20 http://www.opensource.org/licenses/osl-3.0.php"""
21 __url__="https://launchpad.net/escript-finley"
22
23 try:
24 # only needed for getDirectionalDerivative(), so ignore error
25 from esys.escript import grad
26 except:
27 pass
28
29 class CostFunction(object):
30 """
31 A function *f(x)* that can be minimized (base class).
32
33 Example of usage::
34
35 cf=DerivedCostFunction()
36 # ... calculate x ...
37 args=cf.getArguments(x) # this could be potentially expensive!
38 f=cf.getValue(x, *args)
39 # ... it could be required to update x without using the gradient...
40 # ... but then ...
41 gf=cf.getGradient(x, *args)
42
43 The function calls update statistical information.
44 The actual work is done by the methods with corresponding name and a
45 leading underscore. These functions need to be overwritten for a particular
46 cost function implementation.
47 """
48
49 def __init__(self):
50 """
51 the base constructor initializes the counters so subclasses should
52 ensure the super class constructor is called.
53 """
54 self.resetCounters()
55
56 def resetCounters(self):
57 """
58 resets all statistical counters
59 """
60 self.Inner_calls=0
61 self.Value_calls=0
62 self.Gradient_calls=0
63 self.DirectionalDerivative_calls=0
64 self.Arguments_calls=0
65
66 def getInner(self, f0, f1):
67 """
68 returns the inner product of ``f0`` and ``f1``
69 """
70 self.Inner_calls+=1
71 return self._getInner(f0, f1)
72
73 def getValue(self, x, *args):
74 """
75 returns the value *f(x)* using the precalculated values for *x*.
76 """
77 self.Value_calls+=1
78 return self._getValue(x, *args)
79
80 def __call__(self, x, *args):
81 """
82 short for ``getValue(x, *args)``.
83 """
84 return self.getValue(x, *args)
85
86 def getGradient(self, x, *args):
87 """
88 returns the gradient of *f* at *x* using the precalculated values for
89 *x*.
90 """
91 self.Gradient_calls+=1
92 return self._getGradient(x, *args)
93
94 def getDirectionalDerivative(self, x, d, *args):
95 """
96 returns ``inner(grad f(x), d)`` using the precalculated values for *x*.
97 """
98 self.DirectionalDerivative_calls+=1
99 return self._getDirectionalDerivative(x, d, *args)
100
101 def getArguments(self, x):
102 """
103 returns precalculated values that are shared in the calculation of
104 *f(x)* and *grad f(x)*.
105 """
106 self.Arguments_calls+=1
107 return self._getArguments(x)
108
109 def _getInner(self, f0, f1):
110 """
111 Worker for ``getInner()``, needs to be overwritten.
112 """
113 raise NotImplementedError
114
115 def _getValue(self, x, *args):
116 """
117 Worker for ``getValue()``, needs to be overwritten.
118 """
119 raise NotImplementedError
120
121 def _getGradient(self, x, *args):
122 """
123 Worker for ``getGradient()``, needs to be overwritten.
124 """
125 raise NotImplementedError
126
127 def _getDirectionalDerivative(self, x, d, *args):
128 """
129 returns ``getInner(grad f(x), d)`` using the precalculated values for x.
130
131 This function may be overwritten as there might be more efficient ways
132 of calculating the return value rather than using a
133 ``self.getGradient()`` call.
134 """
135 return self.getInner(self.getGradient(x, *args), d)
136
137 def _getArguments(self, x):
138 """
139 can be overwritten to return precalculated values that are shared in
140 the calculation of *f(x)* and *grad f(x)*. By default returns an empty
141 tuple.
142 """
143 return ()
144
145
146 class SimpleCostFunction(CostFunction):
147 """
148 This is a simple cost function with a single continuous (mapped) variable.
149 It is the sum of two weighted terms, a single forward model and a single
150 regularization term. This cost function is used in the gravity inversion.
151 """
152 def __init__(self, regularization, mapping, forwardmodel):
153 super(SimpleCostFunction, self).__init__()
154 self.forwardmodel=forwardmodel
155 self.regularization=regularization
156 self.mapping=mapping
157 self.setWeights()
158
159 def setWeights(self, mu_model=1., mu_reg=1.):
160 """
161 sets the weighting factors for the forward model and regularization
162 terms.
163 """
164 if mu_model<0. or mu_reg<0.:
165 raise ValueError("weighting factors must be non-negative.")
166 self.mu_model=mu_model
167 self.mu_reg=mu_reg
168
169 def _getInner(self, f0, f1):
170 """
171 returns ``regularization.getInner(f0,f1)``
172 """
173 # if there is more than one regularization involved their contributions
174 # need to be added up.
175 return self.regularization.getInner(f0, f1)
176
177 def _getArguments(self, m):
178 """
179 returns precalculated values that are shared in the calculation of
180 *f(x)* and *grad f(x)*. In this implementation returns a tuple with the
181 mapped value of ``m``, the arguments from the forward model and the
182 arguments from the regularization.
183 """
184 rho=self.mapping(m)
185 return rho, self.forwardmodel.getArguments(rho), self.regularization.getArguments(m)
186
187 def _getValue(self, m, *args):
188 """
189 returns the function value at m.
190 If the precalculated values are not supplied getArguments() is called.
191
192 """
193 # if there is more than one forward_model and/or regularization their
194 # contributions need to be added up. But this implementation allows
195 # only one of each...
196 if len(args)==0:
197 args=self.getArguments(m)
198 return self.mu_model * self.forwardmodel.getValue(args[0],*args[1]) \
199 + self.mu_reg * self.regularization.getValue(m)
200
201 def _getGradient(self, m, *args):
202 """
203 returns the gradient of *f* at *m*.
204 If the precalculated values are not supplied getArguments() is called.
205 """
206 drhodm = self.mapping.getDerivative(m)
207 if len(args)==0:
208 args = self.getArguments(m)
209 Y0 = self.forwardmodel.getGradient(args[0],*args[1])
210 Y1, X1 = self.regularization.getGradient(m)
211 return self.regularization.project(Y=self.mu_reg*Y1 + self.mu_model*Y0*drhodm, X=self.mu_reg*X1)
212
213 def _getDirectionalDerivative(self, m, d, *args):
214 """
215 returns the directional derivative at *m* in direction *d*.
216 """
217 drhodm = self.mapping.getDerivative(m)
218 Y0 = self.forwardmodel.getGradient(args[0],*args[1])
219 Y1, X1 = self.regularization.getGradient(m)
220 return self.regularization.getInner(d, self.mu_reg*Y1 + self.mu_model*Y0*drhodm) \
221 + self.mu_reg*self.regularization.getInner(grad(d), X1)
222

  ViewVC Help
Powered by ViewVC 1.1.26