tlinear.py - pism - [fork] customized build of PISM, the parallel ice sheet model (tillflux branch)
(HTM) git clone git://src.adamsgaard.dk/pism
(DIR) Log
(DIR) Files
(DIR) Refs
(DIR) LICENSE
---
tlinear.py (10355B)
---
1 ############################################################################
2 #
3 # This file is a part of siple.
4 #
5 # Copyright 2010, 2014 David Maxwell
6 #
7 # siple is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 2 of the License, or
10 # (at your option) any later version.
11 #
12 ############################################################################
13
14 from siple import Parameters
15 from siple.reporting import msg
16 from siple.exceptions import IterationCountFailure
17 from math import sqrt
18
19 class KrylovSolver:
20 """
21 Base class for solving a linear ill-posed problem
22
23 T(x) = y
24
25 via an iterative Krylov method.
26 """
27
28 @staticmethod
29 def defaultParameters():
30 return Parameters('KrylovSolver', ITER_MAX=200, mu=1.1, cg_reset=0, steepest_descent=False)
31
32 def __init__(self, params=None):
33 self.params = self.defaultParameters()
34 if not params is None: self.params.update(params)
35
36 self.iteration_listeners = []
37
38 def forwardProblem(self):
39 """
40 Returns the LinearForwardProblem that defines the inverse problem.
41 """
42 raise NotImplementedError()
43
44 def solve(self,x0,y,*args):
45 """
46 Run the iterative method starting from the initial point x0.
47 """
48 raise NotImplementedError()
49
50 def addIterationListener(self,listener):
51 """
52 Add an object to be called after each iteration.
53 """
54 self.iteration_listeners.append(listener)
55
56 def iterationHook(self,count,x,y,d,r,*args):
57 """
58 Called during each iteration with the pertinent computations. Handy for debugging and visualization.
59 """
60 for listener in self.iteration_listeners:
61 listener(self,count,x,y,d,r,*args)
62
63 def initialize(self,x0,y,*args):
64 """
65 This method is a hook called at the beginning of a run. It gives an opportunity for the class to
66 set up information needed to decide conditions for the final stopping criterion.
67
68 It may also be that the initial data 'x0' expresses the the initial data for the problem T(x)=y
69 indirectly. Or it could be that x0 and y are expressed as some sort of concrete vectors rather than
70 some invtools.AbstractVector's.
71 So initialize returns a pair of AbstractVectors (x0,y) which are possibly modified and or wrapped
72 versions of the input data.
73
74 The arguments \*args are passed directly from 'run'.
75 """
76 return (x0,y)
77
78 def finalize(self,x,y):
79 """
80 This method is a hook called at the end of a run, and gives the class a way to make adjustments to x and y before
81 finishing the run.
82 """
83 return (x,y)
84
85 def stopConditionMet(self,iter,x,y,r):
86 """
87 Given a current iteration number, current value of x, desired value y of F(X), and current residual,
88 returns whether the stop condition has been met.
89 """
90 raise NotImplementedError()
91
92
93 class KrylovCG(KrylovSolver):
94 """
95 Base class for solving an ill-posed linear problem
96
97 T(x) = y
98
99 using a conjugate gradient method. Necessarily, T:X->X must be self adjoint.
100 """
101
102
103 def solve(self,x0,y,*args):
104 (x,y) = self.initialize(x0,y,*args)
105
106 cg_reset = x.size()
107 if (self.params.cg_reset != 0): cg_reset = self.params.cg_reset
108
109 forward_problem = self.forwardProblem()
110 r=y.copy()
111 Tx = forward_problem.T(x)
112 r -= Tx
113
114 normsq_r = forward_problem.domainIP(r,r)
115
116 # d = r
117 d = r.copy()
118
119 # Eventually will contain T(d)
120 Td = None
121
122 count = 0
123 while True:
124 if count > self.params.ITER_MAX:
125 raise IterationCountFailure(self.params.ITER_MAX)
126 count += 1
127
128 if self.stopConditionMet(count,x,y,r):
129 msg('done at iteration %d', count)
130 break
131
132 if self.params.verbose:
133 msg('solving linear problem')
134 Td = forward_problem.T(d,out=Td)
135
136 self.iterationHook(count, x, y, d, r, Td)
137
138 alpha = normsq_r/forward_problem.domainIP(d,Td)
139 if ((count+1 % cg_reset) == 0):
140 msg('resetting cg via steepest descent')
141 alpha = 1
142
143 # x = x + alpha*d
144 x.axpy(alpha,d)
145
146 # r = r - alpha*Td
147 r.axpy(-alpha,Td)
148
149 prev_normsq_r = normsq_r
150 normsq_r = forward_problem.domainIP(r,r)
151 beta = normsq_r / prev_normsq_r
152 if ((count+1 % cg_reset) == 0): beta = 0
153
154 if (self.params.steepest_descent):
155 beta = 0
156 # d = T*r + beta*d
157 d *= beta
158 d += r
159
160
161 y = forward_problem.T(x)
162 return self.finalize(x,y)
163
164
165 class KrylovCGNE(KrylovSolver):
166 """
167 Base class for solving an ill-posed linear problem
168
169 T(x) = y
170
171 using a conjugate gradient method applied to the normal equation
172
173 T^*T x = T^* y
174 """
175
176 def solve(self,x0,y,*args):
177 (x,y) = self.initialize(x0,y,*args)
178
179 forward_problem = self.forwardProblem()
180 Tx = forward_problem.T(x)
181 r = y.copy()
182 r -= Tx
183
184 cg_reset = x.size()
185 if (self.params.cg_reset != 0): cg_rest = self.params.cg_reset
186
187 TStarR = forward_problem.TStar(r)
188 normsq_TStarR = forward_problem.domainIP(TStarR,TStarR)
189
190 # d = T^* r
191 d = TStarR.copy()
192
193 # Eventual storage for T(d)
194 Td = None
195
196 count = 0
197 while True:
198 if count > self.params.ITER_MAX:
199 raise IterationCountFailure(self.params.ITER_MAX)
200 break
201 count += 1
202
203 if self.stopConditionMet(count,x,y,r):
204 msg('done at iteration %d', count)
205 break
206
207 Td = forward_problem.T(d,out=Td)
208
209 self.iterationHook(count, x, y, d, r, Td, TStarR)
210
211 alpha = normsq_TStarR/forward_problem.rangeIP(Td,Td)
212 if ((count+1 % cg_reset) == 0):
213 msg('resetting cg via steepest descent')
214 alpha = 1
215
216 # x = x + alpha*d
217 x.axpy(alpha,d)
218
219 # r = r - alpha*Td
220 r.axpy(-alpha,Td)
221
222 # beta = ||r_{k+1}||^2 / ||r_k||^2
223 prev_normsq_TStarR = normsq_TStarR
224 TStarR = forward_problem.TStar(r,out=TStarR)
225 normsq_TStarR = forward_problem.domainIP(TStarR,TStarR)
226 beta = normsq_TStarR/prev_normsq_TStarR
227
228 if ((count+1 % cg_reset) == 0): beta = 0
229
230 if (self.params.steepest_descent):
231 beta = 0
232
233 # d = T*r + beta*d
234 d *= beta
235 d += TStarR
236
237
238 Tx = forward_problem.T(x, out=Tx)
239 return self.finalize(x,Tx)
240
241 class BasicKrylovCG(KrylovCG):
242 """
243 Implements the CG regularization method for solving the linear ill posed problem
244
245 T(x) = y
246
247 using the Morozov discrepancy principle. The discrepancy of 'x' is
248
249 ||y-T(x)||_Y
250
251 and the algorithm is run until a target discrepancy (specified as an argument to solve)
252 is reached.
253
254 The specific problem to solve is specified as an argument to the constructor.
255 """
256 def __init__(self,forward_problem,params=None):
257 KrylovCG.__init__(self,params)
258 self.forward_problem = forward_problem
259
260 def forwardProblem(self):
261 """
262 Returns the LinearForwardProblem that defines the inverse problem.
263 """
264 return self.forward_problem
265
266 def solve(self,x0,y,targetDiscrepancy):
267 """
268 Run the iterative method starting from the initial point x0.
269
270 The third argument is the desired value of ||y-T(x)||_Y
271 """
272 return KrylovCG.solve(self,x0,y,targetDiscrepancy)
273
274 def initialize(self,x0,y,targetDiscrepancy):
275 """
276 This method is a hook called at the beginning of a run. It gives an opportunity for the class to
277 set up information needed to decide conditions for the final stopping criterion.
278
279 It may be that the initial data 'x0' expresses the the initial data for the problem T(x)=y
280 indirectly. Or it could be that x0 and y are expressed as dolfin.Function's rather than dolfind.GenericVectors.
281 So initialize returns a triple of vectors (x0,y) which are possibly modified versions of the input data.
282
283 The arguments \*args are passed directly from 'run'.
284 """
285 self.targetDiscrepancy = targetDiscrepancy
286 return (x0,y)
287
288 def stopConditionMet(self,iter,x,y,r):
289 """
290 Given a current iteration number, current value of x, desired value y of F(X), and current residual,
291 returns whether the stop condition has been met.
292 """
293 return sqrt(self.forward_problem.rangeIP(r,r)) <= self.params.mu*self.targetDiscrepancy
294
295 class BasicKrylovCGNE(KrylovCGNE):
296 """
297 Implements the CGNE regularization method for solving the linear ill posed problem
298
299 T(x) = y
300
301 using the Morozov discrepancy principle. The discrepancy of 'x' is
302
303 ||y-T(x)||_Y
304
305 and the algorithm is run until a target discrepancy (specified as an argument to solve)
306 is reached.
307
308 The specific problem to solve is specified as an argument to the constructor.
309 """
310 def __init__(self,forward_problem,params=None):
311 KrylovCGNE.__init__(self,params)
312 self.forward_problem = forward_problem
313
314 def forwardProblem(self):
315 """
316 Returns the LinearForwardProblem that defines the inverse problem.
317 """
318 return self.forward_problem
319
320 def solve(self,x0,y,targetDiscrepancy):
321 """
322 Run the iterative method starting from the initial point x0.
323
324 The third argument is the desired value of ||y-T(x)||_Y
325 """
326 return KrylovCGNE.solve(self,x0,y,targetDiscrepancy)
327
328 def initialize(self,x0,y,targetDiscrepancy):
329 """
330 This method is a hook called at the beginning of a run. It gives an opportunity for the class to
331 set up information needed to decide conditions for the final stopping criterion.
332
333 It may be that the initial data 'x0' expresses the the initial data for the problem T(x)=y
334 indirectly. Or it could be that x0 and y are expressed as dolfin.Function's rather than dolfind.GenericVectors.
335 So initialize returns a triple of vectors (x0,y) which are possibly modified versions of the input data.
336
337 The arguments \*args are passed directly from 'run'.
338 """
339 self.targetDiscrepancy = targetDiscrepancy
340 return (x0,y)
341
342 def stopConditionMet(self,iter,x,y,r):
343 """
344 Given a current iteration number, current value of x, desired value y of F(X), and current residual,
345 returns whether the stop condition has been met.
346 """
347 disc = sqrt(self.forward_problem.rangeIP(r,r))
348 target = self.params.mu*self.targetDiscrepancy
349 if self.params.verbose:
350 msg('Iteration %d: discrepancy %g target %g',iter,disc,target)
351 return disc <= target