Coverage for /usr/share/miniconda3/envs/dolfin/lib/python3.8/site-packages/block/iterative/lgmres.py: 87%

83 statements  

« prev     ^ index     » next       coverage.py v7.2.1, created at 2023-03-20 13:03 +0000

1from __future__ import division 

2from builtins import zip 

3from builtins import range 

4from .common import * 

5import numpy 

6 

7##### 

8# Adapted from SciPy (BSD license) 

9# Copyright (C) 2009, Pauli Virtanen <pav@iki.fi> 

10##### 

11 

12def lgmres(B, A, x, b, tolerance, maxiter, progress, relativeconv=False, 

13 inner_m=30, outer_k=3, outer_v=None, store_outer_Av=True, callback=None): 

14 """ 

15 Solve a matrix equation using the LGMRES algorithm. 

16 

17 The LGMRES algorithm [BJM]_ [BPh]_ is designed to avoid some problems 

18 in the convergence in restarted GMRES, and often converges in fewer 

19 iterations. 

20 

21 Additional parameters 

22 --------------------- 

23 inner_m : int, optional 

24 Number of inner GMRES iterations per each outer iteration. 

25 outer_k : int, optional 

26 Number of vectors to carry between inner GMRES iterations. 

27 According to [BJM]_, good values are in the range of 1...3. 

28 However, note that if you want to use the additional vectors to 

29 accelerate solving multiple similar problems, larger values may 

30 be beneficial. 

31 outer_v : list of tuples, optional 

32 List containing tuples ``(v, Av)`` of vectors and corresponding 

33 matrix-vector products, used to augment the Krylov subspace, and 

34 carried between inner GMRES iterations. The element ``Av`` can 

35 be `None` if the matrix-vector product should be re-evaluated. 

36 This parameter is modified in-place by `lgmres`, and can be used 

37 to pass "guess" vectors in and out of the algorithm when solving 

38 similar problems. 

39 store_outer_Av : bool, optional 

40 Whether LGMRES should store also A*v in addition to vectors `v` 

41 in the `outer_v` list. Default is True. 

42 

43 Notes 

44 ----- 

45 The LGMRES algorithm [BJM]_ [BPh]_ is designed to avoid the 

46 slowing of convergence in restarted GMRES, due to alternating 

47 residual vectors. Typically, it often outperforms GMRES(m) of 

48 comparable memory requirements by some measure, or at least is not 

49 much worse. 

50 

51 Another advantage in this algorithm is that you can supply it with 

52 'guess' vectors in the `outer_v` argument that augment the Krylov 

53 subspace. If the solution lies close to the span of these vectors, 

54 the algorithm converges faster. This can be useful if several very 

55 similar matrices need to be inverted one after another, such as in 

56 Newton-Krylov iteration where the Jacobian matrix often changes 

57 little in the nonlinear steps. 

58 

59 References 

60 ---------- 

61 .. [BJM] A.H. Baker and E.R. Jessup and T. Manteuffel, 

62 SIAM J. Matrix Anal. Appl. 26, 962 (2005). 

63 .. [BPh] A.H. Baker, PhD thesis, University of Colorado (2003). 

64 http://amath.colorado.edu/activities/thesis/allisonb/Thesis.ps 

65 

66 """ 

67 import sys 

68 from scipy.linalg.basic import lstsq 

69 

70 if outer_v is None: 70 ↛ 73line 70 didn't jump to line 73, because the condition on line 70 was never false

71 outer_v = [] 

72 

73 r_outer = A*x - b 

74 r_norm = norm(r_outer) 

75 

76 if relativeconv: 76 ↛ 77line 76 didn't jump to line 77, because the condition on line 76 was never true

77 tolerance *= r_norm 

78 residuals = [r_norm] 

79 

80 for k_outer in range(maxiter): 80 ↛ 228line 80 didn't jump to line 228, because the loop on line 80 didn't complete

81 progress += 1 

82 

83 # -- check stopping condition 

84 if r_norm < tolerance: 

85 break 

86 

87 # -- inner LGMRES iteration 

88 vs0 = -1.*(B*r_outer) 

89 inner_res_0 = norm(vs0) 

90 

91 if inner_res_0 == 0: 91 ↛ 92line 91 didn't jump to line 92, because the condition on line 91 was never true

92 rnorm = norm(r_outer) 

93 raise RuntimeError("Preconditioner returned a zero vector; " 

94 "|v| ~ %.1g, |M v| = 0" % rnorm) 

95 

96 vs0 *= 1.0/inner_res_0 

97 hs = [] 

98 vs = [vs0] 

99 ws = [] 

100 y = None 

101 

102 for j in range(1, 1 + inner_m + len(outer_v)): 

103 # -- Arnoldi process: 

104 # 

105 # Build an orthonormal basis V and matrices W and H such that 

106 # A W = V H 

107 # Columns of W, V, and H are stored in `ws`, `vs` and `hs`. 

108 # 

109 # The first column of V is always the residual vector, `vs0`; 

110 # V has *one more column* than the other of the three matrices. 

111 # 

112 # The other columns in V are built by feeding in, one 

113 # by one, some vectors `z` and orthonormalizing them 

114 # against the basis so far. The trick here is to 

115 # feed in first some augmentation vectors, before 

116 # starting to construct the Krylov basis on `v0`. 

117 # 

118 # It was shown in [BJM]_ that a good choice (the LGMRES choice) 

119 # for these augmentation vectors are the `dx` vectors obtained 

120 # from a couple of the previous restart cycles. 

121 # 

122 # Note especially that while `vs0` is always the first 

123 # column in V, there is no reason why it should also be 

124 # the first column in W. (In fact, below `vs0` comes in 

125 # W only after the augmentation vectors.) 

126 # 

127 # The rest of the algorithm then goes as in GMRES, one 

128 # solves a minimization problem in the smaller subspace 

129 # spanned by W (range) and V (image). 

130 # 

131 # XXX: Below, I'm lazy and use `lstsq` to solve the 

132 # small least squares problem. Performance-wise, this 

133 # is in practice acceptable, but it could be nice to do 

134 # it on the fly with Givens etc. 

135 # 

136 

137 # ++ evaluate 

138 v_new = None 

139 if j < len(outer_v) + 1: 

140 z, v_new = outer_v[j-1] 

141 elif j == len(outer_v) + 1: 

142 z = vs0 

143 else: 

144 z = vs[-1] 

145 

146 if v_new is None: 

147 v_new = B*(A*z) 

148 else: 

149 # Note: v_new is modified in-place below. Must make a 

150 # copy to ensure that the outer_v vectors are not 

151 # clobbered. 

152 v_new = v_new.copy() 

153 

154 # ++ orthogonalize 

155 hcur = [] 

156 for v in vs: 

157 alpha = inner(v, v_new) 

158 hcur.append(alpha) 

159 v_new -= alpha*v 

160 hcur.append(norm(v_new)) 

161 

162 if hcur[-1] == 0: 162 ↛ 167line 162 didn't jump to line 167, because the condition on line 162 was never true

163 # Exact solution found; bail out. 

164 # Zero basis vector (v_new) in the least-squares problem 

165 # does no harm, so we can just use the same code as usually; 

166 # it will give zero (inner) residual as a result. 

167 bailout = True 

168 else: 

169 bailout = False 

170 v_new *= 1.0/hcur[-1] 

171 

172 vs.append(v_new) 

173 hs.append(hcur) 

174 ws.append(z) 

175 

176 # XXX: Ugly: should implement the GMRES iteration properly, 

177 # with Givens rotations and not using lstsq. Instead, we 

178 # spare some work by solving the LSQ problem only every 5 

179 # iterations. 

180 if not bailout and j % 5 != 1 and j < inner_m + len(outer_v) - 1: 

181 continue 

182 

183 # -- GMRES optimization problem 

184 hess = numpy.zeros((j+1, j)) 

185 e1 = numpy.zeros((j+1,)) 

186 e1[0] = inner_res_0 

187 for q in range(j): 

188 hess[:(q+2),q] = hs[q] 

189 

190 y, resids, rank, s = lstsq(hess, e1) 

191 inner_res = numpy.dot(hess, y) - e1 

192 inner_res = sqrt(numpy.inner(inner_res, inner_res)) 

193 

194 # -- check for termination 

195 if inner_res < tolerance * inner_res_0: 195 ↛ 196line 195 didn't jump to line 196, because the condition on line 195 was never true

196 break 

197 

198 # -- GMRES terminated: eval solution 

199 dx = ws[0]*y[0] 

200 for w, yc in zip(ws[1:], y[1:]): 

201 dx += w*yc 

202 

203 # -- Store LGMRES augmentation vectors 

204 nxi = 1/norm(dx) 

205 if store_outer_Av: 205 ↛ 212line 205 didn't jump to line 212, because the condition on line 205 was never false

206 q = numpy.dot(hess, y) 

207 ax = vs[0]*q[0] 

208 for v, qc in zip(vs[1:], q[1:]): 

209 ax += v*qc 

210 outer_v.append((nxi*dx, nxi*ax)) 

211 else: 

212 outer_v.append((nxi*dx, None)) 

213 

214 # -- Retain only a finite number of augmentation vectors 

215 outer_v = outer_v[-outer_k:] 

216 

217 # -- Apply step 

218 x += dx 

219 r_outer = A*x - b 

220 r_norm = norm(r_outer) 

221 

222 residuals.append(r_norm) 

223 

224 # Call user provided callback with solution 

225 if callable(callback): 225 ↛ 226line 225 didn't jump to line 226, because the condition on line 225 was never true

226 callback(k=k_outer, x=x, r=r_norm) 

227 

228 return x, residuals, [], []