Coverage for /usr/share/miniconda3/envs/dolfin/lib/python3.8/site-packages/block/block_vec.py: 84%

104 statements  

« prev     ^ index     » next       coverage.py v7.2.1, created at 2023-03-20 13:03 +0000

1from .block_base import block_container 

2import numpy as np 

3 

4class block_vec(block_container): 

5 """Class defining a block vector suitable for multiplication with a 

6 block_mat of the right dimension. Many of the methods of dolfin.Vector are 

7 made available by calling the equivalent method on the individual 

8 vectors.""" 

9 

10 def __init__(self, m, blocks=None): 

11 if hasattr(m, '__iter__'): 

12 m = [x for x in m] 

13 blocks = m 

14 m = len(m) 

15 block_container.__init__(self, m, blocks) 

16 

17 def allocated(self): 

18 """Check whether all blocks are proper vectors.""" 

19 from dolfin import GenericVector 

20 return all(isinstance(block, GenericVector) for block in self) 

21 

22 def allocate(self, template, dim=None, alternative_templates=[]): 

23 """Make sure all blocks are proper vectors. Any non-vector blocks are 

24 replaced with appropriately sized vectors (where the sizes are taken 

25 from the template, which should be a block_mat or a list of 

26 DirichletBCs or FunctionSpaces). If dim==0, newly allocated vectors use 

27 layout appropriate for b (in Ax=b); if dim==1, the layout for x is 

28 used.""" 

29 from dolfin import GenericVector 

30 from .block_mat import block_mat 

31 from .block_util import create_vec_from 

32 for i in range(len(self)): 

33 if isinstance(self[i], GenericVector): 

34 continue 

35 val = self[i] 

36 try: 

37 self[i] = create_vec_from(template[:,i] if dim==1 else template[i], dim) 

38 except Exception: 

39 for tmpl in alternative_templates: 39 ↛ 45line 39 didn't jump to line 45, because the loop on line 39 didn't complete

40 try: 

41 self[i] = create_vec_from(tmpl[:,i] if dim==1 else tmpl[i], dim) 

42 break 

43 except Exception: 

44 pass 

45 if not isinstance(self[i], GenericVector): 45 ↛ 46line 45 didn't jump to line 46, because the condition on line 45 was never true

46 raise ValueError( 

47 f"Can't allocate vector - no usable template for block {i}.\n" 

48 "Consider calling something like bb.allocate([V, Q]) to initialise the block_vec." 

49 ) 

50 self[i].zero() 

51 if val: 51 ↛ 52line 51 didn't jump to line 52, because the condition on line 51 was never true

52 self[i] += val 

53 

54 def norm(self, ntype='l2'): 

55 if ntype == 'linf': 

56 return max(x.norm(ntype) for x in self) 

57 else: 

58 try: 

59 assert(ntype[0] == 'l') 

60 p = int(ntype[1:]) 

61 except: 

62 raise TypeError("Unknown norm '%s'"%ntype) 

63 unpack = lambda x: pow(x, p) 

64 pack = lambda x: pow(x, 1/p) 

65 return pack(sum(unpack(x.norm(ntype)) for x in self)) 

66 

67 def randomize(self): 

68 """Fill the block_vec with random data (with zero bias).""" 

69 import numpy 

70 from dolfin import MPI 

71 # FIXME: deal with dolfin MPI api changes 

72 for i in range(len(self)): 

73 if hasattr(self[i], 'local_size'): 73 ↛ 78line 73 didn't jump to line 78, because the condition on line 73 was never false

74 ran = numpy.random.random(self[i].local_size()) 

75 ran -= MPI.sum(self[i].mpi_comm(), sum(ran))/self[i].size() 

76 self[i].set_local(ran) 

77 self[i].apply('') 

78 elif hasattr(self[i], '__len__'): 

79 ran = numpy.random.random(len(self[i])) 

80 ran -= MPI.sum(self[i].mpi_comm(), sum(ran))/MPI.sum(len(ran)) 

81 self[i][:] = ran 

82 else: 

83 raise ValueError( 

84 f'block {i} in block_vec has no size -- use a proper vector or call allocate(A, dim=d)') 

85 

86 # 

87 # Map operator on the block_vec to operators on the individual blocks. 

88 # 

89 

90 def _map_operator(self, operator, inplace=False, args_fn=lambda i: (), filter_fn=None): 

91 y = self if inplace else block_vec(len(self)) 

92 for i in range(len(self)): 

93 try: 

94 args = args_fn(i) 

95 if filter_fn is None or filter_fn(args): 95 ↛ 92line 95 didn't jump to line 92, because the condition on line 95 was never false

96 v = getattr(self[i], operator)(*args_fn(i)) 

97 if isinstance(v, type(NotImplemented)): 97 ↛ 98line 97 didn't jump to line 98, because the condition on line 97 was never true

98 raise NotImplementedError() 

99 if not inplace: 

100 y[i] = v 

101 except Exception as e: 

102 if i==0 or not inplace: 

103 raise e 

104 else: 

105 raise RuntimeError( 

106 "operator partially applied, block %d does not support '%s' (err=%s)" % (i, operator, str(e))) 

107 return y 

108 

109 def _map_scalar_operator(self, operator, x, inplace=False, filter_fn=None): 

110 return self._map_operator(operator, args_fn=lambda i: (float(x),), inplace=inplace) 

111 

112 def _map_vector_operator(self, operator, x, inplace=False, filter_fn=None): 

113 return self._map_operator(operator, args_fn=lambda i: (x[i],), inplace=inplace) 

114 

115 def _map_any_operator(self, operator, x, inplace=False): 

116 if hasattr(x, '__iter__'): 

117 return self._map_vector_operator(operator, x, inplace) 

118 else: 

119 return self._map_scalar_operator(operator, x, inplace) 

120 

121 def copy(self): 

122 from . import block_util 

123 m = len(self) 

124 y = block_vec(m) 

125 for i in range(m): 

126 y[i] = block_util.copy(self[i]) 

127 return y 

128 

129 def zero(self): return self._map_operator('zero', inplace=True) 

130 

131 def __add__ (self, x): return self._map_vector_operator('__add__', x) 

132 def __radd__(self, x): return self._map_vector_operator('__radd__', x) 132 ↛ exitline 132 didn't return from function '__radd__', because the return on line 132 wasn't executed

133 def __iadd__(self, x): return self._map_vector_operator('__iadd__', x, inplace=True) 

134 

135 def __sub__ (self, x): return self._map_any_operator('__sub__', x) 

136 def __rsub__(self, x): return self._map_vector_operator('__rsub__', x) 136 ↛ exitline 136 didn't return from function '__rsub__', because the return on line 136 wasn't executed

137 def __isub__(self, x): return self._map_vector_operator('__isub__', x, inplace=True) 

138 

139 def __mul__ (self, x): return self._map_scalar_operator('__mul__', x) 

140 def __rmul__(self, x): return self._map_scalar_operator('__rmul__', x) 

141 def __imul__(self, x): return self._map_scalar_operator('__imul__', x, inplace=True) 

142 

143 def inner(self, x): return sum(self._map_vector_operator('inner', x)) 

144 def scale_by(self, x): return self._map_vector_operator('__imul__', x, inplace=True, 144 ↛ exitline 144 didn't return from function 'scale_by', because the return on line 144 wasn't executed

145 filter_fn=lambda x: x != (1,)) 

146 

147 def get_local(self): return self._map_operator('get_local') 

148 def set_local(self, x): return self._map_vector_operator('set_local', x, inplace=True) 148 ↛ exitline 148 didn't return from function 'set_local', because the return on line 148 wasn't executed