Coverage for gpaw/eigensolvers/diagonalizerbackend.py: 98%

61 statements  

« prev     ^ index     » next       coverage.py v7.7.1, created at 2025-07-12 00:18 +0000

1"""Module for Numpy array diagonalization with Scipy/Scalapack.""" 

2from abc import ABC, abstractmethod 

3from functools import cached_property 

4 

5import numpy as np 

6from scipy.linalg import eigh 

7 

8from gpaw.blacs import BlacsGrid, Redistributor 

9from gpaw.mpi import broadcast_exception, MPIComm 

10from gpaw.utilities.elpa import LibElpa 

11from gpaw.utilities.tools import tri2full 

12 

13 

14class ScipyDiagonalizer: 

15 """Diagonalizer class that uses scipy.linalg.eigh. 

16 

17 The ScipyDiagonalizer wraps scipy.linalg.eigh to solve a 

18 (generalized) eigenproblem on one core. 

19 """ 

20 def __init__(self, comm: MPIComm): 

21 self.comm = comm 

22 

23 def diagonalize(self, A, B, eps, debug): 

24 """Solves the eigenproblem A @ x = eps [B] @ x. 

25 

26 The problem is solved inplace, so when done, A has the eigenvectors as 

27 columns and eps has the eigenvalues. 

28 B is overwritten for potential increase in performance. 

29 

30 Parameters 

31 ---------- 

32 A : Numpy array 

33 Left-hand matrix of the eigenproblem. After running, the 

34 eigenvectors are the column vectors of this array. 

35 B : Numpy array 

36 Right-hand overlap matrix of the eigenproblem. 

37 eps : Numpy array 

38 1D vector containing the eigenvalues of the solved eigenproblem. 

39 is_master : bool 

40 A boolean to mark which rank to perform the diagonalization on. 

41 Since Scipy's diagonalizer is not MPI-parallelized, the 

42 diagonalization needs (necessarily) to be done on the rank which 

43 has the arrays. 

44 debug : bool 

45 Flag to check for finiteness when running in debug mode. 

46 """ 

47 with broadcast_exception(self.comm): 

48 if self.comm.rank == 0: 

49 eps[:], A[:] = eigh( 

50 A, B, lower=True, check_finite=debug, overwrite_b=True) 

51 

52 

53class DistributedBlacsDiagonalizer(ABC): 

54 do_tri2full = False 

55 

56 def __init__( 

57 self, 

58 arraysize, 

59 grid_nrows, 

60 grid_ncols, 

61 *, 

62 dtype, 

63 scalapack_communicator, 

64 blocksize=64): 

65 """Initialize grids, communicators, redistributors. 

66 

67 Parameters 

68 ---------- 

69 arraysize : int 

70 The side length of the square matrix to diagonalize. 

71 grid_nrows : int 

72 Number of rows in the BLACS grid. 

73 grid_ncols : int 

74 Number of columns in the BLACS grid. 

75 dtype : type 

76 `float` or `complex`, the datatype of the eigenproblem. 

77 scalapack_communicator : MPICommunicator 

78 The communicator object over which Scalapack diagonalizes. 

79 blocksize : int, optional 

80 The block size in the 2D block cyclic data distribution. 

81 The default value of 64 is universally good. 

82 """ 

83 self.arraysize = arraysize 

84 self.blocksize = blocksize 

85 self.dtype = dtype 

86 self.scalapack_communicator = scalapack_communicator 

87 

88 self.blacsgrid = BlacsGrid( 

89 scalapack_communicator, grid_nrows, grid_ncols) 

90 self.distributed_descriptor = self.blacsgrid.new_descriptor( 

91 arraysize, arraysize, blocksize, blocksize) 

92 self.head_rank_descriptor = self.blacsgrid.new_descriptor( 

93 arraysize, arraysize, arraysize, arraysize) 

94 

95 self.head_to_all_redistributor = Redistributor( 

96 self.blacsgrid.comm, 

97 self.head_rank_descriptor, 

98 self.distributed_descriptor) 

99 

100 self.all_to_head_redistributor = Redistributor( 

101 self.blacsgrid.comm, 

102 self.distributed_descriptor, 

103 self.head_rank_descriptor) 

104 

105 def diagonalize(self, A, B, eps, debug): 

106 """Solves the eigenproblem A @ x = eps [B] @ x. 

107 

108 The problem is solved inplace, so when done, A has the eigenvectors 

109 as columns and eps has the eigenvalues. 

110 

111 Parameters 

112 ---------- 

113 A : Numpy array 

114 Left-hand matrix of the eigenproblem. After running, the 

115 eigenvectors are the column vectors of this array. 

116 B : Numpy array 

117 Right-hand overlap matrix of the eigenproblem. 

118 eps : Numpy array 

119 1D vector containing the eigenvalues of the solved eigenproblem. 

120 is_master : bool 

121 A boolean to mark which rank to perform the diagonalization on. 

122 Used to know which ranks to redistribute the Numpy arrays to/from. 

123 debug : bool 

124 Flag to check for finiteness when running in debug mode. 

125 """ 

126 

127 Asc_MM = self.head_rank_descriptor.zeros(dtype=self.dtype) 

128 Bsc_MM = self.head_rank_descriptor.zeros(dtype=self.dtype) 

129 vec_MM = self.head_rank_descriptor.zeros(dtype=self.dtype) 

130 

131 Asc_mm = self.distributed_descriptor.zeros(dtype=self.dtype) 

132 Bsc_mm = self.distributed_descriptor.zeros(dtype=self.dtype) 

133 vec_mm = self.distributed_descriptor.zeros(dtype=self.dtype) 

134 

135 temporary_eps = np.zeros([self.arraysize]) 

136 if self.scalapack_communicator.rank == 0: 

137 assert self.blacsgrid.comm.rank == 0 

138 if self.do_tri2full: 

139 tri2full(A) # ELPA requires full matrix 

140 Asc_MM[:, :] = A 

141 Bsc_MM[:, :] = B 

142 

143 self.head_to_all_redistributor.redistribute(Asc_MM, Asc_mm) 

144 self.head_to_all_redistributor.redistribute(Bsc_MM, Bsc_mm) 

145 

146 self._eigh(Asc_mm, Bsc_mm, vec_mm, temporary_eps) 

147 

148 # vec_MM contains the eigenvectors in 'Fortran form'. They need to be 

149 # transpose-conjugated before they are consistent with Scipy behaviour 

150 self.all_to_head_redistributor.redistribute(vec_mm, vec_MM, uplo="G") 

151 

152 if self.scalapack_communicator.rank == 0: 

153 # Conjugate-transpose here since general_diagonalize_dc gives us 

154 # Fortran-convention eigenvectors. 

155 A[:, :] = vec_MM.conj().T 

156 eps[:] = temporary_eps 

157 

158 @abstractmethod 

159 def _eigh(self, Asc_mm, Bsc_mm, vec_mm, eps): 

160 raise NotImplementedError() 

161 

162 

163class ScalapackDiagonalizer(DistributedBlacsDiagonalizer): 

164 """Diagonalizer class that uses general_diagonalize_dc. 

165 

166 The ScalapackDiagonalizer wraps general_diagonalize_dc to solve a 

167 (generalized) eigenproblem on one core??? 

168 """ 

169 

170 def _eigh(self, Asc_mm, Bsc_mm, vec_mm, eps): 

171 self.distributed_descriptor.general_diagonalize_dc( 

172 Asc_mm, Bsc_mm, vec_mm, eps) 

173 

174 

175class ElpaDiagonalizer(DistributedBlacsDiagonalizer): 

176 """Diagonalizer class that uses LibElpa general_diagonalize.""" 

177 do_tri2full = True 

178 

179 @cached_property 

180 def elpa(self): 

181 return LibElpa(self.distributed_descriptor) 

182 

183 def _eigh(self, Asc_mm, Bsc_mm, vec_mm, eps): 

184 self.elpa.general_diagonalize( 

185 Asc_mm, Bsc_mm, vec_mm, eps)