Coverage for gpaw/test/fd_ops/test_gradient.py: 94%

62 statements  

« prev     ^ index     » next       coverage.py v7.7.1, created at 2025-07-14 00:18 +0000

1import numpy as np 

2import pytest 

3 

4from gpaw.core import UGDesc 

5from gpaw.fd_operators import FDOperator, Gradient 

6from gpaw.grid_descriptor import GridDescriptor 

7from gpaw.mpi import world 

8 

9 

10@pytest.mark.parametrize('cell, size', 

11 [([8.7, 5.7, 7.7, 28, 120, 95], [60, 40, 54]), 

12 ([2, 2, 2, 120.1, 120.1, 90], [20, 20, 20])]) 

13def test_strange_cell(cell, size): 

14 """Make sure x-derivative is correct even in a strange cell. 

15 

16 See issue #1102. 

17 """ 

18 grid = UGDesc(cell=cell, size=size) 

19 grad = Gradient(grid._gd, v=0) 

20 a, b = grid.empty(2) 

21 a.data[:] = grid.xyz()[:, :, :, 0] 

22 grad.apply(a.data, b.data) 

23 n1, n2, n3 = grid.size_c // 2 

24 assert b.data[n1, n2, n3] == pytest.approx(1.0) 

25 

26 

27def test_fd_ops_gradient(): 

28 if world.size > 4: 

29 # Grid is so small that domain decomposition cannot exceed 4 domains 

30 assert world.size % 4 == 0 

31 group, other = divmod(world.rank, 4) 

32 ranks = np.arange(4 * group, 4 * (group + 1)) 

33 domain_comm = world.new_communicator(ranks) 

34 else: 

35 domain_comm = world 

36 

37 gd = GridDescriptor((8, 1, 1), (8.0, 1.0, 1.0), comm=domain_comm) 

38 a = gd.zeros() 

39 dadx = gd.zeros() 

40 a[:, 0, 0] = np.arange(gd.beg_c[0], gd.end_c[0]) 

41 gradx = Gradient(gd, v=0) 

42 print(a.itemsize, a.dtype, a.shape) 

43 print(dadx.itemsize, dadx.dtype, dadx.shape) 

44 gradx.apply(a, dadx) 

45 

46 # a = [ 0. 1. 2. 3. 4. 5. 6. 7.] 

47 

48 dAdx = gd.collect(dadx, broadcast=True) 

49 assert not (dAdx[:, 0, 0] - [-3, 1, 1, 1, 1, 1, 1, -3]).any() 

50 

51 # Backwards FD operator: 

52 gradx2 = FDOperator([-1, 1], [[-1, 0, 0], [0, 0, 0]], gd) 

53 gradx2.apply(a, dadx) 

54 dAdx = gd.collect(dadx, broadcast=True) 

55 assert not (dAdx[:, 0, 0] - [-7, 1, 1, 1, 1, 1, 1, 1]).any() 

56 

57 gd = GridDescriptor((1, 8, 1), (1.0, 8.0, 1.0), 

58 (1, 0, 1), comm=domain_comm) 

59 dady = gd.zeros() 

60 a = gd.zeros() 

61 grady = Gradient(gd, v=1) 

62 a[0, :, 0] = np.arange(gd.beg_c[1], gd.end_c[1]) - 1 

63 grady.apply(a, dady) 

64 

65 # da 

66 # -- = [0.5 1. 1. 1. 1. 1. -2.5] 

67 # dy 

68 

69 dady = gd.collect(dady, broadcast=True) 

70 assert dady[0, 0, 0] == 0.5 and np.sum(dady[0, :, 0]) == 3.0 

71 

72 # a GUC cell 

73 gd = GridDescriptor((1, 7, 1), 

74 ((1.0, 0.0, 0.0), 

75 (5.0, 5.0, 0.0), 

76 (0.0, 0.0, 0.7)), comm=domain_comm) 

77 dady = gd.zeros() 

78 grady = Gradient(gd, v=1) 

79 a = gd.zeros() 

80 a[0, :, 0] = np.arange(gd.beg_c[1], gd.end_c[1]) - 1 

81 grady.apply(a, dady) 

82 

83 # da 

84 # -- = [-3.5 1.4 1.4 1.4 1.4 1.4 -3.5] 

85 # dy 

86 

87 dady = gd.collect(dady, broadcast=True) 

88 assert abs(dady[0, 0, 0] - - 

89 3.5) < 1e-12 and abs(np.sum(dady[0, :, 0])) < 1e-12 

90 

91 # Check continuity of weights: 

92 weights = [] 

93 for x in np.linspace(-0.6, 0.6, 130, True): 

94 gd = GridDescriptor((3, 3, 3), 

95 ((3.0, 0.0, 0.0), 

96 (3 * x, 1.5 * 3**0.5, 0.0), 

97 (0.0, 0.0, 3)), 

98 comm=domain_comm) 

99 g = Gradient(gd, v=0) 

100 for c, o in zip(g.coef_p, g.offset_pc): 

101 if (o == (-1, 1, 0)).all(): 

102 break 

103 else: 

104 c = 0.0 

105 weights.append(c) 

106 

107 if 0: 

108 import matplotlib.pyplot as plt 

109 plt.plot(weights) 

110 plt.show()