• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

eT-program / eT / 22395

07 Mar 2025 10:29PM UTC coverage: 88.587%. Remained the same
22395

push

gitlab-ci

Merge branch 'write_xyz_trj' into 'development'

Write geometry optimization trajectory to .xyz file

See merge request eT-program/eT!1544

18 of 18 new or added lines in 5 files covered. (100.0%)

38 existing lines in 4 files now uncovered.

53858 of 60797 relevant lines covered (88.59%)

3059275.34 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.36
/src/solver_tools/objective_functions/objective_function_class.F90
1

2

3
! eT - a coupled cluster program
4
! Copyright (C) 2016-2024 the authors of eT
5
!
6
! eT is free software: you can redistribute it and/or modify
7
! it under the terms of the GNU General Public License as published by
8
! the Free Software Foundation, either version 3 of the License, or
9
! (at your option) any later version.
10
!
11
! eT is distributed in the hope that it will be useful,
12
! but WITHOUT ANY WARRANTY; without even the implied warranty of
13
! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
! GNU General Public License for more details.
15
!
16
! You should have received a copy of the GNU General Public License
17
! along with this program. If not, see <https://www.gnu.org/licenses/>.
18

19

20
module objective_function_class
21

22
   !!
23
   !! Objective Function
24
   !! Written by Eirik F. Kjønstad, 2021
25
   !!
26
   !! Class that represents a function, where one can evaluate function values
27
   !! (and values of derivatives) given set of specifiable parameters
28
   !!
29

30
   use kinds,                only: dp
31
   use global_out,           only: output
32
   use memory_manager_class, only: mem
33
   use output_file_class,    only: output_file
34

35
   implicit none
36

37
   type, abstract :: objective_function
38

39
      real(dp) :: step_size
40
      character(len=:), allocatable :: gradient_method
41

42
      type(output_file) :: optimization_iterations_file
43

44
      integer :: n_parameters
45

46
   contains
47

48
      procedure(get_parameters),      deferred, public :: get_parameters
49
      procedure(set_parameters),      deferred, public :: set_parameters
50
      procedure(get),                 deferred, public :: get
51
      procedure(analytical_gradient), deferred, public :: analytical_gradient
52
      procedure(initialize),          deferred, public :: initialize
53
      procedure,                                public :: get_gradient
54

55
      procedure, private :: numerical_gradient_forward_difference
56
      procedure, private :: numerical_gradient_central_difference
57

58
   end type objective_function
59

60

61
   abstract interface
62

63
      function get_parameters(this) result(x)
64

65
         import :: objective_function, dp
66

67
         implicit none
68

69
         class(objective_function), intent(in) :: this
70

71
         real(dp), dimension(this%n_parameters) :: x
72

73
      end function get_parameters
74

75
      subroutine set_parameters(this, x)
76

77
         import :: objective_function, dp
78

79
         implicit none
80

81
         class(objective_function), intent(inout) :: this
82

83
         real(dp), dimension(this%n_parameters), intent(in) :: x
84

85
      end subroutine set_parameters
86

87
      function get(this) result(energy)
88

89
         import :: objective_function, dp
90

91
         implicit none
92

93
         class(objective_function), intent(in) :: this
94

95
         real(dp) :: energy
96

97
      end function get
98

99
      subroutine analytical_gradient(this, g)
100

101
         import :: objective_function, dp
102

103
         implicit none
104

105
         class(objective_function), intent(inout) :: this
106

107
         real(dp), dimension(this%n_parameters), intent(out) :: g
108

109
      end subroutine analytical_gradient
110

111
      subroutine initialize(this)
112

113
         import :: objective_function
114

115
         implicit none
116

117
         class(objective_function), intent(inout) :: this
118

119
      end subroutine initialize
120

121
   end interface
122

123
contains
124

125

126
   subroutine numerical_gradient_forward_difference(this, g, h)
36✔
127
      !!
128
      !! Written by Jan Haakon M. Trabski, Feb 2022
129
      !!
130
      !! g = (f(x+h) - f(x)) / h
131
      !!
132
      use array_initialization, only: zero_array
133

134
      implicit none
135

136
      class(objective_function),              intent(inout) :: this
137
      real(dp), dimension(this%n_parameters), intent(out)   :: g
138
      real(dp), intent(in) :: h
139

140
      real(dp), dimension(:), allocatable :: x, x0
36✔
141
      real(dp) :: E0, E
142
      integer :: i
143

144
      call mem%alloc(x, this%n_parameters)
36✔
145
      call mem%alloc(x0, this%n_parameters)
36✔
146

147
      E0 = this%get()
36✔
148

149
      ! Get parameters (current geometry), x_0 is updated from last iteration.
150

151
      x0 = this%get_parameters()
396✔
152

153
      ! Calculating numerical gradient for each position
154

155
      call output%mute()
36✔
156
      call this%optimization_iterations_file%mute()
36✔
157

158
      do i = 1, this%n_parameters
360✔
159

160
         call dcopy(this%n_parameters, x0, 1, x, 1)
324✔
161
         x(i) = x(i) + h
324✔
162
         call this%set_parameters(x)
324✔
163
         E = this%get()
324✔
164
         g(i) = (E - E0) / h
360✔
165

166
      enddo
167

168
      call output%unmute()
36✔
169
      call this%optimization_iterations_file%unmute()
36✔
170

171
      call this%set_parameters(x0)
36✔
172

173
      call mem%dealloc(x)
36✔
174
      call mem%dealloc(x0)
36✔
175

176
   end subroutine numerical_gradient_forward_difference
36✔
177

178

179
   subroutine numerical_gradient_central_difference(this, g, h)
252✔
180
      !!
181
      !! Written by Jan Haakon M. Trabski, March 2022
182
      !!
183
      !! g = (f(x-h) - f(x+h)) / 2h
184
      !!
185
      use array_initialization, only: zero_array
186

187
      implicit none
188

189
      class(objective_function),              intent(inout) :: this
190
      real(dp), dimension(this%n_parameters), intent(out)   :: g
191
      real(dp), intent(in) :: h
192

193
      real(dp), dimension(:), allocatable :: x_p, x_n, x0
252✔
194
      real(dp) :: E_p, E_n
195
      integer :: i
196

197
      call mem%alloc(x_n, this%n_parameters)
252✔
198
      call mem%alloc(x_p, this%n_parameters)
252✔
199
      call mem%alloc(x0, this%n_parameters)
252✔
200

201
      x0 = this%get_parameters()
2,448✔
202

203
      ! Calculating numerical gradient for each position
204

205
      call output%mute()
252✔
206
      call this%optimization_iterations_file%mute()
252✔
207

208
      do i = 1, this%n_parameters
2,196✔
209

210
         call dcopy(this%n_parameters, x0, 1, x_p, 1)
1,944✔
211
         x_p(i) = x_p(i) + h
1,944✔
212
         call dcopy(this%n_parameters, x0, 1, x_n, 1)
1,944✔
213
         x_n(i) = x_n(i) - h
1,944✔
214

215
         call this%set_parameters(x_n)
1,944✔
216
         E_n = this%get()
1,944✔
217

218
         call this%set_parameters(x_p)
1,944✔
219
         E_p = this%get()
1,944✔
220

221
         g(i) = (E_p - E_n) / (2*h)
2,196✔
222

223
      enddo
224

225
      call output%unmute()
252✔
226
      call this%optimization_iterations_file%unmute()
252✔
227

228
      call this%set_parameters(x0)
252✔
229

230
      call mem%dealloc(x_n)
252✔
231
      call mem%dealloc(x_p)
252✔
232
      call mem%dealloc(x0)
252✔
233

234
   end subroutine numerical_gradient_central_difference
252✔
235

236

237
   function get_gradient(this) result(g)
2,680✔
238
      !!
239
      !! Written by Eirik F. Kjønstad, 2021 and 2023
240
      !!
241
      implicit none
242

243
      class(objective_function), intent(inout) :: this
244

245
      real(dp), dimension(this%n_parameters) :: g
246

247
      if (trim(this%gradient_method) == "analytical") then
2,680✔
248

249
         call this%analytical_gradient(g)
2,392✔
250

251
      else if (trim(this%gradient_method) == "forward difference") then
288✔
252

253
         call this%numerical_gradient_forward_difference(g, this%step_size)
36✔
254

255
      else if (trim(this%gradient_method) == "central difference") then
252✔
256

257
         call this%numerical_gradient_central_difference(g, this%step_size)
252✔
258

259
      else
260

UNCOV
261
         call output%error_msg('Could not recognize gradient method in get_gradient')
×
262

263
      endif
264

265
   end function get_gradient
2,680✔
266

267

268
end module objective_function_class
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc