Skip to content

Commit ae48d6e

Browse files
Merge pull request #411 from SSCHAcode/improved_minimizer
Improved minimizer
2 parents e070094 + 27c8853 commit ae48d6e

5 files changed

Lines changed: 15 additions & 9 deletions

File tree

Modules/Minimizer.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -224,6 +224,9 @@ def run_step(self, gradient, kl_new):
224224
# Enlarge the step
225225
if not self.fixed_step:
226226
self.step *= self.increment_step
227+
228+
# Perform the minimization step for new direction
229+
self.current_x = self.old_x - self.step * self.direction
227230
else:
228231
# Proceed with the line minimization
229232

@@ -243,14 +246,16 @@ def run_step(self, gradient, kl_new):
243246
print("Step too large (scalar = {} | kl_ratio = {}), reducing to {}".format(scalar, kl_ratio, self.step))
244247
#print("Direction: ", self.direction)
245248
#print("Gradient: ", gradient)
249+
250+
# Try again with reduced step
251+
self.current_x = self.old_x - self.step * self.direction
246252
else:
247253
# The step is good, therefore next step perform a new direction
248254
self.new_direction = True
249255
if self.verbose:
250256
print("Good step found with {}, try increment".format(self.step))
251-
252-
# Perform the minimiziation step
253-
self.current_x = self.old_x - self.step * self.direction
257+
# DO NOT update current_x - we accept the current position
258+
# (current_x was already updated in the previous step)
254259

255260

256261
def update_dyn(self, new_kl_ratio, dyn_gradient, structure_gradient = None):

Modules/SchaMinimizer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -964,11 +964,11 @@ def print_info(self):
964964
print (" supercell size = ", " ".join([str(x) for x in self.ensemble.supercell]))
965965

966966
# Get the current frequencies
967-
w, pols = self.dyn.GenerateSupercellDyn(self.ensemble.supercell).DyagDinQ(0)
967+
w, pols = self.dyn.DiagonalizeSupercell()#self.dyn.GenerateSupercellDyn(self.ensemble.supercell).DyagDinQ(0)
968968
w *= __RyToCm__
969969

970970
# Get the starting frequencies
971-
w0, p0 = self.ensemble.dyn_0.GenerateSupercellDyn(self.ensemble.supercell).DyagDinQ(0)
971+
w0, p0 = self.ensemble.dyn_0.DiagonalizeSupercell()
972972
w0 *= __RyToCm__
973973

974974
print ()

Modules/fourier_gradient.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ function get_gradient_fourier!(Φ_grad :: Array{Complex{T}, 3},
7878
for i in 1:n_random
7979
tmp = v_tilde[i, j, jq] * conj(δf_tilde[i, k, jq])
8080
Φ_grad[j, k, jq] += tmp * weights[i]
81-
Φ_grad_err[j, k, jq] += tmp * conj(tmp) * weights[i]
81+
Φ_grad_err[j, k, jq] += abs2(tmp) * weights[i]
8282

8383
# @views mul!(tmp, v_tilde[:, jq, i], δf_tilde[:, jq, i]')
8484
# @. tmp2 = tmp * conj(tmp)
@@ -103,9 +103,10 @@ function get_gradient_fourier!(Φ_grad :: Array{Complex{T}, 3},
103103
begin
104104
tmp_grad = zeros(Complex{T}, (3*nat, 3*nat, nq))
105105
for iq in 1:nq
106-
@views Φ_grad[:, :, iq] .+= Φ_grad[:, :, iq]'
107106
@views tmp_grad[:, :, iq] .= Φ_grad[:, :, iq]
107+
@views tmp_grad[:, :, iq] .+= Φ_grad[:, :, iq]'
108108
end
109+
Φ_grad .= tmp_grad
109110
for iq in 1:nq
110111
@views tmp_grad[:, :, iq] .+= conj.(Φ_grad[:, :, minus_q_index[iq]]')
111112
end

meson.build

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
project('python-sscha',
22
['c','fortran'],
3-
version : '1.5.0',
3+
version : '1.6.0',
44
license: 'GPL',
55
meson_version: '>= 1.1.0', # <- set min version of meson.
66
default_options : [

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "mesonpy"
44

55
[project]
66
name = "python-sscha"
7-
version = "1.5.1"
7+
version = "1.6.0"
88
description = "Python implementation of the sscha code"
99
authors = [{name = "Lorenzo Monacelli"}] # Put here email
1010
readme = "README.md"

0 commit comments

Comments
 (0)