From f12a1b1e75de6b4f342568c6b44d2508b3fce69a Mon Sep 17 00:00:00 2001 From: David Rotermund <54365609+davrot@users.noreply.github.com> Date: Thu, 5 Jan 2023 13:24:47 +0100 Subject: [PATCH] Add files via upload --- network/Adam.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/network/Adam.py b/network/Adam.py index 08009fe..45bde9b 100644 --- a/network/Adam.py +++ b/network/Adam.py @@ -60,6 +60,8 @@ class Adam(torch.optim.Optimizer): state_steps = [] sbs_setting = [] + assert len(self.param_groups) == 1 + for id, p in enumerate(self.params): if p.grad is not None: params_with_grad.append(p) @@ -95,7 +97,7 @@ class Adam(torch.optim.Optimizer): state_steps, beta1=self.beta1, beta2=self.beta2, - lr=self.lr, + lr=self.param_groups[0]["lr"], eps=self.eps, maximize=self.maximize, ) @@ -149,6 +151,6 @@ class Adam(torch.optim.Optimizer): else: delta = torch.exp(-step_size * (exp_avg / denom)) print( - f"{float(delta.min()) - 1.0:.4e} {float(delta.max()) - 1.0:.4e}" + f"{float(delta.min()) - 1.0:.4e} {float(delta.max()) - 1.0:.4e} {lr:.4e}" ) param *= delta