Skip to content

Commit e34aef8

Browse files
committed
fix adamg instability
1 parent aee5fc4 commit e34aef8

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

pytorch_optimizer/optimizer/adamg.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,9 @@ class AdamG(BaseOptimizer):
2424
def __init__(
2525
self,
2626
params: PARAMETERS,
27-
lr: float = 1e-3,
27+
lr: float = 1.0,
2828
betas: BETAS = (0.95, 0.999, 0.95),
29-
p: float = 0.5,
29+
p: float = 0.2,
3030
q: float = 0.24,
3131
weight_decay: float = 0.0,
3232
weight_decouple: bool = False,
@@ -88,8 +88,8 @@ def step(self, closure: CLOSURE = None) -> LOSS:
8888

8989
beta1, beta2, beta3 = group['betas']
9090

91-
bias_correction1: float = 1.0 - self.debias(beta1, group['step'])
92-
bias_correction2: float = 1.0 - self.debias(beta2, group['step'])
91+
bias_correction1: float = self.debias(beta1, group['step'])
92+
bias_correction2: float = self.debias(beta2, group['step'])
9393
step_size: float = min(group['lr'], 1.0 / math.sqrt(group['step']))
9494

9595
for p in group['params']:
@@ -125,4 +125,4 @@ def step(self, closure: CLOSURE = None) -> LOSS:
125125

126126
p.add_(update, alpha=-step_size)
127127

128-
return loss
128+
return loss

0 commit comments

Comments
 (0)