Browse Source

fix(mge/optimizer): fix optimizer update step inplace add grad

GitOrigin-RevId: d677d1ca6b
release-1.4
Megvii Engine Team 4 years ago
parent
commit
e9d6361e33
4 changed files with 4 additions and 4 deletions
  1. +1
    -1
      imperative/python/megengine/optimizer/adadelta.py
  2. +1
    -1
      imperative/python/megengine/optimizer/adagrad.py
  3. +1
    -1
      imperative/python/megengine/optimizer/adam.py
  4. +1
    -1
      imperative/python/megengine/optimizer/sgd.py

+ 1
- 1
imperative/python/megengine/optimizer/adadelta.py View File

@@ -84,7 +84,7 @@ class Adadelta(Optimizer):
step += c1
grad = param.grad
if weight_decay != 0.0:
grad += param * _weight_decay
grad = grad + param * _weight_decay

square_avg = states["square_avg"]
acc_delta = states["acc_delta"]


+ 1
- 1
imperative/python/megengine/optimizer/adagrad.py View File

@@ -82,7 +82,7 @@ class Adagrad(Optimizer):
step += c1
grad = param.grad
if weight_decay != 0.0:
grad += param * _weight_decay
grad = grad + param * _weight_decay

square_avg = states["square_avg"]
square_avg += grad ** c2


+ 1
- 1
imperative/python/megengine/optimizer/adam.py View File

@@ -85,7 +85,7 @@ class Adam(Optimizer):

grad = param.grad
if weight_decay != 0.0:
grad += param * _weight_decay
grad = grad + param * _weight_decay

states = self._state[param]



+ 1
- 1
imperative/python/megengine/optimizer/sgd.py View File

@@ -72,7 +72,7 @@ class SGD(Optimizer):

grad = param.grad
if weight_decay != 0.0:
grad += param * _weight_decay
grad = grad + param * _weight_decay

if inplace_mode:
if momentum:


Loading…
Cancel
Save