lib/torch/optim/adamax.rb in torch-rb-0.4.1 vs lib/torch/optim/adamax.rb in torch-rb-0.4.2

- old
+ new

@@ -40,10 +40,10 @@ eps = group[:eps] state[:step] += 1 if group[:weight_decay] != 0 - grad = grad.add(group[:weight_decay], p.data) + grad = grad.add(p.data, alpha: group[:weight_decay]) end # Update biased first moment estimate. exp_avg.mul!(beta1).add!(grad, alpha: 1 - beta1) # Update the exponentially weighted infinity norm.