lib/torch/optim/rprop.rb in torch-rb-0.4.2 vs lib/torch/optim/rprop.rb in torch-rb-0.5.0

- old
+ new

@@ -50,10 +50,10 @@ # for dir>=0 dfdx=dfdx grad = grad.clone grad[sign.eq(etaminus)] = 0 # update parameters - p.data.addcmul!(-1, grad.sign, step_size) + p.data.addcmul!(grad.sign, step_size, value: -1) state[:prev].copy!(grad) end end