lib/torch/nn/functional.rb in torch-rb-0.15.0 vs lib/torch/nn/functional.rb in torch-rb-0.16.0
- old
+ new
@@ -172,9 +172,21 @@
end
end
# activation layers
+ def elu(input, alpha: 1, inplace: false)
+ if inplace
+ NN.elu!(input, alpha)
+ else
+ NN.elu(input, alpha)
+ end
+ end
+
+ def gelu(input, approximate: 'none')
+ NN.gelu(input, approximate: approximate)
+ end
+
def hardshrink(input, lambd = 0.5)
Torch.hardshrink(input, lambd)
end
def leaky_relu(input, negative_slope = 0.01, inplace: false)