lib/dnn/core/activations.rb in ruby-dnn-0.10.3 vs lib/dnn/core/activations.rb in ruby-dnn-0.10.4
- old
+ new
@@ -59,19 +59,19 @@
end
class ReLU < Layers::Layer
def forward(x)
- @x = x.clone
+ @x = x
x[x < 0] = 0
x
end
def backward(dy)
- @x[@x > 0] = 1
- @x[@x <= 0] = 0
- dy * @x
+ dx = Xumo::SFloat.ones(@x.shape)
+ dx[@x <= 0] = 0
+ dy * dx
end
end
class LeakyReLU < Layers::Layer
@@ -86,19 +86,19 @@
def initialize(alpha = 0.3)
@alpha = alpha
end
def forward(x)
- @x = x.clone
+ @x = x
a = Xumo::SFloat.ones(x.shape)
a[x <= 0] = @alpha
x * a
end
def backward(dy)
- @x[@x > 0] = 1
- @x[@x <= 0] = @alpha
- dy * @x
+ dx = Xumo::SFloat.ones(@x.shape)
+ dx[@x <= 0] = @alpha
+ dy * dx
end
def to_hash
{class: self.class.name, alpha: alpha}
end