lib/dnn/core/activations.rb in ruby-dnn-0.13.4 vs lib/dnn/core/activations.rb in ruby-dnn-0.14.0
- old
+ new
@@ -1,79 +1,73 @@
module DNN
- module Activations
+ module Layers
- class Sigmoid < Layers::Layer
+ class Sigmoid < Layer
def forward(x)
@y = 1 / (1 + Xumo::NMath.exp(-x))
end
def backward(dy)
dy * (1 - @y) * @y
end
end
-
- class Tanh < Layers::Layer
+ class Tanh < Layer
def forward(x)
@y = Xumo::NMath.tanh(x)
end
def backward(dy)
- dy * (1 - @y ** 2)
+ dy * (1 - @y**2)
end
end
-
- class Softsign < Layers::Layer
+ class Softsign < Layer
def forward(x)
@x = x
x / (1 + x.abs)
end
def backward(dy)
- dy * (1 / (1 + @x.abs) ** 2)
+ dy * (1 / (1 + @x.abs)**2)
end
end
-
- class Softplus < Layers::Layer
+ class Softplus < Layer
def forward(x)
@x = x
Xumo::NMath.log(1 + Xumo::NMath.exp(x))
end
def backward(dy)
dy * (1 / (1 + Xumo::NMath.exp(-@x)))
end
end
-
- class Swish < Layers::Layer
+ class Swish < Layer
def forward(x)
@x = x
@y = x * (1 / (1 + Xumo::NMath.exp(-x)))
end
def backward(dy)
dy * (@y + (1 / (1 + Xumo::NMath.exp(-@x))) * (1 - @y))
end
end
-
- class ReLU < Layers::Layer
+ class ReLU < Layer
def forward(x)
@x = x
Xumo::SFloat.maximum(0, x)
end
def backward(dy)
dy * Xumo::SFloat.cast(@x > 0)
end
end
-
- class LeakyReLU < Layers::Layer
+ class LeakyReLU < Layer
attr_reader :alpha
# @param [Float] alpha The slope when the output value is negative.
def initialize(alpha = 0.3)
super()
@@ -100,11 +94,10 @@
def load_hash(hash)
initialize(hash[:alpha])
end
end
-
- class ELU < Layers::Layer
+ class ELU < Layer
attr_reader :alpha
# @param [Float] alpha The slope when the output value is negative.
def initialize(alpha = 1.0)
super()