lib/dnn/core/activations.rb in ruby-dnn-0.2.2 vs lib/dnn/core/activations.rb in ruby-dnn-0.3.0
- old
+ new
@@ -1,29 +1,23 @@
module DNN
module Activations
Layer = Layers::Layer
OutputLayer = Layers::OutputLayer
-
- module SigmoidFunction
+ class Sigmoid < Layer
def forward(x)
@out = 1.0 / (1 + NMath.exp(-x))
end
- end
-
-
- class Sigmoid < Layer
- include SigmoidFunction
def backward(dout)
dout * (1.0 - @out) * @out
end
end
class Tanh < Layer
- include Numo
+ include Xumo
def forward(x)
@x = x
NMath.tanh(x)
end
@@ -48,11 +42,11 @@
end
end
class LeakyReLU < Layer
- include Numo
+ include Xumo
def initialize(alpha = 0.3)
@alpha = alpha
end
@@ -110,11 +104,18 @@
end
end
class SigmoidWithLoss < OutputLayer
- include Numo
- include SigmoidFunction
+ include Xumo
+
+ def initialize
+ @sigmoid = Sigmoid.new
+ end
+
+ def forward(x)
+ @out = @sigmoid.forward(x)
+ end
def backward(y)
@out - y
end