lib/dnn/core/activations.rb in ruby-dnn-0.5.9 vs lib/dnn/core/activations.rb in ruby-dnn-0.5.10
- old
+ new
@@ -2,29 +2,27 @@
module Activations
Layer = Layers::Layer
class Sigmoid < Layer
def forward(x)
- @out = 1 / (1 + NMath.exp(-x))
+ @out = 1 / (1 + Xumo::NMath.exp(-x))
end
def backward(dout)
dout * (1 - @out) * @out
end
end
class Tanh < Layer
- include Xumo
-
def forward(x)
@x = x
- NMath.tanh(x)
+ Xumo::NMath.tanh(x)
end
def backward(dout)
- dout * (1.0 / NMath.cosh(@x)**2)
+ dout * (1.0 / Xumo::NMath.cosh(@x)**2)
end
end
class ReLU < Layer
@@ -41,12 +39,10 @@
end
end
class LeakyReLU < Layer
- include Xumo
-
attr_reader :alpha
def initialize(alpha = 0.3)
@alpha = alpha
end
@@ -55,11 +51,11 @@
self.new(hash[:alpha])
end
def forward(x)
@x = x.clone
- a = SFloat.ones(x.shape)
+ a = Xumo::SFloat.ones(x.shape)
a[x <= 0] = @alpha
x * a
end
def backward(dout)
@@ -142,27 +138,25 @@
end
class SoftmaxWithLoss < Layers::OutputLayer
def forward(x)
- @out = NMath.exp(x) / NMath.exp(x).sum(1).reshape(x.shape[0], 1)
+ @out = Xumo::NMath.exp(x) / Xumo::NMath.exp(x).sum(1).reshape(x.shape[0], 1)
end
def backward(y)
@out - y
end
def loss(y)
batch_size = y.shape[0]
- -(y * NMath.log(@out + 1e-7)).sum / batch_size + ridge
+ -(y * Xumo::NMath.log(@out + 1e-7)).sum / batch_size + ridge
end
end
class SigmoidWithLoss < Layers::OutputLayer
- include Xumo
-
def initialize
@sigmoid = Sigmoid.new
end
def forward(x)
@@ -173,10 +167,10 @@
@out - y
end
def loss(y)
batch_size = y.shape[0]
- -(y * NMath.log(@out + 1e-7) + (1 - y) * NMath.log(1 - @out + 1e-7)).sum / batch_size + ridge
+ -(y * Xumo::NMath.log(@out + 1e-7) + (1 - y) * Xumo::NMath.log(1 - @out + 1e-7)).sum / batch_size + ridge
end
end
end
end