lib/dnn/core/activations.rb in ruby-dnn-0.5.2 vs lib/dnn/core/activations.rb in ruby-dnn-0.5.3
- old
+ new
@@ -1,9 +1,8 @@
module DNN
module Activations
Layer = Layers::Layer
- OutputLayer = Layers::OutputLayer
class Sigmoid < Layer
def forward(x)
@out = 1 / (1 + NMath.exp(-x))
end
@@ -73,11 +72,11 @@
{name: self.class.name, alpha: alpha}
end
end
- class IdentityMSE < OutputLayer
+ class IdentityMSE < Layers::OutputLayer
def forward(x)
@out = x
end
def backward(y)
@@ -89,11 +88,11 @@
0.5 * ((@out - y)**2).sum / batch_size + ridge
end
end
- class IdentityMAE < OutputLayer
+ class IdentityMAE < Layers::OutputLayer
def forward(x)
@out = x
end
def backward(y)
@@ -108,11 +107,11 @@
(@out - y).abs.sum / batch_size + ridge
end
end
- class SoftmaxWithLoss < OutputLayer
+ class SoftmaxWithLoss < Layers::OutputLayer
def forward(x)
@out = NMath.exp(x) / NMath.exp(x).sum(1).reshape(x.shape[0], 1)
end
def backward(y)
@@ -124,10 +123,10 @@
-(y * NMath.log(@out + 1e-7)).sum / batch_size + ridge
end
end
- class SigmoidWithLoss < OutputLayer
+ class SigmoidWithLoss < Layers::OutputLayer
include Xumo
def initialize
@sigmoid = Sigmoid.new
end