lib/dnn/core/layers.rb in ruby-dnn-0.8.0 vs lib/dnn/core/layers.rb in ruby-dnn-0.8.1
- old
+ new
@@ -18,15 +18,19 @@
@built
end
# Forward propagation.
# Classes that inherit from this class must implement this method.
- # def forward() end
+ def forward
+ raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'forward'")
+ end
# Backward propagation.
# Classes that inherit from this class must implement this method.
- # def backward() end
+ def backward
+ raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'update'")
+ end
# Get the shape of the layer.
def shape
prev_layer.shape
end
@@ -71,11 +75,13 @@
private
# Initialize of the parameters.
# Classes that inherit from this class must implement this method.
- def init_params() end
+ def init_params
+ raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'init_params'")
+ end
end
class InputLayer < Layer
attr_reader :shape
@@ -117,12 +123,12 @@
super()
@weight_initializer = (weight_initializer || RandomNormal.new)
@bias_initializer = (bias_initializer || Zeros.new)
@l1_lambda = l1_lambda
@l2_lambda = l2_lambda
- @params[:weight] = @weight = LearningParam.new(self)
- @params[:bias] = @bias = LearningParam.new(self)
+ @params[:weight] = @weight = LearningParam.new
+ @params[:bias] = @bias = LearningParam.new
end
def lasso
if @l1_lambda > 0
@l1_lambda * @weight.data.abs.sum
@@ -157,12 +163,12 @@
end
private
def init_params
- @weight_initializer.init_param(@weight)
- @bias_initializer.init_param(@bias)
+ @weight_initializer.init_param(self, @weight)
+ @bias_initializer.init_param(self, @bias)
end
end
class Dense < Connection
@@ -322,11 +328,11 @@
end
def initialize(momentum: 0.9)
super()
@momentum = momentum
- @params[:gamma] = @gamma = LearningParam.new(self)
- @params[:beta] = @beta = LearningParam.new(self)
+ @params[:gamma] = @gamma = LearningParam.new
+ @params[:beta] = @beta = LearningParam.new
@params[:running_mean] = nil
@params[:running_var] = nil
end
def build(model)