lib/dnn/core/layers.rb in ruby-dnn-0.13.4 vs lib/dnn/core/layers.rb in ruby-dnn-0.14.0
- old
+ new
@@ -5,35 +5,38 @@
class Layer
attr_accessor :name
attr_reader :input_shape
def self.call(x, *args)
- self.new(*args).(x)
+ new(*args).(x)
end
def self.from_hash(hash)
return nil unless hash
layer_class = DNN.const_get(hash[:class])
layer = layer_class.allocate
- raise DNN_Error.new("#{layer.class} is not an instance of #{self} class.") unless layer.is_a?(self)
+ raise DNN_Error, "#{layer.class} is not an instance of #{self} class." unless layer.is_a?(self)
layer.load_hash(hash)
+ layer.name = hash[:name]&.to_sym
layer
end
def initialize
@built = false
@name = nil
end
# Forward propagation and create a link.
- # @param [Array] input Array of the form [x_input_data, prev_link].
- def call(input)
- x, prev_link = *input
+ # @param [Tensor] input_tensor Input tensor.
+ # @return [Tensor] Output tensor.
+ def call(input_tensor)
+ x = input_tensor.data
+ prev_link = input_tensor.link
build(x.shape[1..-1]) unless built?
y = forward(x)
link = Link.new(prev_link, self)
- [y, link]
+ Tensor.new(y, link)
end
# Build the layer.
# @param [Array] input_shape Setting the shape of the input data.
def build(input_shape)
@@ -47,17 +50,17 @@
end
# Forward propagation.
# @param [Numo::SFloat] x Input data.
def forward(x)
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'forward'")
+ raise NotImplementedError, "Class '#{self.class.name}' has implement method 'forward'"
end
# Backward propagation.
# @param [Numo::SFloat] dy Differential value of output data.
def backward(dy)
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'backward'")
+ raise NotImplementedError, "Class '#{self.class.name}' has implement method 'backward'"
end
# Please reimplement this method as needed.
# The default implementation return input_shape.
# @return [Array] Return the shape of the output data.
@@ -75,11 +78,10 @@
def load_hash(hash)
initialize
end
end
-
# This class is a superclass of all classes with learning parameters.
class HasParamLayer < Layer
# @return [Boolean] Setting false prevents learning of parameters.
attr_accessor :trainable
@@ -88,64 +90,72 @@
@trainable = true
end
# @return [Array] The parameters of the layer.
def get_params
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'get_params'")
+ raise NotImplementedError, "Class '#{self.class.name}' has implement method 'get_params'"
end
end
-
class InputLayer < Layer
- def self.call(input)
- shape = input.is_a?(Array) ? input[0].shape : input.shape
- self.new(shape[1..-1]).(input)
+ def self.call(input_tensor)
+ new(input_tensor.data.shape[1..-1]).(input_tensor)
end
# @param [Array] input_dim_or_shape Setting the shape or dimension of the input data.
def initialize(input_dim_or_shape)
super()
@input_shape = input_dim_or_shape.is_a?(Array) ? input_dim_or_shape : [input_dim_or_shape]
end
- def call(input)
+ def call(input_tensor)
build unless built?
- if input.is_a?(Array)
- x, prev_link = *input
- else
- x = input
- prev_link = nil
- end
- link = prev_link ? Link.new(prev_link, self) : Link.new(nil, self)
- [forward(x), link]
+ Tensor.new(forward(input_tensor.data), Link.new(input_tensor&.link, self))
end
def build
@built = true
end
def forward(x)
unless x.shape[1..-1] == @input_shape
- raise DNN_ShapeError.new("The shape of x does not match the input shape. input shape is #{@input_shape}, but x shape is #{x.shape[1..-1]}.")
+ raise DNN_ShapeError, "The shape of x does not match the input shape. input shape is #{@input_shape}, but x shape is #{x.shape[1..-1]}."
end
x
end
def backward(dy)
dy
end
+ def to_proc
+ method(:call).to_proc
+ end
+
+ def >>(layer)
+ if RUBY_VERSION < "2.6.0"
+ raise DNN_Error, "Function composition is not supported before ruby version 2.6.0."
+ end
+ to_proc >> layer
+ end
+
+ def <<(layer)
+ if RUBY_VERSION < "2.6.0"
+ raise DNN_Error, "Function composition is not supported before ruby version 2.6.0."
+ end
+ to_proc << layer
+ end
+
def to_hash
super(input_shape: @input_shape)
end
def load_hash(hash)
initialize(hash[:input_shape])
end
end
-
# It is a superclass of all connection layers.
class Connection < HasParamLayer
attr_reader :weight
attr_reader :bias
attr_reader :weight_initializer
@@ -204,11 +214,10 @@
@bias_regularizer.param = @bias if @bias_regularizer
end
end
end
-
class Dense < Connection
attr_reader :num_nodes
# @param [Integer] num_nodes Number of nodes.
def initialize(num_nodes,
@@ -222,11 +231,11 @@
@num_nodes = num_nodes
end
def build(input_shape)
unless input_shape.length == 1
- raise DNN_ShapeError.new("Input shape is #{input_shape}. But input shape must be 1 dimensional.")
+ raise DNN_ShapeError, "Input shape is #{input_shape}. But input shape must be 1 dimensional."
end
super
num_prev_nodes = input_shape[0]
@weight.data = Xumo::SFloat.new(num_prev_nodes, @num_nodes)
@bias.data = Xumo::SFloat.new(@num_nodes) if @bias
@@ -264,11 +273,10 @@
bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
use_bias: hash[:use_bias])
end
end
-
class Flatten < Layer
def forward(x)
x.reshape(x.shape[0], *output_shape)
end
@@ -279,12 +287,13 @@
def output_shape
[@input_shape.reduce(:*)]
end
end
-
class Reshape < Layer
+ attr_reader :output_shape
+
def initialize(output_shape)
super()
@output_shape = output_shape
end
@@ -294,22 +303,17 @@
def backward(dy)
dy.reshape(dy.shape[0], *@input_shape)
end
- def output_shape
- @output_shape
- end
-
def to_hash
super(output_shape: @output_shape)
end
def load_hash(hash)
initialize(hash[:output_shape])
end
end
-
class Dropout < Layer
attr_accessor :dropout_ratio
attr_reader :use_scale