lib/dnn/core/layers.rb in ruby-dnn-0.13.1 vs lib/dnn/core/layers.rb in ruby-dnn-0.13.2
- old
+ new
@@ -8,12 +8,22 @@
def self.call(x, *args)
self.new(*args).(x)
end
+ def self.from_hash(hash)
+ return nil unless hash
+ layer_class = DNN.const_get(hash[:class])
+ layer = layer_class.allocate
+ raise DNN_Error.new("#{layer.class} is not an instance of #{self} class.") unless layer.is_a?(self)
+ layer.load_hash(hash)
+ layer
+ end
+
def initialize
@built = false
+ @name = nil
end
# Forward propagation and create a link.
# @param [Array] input Array of the form [x_input_data, prev_link].
def call(input)
@@ -55,14 +65,18 @@
@input_shape
end
# Layer to a hash.
def to_hash(merge_hash = nil)
- hash = { class: self.class.name }
+ hash = { class: self.class.name, name: @name }
hash.merge!(merge_hash) if merge_hash
hash
end
+
+ def load_hash(hash)
+ initialize
+ end
end
# This class is a superclass of all classes with learning parameters.
class HasParamLayer < Layer
@@ -85,14 +99,10 @@
def self.call(input)
shape = input.is_a?(Array) ? input[0].shape : input.shape
self.new(shape[1..-1]).(input)
end
- def self.from_hash(hash)
- self.new(hash[:input_shape])
- end
-
# @param [Array] input_dim_or_shape Setting the shape or dimension of the input data.
def initialize(input_dim_or_shape)
super()
@input_shape = input_dim_or_shape.is_a?(Array) ? input_dim_or_shape : [input_dim_or_shape]
end
@@ -125,10 +135,14 @@
end
def to_hash
super(input_shape: @input_shape)
end
+
+ def load_hash(hash)
+ initialize(hash[:input_shape])
+ end
end
# It is a superclass of all connection layers.
class Connection < HasParamLayer
@@ -194,19 +208,10 @@
class Dense < Connection
attr_reader :num_nodes
- def self.from_hash(hash)
- self.new(hash[:num_nodes],
- weight_initializer: Utils.hash_to_obj(hash[:weight_initializer]),
- bias_initializer: Utils.hash_to_obj(hash[:bias_initializer]),
- weight_regularizer: Utils.hash_to_obj(hash[:weight_regularizer]),
- bias_regularizer: Utils.hash_to_obj(hash[:bias_regularizer]),
- use_bias: hash[:use_bias])
- end
-
# @param [Integer] num_nodes Number of nodes.
def initialize(num_nodes,
weight_initializer: Initializers::RandomNormal.new,
bias_initializer: Initializers::Zeros.new,
weight_regularizer: nil,
@@ -248,10 +253,19 @@
end
def to_hash
super(num_nodes: @num_nodes)
end
+
+ def load_hash(hash)
+ initialize(hash[:num_nodes],
+ weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
+ bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
+ weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
+ bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
+ use_bias: hash[:use_bias])
+ end
end
class Flatten < Layer
def forward(x)
@@ -267,14 +281,10 @@
end
end
class Reshape < Layer
- def self.from_hash(hash)
- self.new(hash[:output_shape])
- end
-
def initialize(output_shape)
super()
@output_shape = output_shape
end
@@ -291,21 +301,21 @@
end
def to_hash
super(output_shape: @output_shape)
end
+
+ def load_hash(hash)
+ initialize(hash[:output_shape])
+ end
end
class Dropout < Layer
attr_accessor :dropout_ratio
attr_reader :use_scale
- def self.from_hash(hash)
- self.new(hash[:dropout_ratio], seed: hash[:seed], use_scale: hash[:use_scale])
- end
-
# @param [Float] dropout_ratio Nodes dropout ratio.
# @param [Integer] seed Seed of random number used for masking.
# @param [Boolean] use_scale Set to true to scale the output according to the dropout ratio.
def initialize(dropout_ratio = 0.5, seed: rand(1 << 31), use_scale: true)
super()
@@ -332,9 +342,13 @@
dy
end
def to_hash
super(dropout_ratio: @dropout_ratio, seed: @seed, use_scale: @use_scale)
+ end
+
+ def load_hash(hash)
+ initialize(hash[:dropout_ratio], seed: hash[:seed], use_scale: hash[:use_scale])
end
end
end
end