lib/dnn/core/layers.rb in ruby-dnn-0.14.0 vs lib/dnn/core/layers.rb in ruby-dnn-0.14.1

- old
+ new

@@ -95,23 +95,31 @@ raise NotImplementedError, "Class '#{self.class.name}' has implement method 'get_params'" end end class InputLayer < Layer - def self.call(input_tensor) - new(input_tensor.data.shape[1..-1]).(input_tensor) + def self.call(input) + shape = input.is_a?(Tensor) ? input.data.shape : input.shape + new(shape[1..-1]).(input) end # @param [Array] input_dim_or_shape Setting the shape or dimension of the input data. def initialize(input_dim_or_shape) super() @input_shape = input_dim_or_shape.is_a?(Array) ? input_dim_or_shape : [input_dim_or_shape] end - def call(input_tensor) + def call(input) build unless built? - Tensor.new(forward(input_tensor.data), Link.new(input_tensor&.link, self)) + if input.is_a?(Tensor) + x = input.data + prev_link = input&.link + else + x = input + prev_link = nil + end + Tensor.new(forward(x), Link.new(prev_link, self)) end def build @built = true end @@ -331,10 +339,10 @@ end def forward(x) if DNN.learning_phase Xumo::SFloat.srand(@rnd.rand(1 << 31)) - @mask = Xumo::SFloat.ones(*x.shape).rand < @dropout_ratio + @mask = Xumo::SFloat.new(*x.shape).rand < @dropout_ratio x[@mask] = 0 elsif @use_scale x *= (1 - @dropout_ratio) end x