lib/dnn/core/model.rb in ruby-dnn-0.7.1 vs lib/dnn/core/model.rb in ruby-dnn-0.7.2

- old
+ new

@@ -149,19 +149,21 @@ epoch_proc.call(epoch) if epoch_proc end end def train_on_batch(x, y, &batch_proc) + input_data_shape_check(x, y) x, y = batch_proc.call(x, y) if batch_proc forward(x, true) loss_value = loss(y) backward(y) update loss_value end def accurate(x, y, batch_size = 1, &batch_proc) + input_data_shape_check(x, y) batch_size = batch_size >= x.shape[0] ? x.shape[0] : batch_size correct = 0 (x.shape[0].to_f / batch_size).ceil.times do |i| x_batch = Xumo::SFloat.zeros(batch_size, *x.shape[1..-1]) y_batch = Xumo::SFloat.zeros(batch_size, *y.shape[1..-1]) @@ -183,10 +185,11 @@ end correct.to_f / x.shape[0] end def predict(x) + input_data_shape_check(x) forward(x, false) end def predict1(x) predict(Xumo::SFloat.cast([x]))[0, false] @@ -237,11 +240,15 @@ end def get_prev_layer(layer) layer_index = @layers.index(layer) prev_layer = if layer_index == 0 - @super_model.layers[@super_model.layers.index(self) - 1] + if @super_model + @super_model.layers[@super_model.layers.index(self) - 1] + else + self + end else @layers[layer_index - 1] end if prev_layer.is_a?(Layers::Layer) prev_layer @@ -259,20 +266,33 @@ unless @layers.last.is_a?(Layers::OutputLayer) raise TypeError.new("The last layer is not an OutputLayer.") end end + def input_data_shape_check(x, y = nil) + unless @layers.first.shape == x.shape[1..-1] + raise DNN_ShapeError.new("The shape of x does not match the input shape. x shape is #{x.shape[1..-1]}, but input shape is #{@layers.first.shape}.") + end + if y && @layers.last.shape != y.shape[1..-1] + raise DNN_ShapeError.new("The shape of y does not match the input shape. y shape is #{y.shape[1..-1]}, but output shape is #{@layers.last.shape}.") + end + end + def layers_shape_check @layers.each.with_index do |layer, i| + prev_shape = layer.prev_layer.shape if layer.is_a?(Layers::Dense) - prev_shape = layer.prev_layer.shape if prev_shape.length != 1 raise DNN_ShapeError.new("layer index(#{i}) Dense: The shape of the previous layer is #{prev_shape}. The shape of the previous layer must be 1 dimensional.") end elsif layer.is_a?(Layers::Conv2D) || layer.is_a?(Layers::MaxPool2D) - prev_shape = layer.prev_layer.shape if prev_shape.length != 3 raise DNN_ShapeError.new("layer index(#{i}) Conv2D: The shape of the previous layer is #{prev_shape}. The shape of the previous layer must be 3 dimensional.") + end + elsif layer.is_a?(Layers::RNN) + if prev_shape.length != 2 + layer_name = layer.class.name.match("\:\:(.+)$")[1] + raise DNN_ShapeError.new("layer index(#{i}) #{layer_name}: The shape of the previous layer is #{prev_shape}. The shape of the previous layer must be 3 dimensional.") end end end end end