lib/dnn/core/models.rb in ruby-dnn-0.14.0 vs lib/dnn/core/models.rb in ruby-dnn-0.14.1

- old
+ new

@@ -133,11 +133,11 @@ # @param [Numo::SFloat] x Input training data. # @param [Numo::SFloat] y Output training data. # @return [Hash] Hash of contents to be output to log. private def train_step(x, y) loss_value = train_on_batch(x, y) - { loss: loss_value.mean } + { loss: loss_value } end # Implement the test process to be performed. # @param [Numo::SFloat] x Input training data. # @param [Numo::SFloat] y Output training data. @@ -183,11 +183,11 @@ sum_loss = Xumo::SFloat[0] max_steps = (num_test_datas.to_f / batch_size).ceil iter.foreach(batch_size) do |x_batch, y_batch| correct, loss_value = test_on_batch(x_batch, y_batch) total_correct += correct - sum_loss += loss_value.mean + sum_loss += loss_value end mean_loss = sum_loss / max_steps acc = total_correct.to_f / num_test_datas @last_log[:test_loss] = mean_loss @last_log[:test_accuracy] = acc @@ -257,12 +257,13 @@ @callbacks = [] end # Save the model in marshal format. # @param [String] file_name Name to save model. - def save(file_name) - saver = Savers::MarshalSaver.new(self) + # @param [Boolean] include_optimizer Set true to save data included optimizer status. + def save(file_name, include_optimizer: true) + saver = Savers::MarshalSaver.new(self, include_optimizer: include_optimizer) saver.save(file_name) end # @return [DNN::Models::Model] Return the copy this model. def copy @@ -310,10 +311,15 @@ private def forward(x, learning_phase) DNN.learning_phase = learning_phase @layers_cache = nil - output_tensor = call(Tensor.new(x, nil)) + inputs = if x.is_a?(Array) + x.map { |a| Tensor.new(a, nil) } + else + Tensor.new(x, nil) + end + output_tensor = call(inputs) @last_link = output_tensor.link unless @built @built = true naming end