lib/dnn/core/model.rb in ruby-dnn-0.9.3 vs lib/dnn/core/model.rb in ruby-dnn-0.9.4
- old
+ new
@@ -211,12 +211,14 @@
num_train_datas = x.shape[0]
(1..epochs).each do |epoch|
puts "【 epoch #{epoch}/#{epochs} 】" if verbose
(num_train_datas.to_f / batch_size).ceil.times do |index|
x_batch, y_batch = dataset.get_batch(batch_size)
- loss = train_on_batch(x_batch, y_batch, &batch_proc)
- if loss.nan?
+ loss_value = train_on_batch(x_batch, y_batch, &batch_proc)
+ if loss_value.is_a?(Numo::SFloat)
+ loss_value = loss_value.mean
+ elsif loss_value.nan?
puts "\nloss is nan" if verbose
return
end
num_trained_datas = (index + 1) * batch_size
num_trained_datas = num_trained_datas > num_train_datas ? num_train_datas : num_trained_datas
@@ -228,11 +230,11 @@
log << ">"
else
log << "_"
end
end
- log << " #{num_trained_datas}/#{num_train_datas} loss: #{sprintf('%.8f', loss)}"
+ log << " #{num_trained_datas}/#{num_train_datas} loss: #{sprintf('%.8f', loss_value)}"
print log if verbose
end
if verbose && test
acc = accurate(test[0], test[1], batch_size, &batch_proc)
print " accurate: #{acc}"
@@ -244,10 +246,11 @@
# Training once.
# Compile the model before use this method.
# @param [Numo::SFloat] x Input training data.
# @param [Numo::SFloat] y Output training data.
+ # @return [Float | Numo::SFloat] Return loss value in the form of Float or Numo::SFloat.
# @yield [x, y] batch_proc Set proc to process per batch.
def train_on_batch(x, y, &batch_proc)
raise DNN_Error.new("The model is not compiled.") unless compiled?
check_xy_type(x, y)
input_data_shape_check(x, y)
@@ -302,11 +305,11 @@
# Predict one data.
# @param [Numo::SFloat] x Input data. However, x is single data.
def predict1(x)
check_xy_type(x)
- predict(Xumo::SFloat.cast([x]))[0, false]
+ predict(x.reshape(1, *x.shape))[0, false]
end
# @return [DNN::Model] Copy this model.
def copy
Marshal.load(Marshal.dump(self))
@@ -331,10 +334,10 @@
}.flatten
end
# TODO
# It is not good to write the Layer class name directly in the Model class. I will fix it later.
- def forward(x, learning_phase)01
+ def forward(x, learning_phase)
@layers.each do |layer|
x = if layer.is_a?(Layers::Dropout) || layer.is_a?(Layers::BatchNormalization) || layer.is_a?(Model)
layer.forward(x, learning_phase)
else
layer.forward(x)