lib/dnn/core/model.rb in ruby-dnn-0.6.4 vs lib/dnn/core/model.rb in ruby-dnn-0.6.5

- old
+ new

@@ -145,10 +145,10 @@ @layers.each { |layer| layer.update if @trainable && layer.is_a?(HasParamLayer) } loss end def accurate(x, y, batch_size = 1, &batch_proc) - batch_size = batch_size >= x.shape[0] ? batch_size : x.shape[0] + batch_size = batch_size >= x.shape[0] ? x.shape[0] : batch_size correct = 0 (x.shape[0].to_f / batch_size).ceil.times do |i| x_batch = Xumo::SFloat.zeros(batch_size, *x.shape[1..-1]) y_batch = Xumo::SFloat.zeros(batch_size, *y.shape[1..-1]) batch_size.times do |j|