lib/dnn/core/model.rb in ruby-dnn-0.2.0 vs lib/dnn/core/model.rb in ruby-dnn-0.2.1

- old
+ new

@@ -6,11 +6,10 @@ include Numo attr_accessor :layers attr_reader :optimizer attr_reader :batch_size - attr_reader :training def initialize @layers = [] @optimizer = nil @batch_size = nil @@ -84,10 +83,14 @@ end def compiled? @compiled end + + def training? + @training + end def train(x, y, epochs, batch_size: 1, test: nil, verbose: true, @@ -134,10 +137,16 @@ @layers.each { |layer| layer.update if layer.respond_to?(:update) } @layers[-1].loss(y) end def accurate(x, y, batch_size = nil, &batch_proc) - @batch_size = batch_size if batch_size + unless batch_size + if @batch_size + batch_size = @batch_size >= x.shape[0] ? @batch_size : x.shape[0] + else + batch_size = 1 + end + end correct = 0 (x.shape[0].to_f / @batch_size).ceil.times do |i| x_batch = SFloat.zeros(@batch_size, *x.shape[1..-1]) y_batch = SFloat.zeros(@batch_size, *y.shape[1..-1]) @batch_size.times do |j|