lib/dnn/core/model.rb in ruby-dnn-0.1.8 vs lib/dnn/core/model.rb in ruby-dnn-0.2.0
- old
+ new
@@ -12,10 +12,11 @@
def initialize
@layers = []
@optimizer = nil
@batch_size = nil
+ @compiled = false
end
def self.load(file_name)
Marshal.load(File.binread(file_name))
end
@@ -71,22 +72,28 @@
def compile(optimizer)
unless optimizer.is_a?(Optimizers::Optimizer)
raise DNN_TypeError.new("optimizer is not an instance of the DNN::Optimizers::Optimizer class.")
end
+ @compiled = true
layers_check
@optimizer = optimizer
@layers.each do |layer|
- layer.init(self)
+ layer.build(self)
end
layers_shape_check
end
+
+ def compiled?
+ @compiled
+ end
def train(x, y, epochs,
batch_size: 1,
- batch_proc: nil,
+ test: nil,
verbose: true,
+ batch_proc: nil,
&epoch_proc)
@batch_size = batch_size
num_train_data = x.shape[0]
(1..epochs).each do |epoch|
puts "【 epoch #{epoch}/#{epochs} 】" if verbose
@@ -108,10 +115,14 @@
end
end
log << " #{num_trained_data}/#{num_train_data} loss: #{loss}"
print log if verbose
end
+ if verbose && test
+ acc = accurate(test[0], test[1], batch_size,&batch_proc)
+ print " accurate: #{acc}"
+ end
puts "" if verbose
epoch_proc.call(epoch) if epoch_proc
end
end
@@ -122,17 +133,10 @@
backward(y)
@layers.each { |layer| layer.update if layer.respond_to?(:update) }
@layers[-1].loss(y)
end
- def test(x, y, batch_size = nil, &batch_proc)
- @batch_size = batch_size if batch_size
- acc = accurate(x, y, @batch_size, &batch_proc)
- puts "accurate: #{acc}"
- acc
- end
-
def accurate(x, y, batch_size = nil, &batch_proc)
@batch_size = batch_size if batch_size
correct = 0
(x.shape[0].to_f / @batch_size).ceil.times do |i|
x_batch = SFloat.zeros(@batch_size, *x.shape[1..-1])
@@ -154,12 +158,10 @@
def predict(x)
forward(x, false)
end
- private
-
def forward(x, training)
@training = training
@layers.each do |layer|
x = layer.forward(x)
end
@@ -169,10 +171,13 @@
def backward(y)
dout = y
@layers[0..-1].reverse.each do |layer|
dout = layer.backward(dout)
end
+ dout
end
+
+ private
def layers_check
unless @layers.first.is_a?(Layers::InputLayer)
raise DNN_Error.new("The first layer is not an InputLayer.")
end