lib/dnn/core/model.rb in ruby-dnn-0.5.9 vs lib/dnn/core/model.rb in ruby-dnn-0.5.10

- old
+ new

@@ -1,12 +1,10 @@ require "json" module DNN # This class deals with the model of the network. class Model - include Xumo - attr_accessor :layers attr_reader :optimizer attr_reader :batch_size def self.load(file_name) @@ -34,11 +32,11 @@ has_param_layers_index = 0 @layers.each do |layer| next unless layer.is_a?(HasParamLayer) hash_params = has_param_layers_params[has_param_layers_index] hash_params.each do |key, param| - layer.params[key] = SFloat.cast(param) + layer.params[key] = Xumo::SFloat.cast(param) end has_param_layers_index += 1 end end @@ -158,12 +156,12 @@ batch_size = 1 end end correct = 0 (x.shape[0].to_f / @batch_size).ceil.times do |i| - x_batch = SFloat.zeros(@batch_size, *x.shape[1..-1]) - y_batch = SFloat.zeros(@batch_size, *y.shape[1..-1]) + x_batch = Xumo::SFloat.zeros(@batch_size, *x.shape[1..-1]) + y_batch = Xumo::SFloat.zeros(@batch_size, *y.shape[1..-1]) @batch_size.times do |j| k = i * @batch_size + j break if k >= x.shape[0] x_batch[j, false] = x[k, false] y_batch[j, false] = y[k, false] @@ -184,10 +182,10 @@ def predict(x) forward(x, false) end def predict1(x) - predict(SFloat.cast([x]))[0, false] + predict(Xumo::SFloat.cast([x]))[0, false] end def forward(x, training) unless compiled? raise DNN_Error.new("The model is not compiled.")