lib/dnn/core/model.rb in ruby-dnn-0.7.3 vs lib/dnn/core/model.rb in ruby-dnn-0.8.0

- old
+ new

@@ -33,11 +33,16 @@ @layers.each do |layer| next unless layer.is_a?(HasParamLayer) hash_params = has_param_layers_params[has_param_layers_index] hash_params.each do |key, (shape, base64_param)| bin = Base64.decode64(base64_param) - layer.params[key] = Xumo::SFloat.from_binary(bin).reshape(*shape) + data = Xumo::SFloat.from_binary(bin).reshape(*shape) + if layer.params[key].is_a?(LearningParam) + layer.params[key].data = data + else + layer.params[key] = data + end end has_param_layers_index += 1 end end @@ -53,17 +58,18 @@ end def to_json hash_layers = @layers.map { |layer| layer.to_hash } hash = {version: VERSION, layers: hash_layers, optimizer: @optimizer.to_hash} - JSON.dump(hash) + JSON.pretty_generate(hash) end def params_to_json - has_param_layers = @layers.select { |layer| layer.is_a?(HasParamLayer) } + has_param_layers = @layers.select { |layer| layer.is_a?(Layers::HasParamLayer) } has_param_layers_params = has_param_layers.map do |layer| layer.params.map { |key, param| + param = param.data if param.is_a?(LearningParam) base64_param = Base64.encode64(param.to_binary) [key, [param.shape, base64_param]] }.to_h end JSON.dump(has_param_layers_params) @@ -158,10 +164,10 @@ backward(y) update loss_value end - def accurate(x, y, batch_size = 1, &batch_proc) + def accurate(x, y, batch_size = 100, &batch_proc) input_data_shape_check(x, y) batch_size = batch_size >= x.shape[0] ? x.shape[0] : batch_size correct = 0 (x.shape[0].to_f / batch_size).ceil.times do |i| x_batch = Xumo::SFloat.zeros(batch_size, *x.shape[1..-1])