lib/nn.rb in nn-2.2.0 vs lib/nn.rb in nn-2.3.0
- old
+ new
@@ -1,10 +1,10 @@
require "numo/narray"
require "json"
class NN
- VERSION = "2.2"
+ VERSION = "2.3"
include Numo
attr_accessor :weights
attr_accessor :biases
@@ -122,21 +122,21 @@
end
correct.to_f / num_test_data
end
def learn(x_train, y_train, &block)
- x = SFloat.zeros(@batch_size, @num_nodes.first)
- y = SFloat.zeros(@batch_size, @num_nodes.last)
- @batch_size.times do |i|
- if x_train.is_a?(SFloat)
- r = rand(x_train.shape[0])
- x[i, true] = x_train[r, true]
- y[i, true] = y_train[r, true]
- else
- r = rand(x_train.length)
- x[i, true] = SFloat.cast(x_train[r])
- y[i, true] = SFloat.cast(y_train[r])
+ if x_train.is_a?(SFloat)
+ indexes = (0...x_train.shape[0]).to_a.sample(@batch_size)
+ x = x_train[indexes, true]
+ y = y_train[indexes, true]
+ else
+ indexes = (0...x_train.length).to_a.sample(@batch_size)
+ x = SFloat.zeros(@batch_size, @num_nodes.first)
+ y = SFloat.zeros(@batch_size, @num_nodes.last)
+ @batch_size.times do |i|
+ x[i, true] = SFloat.cast(x_train[indexes[i]])
+ y[i, true] = SFloat.cast(y_train[indexes[i]])
end
end
x, y = block.call(x, y) if block
forward(x)
backward(y)
@@ -413,18 +413,16 @@
@nn = nn
@index = index
end
def forward(x)
- @x = x
@mean = x.mean(0)
@xc = x - @mean
@var = (@xc ** 2).mean(0)
@std = NMath.sqrt(@var + 1e-7)
@xn = @xc / @std
- out = @nn.gammas[@index] * @xn + @nn.betas[@index]
- out.reshape(*@x.shape)
+ @nn.gammas[@index] * @xn + @nn.betas[@index]
end
def backward(dout)
@d_beta = dout.sum(0).mean
@d_gamma = (@xn * dout).sum(0).mean
@@ -432,9 +430,8 @@
dxc = dxn / @std
dstd = -((dxn * @xc) / (@std ** 2)).sum(0)
dvar = 0.5 * dstd / @std
dxc += (2.0 / @nn.batch_size) * @xc * dvar
dmean = dxc.sum(0)
- dx = dxc - dmean / @nn.batch_size
- dx.reshape(*@x.shape)
+ dxc - dmean / @nn.batch_size
end
end