examples/mnist_example.rb in ruby-dnn-0.8.8 vs examples/mnist_example.rb in ruby-dnn-0.9.0

- old
+ new

@@ -1,12 +1,13 @@ require "dnn" require "dnn/lib/mnist" -#require "numo/linalg/autoloader" +# require "numo/linalg/autoloader" include DNN::Layers include DNN::Activations include DNN::Optimizers +include DNN::Losses Model = DNN::Model MNIST = DNN::MNIST x_train, y_train = MNIST.load_train x_test, y_test = MNIST.load_test @@ -15,12 +16,12 @@ x_test = Numo::SFloat.cast(x_test).reshape(x_test.shape[0], 784) x_train /= 255 x_test /= 255 -y_train = DNN::Util.to_categorical(y_train, 10, Numo::SFloat) -y_test = DNN::Util.to_categorical(y_test, 10, Numo::SFloat) +y_train = DNN::Utils.to_categorical(y_train, 10, Numo::SFloat) +y_test = DNN::Utils.to_categorical(y_test, 10, Numo::SFloat) model = Model.new model << InputLayer.new(784) @@ -31,10 +32,9 @@ model << Dense.new(256) model << BatchNormalization.new model << ReLU.new model << Dense.new(10) -model << SoftmaxWithLoss.new -model.compile(RMSProp.new) +model.compile(RMSProp.new, SoftmaxCrossEntropy.new) model.train(x_train, y_train, 10, batch_size: 100, test: [x_test, y_test])