From 502ab760335c5e436de627a89ec7eeeb65425397 Mon Sep 17 00:00:00 2001 From: Himanshu Pathak Date: Mon, 15 Jul 2024 19:59:36 +0530 Subject: [PATCH] Adding Batch Norm layer to mnist example. --- .gitignore | 2 +- cpp/neural_networks/mnist_cnn/mnist_cnn.cpp | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 41653f59..d5d51829 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,7 @@ cmake-build-* *.so data plots -mnist_* +mnist_t* nn_* cifar_* covertype-rf diff --git a/cpp/neural_networks/mnist_cnn/mnist_cnn.cpp b/cpp/neural_networks/mnist_cnn/mnist_cnn.cpp index b5862158..44a371be 100644 --- a/cpp/neural_networks/mnist_cnn/mnist_cnn.cpp +++ b/cpp/neural_networks/mnist_cnn/mnist_cnn.cpp @@ -120,6 +120,9 @@ int main() 2, // Stride along height. true); + // Add BatchNorm. + model.Add(); + // Add the second convolution layer. model.Add(16, // Number of output activation maps. 5, // Filter width. @@ -136,6 +139,9 @@ int main() // Add the second pooling layer. model.Add(2, 2, 2, 2, true); + // Add BatchNorm + model.Add(); + // Add the final dense layer. model.Add(10); model.Add(); @@ -200,7 +206,7 @@ int main() // Get predictions on test data points. // The original file could be download from // https://www.kaggle.com/c/digit-recognizer/data - data::Load("../data/mnist_test.csv", dataset, true); + data::Load("../../../data/mnist_test.csv", dataset, true); const mat testX = dataset.submat(1, 0, dataset.n_rows - 1, dataset.n_cols - 1) / 256.0; const mat testY = dataset.row(0);