Skip to content

Commit

Permalink
Merge pull request #231 from geekypathak21/batchnorm-example
Browse files Browse the repository at this point in the history
Adding Batch Norm layer to mnist example.
  • Loading branch information
zoq authored Jul 23, 2024
2 parents 4d413b5 + 502ab76 commit 2beef0a
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 2 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ cmake-build-*
*.so
data
plots
mnist_*
mnist_t*
nn_*
cifar_*
covertype-rf
Expand Down
8 changes: 7 additions & 1 deletion cpp/neural_networks/mnist_cnn/mnist_cnn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,9 @@ int main()
2, // Stride along height.
true);

// Add BatchNorm.
model.Add<BatchNorm>();

// Add the second convolution layer.
model.Add<Convolution>(16, // Number of output activation maps.
5, // Filter width.
Expand All @@ -136,6 +139,9 @@ int main()
// Add the second pooling layer.
model.Add<MaxPooling>(2, 2, 2, 2, true);

// Add BatchNorm
model.Add<BatchNorm>();

// Add the final dense layer.
model.Add<Linear>(10);
model.Add<LogSoftMax>();
Expand Down Expand Up @@ -200,7 +206,7 @@ int main()
// Get predictions on test data points.
// The original file could be download from
// https://www.kaggle.com/c/digit-recognizer/data
data::Load("../data/mnist_test.csv", dataset, true);
data::Load("../../../data/mnist_test.csv", dataset, true);
const mat testX = dataset.submat(1, 0, dataset.n_rows - 1, dataset.n_cols - 1)
/ 256.0;
const mat testY = dataset.row(0);
Expand Down

0 comments on commit 2beef0a

Please sign in to comment.