-
-
Notifications
You must be signed in to change notification settings - Fork 1.7k
Closed
Labels
Milestone
Description
Issue description
When I try to train the following cnn, it only runs when I include the line
arma::conv_to<arma::mat>::from(matrix).print("Print Image: \n");
However, if I comment out this line, the code no longer runs (both cases compile. but in this second case doesn't run - "Segmentation Fault"). It doesn't make sense to me why this happens.
Your environment
- version of mlpack: 3.2.2
- operating system: ubuntu 18.04
- compiler: g++ 7.4.0
- version of dependencies (Boost/Armadillo): libboost-1.65.1, armadillo-9.800.3
Steps to reproduce
Here's the code to reproduce the issue:
#include <mlpack/core.hpp>
#include <mlpack/core/data/split_data.hpp>
#include <mlpack/methods/ann/layer/layer.hpp>
#include <mlpack/methods/ann/ffn.hpp>
#include <ensmallen.hpp>
using namespace mlpack;
using namespace mlpack::ann;
using namespace arma;
using namespace std;
using namespace ens;
int main()
{
double RATIO = 0.1;
int CYCLES = 10;
const int ITERATIONS_PER_CYCLE = 1;
double STEP_SIZE = 10e-3;
int BATCH_SIZE = 1;
// Reading in image.
cout << "Reading data ..." << endl;
arma::Mat<unsigned char> matrix;
data::ImageInfo info;
data::Load("saitama.png", matrix, info, false, true);
FFN<NegativeLogLikelihood<>, RandomInitialization> model;
model.Add<Convolution<> >(info.Channels(), 6, 5, 5, 1, 1, 0, 0, info.Width(), info.Height());
model.Add<ReLULayer<> >();
model.Add<MaxPooling<> >(2, 2, 2, 2, true);
model.Add<Linear<> >(62*62*6, 2);
model.Add<LogSoftMax<> >();
cout << "Training ..." << endl;
// Setting parameters Stochastic Gradient Descent (SGD) optimizer.
SGD<AdamUpdate> optimizer(
STEP_SIZE,
BATCH_SIZE,
ITERATIONS_PER_CYCLE,
1e-8,
true,
AdamUpdate(1e-8, 0.9, 0.999));
arma::mat trainY;
trainY << 1;// << 0;
arma::conv_to<arma::mat>::from(matrix).print("Print Image: \n");
trainY.print("Train label: \n");
cout << trainY.n_rows << " " << trainY.n_cols;
for (int i = 1; i <= CYCLES; i++)
{
model.Train(arma::conv_to<arma::mat>::from(matrix), trainY, optimizer, PrintLoss(), ProgressBar());
}
return 0;
}
(The image size I used is 128 x 128).
compiled using:
g++ cnn.cpp -o cnn `pkg-config --cflags --libs mlpack armadillo` -fopenmp -DHAS_STB
Expected behavior
When the line is commented out, the network should still train. The code should run irrespective of whether the line in question is commented out or not.
Actual behavior
The code runs only when the line is NOT commented out.
Reactions are currently unavailable