Skip to content
Snippets Groups Projects
Commit ef7227bd authored by Cédric Traizet's avatar Cédric Traizet
Browse files

the learning curve file can now be set in the app

parent 94ab0bf1
No related branches found
No related tags found
1 merge request!4Dimensionality reduction algorithms
......@@ -59,6 +59,12 @@ public:
itkGetMacro(Beta,itk::Array<double>);
itkSetMacro(Beta,itk::Array<double>);
itkGetMacro(WriteLearningCurve,bool);
itkSetMacro(WriteLearningCurve,bool);
itkGetMacro(LearningCurveFileName,std::string);
itkSetMacro(LearningCurveFileName,std::string);
bool CanReadFile(const std::string & filename);
bool CanWriteFile(const std::string & filename);
......@@ -98,6 +104,7 @@ private:
itk::Array<double> m_Beta; // Sparsity regularization parameter
bool m_WriteLearningCurve; // Flag for writting the learning curve into a txt file
std::string m_LearningCurveFileName; // Name of the output learning curve printed after training
};
} // end namespace otb
......
......@@ -25,6 +25,7 @@ template <class TInputValue, class AutoencoderType>
AutoencoderModel<TInputValue,AutoencoderType>::AutoencoderModel()
{
this->m_IsDoPredictBatchMultiThreaded = true;
this->m_WriteLearningCurve = false;
}
......@@ -36,6 +37,7 @@ AutoencoderModel<TInputValue,AutoencoderType>::~AutoencoderModel()
template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>::Train()
{
std::cout << this->m_WriteLearningCurve << std::endl;
std::vector<shark::RealVector> features;
std::cout << "converting the input ListSample to Shark vector" << std::endl;
Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
......@@ -43,9 +45,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train()
shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
std::ofstream ofs;
if (this->m_WriteLearningCurve =true)
{
ofs.open(m_LearningCurveFileName);
ofs << "learning curve" << std::endl;
}
/*
std::ofstream ofs("/mnt/data/home/traizetc/computation/learning_curve.txt"); //learning curve
ofs << "learning curve" << std::endl;
*/
if (m_Epsilon > 0){
shark::TrainingProgress<> criterion(5,m_Epsilon);
......@@ -106,15 +115,19 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(shark::Abstrac
optimizer.init(error);
std::cout<<"error before training : " << optimizer.solution().value<<std::endl;
File << "end layer" << std::endl;
if (this->m_WriteLearningCurve =true)
{
File << "end layer" << std::endl;
}
unsigned int i=0;
do{
i++;
optimizer.step(error);
if (this->m_WriteLearningCurve =true)
{
File << optimizer.solution().value << std::endl;
}
} while( !criterion.stop( optimizer.solution() ) );
std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
......@@ -149,10 +162,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(shark::A
do{
i++;
optimizer.step(error);
if (this->m_WriteLearningCurve =true)
{
File << optimizer.solution().value << std::endl;
}
} while( !criterion.stop( optimizer.solution() ) );
std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
if (this->m_WriteLearningCurve =true)
{
File << "end layer" << std::endl;
}
net.setParameterVector(optimizer.solution().point);
m_net.push_back(net);
samples = net.encode(samples);
......
......@@ -80,6 +80,11 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue>
AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength");
SetParameterDescription("model.autoencoder.beta",
"Sparsity regularization strength");
AddParameter(ParameterType_OutputFilename, "model.autoencoder.learningcurve", "Learning curve");
SetParameterDescription("model.autoencoder.learningcurve", "Learning error values");
MandatoryOff("model.autoencoder.learningcurve");
}
......@@ -146,6 +151,14 @@ void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
dimredTrainer->SetNoise(noise);
dimredTrainer->SetRho(rho);
dimredTrainer->SetBeta(beta);
if (HasValue("model.autoencoder.learningcurve") && IsParameterEnabled("model.autoencoder.learningcurve"))
{
std::cout << "yo" << std::endl;
dimredTrainer->SetWriteLearningCurve(true);
dimredTrainer->SetLearningCurveFileName(GetParameterString("model.autoencoder.learningcurve"));
}
dimredTrainer->SetInputListSample(trainingListSample);
std::cout << "before train" << std::endl;
dimredTrainer->Train();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment