From ef7227bd6785e931e25fe86358643ac90ddd2825 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Traizet?= <traizetc@cesbio.cnes.fr>
Date: Thu, 29 Jun 2017 11:55:56 +0200
Subject: [PATCH] the learning curve file can now be set in the app

---
 include/AutoencoderModel.h     |  7 +++++++
 include/AutoencoderModel.txx   | 29 ++++++++++++++++++++++++-----
 include/cbTrainAutoencoder.txx | 13 +++++++++++++
 3 files changed, 44 insertions(+), 5 deletions(-)

diff --git a/include/AutoencoderModel.h b/include/AutoencoderModel.h
index edbe4ae052..739f13f6cb 100644
--- a/include/AutoencoderModel.h
+++ b/include/AutoencoderModel.h
@@ -59,6 +59,12 @@ public:
 	itkGetMacro(Beta,itk::Array<double>);
 	itkSetMacro(Beta,itk::Array<double>);
 
+	itkGetMacro(WriteLearningCurve,bool);
+	itkSetMacro(WriteLearningCurve,bool);
+	
+	itkGetMacro(LearningCurveFileName,std::string);
+	itkSetMacro(LearningCurveFileName,std::string);
+
 	bool CanReadFile(const std::string & filename);
 	bool CanWriteFile(const std::string & filename);
 
@@ -98,6 +104,7 @@ private:
 	itk::Array<double> m_Beta; // Sparsity regularization parameter
 	
 	bool m_WriteLearningCurve; // Flag for writting the learning curve into a txt file
+	std::string m_LearningCurveFileName; // Name of the output learning curve printed after training
 	
 };
 } // end namespace otb
diff --git a/include/AutoencoderModel.txx b/include/AutoencoderModel.txx
index ecd39a468f..8788315b72 100644
--- a/include/AutoencoderModel.txx
+++ b/include/AutoencoderModel.txx
@@ -25,6 +25,7 @@ template <class TInputValue, class AutoencoderType>
 AutoencoderModel<TInputValue,AutoencoderType>::AutoencoderModel()
 {
 	this->m_IsDoPredictBatchMultiThreaded = true;
+	this->m_WriteLearningCurve = false;
 }
 
    
@@ -36,6 +37,7 @@ AutoencoderModel<TInputValue,AutoencoderType>::~AutoencoderModel()
 template <class TInputValue, class AutoencoderType>
 void AutoencoderModel<TInputValue,AutoencoderType>::Train()
 {
+	std::cout << this->m_WriteLearningCurve << std::endl;
 	std::vector<shark::RealVector> features;
 	std::cout << "converting the input ListSample to Shark vector" << std::endl;
 	Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
@@ -43,9 +45,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train()
 	shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
 	
 	
+	std::ofstream ofs;
+	if (this->m_WriteLearningCurve =true) 
+	{
+		ofs.open(m_LearningCurveFileName);
+		ofs << "learning curve" << std::endl; 
+	}
+	/*
 	std::ofstream ofs("/mnt/data/home/traizetc/computation/learning_curve.txt"); //learning curve
 	ofs << "learning curve" << std::endl; 
-	
+	*/
 	if (m_Epsilon > 0){
 		shark::TrainingProgress<> criterion(5,m_Epsilon);
 		
@@ -106,15 +115,19 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(shark::Abstrac
 	optimizer.init(error);
 	
 	std::cout<<"error before training : " << optimizer.solution().value<<std::endl;
-	
-	File << "end layer" << std::endl;
-	
+	if (this->m_WriteLearningCurve =true) 
+	{
+		File << "end layer" << std::endl;
+	}
 	
 	unsigned int i=0;
 	do{
 		i++;
 		optimizer.step(error);
+		if (this->m_WriteLearningCurve =true) 
+		{	
 		File << optimizer.solution().value << std::endl;
+		}
 	} while( !criterion.stop( optimizer.solution() ) );
 	std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
 	
@@ -149,10 +162,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(shark::A
 	do{
 		i++;
 		optimizer.step(error);
+		if (this->m_WriteLearningCurve =true) 
+		{	
 		File << optimizer.solution().value << std::endl;
+		}
 	} while( !criterion.stop( optimizer.solution() ) );
 	std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
-	
+	if (this->m_WriteLearningCurve =true) 
+	{
+		File << "end layer" << std::endl;
+	}
 	net.setParameterVector(optimizer.solution().point);
 	m_net.push_back(net);
 	samples = net.encode(samples);
diff --git a/include/cbTrainAutoencoder.txx b/include/cbTrainAutoencoder.txx
index 4efad5dae5..244441d918 100644
--- a/include/cbTrainAutoencoder.txx
+++ b/include/cbTrainAutoencoder.txx
@@ -80,6 +80,11 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue>
   AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength");
   SetParameterDescription("model.autoencoder.beta", 
                          "Sparsity regularization strength");
+                         
+  AddParameter(ParameterType_OutputFilename, "model.autoencoder.learningcurve", "Learning curve");
+  SetParameterDescription("model.autoencoder.learningcurve", "Learning error values");
+  MandatoryOff("model.autoencoder.learningcurve");
+		
 }
 
 
@@ -146,6 +151,14 @@ void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
 		dimredTrainer->SetNoise(noise);
 		dimredTrainer->SetRho(rho);
 		dimredTrainer->SetBeta(beta);
+		
+		if (HasValue("model.autoencoder.learningcurve") && IsParameterEnabled("model.autoencoder.learningcurve"))
+		{
+			std::cout << "yo" << std::endl;
+			dimredTrainer->SetWriteLearningCurve(true);
+			dimredTrainer->SetLearningCurveFileName(GetParameterString("model.autoencoder.learningcurve"));
+		}
+		
 		dimredTrainer->SetInputListSample(trainingListSample);
 		std::cout << "before train" << std::endl;
 		dimredTrainer->Train();
-- 
GitLab