diff --git a/Modules/Applications/AppClassification/app/CMakeLists.txt b/Modules/Applications/AppClassification/app/CMakeLists.txt
index 7613f5a2612a7c62bfe8ecc648dab3f8b0b40436..692aa710eba22f1cf1274dcd08171c063764c9b1 100644
--- a/Modules/Applications/AppClassification/app/CMakeLists.txt
+++ b/Modules/Applications/AppClassification/app/CMakeLists.txt
@@ -65,16 +65,19 @@ otb_create_application(
   SOURCES        otbKMeansClassification.cxx
   LINK_LIBRARIES ${${otb-module}_LIBRARIES})
 
-set(TrainImagesClassifier_SRC 
-  otbTrainSVM.cxx  otbTrainBoost.cxx otbTrainDecisionTree.cxx
-  otbTrainGradientBoostedTree.cxx otbTrainNeuralNetwork.cxx
-  otbTrainNormalBayes.cxx otbTrainRandomForests.cxx otbTrainKNN.cxx
-  otbTrainLibSVM.cxx
-  otbTrainImagesClassifier.cxx)
-
 otb_create_application(
   NAME           TrainImagesClassifier
-  SOURCES        ${TrainImagesClassifier_SRC}
+  SOURCES        otbTrainImagesClassifier.cxx
+  LINK_LIBRARIES ${${otb-module}_LIBRARIES})
+
+otb_create_application(
+  NAME           TrainRegression
+  SOURCES        otbTrainRegression.cxx
+  LINK_LIBRARIES ${${otb-module}_LIBRARIES})
+
+otb_create_application(
+  NAME           PredictRegression
+  SOURCES        otbPredictRegression.cxx
   LINK_LIBRARIES ${${otb-module}_LIBRARIES})
 
 otb_create_application(
diff --git a/Modules/Applications/AppClassification/app/otbPredictRegression.cxx b/Modules/Applications/AppClassification/app/otbPredictRegression.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..413d70f05d08d50efaef54a0bd38e0c58bd1ca7d
--- /dev/null
+++ b/Modules/Applications/AppClassification/app/otbPredictRegression.cxx
@@ -0,0 +1,275 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#include "otbWrapperApplication.h"
+#include "otbWrapperApplicationFactory.h"
+
+#include "itkUnaryFunctorImageFilter.h"
+#include "otbChangeLabelImageFilter.h"
+#include "otbStandardWriterWatcher.h"
+#include "otbStatisticsXMLFileReader.h"
+#include "otbShiftScaleVectorImageFilter.h"
+#include "otbImageClassificationFilter.h"
+#include "otbMultiToMonoChannelExtractROI.h"
+#include "otbImageToVectorImageCastFilter.h"
+#include "otbMachineLearningModelFactory.h"
+
+namespace otb
+{
+namespace Functor
+{
+/**
+ * simple affine function : y = ax+b
+ */
+template<class TInput, class TOutput>
+class AffineFunctor
+{
+public:
+  typedef double InternalType;
+  
+  // constructor
+  AffineFunctor() : m_A(1.0),m_B(0.0) {}
+  
+  // destructor
+  virtual ~AffineFunctor() {}
+  
+  void SetA(InternalType a)
+    {
+    m_A = a;
+    }
+  
+  void SetB(InternalType b)
+    {
+    m_B = b;
+    }
+  
+  inline TOutput operator()(const TInput & x) const
+    {
+    return static_cast<TOutput>( static_cast<InternalType>(x)*m_A + m_B);
+    }
+private:
+  InternalType m_A;
+  InternalType m_B;
+};
+  
+}
+
+namespace Wrapper
+{
+
+class PredictRegression : public Application
+{
+public:
+  /** Standard class typedefs. */
+  typedef PredictRegression             Self;
+  typedef Application                   Superclass;
+  typedef itk::SmartPointer<Self>       Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Standard macro */
+  itkNewMacro(Self);
+
+  itkTypeMacro(PredictRegression, otb::Application);
+
+  /** Filters typedef */
+  typedef UInt8ImageType                                                                       MaskImageType;
+  typedef itk::VariableLengthVector<FloatVectorImageType::InternalPixelType>                   MeasurementType;
+  typedef otb::StatisticsXMLFileReader<MeasurementType>                                        StatisticsReader;
+  typedef otb::ShiftScaleVectorImageFilter<FloatVectorImageType, FloatVectorImageType>         RescalerType;
+  typedef itk::UnaryFunctorImageFilter<
+      FloatImageType,
+      FloatImageType,
+      otb::Functor::AffineFunctor<float,float> >                                               OutputRescalerType;
+  typedef otb::ImageClassificationFilter<FloatVectorImageType, FloatImageType, MaskImageType>  ClassificationFilterType;
+  typedef ClassificationFilterType::Pointer                                                    ClassificationFilterPointerType;
+  typedef ClassificationFilterType::ModelType                                                  ModelType;
+  typedef ModelType::Pointer                                                                   ModelPointerType;
+  typedef ClassificationFilterType::ValueType                                                  ValueType;
+  typedef ClassificationFilterType::LabelType                                                  LabelType;
+  typedef otb::MachineLearningModelFactory<ValueType, LabelType>                               MachineLearningModelFactoryType;
+
+private:
+  void DoInit()
+  {
+    SetName("PredictRegression");
+    SetDescription("Performs a prediction of the input image according to a regression model file.");
+
+    // Documentation
+    SetDocName("Predict Regression");
+    SetDocLongDescription("This application predict output values from an input"
+                          " image, based on a regression model file produced by"
+                          " the TrainRegression application. Pixels of the "
+                          "output image will contain the predicted values from"
+                          "the regression model (single band). The input pixels"
+                          " can be optionally centered and reduced according "
+                          "to the statistics file produced by the "
+                          "ComputeImagesStatistics application. An optional "
+                          "input mask can be provided, in which case only "
+                          "input image pixels whose corresponding mask value "
+                          "is greater than 0 will be processed. The remaining"
+                          " of pixels will be given the value 0 in the output"
+                          " image.");
+
+    SetDocLimitations("The input image must contain the feature bands used for"
+                      " the model training (without the predicted value). "
+                      "If a statistics file was used during training by the "
+                      "TrainRegression, it is mandatory to use the same "
+                      "statistics file for prediction. If an input mask is "
+                      "used, its size must match the input image size.");
+    SetDocAuthors("OTB-Team");
+    SetDocSeeAlso("TrainRegression, ComputeImagesStatistics");
+
+    AddDocTag(Tags::Learning);
+
+    AddParameter(ParameterType_InputImage, "in",  "Input Image");
+    SetParameterDescription( "in", "The input image to predict.");
+
+    // TODO : use CSV input/output ?
+
+    AddParameter(ParameterType_InputImage,  "mask",   "Input Mask");
+    SetParameterDescription( "mask", "The mask allows to restrict "
+      "classification of the input image to the area where mask pixel values "
+      "are greater than 0.");
+    MandatoryOff("mask");
+
+    AddParameter(ParameterType_InputFilename, "model", "Model file");
+    SetParameterDescription("model", "A regression model file (produced by "
+      "TrainRegression application).");
+
+    AddParameter(ParameterType_InputFilename, "imstat", "Statistics file");
+    SetParameterDescription("imstat", "A XML file containing mean and standard"
+      " deviation to center and reduce samples before prediction "
+      "(produced by ComputeImagesStatistics application). If this file contains"
+      "one more band than the sample size, the last stat of last band will be"
+      "applied to expand the output predicted value");
+    MandatoryOff("imstat");
+
+    AddParameter(ParameterType_OutputImage, "out",  "Output Image");
+    SetParameterDescription( "out", "Output image containing predicted values");
+
+    AddRAMParameter();
+
+   // Doc example parameter settings
+    SetDocExampleParameterValue("in", "QB_1_ortho.tif");
+    SetDocExampleParameterValue("imstat", "EstimateImageStatisticsQB1.xml");
+    SetDocExampleParameterValue("model", "clsvmModelQB1.svm");
+    SetDocExampleParameterValue("out", "clLabeledImageQB1.tif");
+  }
+
+  void DoUpdateParameters()
+  {
+    // Nothing to do here : all parameters are independent
+  }
+
+  void DoExecute()
+  {
+    // Load input image
+    FloatVectorImageType::Pointer inImage = GetParameterImage("in");
+    inImage->UpdateOutputInformation();
+    unsigned int nbFeatures = inImage->GetNumberOfComponentsPerPixel();
+
+    // Load svm model
+    otbAppLogINFO("Loading model");
+    m_Model = MachineLearningModelFactoryType::CreateMachineLearningModel(GetParameterString("model"),
+                                                                          MachineLearningModelFactoryType::ReadMode);
+
+    if (m_Model.IsNull())
+      {
+      otbAppLogFATAL(<< "Error when loading model " << GetParameterString("model") << " : unsupported model type");
+      }
+
+    m_Model->Load(GetParameterString("model"));
+    m_Model->SetRegressionMode(true);
+    otbAppLogINFO("Model loaded");
+
+    // Classify
+    m_ClassificationFilter = ClassificationFilterType::New();
+    m_ClassificationFilter->SetModel(m_Model);
+    
+    FloatImageType::Pointer outputImage = m_ClassificationFilter->GetOutput();
+
+    // Normalize input image if asked
+    if(IsParameterEnabled("imstat")  )
+      {
+      otbAppLogINFO("Input image normalization activated.");
+      // Normalize input image (optional)
+      StatisticsReader::Pointer  statisticsReader = StatisticsReader::New();
+      MeasurementType  meanMeasurementVector;
+      MeasurementType  stddevMeasurementVector;
+      m_Rescaler = RescalerType::New();
+      // Load input image statistics
+      statisticsReader->SetFileName(GetParameterString("imstat"));
+      meanMeasurementVector   = statisticsReader->GetStatisticVectorByName("mean");
+      stddevMeasurementVector = statisticsReader->GetStatisticVectorByName("stddev");
+      otbAppLogINFO( "mean used: " << meanMeasurementVector );
+      otbAppLogINFO( "standard deviation used: " << stddevMeasurementVector );
+      if (meanMeasurementVector.Size() == nbFeatures + 1)
+        {
+        double outMean = meanMeasurementVector[nbFeatures];
+        double outStdDev = stddevMeasurementVector[nbFeatures];
+        meanMeasurementVector.SetSize(nbFeatures,false);
+        stddevMeasurementVector.SetSize(nbFeatures,false);
+        m_OutRescaler = OutputRescalerType::New();
+        m_OutRescaler->SetInput(m_ClassificationFilter->GetOutput());
+        m_OutRescaler->GetFunctor().SetA(outStdDev);
+        m_OutRescaler->GetFunctor().SetB(outMean);
+        outputImage = m_OutRescaler->GetOutput();
+        }
+      else if (meanMeasurementVector.Size() != nbFeatures)
+        {
+        otbAppLogFATAL("Wrong number of components in statistics file : "<<meanMeasurementVector.Size());
+        }
+        
+      // Rescale vector image
+      m_Rescaler->SetScale(stddevMeasurementVector);
+      m_Rescaler->SetShift(meanMeasurementVector);
+      m_Rescaler->SetInput(inImage);
+
+      m_ClassificationFilter->SetInput(m_Rescaler->GetOutput());
+      }
+    else
+      {
+      otbAppLogINFO("Input image normalization deactivated.");
+      m_ClassificationFilter->SetInput(inImage);
+      }
+
+
+    if(IsParameterEnabled("mask"))
+      {
+      otbAppLogINFO("Using input mask");
+      // Load mask image and cast into LabeledImageType
+      MaskImageType::Pointer inMask = GetParameterUInt8Image("mask");
+
+      m_ClassificationFilter->SetInputMask(inMask);
+      }
+
+    SetParameterOutputImage<FloatImageType>("out", outputImage);
+
+  }
+
+  ClassificationFilterType::Pointer m_ClassificationFilter;
+  ModelPointerType m_Model;
+  RescalerType::Pointer m_Rescaler;
+  OutputRescalerType::Pointer m_OutRescaler;
+
+};
+
+
+}
+}
+
+OTB_APPLICATION_EXPORT(otb::Wrapper::PredictRegression)
diff --git a/Modules/Applications/AppClassification/app/otbTrainImagesClassifier.cxx b/Modules/Applications/AppClassification/app/otbTrainImagesClassifier.cxx
index 1b9358f04832bed66ec463d1510fbb371ea2a3de..de1520dfca4855e523513f8a4542a95bff1cde84 100644
--- a/Modules/Applications/AppClassification/app/otbTrainImagesClassifier.cxx
+++ b/Modules/Applications/AppClassification/app/otbTrainImagesClassifier.cxx
@@ -14,15 +14,95 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
+#include "otbLearningApplicationBase.h"
+#include "otbWrapperApplicationFactory.h"
 
-#include "otbTrainImagesClassifier.h"
+#include "otbListSampleGenerator.h"
+
+// Statistic XML Reader
+#include "otbStatisticsXMLFileReader.h"
+
+// Validation
+#include "otbConfusionMatrixCalculator.h"
+
+#include "itkTimeProbe.h"
+#include "otbStandardFilterWatcher.h"
+
+// Normalize the samples
+#include "otbShiftScaleSampleListFilter.h"
+
+// List sample concatenation
+#include "otbConcatenateSampleListFilter.h"
+
+// Balancing ListSample
+#include "otbListSampleToBalancedListSampleFilter.h"
+
+// VectorData projection filter
+
+// Extract a ROI of the vectordata
+#include "otbVectorDataIntoImageProjectionFilter.h"
+
+// Elevation handler
+#include "otbWrapperElevationParametersHandler.h"
 
 namespace otb
 {
 namespace Wrapper
 {
 
-void TrainImagesClassifier::DoInit()
+class TrainImagesClassifier: public LearningApplicationBase<float,int>
+{
+public:
+  /** Standard class typedefs. */
+  typedef TrainImagesClassifier Self;
+  typedef LearningApplicationBase<float,int> Superclass;
+  typedef itk::SmartPointer<Self> Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Standard macro */
+  itkNewMacro(Self)
+
+  itkTypeMacro(TrainImagesClassifier, otb::Wrapper::LearningApplicationBase)
+  
+  typedef Superclass::SampleType              SampleType;
+  typedef Superclass::ListSampleType          ListSampleType;
+  typedef Superclass::TargetSampleType        TargetSampleType;
+  typedef Superclass::TargetListSampleType    TargetListSampleType;
+  
+  typedef Superclass::SampleImageType         SampleImageType;
+  typedef SampleImageType::PixelType          PixelType;
+
+  // SampleList manipulation
+  typedef otb::ListSampleGenerator<SampleImageType, VectorDataType> ListSampleGeneratorType;
+
+  typedef otb::Statistics::ConcatenateSampleListFilter<ListSampleType> ConcatenateListSampleFilterType;
+  typedef otb::Statistics::ConcatenateSampleListFilter<TargetListSampleType> ConcatenateLabelListSampleFilterType;
+
+  // Statistic XML file Reader
+  typedef otb::StatisticsXMLFileReader<SampleType> StatisticsReader;
+
+  // Enhance List Sample  typedef otb::Statistics::ListSampleToBalancedListSampleFilter<ListSampleType, LabelListSampleType>      BalancingListSampleFilterType;
+  typedef otb::Statistics::ShiftScaleSampleListFilter<ListSampleType, ListSampleType> ShiftScaleFilterType;
+ 
+  // Estimate performance on validation sample
+  typedef otb::ConfusionMatrixCalculator<TargetListSampleType, TargetListSampleType> ConfusionMatrixCalculatorType;
+  typedef ConfusionMatrixCalculatorType::ConfusionMatrixType ConfusionMatrixType;
+  typedef ConfusionMatrixCalculatorType::MapOfIndicesType MapOfIndicesType;
+  typedef ConfusionMatrixCalculatorType::ClassLabelType ClassLabelType;
+
+  // VectorData projection filter
+  typedef otb::VectorDataProjectionFilter<VectorDataType, VectorDataType> VectorDataProjectionFilterType;
+
+  // Extract ROI
+  typedef otb::VectorDataIntoImageProjectionFilter<VectorDataType, SampleImageType> VectorDataReprojectionType;
+
+protected:
+  //using Superclass::AddParameter;
+  //friend void InitSVMParams(TrainImagesClassifier & app);
+
+private:
+  
+void DoInit()
 {
   SetName("TrainImagesClassifier");
   SetDescription(
@@ -46,8 +126,6 @@ void TrainImagesClassifier::DoInit()
   SetDocAuthors("OTB-Team");
   SetDocSeeAlso("OpenCV documentation for machine learning http://docs.opencv.org/modules/ml/doc/ml.html ");
 
-  AddDocTag(Tags::Learning);
-
   //Group IO
   AddParameter(ParameterType_Group, "io", "Input and output data");
   SetParameterDescription("io", "This group of parameters allows to set input and output data.");
@@ -101,24 +179,7 @@ void TrainImagesClassifier::DoInit()
   SetParameterDescription("sample.vfn", "Name of the field used to discriminate class labels in the input vector data files.");
   SetParameterString("sample.vfn", "Class");
 
-  AddParameter(ParameterType_Choice, "classifier", "Classifier to use for the training");
-  SetParameterDescription("classifier", "Choice of the classifier to use for the training.");
-
-  //Group LibSVM
-#ifdef OTB_USE_LIBSVM 
-  InitLibSVMParams();
-#endif
-
-#ifdef OTB_USE_OPENCV
-  InitSVMParams();
-  InitBoostParams();
-  InitDecisionTreeParams();
-  InitGradientBoostedTreeParams();
-  InitNeuralNetworkParams();
-  InitNormalBayesParams();
-  InitRandomForestsParams();
-  InitKNNParams();
-#endif
+  Superclass::DoInit();
 
   AddRANDParameter();
   // Doc example parameter settings
@@ -136,15 +197,14 @@ void TrainImagesClassifier::DoInit()
   SetDocExampleParameterValue("classifier.libsvm.opt", "false");
   SetDocExampleParameterValue("io.out", "svmModelQB1.txt");
   SetDocExampleParameterValue("io.confmatout", "svmConfusionMatrixQB1.csv");
-}
+}  
 
-void TrainImagesClassifier::DoUpdateParameters()
+void DoUpdateParameters()
 {
   // Nothing to do here : all parameters are independent
 }
 
-
-void TrainImagesClassifier::LogConfusionMatrix(ConfusionMatrixCalculatorType* confMatCalc)
+void LogConfusionMatrix(ConfusionMatrixCalculatorType* confMatCalc)
 {
   ConfusionMatrixCalculatorType::ConfusionMatrixType matrix = confMatCalc->GetConfusionMatrix();
 
@@ -215,24 +275,7 @@ void TrainImagesClassifier::LogConfusionMatrix(ConfusionMatrixCalculatorType* co
   otbAppLogINFO("Confusion matrix (rows = reference labels, columns = produced labels):\n" << os.str());
 }
 
-void TrainImagesClassifier::Classify(ListSampleType::Pointer validationListSample, LabelListSampleType::Pointer predictedList)
-{
-  //Classification
-  ModelPointerType model = MachineLearningModelFactoryType::CreateMachineLearningModel(GetParameterString("io.out"),
-                                                                                       MachineLearningModelFactoryType::ReadMode);
-
-  if (model.IsNull())
-    {
-    otbAppLogFATAL(<< "Error when loading model " << GetParameterString("io.out"));
-    }
-
-  model->Load(GetParameterString("io.out"));
-  model->SetInputListSample(validationListSample);
-  model->SetTargetListSample(predictedList);
-  model->PredictAll();
-}
-
-void TrainImagesClassifier::DoExecute()
+void DoExecute()
 {
   GetLogger()->Debug("Entering DoExecute\n");
   //Create training and validation for list samples and label list samples
@@ -243,8 +286,8 @@ void TrainImagesClassifier::DoExecute()
     ConcatenateLabelListSampleFilterType::New();
   ConcatenateListSampleFilterType::Pointer concatenateValidationSamples = ConcatenateListSampleFilterType::New();
 
-  MeasurementType meanMeasurementVector;
-  MeasurementType stddevMeasurementVector;
+  SampleType meanMeasurementVector;
+  SampleType stddevMeasurementVector;
 
   //--------------------------
   // Load measurements from images
@@ -358,7 +401,7 @@ void TrainImagesClassifier::DoExecute()
     }
 
   ListSampleType::Pointer listSample;
-  LabelListSampleType::Pointer labelListSample;
+  TargetListSampleType::Pointer labelListSample;
   //--------------------------
   // Balancing training sample (if needed)
   // if (IsParameterEnabled("sample.b"))
@@ -384,9 +427,9 @@ void TrainImagesClassifier::DoExecute()
   //--------------------------
   // Split the data set into training/validation set
   ListSampleType::Pointer trainingListSample = listSample;
-  LabelListSampleType::Pointer trainingLabeledListSample = labelListSample;
+  TargetListSampleType::Pointer trainingLabeledListSample = labelListSample;
 
-  LabelListSampleType::Pointer validationLabeledListSample = concatenateValidationLabels->GetOutput();
+  TargetListSampleType::Pointer validationLabeledListSample = concatenateValidationLabels->GetOutput();
   otbAppLogINFO("Size of training set: " << trainingListSample->Size());
   otbAppLogINFO("Size of validation set: " << validationListSample->Size());
   otbAppLogINFO("Size of labeled training set: " << trainingLabeledListSample->Size());
@@ -395,88 +438,14 @@ void TrainImagesClassifier::DoExecute()
   //--------------------------
   // Estimate model
   //--------------------------
-  LabelListSampleType::Pointer predictedList = LabelListSampleType::New();
-  const std::string classifierType = GetParameterString("classifier");
-
-  if (classifierType == "libsvm")
-    {
-	#ifdef OTB_USE_LIBSVM
-    TrainLibSVM(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module LIBSVM is not installed. You should consider turning OTB_USE_LIBSVM on during cmake configuration.");
-    #endif
-    }
-  else if (classifierType == "svm")
-    {
-	#ifdef OTB_USE_OPENCV
-    TrainSVM(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
-    #endif
-    }
-  else if (classifierType == "boost")
-    {
-	#ifdef OTB_USE_OPENCV
-    TrainBoost(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
-    #endif
-    }
-  else if (classifierType == "dt")
-    {
-	#ifdef OTB_USE_OPENCV
-    TrainDecisionTree(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
-    #endif
-    }
-  else if (classifierType == "gbt")
-    {
-	#ifdef OTB_USE_OPENCV
-    TrainGradientBoostedTree(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
-    #endif
-    }
-  else if (classifierType == "ann")
-    {
-	#ifdef OTB_USE_OPENCV
-    TrainNeuralNetwork(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
-    #endif
-    }
-  else if (classifierType == "bayes")
-    {
-	#ifdef OTB_USE_OPENCV
-    TrainNormalBayes(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
-    #endif
-    }
-  else if (classifierType == "rf")
-    {
-	#ifdef OTB_USE_OPENCV
-    TrainRandomForests(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
-    #endif
-    }
-  else if (classifierType == "knn")
-    {
-	#ifdef OTB_USE_OPENCV
-    TrainKNN(trainingListSample, trainingLabeledListSample);
-    #else
-    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
-    #endif
-    }
-
+  this->Train(trainingListSample,trainingLabeledListSample,GetParameterString("io.out"));
 
   //--------------------------
   // Performances estimation
   //--------------------------
+  TargetListSampleType::Pointer predictedList = TargetListSampleType::New();
   ListSampleType::Pointer performanceListSample=ListSampleType::New();
-  LabelListSampleType::Pointer performanceLabeledListSample=LabelListSampleType::New();
+  TargetListSampleType::Pointer performanceLabeledListSample=TargetListSampleType::New();
 
   //Test the input validation set size
   if(validationLabeledListSample->Size() != 0)
@@ -491,7 +460,7 @@ void TrainImagesClassifier::DoExecute()
     performanceLabeledListSample = trainingLabeledListSample;
     }
 
-    Classify(performanceListSample, predictedList);
+    this->Classify(performanceListSample, predictedList, GetParameterString("io.out"));
 
     ConfusionMatrixCalculatorType::Pointer confMatCalc = ConfusionMatrixCalculatorType::New();
 
@@ -605,11 +574,12 @@ void TrainImagesClassifier::DoExecute()
       } // END if (this->HasValue("io.confmatout"))
 
   // TODO: implement hyperplane distance classifier and performance validation (cf. object detection) ?
-
 }
 
+  VectorDataReprojectionType::Pointer vdreproj;
+};
 
-}
-}
+} // end namespace Wrapper
+} // end namespace otb
 
 OTB_APPLICATION_EXPORT(otb::Wrapper::TrainImagesClassifier)
diff --git a/Modules/Applications/AppClassification/app/otbTrainImagesClassifier.h b/Modules/Applications/AppClassification/app/otbTrainImagesClassifier.h
deleted file mode 100644
index 235613fd56eaa5a265dc66f1e60ad26c849e1a27..0000000000000000000000000000000000000000
--- a/Modules/Applications/AppClassification/app/otbTrainImagesClassifier.h
+++ /dev/null
@@ -1,198 +0,0 @@
-/*=========================================================================
- Program:   ORFEO Toolbox
- Language:  C++
- Date:      $Date$
- Version:   $Revision$
-
-
- Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
- See OTBCopyright.txt for details.
-
-
- This software is distributed WITHOUT ANY WARRANTY; without even
- the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
- PURPOSE.  See the above copyright notices for more information.
-
- =========================================================================*/
-#include "otbConfigure.h"
-
-#include "otbWrapperApplicationFactory.h"
-
-#include <iostream>
-
-//Image
-#include "otbVectorImage.h"
-#include "otbVectorData.h"
-#include "otbListSampleGenerator.h"
-
-// ListSample
-#include "itkVariableLengthVector.h"
-
-//Estimator
-#include "otbMachineLearningModelFactory.h"
-
-#ifdef OTB_USE_OPENCV
-# include "otbKNearestNeighborsMachineLearningModel.h"
-# include "otbRandomForestsMachineLearningModel.h"
-# include "otbSVMMachineLearningModel.h"
-# include "otbBoostMachineLearningModel.h"
-# include "otbDecisionTreeMachineLearningModel.h"
-# include "otbGradientBoostedTreeMachineLearningModel.h"
-# include "otbNormalBayesMachineLearningModel.h"
-# include "otbNeuralNetworkMachineLearningModel.h"
-#endif
-
-#ifdef OTB_USE_LIBSVM 
-#include "otbLibSVMMachineLearningModel.h"
-#endif
-
-// Statistic XML Reader
-#include "otbStatisticsXMLFileReader.h"
-
-// Validation
-#include "otbConfusionMatrixCalculator.h"
-
-#include "itkTimeProbe.h"
-#include "otbStandardFilterWatcher.h"
-
-// Normalize the samples
-#include "otbShiftScaleSampleListFilter.h"
-
-// List sample concatenation
-#include "otbConcatenateSampleListFilter.h"
-
-// Balancing ListSample
-#include "otbListSampleToBalancedListSampleFilter.h"
-
-// VectorData projection filter
-
-// Extract a ROI of the vectordata
-#include "otbVectorDataIntoImageProjectionFilter.h"
-
-// Elevation handler
-#include "otbWrapperElevationParametersHandler.h"
-
-namespace otb
-{
-namespace Wrapper
-{
-
-class TrainImagesClassifier: public Application
-{
-public:
-  /** Standard class typedefs. */
-  typedef TrainImagesClassifier Self;
-  typedef Application Superclass;
-  typedef itk::SmartPointer<Self> Pointer;
-  typedef itk::SmartPointer<const Self> ConstPointer;
-
-  /** Standard macro */
-  itkNewMacro(Self)
-
-  itkTypeMacro(TrainImagesClassifier, otb::Application)
-
-  typedef FloatVectorImageType::PixelType         PixelType;
-  typedef FloatVectorImageType::InternalPixelType InternalPixelType;
-
-  // Training vectordata
-  typedef itk::VariableLengthVector<InternalPixelType> MeasurementType;
-
-  // SampleList manipulation
-  typedef otb::ListSampleGenerator<FloatVectorImageType, VectorDataType> ListSampleGeneratorType;
-
-  typedef ListSampleGeneratorType::ListSampleType ListSampleType;
-  typedef ListSampleGeneratorType::LabelType LabelType;
-  typedef ListSampleGeneratorType::ListLabelType LabelListSampleType;
-  typedef otb::Statistics::ConcatenateSampleListFilter<ListSampleType> ConcatenateListSampleFilterType;
-  typedef otb::Statistics::ConcatenateSampleListFilter<LabelListSampleType> ConcatenateLabelListSampleFilterType;
-
-  // Statistic XML file Reader
-  typedef otb::StatisticsXMLFileReader<MeasurementType> StatisticsReader;
-
-  // Enhance List Sample  typedef otb::Statistics::ListSampleToBalancedListSampleFilter<ListSampleType, LabelListSampleType>      BalancingListSampleFilterType;
-  typedef otb::Statistics::ShiftScaleSampleListFilter<ListSampleType, ListSampleType> ShiftScaleFilterType;
-
-  // Machine Learning models
-  typedef otb::MachineLearningModelFactory<InternalPixelType, ListSampleGeneratorType::ClassLabelType> MachineLearningModelFactoryType;
-  typedef MachineLearningModelFactoryType::MachineLearningModelTypePointer ModelPointerType;
-  
-#ifdef OTB_USE_OPENCV
-  typedef otb::RandomForestsMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> RandomForestType;
-  typedef otb::KNearestNeighborsMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> KNNType;
-  typedef otb::SVMMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> SVMType;
-  typedef otb::BoostMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> BoostType;
-  typedef otb::DecisionTreeMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> DecisionTreeType;
-  typedef otb::GradientBoostedTreeMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> GradientBoostedTreeType;
-  typedef otb::NeuralNetworkMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> NeuralNetworkType;
-  typedef otb::NormalBayesMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> NormalBayesType;
-#endif
-
-#ifdef OTB_USE_LIBSVM 
-  typedef otb::LibSVMMachineLearningModel<InternalPixelType, ListSampleGeneratorType::ClassLabelType> LibSVMType;
-#endif
- 
-  // Estimate performance on validation sample
-  typedef otb::ConfusionMatrixCalculator<LabelListSampleType, LabelListSampleType> ConfusionMatrixCalculatorType;
-  typedef ConfusionMatrixCalculatorType::ConfusionMatrixType ConfusionMatrixType;
-  typedef ConfusionMatrixCalculatorType::MapOfIndicesType MapOfIndicesType;
-  typedef ConfusionMatrixCalculatorType::ClassLabelType ClassLabelType;
-
-
-  // VectorData projection filter
-  typedef otb::VectorDataProjectionFilter<VectorDataType, VectorDataType> VectorDataProjectionFilterType;
-
-  // Extract ROI
-  typedef otb::VectorDataIntoImageProjectionFilter<VectorDataType, FloatVectorImageType> VectorDataReprojectionType;
-
-protected:
-  using Superclass::AddParameter;
-  friend void InitSVMParams(TrainImagesClassifier & app);
-
-private:
-  void DoInit();
-
-  void DoUpdateParameters();
-
-  void LogConfusionMatrix(ConfusionMatrixCalculatorType* confMatCalc);
-
-#ifdef OTB_USE_LIBSVM 
-  void InitLibSVMParams();
-#endif  
-  
-#ifdef OTB_USE_OPENCV
-  void InitBoostParams();
-  void InitSVMParams();
-  void InitDecisionTreeParams();
-  void InitGradientBoostedTreeParams();
-  void InitNeuralNetworkParams();
-  void InitNormalBayesParams();
-  void InitRandomForestsParams();
-  void InitKNNParams();
-#endif
-
-#ifdef OTB_USE_LIBSVM 
-  void TrainLibSVM(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-#endif 
-  
-#ifdef OTB_USE_OPENCV
-  void TrainBoost(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-  void TrainSVM(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-  void TrainDecisionTree(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-  void TrainGradientBoostedTree(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-  void TrainNeuralNetwork(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-  void TrainNormalBayes(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-  void TrainRandomForests(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-  void TrainKNN(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
-#endif
-
-  void Classify(ListSampleType::Pointer validationListSample, LabelListSampleType::Pointer predictedList);
-
-  void DoExecute();
-
-  VectorDataReprojectionType::Pointer vdreproj;
-};
-
-}
-}
-
-
diff --git a/Modules/Applications/AppClassification/app/otbTrainKNN.cxx b/Modules/Applications/AppClassification/app/otbTrainKNN.cxx
deleted file mode 100644
index bc31cd4fce6c1c0207570161bd6df033e8fbff45..0000000000000000000000000000000000000000
--- a/Modules/Applications/AppClassification/app/otbTrainKNN.cxx
+++ /dev/null
@@ -1,52 +0,0 @@
-/*=========================================================================
- Program:   ORFEO Toolbox
- Language:  C++
- Date:      $Date$
- Version:   $Revision$
-
-
- Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
- See OTBCopyright.txt for details.
-
-
- This software is distributed WITHOUT ANY WARRANTY; without even
- the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
- PURPOSE.  See the above copyright notices for more information.
-
- =========================================================================*/
-
-#include "otbTrainImagesClassifier.h"
-
-
-namespace otb
-{
-namespace Wrapper
-{
-#ifdef OTB_USE_OPENCV
-  void TrainImagesClassifier::InitKNNParams()
-  {
-    AddChoice("classifier.knn", "KNN classifier");
-    SetParameterDescription("classifier.knn", "This group of parameters allows to set KNN classifier parameters. "
-        "See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/k_nearest_neighbors.html}.");
-
-    //K parameter
-    AddParameter(ParameterType_Int, "classifier.knn.k", "Number of Neighbors");
-    SetParameterInt("classifier.knn.k", 32);
-    SetParameterDescription("classifier.knn.k","The number of neighbors to use.");
-
-  }
-
-
-  void TrainImagesClassifier::TrainKNN(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
-  {
-    KNNType::Pointer knnClassifier = KNNType::New();
-    knnClassifier->SetInputListSample(trainingListSample);
-    knnClassifier->SetTargetListSample(trainingLabeledListSample);
-    knnClassifier->SetK(GetParameterInt("classifier.knn.k"));
-
-    knnClassifier->Train();
-    knnClassifier->Save(GetParameterString("io.out"));
-  }
-#endif
-} //end namespace wrapper
-} //end namespace otb
diff --git a/Modules/Applications/AppClassification/app/otbTrainRegression.cxx b/Modules/Applications/AppClassification/app/otbTrainRegression.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..82e865194dd0418e98d04c17550df9919f19ff76
--- /dev/null
+++ b/Modules/Applications/AppClassification/app/otbTrainRegression.cxx
@@ -0,0 +1,543 @@
+/*=========================================================================
+ Program:   ORFEO Toolbox
+ Language:  C++
+ Date:      $Date$
+ Version:   $Revision$
+
+
+ Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+ See OTBCopyright.txt for details.
+
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notices for more information.
+
+ =========================================================================*/
+#include "otbLearningApplicationBase.h"
+#include "otbWrapperApplicationFactory.h"
+
+#include "otbListSampleGenerator.h"
+
+#include "otbImageToEnvelopeVectorDataFilter.h"
+#include "itkPreOrderTreeIterator.h"
+
+// Statistic XML Reader
+#include "otbStatisticsXMLFileReader.h"
+
+#include "itkTimeProbe.h"
+#include "otbStandardFilterWatcher.h"
+
+// Normalize the samples
+#include "otbShiftScaleSampleListFilter.h"
+
+// List sample concatenation
+#include "otbConcatenateSampleListFilter.h"
+
+// Balancing ListSample
+#include "otbListSampleToBalancedListSampleFilter.h"
+
+#include "itkMersenneTwisterRandomVariateGenerator.h"
+
+// Elevation handler
+#include "otbWrapperElevationParametersHandler.h"
+
+namespace otb
+{
+namespace Wrapper
+{
+
+class TrainRegression: public LearningApplicationBase<float,float>
+{
+public:
+  /** Standard class typedefs. */
+  typedef TrainRegression Self;
+  typedef LearningApplicationBase<float,float> Superclass;
+  typedef itk::SmartPointer<Self> Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Standard macro */
+  itkNewMacro(Self)
+
+  itkTypeMacro(TrainRegression, otb::Wrapper::LearningApplicationBase)
+  
+  typedef Superclass::SampleType              SampleType;
+  typedef Superclass::ListSampleType          ListSampleType;
+  typedef Superclass::TargetSampleType        TargetSampleType;
+  typedef Superclass::TargetListSampleType    TargetListSampleType;
+  
+  typedef Superclass::SampleImageType         SampleImageType;
+  typedef SampleImageType::PixelType          PixelType;
+
+  // SampleList manipulation
+  typedef otb::ListSampleGenerator<SampleImageType, VectorDataType> ListSampleGeneratorType;
+
+  typedef otb::Statistics::ConcatenateSampleListFilter<ListSampleType> ConcatenateListSampleFilterType;
+  typedef otb::Statistics::ConcatenateSampleListFilter<TargetListSampleType> ConcatenateLabelListSampleFilterType;
+
+  // Statistic XML file Reader
+  typedef otb::StatisticsXMLFileReader<SampleType> StatisticsReader;
+
+  // Enhance List Sample  typedef otb::Statistics::ListSampleToBalancedListSampleFilter<ListSampleType, LabelListSampleType>      BalancingListSampleFilterType;
+  typedef otb::Statistics::ShiftScaleSampleListFilter<ListSampleType, ListSampleType> ShiftScaleFilterType;
+
+  typedef otb::ImageToEnvelopeVectorDataFilter<SampleImageType,VectorDataType> EnvelopeFilterType;
+  
+  typedef itk::PreOrderTreeIterator<VectorDataType::DataTreeType> TreeIteratorType;
+
+  typedef itk::Statistics::MersenneTwisterRandomVariateGenerator RandomGeneratorType;
+
+protected:
+  TrainRegression()
+    {
+    this->m_RegressionFlag = true;
+    }
+
+private:
+
+void DoInit()
+{
+  SetName("TrainRegression");
+  SetDescription(
+    "Train a classifier from multiple images to perform regression.");
+
+  // Documentation
+  SetDocName("Train a regression model");
+  SetDocLongDescription(
+    "This application trains a classifier from multiple input images or a csv "
+    "file, in order to perform regression. Predictors are composed of pixel "
+    "values in each band optionally centered and reduced using an XML "
+    "statistics file produced by the ComputeImagesStatistics application.\n "
+    "The output value for each predictor is assumed to be the last band "
+    "(or the last column for CSV files). Training and validation predictor "
+    "lists are built such that their size is inferior to maximum bounds given "
+    "by the user, and the proportion corresponds to the balance parameter. "
+    "Several classifier parameters can be set depending on the chosen "
+    "classifier. In the validation process, the mean square error is computed\n"
+    " This application is based on LibSVM and on OpenCV Machine Learning "
+    "classifiers, and is compatible with OpenCV 2.3.1 and later.");
+  SetDocLimitations("None");
+  SetDocAuthors("OTB-Team");
+  SetDocSeeAlso("OpenCV documentation for machine learning http://docs.opencv.org/modules/ml/doc/ml.html ");
+
+  //Group IO
+  AddParameter(ParameterType_Group, "io", "Input and output data");
+  SetParameterDescription("io", "This group of parameters allows to set input and output data.");
+  AddParameter(ParameterType_InputImageList, "io.il", "Input Image List");
+  SetParameterDescription("io.il", "A list of input images. First (n-1) bands should contain the predictor. The last band should contain the output value to predict.");
+  AddParameter(ParameterType_InputFilename, "io.csv", "Input CSV file");
+  SetParameterDescription("io.csv","Input CSV file containing the predictors, and the output values in last column. Only used when no input image is given");
+  MandatoryOff("io.csv");
+  
+  AddParameter(ParameterType_InputFilename, "io.imstat", "Input XML image statistics file");
+  MandatoryOff("io.imstat");
+  SetParameterDescription("io.imstat",
+                          "Input XML file containing the mean and the standard deviation of the input images.");
+  AddParameter(ParameterType_OutputFilename, "io.out", "Output regression model");
+  SetParameterDescription("io.out", "Output file containing the model estimated (.txt format).");
+  
+  AddParameter(ParameterType_Float,"io.mse","Mean Square Error");
+  SetParameterDescription("io.mse","Mean square error computed with the validation predictors");
+  SetParameterRole("io.mse",Role_Output);
+  DisableParameter("io.mse");
+
+  //Group Sample list
+  AddParameter(ParameterType_Group, "sample", "Training and validation samples parameters");
+  SetParameterDescription("sample",
+                          "This group of parameters allows to set training and validation sample lists parameters.");
+
+  AddParameter(ParameterType_Int, "sample.mt", "Maximum training predictors");
+  //MandatoryOff("mt");
+  SetDefaultParameterInt("sample.mt", 1000);
+  SetParameterDescription("sample.mt", "Maximum number of training predictors (default = 1000) (no limit = -1).");
+  
+  AddParameter(ParameterType_Int, "sample.mv", "Maximum validation predictors");
+  // MandatoryOff("mv");
+  SetDefaultParameterInt("sample.mv", 1000);
+  SetParameterDescription("sample.mv", "Maximum number of validation predictors (default = 1000) (no limit = -1).");
+
+  AddParameter(ParameterType_Float, "sample.vtr", "Training and validation sample ratio");
+  SetParameterDescription("sample.vtr",
+                          "Ratio between training and validation samples (0.0 = all training, 1.0 = all validation) (default = 0.5).");
+  SetParameterFloat("sample.vtr", 0.5);
+
+  Superclass::DoInit();
+
+  AddRANDParameter();
+
+  // Doc example parameter settings
+  SetDocExampleParameterValue("io.il", "training_dataset.tif");
+  SetDocExampleParameterValue("io.out", "regression_model.txt");
+  SetDocExampleParameterValue("io.imstat", "training_statistics.xml");
+  SetDocExampleParameterValue("classifier", "svm");
+}
+
+void DoUpdateParameters()
+{
+  if (HasValue("io.csv") && IsParameterEnabled("io.csv"))
+    {
+    MandatoryOff("io.il");
+    }
+  else
+    {
+    MandatoryOn("io.il");
+    }
+}
+
+void ParseCSVPredictors(std::string path, ListSampleType* outputList)
+{
+  std::ifstream ifs;
+  ifs.open(path.c_str());
+  unsigned int nbCols = 0;
+  char sep = '\t';
+  std::istringstream iss;
+  SampleType elem;
+  while(!ifs.eof())
+    {
+    std::string line;
+    std::getline(ifs,line);
+    // filter current line
+    while (!line.empty() && (line[0] == ' ' || line[0] == '\t'))
+      {
+      line.erase(line.begin());
+      }
+    while (!line.empty() && ( *(line.end()-1) == ' ' || *(line.end()-1) == '\t' || *(line.end()-1) == '\r'))
+      {
+      line.erase(line.end()-1);
+      }
+
+    // Avoid commented lines or too short ones
+    if (!line.empty() && line[0] != '#')
+      {
+      std::vector<itksys::String> words = itksys::SystemTools::SplitString(line.c_str(),sep);
+      if (nbCols == 0)
+        {
+        // detect separator and feature size
+        if (words.size() < 2)
+          {
+          sep = ' ';
+          words = itksys::SystemTools::SplitString(line.c_str(),sep);
+          }
+        if (words.size() < 2)
+          {
+          sep = ';';
+          words = itksys::SystemTools::SplitString(line.c_str(),sep);
+          }
+        if (words.size() < 2)
+          {
+          sep = ',';
+          words = itksys::SystemTools::SplitString(line.c_str(),sep);
+          }
+        if (words.size() < 2)
+          {
+          otbAppLogFATAL(<< "Can't parse CSV file : less than 2 columns or unknonw separator (knowns ones are tab, space, comma and semi-colon)");
+          }
+        nbCols = words.size();
+        elem.SetSize(nbCols,false);
+        outputList->SetMeasurementVectorSize(nbCols);
+        }
+      else if (words.size() != nbCols )
+        {
+        otbAppLogWARNING(<< "Skip CSV line, wrong number of columns : got "<<words.size() << ", expected "<<nbCols);
+        continue;
+        }
+      elem.Fill(0.0);
+      for (unsigned int i=0 ; i<nbCols ; ++i)
+        {
+        iss.str(words[i]);
+        iss >> elem[i];
+        }
+      outputList->PushBack(elem);
+      }
+    }
+  ifs.close();
+}
+
+void DoExecute()
+{
+  GetLogger()->Debug("Entering DoExecute\n");
+  //Create training and validation for list samples and label list samples
+  ConcatenateListSampleFilterType::Pointer concatenateTrainingSamples = ConcatenateListSampleFilterType::New();
+  ConcatenateListSampleFilterType::Pointer concatenateValidationSamples = ConcatenateListSampleFilterType::New();
+
+  SampleType meanMeasurementVector;
+  SampleType stddevMeasurementVector;
+
+  //--------------------------
+  // Load measurements from images
+  unsigned int nbBands = 0;
+  unsigned int nbFeatures = 0;
+  //Iterate over all input images
+
+  FloatVectorImageListType* imageList = GetParameterImageList("io.il");
+  
+  //Iterate over all input images
+  for (unsigned int imgIndex = 0; imgIndex < imageList->Size(); ++imgIndex)
+    {
+    FloatVectorImageType::Pointer image = imageList->GetNthElement(imgIndex);
+    image->UpdateOutputInformation();
+
+    if (imgIndex == 0)
+      {
+      nbBands = image->GetNumberOfComponentsPerPixel();
+      nbFeatures = static_cast<unsigned int>(static_cast<int>(nbBands) - 1);
+      if (nbBands < 2)
+        {
+        otbAppLogFATAL(<< "Need at least two bands per image, got "<<nbBands);
+        }
+      else
+        {
+        if (nbBands != image->GetNumberOfComponentsPerPixel())
+          {
+          otbAppLogFATAL(<< "Image has a different number of components than "
+            "the first one, expected "<<nbBands<<", got "<< image->GetNumberOfComponentsPerPixel());
+          }
+        }
+      }
+
+    // Extract image envelope to feed in sampleGenerator
+    EnvelopeFilterType::Pointer envelopeFilter = EnvelopeFilterType::New();
+    envelopeFilter->SetInput(image);
+    envelopeFilter->SetSamplingRate(0);
+    if (!image->GetProjectionRef().empty())
+      {
+      envelopeFilter->SetOutputProjectionRef(image->GetProjectionRef());
+      }
+
+    // Setup the DEM Handler
+    // otb::Wrapper::ElevationParametersHandler::SetupDEMHandlerFromElevationParameters(this,"elev");
+    
+    envelopeFilter->Update();
+    
+    VectorDataType::Pointer envelope = envelopeFilter->GetOutput();
+    
+    TreeIteratorType itVector(envelope->GetDataTree());
+    for (itVector.GoToBegin(); !itVector.IsAtEnd(); ++itVector)
+      {
+      if (itVector.Get()->IsPolygonFeature())
+        {
+        itVector.Get()->SetFieldAsInt(std::string("class"),1);
+        }
+      }
+
+
+    //Sample list generator
+    ListSampleGeneratorType::Pointer sampleGenerator = ListSampleGeneratorType::New();
+
+    sampleGenerator->SetInput(image);
+    sampleGenerator->SetInputVectorData(envelope);
+
+    sampleGenerator->SetClassKey("class");
+    sampleGenerator->SetMaxTrainingSize(GetParameterInt("sample.mt"));
+    sampleGenerator->SetMaxValidationSize(GetParameterInt("sample.mv"));
+    sampleGenerator->SetValidationTrainingProportion(GetParameterFloat("sample.vtr"));
+    sampleGenerator->SetBoundByMin(false);
+    sampleGenerator->SetPolygonEdgeInclusion(true);
+
+    sampleGenerator->Update();
+
+    //Concatenate training and validation samples from the image
+    concatenateTrainingSamples->AddInput(sampleGenerator->GetTrainingListSample());
+    concatenateValidationSamples->AddInput(sampleGenerator->GetValidationListSample());
+    }
+
+  // if no input image, try CSV
+  if (imageList->Size() == 0)
+    {
+    if (HasValue("io.csv") && IsParameterEnabled("io.csv"))
+      {
+      ListSampleType::Pointer csvListSample = ListSampleType::New();
+      this->ParseCSVPredictors(this->GetParameterString("io.csv"), csvListSample);
+      unsigned int totalCSVSize = csvListSample->Size();
+      if (totalCSVSize == 0)
+        {
+        otbAppLogFATAL("No input image and empty CSV file. Missing input data");
+        }
+      nbBands = csvListSample->GetMeasurementVectorSize();
+      nbFeatures = static_cast<unsigned int>(static_cast<int>(nbBands) - 1);
+      ListSampleType::Pointer csvTrainListSample = ListSampleType::New();
+      ListSampleType::Pointer csvValidListSample = ListSampleType::New();
+      csvTrainListSample->SetMeasurementVectorSize(nbBands);
+      csvValidListSample->SetMeasurementVectorSize(nbBands);
+      double ratio = this->GetParameterFloat("sample.vtr");
+      int trainSize = static_cast<int>(static_cast<double>(totalCSVSize)*(1.0-ratio));
+      int validSize = static_cast<int>(static_cast<double>(totalCSVSize)*(ratio));
+      if (trainSize > this->GetParameterInt("sample.mt"))
+        {
+        trainSize = this->GetParameterInt("sample.mt");
+        }
+      if (validSize > this->GetParameterInt("sample.mv"))
+        {
+        validSize = this->GetParameterInt("sample.mv");
+        }
+      double probaTrain = static_cast<double>(trainSize)/static_cast<double>(totalCSVSize);
+      double probaValid = static_cast<double>(validSize)/static_cast<double>(totalCSVSize);
+
+      RandomGeneratorType::Pointer randomGenerator = RandomGeneratorType::GetInstance();
+      for (unsigned int i=0; i<totalCSVSize; ++i)
+        {
+        double random = randomGenerator->GetUniformVariate(0.0, 1.0);
+        if (random < probaTrain)
+          {
+          csvTrainListSample->PushBack(csvListSample->GetMeasurementVector(i));
+          }
+        else if (random < probaTrain + probaValid)
+          {
+          csvValidListSample->PushBack(csvListSample->GetMeasurementVector(i));
+          }
+        }
+      concatenateTrainingSamples->AddInput(csvTrainListSample);
+      concatenateValidationSamples->AddInput(csvValidListSample);
+      }
+    }
+
+  // Update
+  concatenateTrainingSamples->Update();
+  concatenateValidationSamples->Update();
+
+  if (concatenateTrainingSamples->GetOutput()->Size() == 0)
+    {
+    otbAppLogFATAL("No training samples, cannot perform training.");
+    }
+
+  if (concatenateValidationSamples->GetOutput()->Size() == 0)
+    {
+    otbAppLogWARNING("No validation samples.");
+    }
+
+  if (IsParameterEnabled("io.imstat"))
+    {
+    StatisticsReader::Pointer statisticsReader = StatisticsReader::New();
+    statisticsReader->SetFileName(GetParameterString("io.imstat"));
+    meanMeasurementVector = statisticsReader->GetStatisticVectorByName("mean");
+    stddevMeasurementVector = statisticsReader->GetStatisticVectorByName("stddev");
+    // handle stat file without output normalization
+    if (meanMeasurementVector.Size() == nbFeatures)
+      {
+      meanMeasurementVector.SetSize(nbBands,false);
+      meanMeasurementVector[nbFeatures] = 0.0;
+      stddevMeasurementVector.SetSize(nbBands,false);
+      stddevMeasurementVector[nbFeatures] = 1.0;
+      }
+    }
+  else
+    {
+    meanMeasurementVector.SetSize(nbBands);
+    meanMeasurementVector.Fill(0.);
+    stddevMeasurementVector.SetSize(nbBands);
+    stddevMeasurementVector.Fill(1.);
+    }
+
+  // Shift scale the samples
+  ShiftScaleFilterType::Pointer trainingShiftScaleFilter = ShiftScaleFilterType::New();
+  trainingShiftScaleFilter->SetInput(concatenateTrainingSamples->GetOutput());
+  trainingShiftScaleFilter->SetShifts(meanMeasurementVector);
+  trainingShiftScaleFilter->SetScales(stddevMeasurementVector);
+  trainingShiftScaleFilter->Update();
+
+  ListSampleType::Pointer rawValidationListSample=ListSampleType::New();
+
+  //Test if the validation test is empty
+  if ( concatenateValidationSamples->GetOutput()->Size() != 0 )
+    {
+    ShiftScaleFilterType::Pointer validationShiftScaleFilter = ShiftScaleFilterType::New();
+    validationShiftScaleFilter->SetInput(concatenateValidationSamples->GetOutput());
+    validationShiftScaleFilter->SetShifts(meanMeasurementVector);
+    validationShiftScaleFilter->SetScales(stddevMeasurementVector);
+    validationShiftScaleFilter->Update();
+    rawValidationListSample = validationShiftScaleFilter->GetOutput();
+    }
+
+  // Split between predictors and output values
+  ListSampleType::Pointer rawlistSample = trainingShiftScaleFilter->GetOutput();
+  ListSampleType::Pointer listSample = ListSampleType::New();
+  listSample->SetMeasurementVectorSize(nbFeatures);
+  listSample->Resize(rawlistSample->Size());
+  TargetListSampleType::Pointer labelListSample = TargetListSampleType::New();
+  labelListSample->SetMeasurementVectorSize(1);
+  labelListSample->Resize(rawlistSample->Size());
+  
+  ListSampleType::Pointer validationListSample = ListSampleType::New();
+  validationListSample->SetMeasurementVectorSize(nbFeatures);
+  validationListSample->Resize(rawValidationListSample->Size());
+  TargetListSampleType::Pointer validationLabeledListSample = TargetListSampleType::New();
+  validationLabeledListSample->SetMeasurementVectorSize(1);
+  validationLabeledListSample->Resize(rawValidationListSample->Size());
+  
+  ListSampleType::MeasurementVectorType elem;
+  TargetListSampleType::MeasurementVectorType outElem;
+  for (ListSampleType::InstanceIdentifier i=0; i<rawlistSample->Size() ; ++i)
+    {
+    elem = rawlistSample->GetMeasurementVector(i);
+    outElem[0] = elem[nbFeatures];
+    labelListSample->SetMeasurementVector(i,outElem);
+    elem.SetSize(nbFeatures,false);
+    listSample->SetMeasurementVector(i,elem);
+    }
+  for (ListSampleType::InstanceIdentifier i=0; i<rawValidationListSample->Size() ; ++i)
+    {
+    elem = rawValidationListSample->GetMeasurementVector(i);
+    outElem[0] = elem[nbFeatures];
+    validationLabeledListSample->SetMeasurementVector(i,outElem);
+    elem.SetSize(nbFeatures,false);
+    validationListSample->SetMeasurementVector(i,elem);
+    }
+  
+
+  otbAppLogINFO("Number of training samples: " << concatenateTrainingSamples->GetOutput()->Size());
+  //--------------------------
+  // Split the data set into training/validation set
+  ListSampleType::Pointer trainingListSample = listSample;
+  TargetListSampleType::Pointer trainingLabeledListSample = labelListSample;
+
+  otbAppLogINFO("Size of training set: " << trainingListSample->Size());
+  otbAppLogINFO("Size of validation set: " << validationListSample->Size());
+
+  //--------------------------
+  // Estimate model
+  //--------------------------
+  this->Train(trainingListSample,trainingLabeledListSample,GetParameterString("io.out"));
+
+  //--------------------------
+  // Performances estimation
+  //--------------------------
+  ListSampleType::Pointer performanceListSample;
+  TargetListSampleType::Pointer predictedList = TargetListSampleType::New();
+  predictedList->SetMeasurementVectorSize(1);
+  TargetListSampleType::Pointer performanceLabeledListSample;
+
+  //Test the input validation set size
+  if(validationLabeledListSample->Size() != 0)
+    {
+    performanceListSample = validationListSample;
+    performanceLabeledListSample = validationLabeledListSample;
+    }
+  else
+    {
+    otbAppLogWARNING("The validation set is empty. The performance estimation is done using the input training set in this case.");
+    performanceListSample = trainingListSample;
+    performanceLabeledListSample = trainingLabeledListSample;
+    }
+
+  this->Classify(performanceListSample, predictedList, GetParameterString("io.out"));
+
+  otbAppLogINFO("Training performances");
+  double mse=0.0;
+  TargetListSampleType::MeasurementVectorType predictedElem;
+  for (TargetListSampleType::InstanceIdentifier i=0; i<performanceListSample->Size() ; ++i)
+    {
+    outElem = performanceLabeledListSample->GetMeasurementVector(i);
+    predictedElem = predictedList->GetMeasurementVector(i);
+    mse += (outElem[0] - predictedElem[0]) * (outElem[0] - predictedElem[0]);
+    }
+  mse /= static_cast<double>(performanceListSample->Size());
+  otbAppLogINFO("Mean Square Error = "<<mse);
+  this->SetParameterFloat("io.mse",mse);
+}
+
+};
+
+} // end namespace Wrapper
+} // end namespace otb
+
+OTB_APPLICATION_EXPORT(otb::Wrapper::TrainRegression)
diff --git a/Modules/Applications/AppClassification/include/otbLearningApplicationBase.h b/Modules/Applications/AppClassification/include/otbLearningApplicationBase.h
new file mode 100644
index 0000000000000000000000000000000000000000..9af1259c73ff6356d515588f52887db569f63c43
--- /dev/null
+++ b/Modules/Applications/AppClassification/include/otbLearningApplicationBase.h
@@ -0,0 +1,222 @@
+/*=========================================================================
+ Program:   ORFEO Toolbox
+ Language:  C++
+ Date:      $Date$
+ Version:   $Revision$
+
+
+ Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+ See OTBCopyright.txt for details.
+
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notices for more information.
+
+ =========================================================================*/
+#ifndef __otbLearningApplicationBase_h
+#define __otbLearningApplicationBase_h
+
+#include "otbConfigure.h"
+
+#include "otbWrapperApplication.h"
+
+#include <iostream>
+
+// ListSample
+#include "itkListSample.h"
+#include "itkVariableLengthVector.h"
+
+//Estimator
+#include "otbMachineLearningModelFactory.h"
+
+#ifdef OTB_USE_OPENCV
+# include "otbKNearestNeighborsMachineLearningModel.h"
+# include "otbRandomForestsMachineLearningModel.h"
+# include "otbSVMMachineLearningModel.h"
+# include "otbBoostMachineLearningModel.h"
+# include "otbDecisionTreeMachineLearningModel.h"
+# include "otbGradientBoostedTreeMachineLearningModel.h"
+# include "otbNormalBayesMachineLearningModel.h"
+# include "otbNeuralNetworkMachineLearningModel.h"
+#endif
+
+#ifdef OTB_USE_LIBSVM 
+#include "otbLibSVMMachineLearningModel.h"
+#endif
+
+namespace otb
+{
+namespace Wrapper
+{
+
+/** \class LearningApplicationBase
+ *  \brief LearningApplicationBase is the base class for application that
+ *         use machine learning model.
+ *
+ * This base class offers a DoInit() method to initialize all the parameters
+ * related to machine learning models. They will all be in the choice parameter
+ * named "classifier". The class also offers generic Train() and Classify()
+ * methods. The classes derived from LearningApplicationBase only need these
+ * 3 methods to handle the machine learning model.
+ *
+ * There are multiple machine learning models in OTB, some imported from OpenCV,
+ * and one imported from LibSVM. They all have different parameters. The
+ * purpose of this class is to handle the creation of all parameters related to
+ * machine learning models (in DoInit() ), and to dispatch the calls to
+ * specific train functions in function Train().
+ *
+ * This class is templated over scalar types for input and output values.
+ * Typically, the input value type will be either float of double. The choice
+ * of an output value type depends on the learning mode. This base class
+ * supports both classification and regression modes. For classification
+ * (enabled by default), the output value type corresponds to a class
+ * identifier so integer types suit well. For regression, the output value
+ * should not be an integer type, but rather a floating point type. In addition,
+ * an application deriving this base class for regression should initialize
+ * the m_RegressionFlag to true in their constructor.
+ *
+ * \sa TrainImagesClassifier
+ * \sa TrainRegression
+ *
+ * \ingroup OTBAppClassification
+ */
+template <class TInputValue, class TOutputValue>
+class LearningApplicationBase: public Application
+{
+public:
+  /** Standard class typedefs. */
+  typedef LearningApplicationBase Self;
+  typedef Application             Superclass;
+  typedef itk::SmartPointer<Self> Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Standard macro */
+  itkTypeMacro(LearningApplicationBase, otb::Application)
+
+  typedef TInputValue                             InputValueType;
+  typedef TOutputValue                            OutputValueType;
+
+  typedef otb::VectorImage<InputValueType>        SampleImageType;
+  typedef typename SampleImageType::PixelType     PixelType;
+
+  // Machine Learning models
+  typedef otb::MachineLearningModelFactory<
+            InputValueType, OutputValueType>             ModelFactoryType;
+  typedef typename ModelFactoryType::MachineLearningModelTypePointer ModelPointerType;
+  typedef typename ModelFactoryType::MachineLearningModelType        ModelType;
+  
+  typedef typename ModelType::InputSampleType     SampleType;
+  typedef typename ModelType::InputListSampleType ListSampleType;
+  
+  typedef typename ModelType::TargetSampleType      TargetSampleType;
+  typedef typename ModelType::TargetListSampleType  TargetListSampleType;
+  typedef typename ModelType::TargetValueType       TargetValueType;
+  
+#ifdef OTB_USE_OPENCV
+  typedef otb::RandomForestsMachineLearningModel<InputValueType, OutputValueType> RandomForestType;
+  typedef otb::KNearestNeighborsMachineLearningModel<InputValueType, OutputValueType> KNNType;
+  typedef otb::SVMMachineLearningModel<InputValueType, OutputValueType> SVMType;
+  typedef otb::BoostMachineLearningModel<InputValueType, OutputValueType> BoostType;
+  typedef otb::DecisionTreeMachineLearningModel<InputValueType, OutputValueType> DecisionTreeType;
+  typedef otb::GradientBoostedTreeMachineLearningModel<InputValueType, OutputValueType> GradientBoostedTreeType;
+  typedef otb::NeuralNetworkMachineLearningModel<InputValueType, OutputValueType> NeuralNetworkType;
+  typedef otb::NormalBayesMachineLearningModel<InputValueType, OutputValueType> NormalBayesType;
+#endif
+
+#ifdef OTB_USE_LIBSVM 
+  typedef otb::LibSVMMachineLearningModel<InputValueType, OutputValueType> LibSVMType;
+#endif
+ 
+protected:
+  LearningApplicationBase();
+
+  /** Generic method to train and save the machine learning model. This method
+   * uses specific train methods depending on the chosen model.*/
+  void Train(typename ListSampleType::Pointer trainingListSample,
+             typename TargetListSampleType::Pointer trainingLabeledListSample,
+             std::string modelPath);
+
+  /** Generic method to load a model file and use it to classify a sample list*/
+  void Classify(typename ListSampleType::Pointer validationListSample,
+                typename TargetListSampleType::Pointer predictedList,
+                std::string modelPath);
+
+  /** Init method that creates all the parameters for machine learning models */
+  void DoInit();
+
+  /** Flag to switch between classification and regression mode.
+   * False by default, child classes may change it in their constructor */
+  bool m_RegressionFlag;
+
+private:
+
+  /** Specific Init and Train methods for each machine learning model */
+  //@{
+#ifdef OTB_USE_LIBSVM 
+  void InitLibSVMParams();
+
+  void TrainLibSVM(typename ListSampleType::Pointer trainingListSample,
+                   typename TargetListSampleType::Pointer trainingLabeledListSample,
+                   std::string modelPath);
+#endif  
+
+#ifdef OTB_USE_OPENCV
+  void InitBoostParams();
+  void InitSVMParams();
+  void InitDecisionTreeParams();
+  void InitGradientBoostedTreeParams();
+  void InitNeuralNetworkParams();
+  void InitNormalBayesParams();
+  void InitRandomForestsParams();
+  void InitKNNParams();
+
+  void TrainBoost(typename ListSampleType::Pointer trainingListSample,
+                  typename TargetListSampleType::Pointer trainingLabeledListSample,
+                  std::string modelPath);
+  void TrainSVM(typename ListSampleType::Pointer trainingListSample,
+                typename TargetListSampleType::Pointer trainingLabeledListSample,
+                std::string modelPath);
+  void TrainDecisionTree(typename ListSampleType::Pointer trainingListSample,
+                         typename TargetListSampleType::Pointer trainingLabeledListSample,
+                         std::string modelPath);
+  void TrainGradientBoostedTree(typename ListSampleType::Pointer trainingListSample,
+                                typename TargetListSampleType::Pointer trainingLabeledListSample,
+                                std::string modelPath);
+  void TrainNeuralNetwork(typename ListSampleType::Pointer trainingListSample,
+                          typename TargetListSampleType::Pointer trainingLabeledListSample,
+                          std::string modelPath);
+  void TrainNormalBayes(typename ListSampleType::Pointer trainingListSample,
+                        typename TargetListSampleType::Pointer trainingLabeledListSample,
+                        std::string modelPath);
+  void TrainRandomForests(typename ListSampleType::Pointer trainingListSample,
+                          typename TargetListSampleType::Pointer trainingLabeledListSample,
+                          std::string modelPath);
+  void TrainKNN(typename ListSampleType::Pointer trainingListSample,
+                typename TargetListSampleType::Pointer trainingLabeledListSample,
+                std::string modelPath);
+#endif
+  //@}
+};
+
+}
+}
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "otbLearningApplicationBase.txx"
+#ifdef OTB_USE_OPENCV
+#include "otbTrainBoost.txx"
+#include "otbTrainDecisionTree.txx"
+#include "otbTrainGradientBoostedTree.txx"
+#include "otbTrainKNN.txx"
+#include "otbTrainNeuralNetwork.txx"
+#include "otbTrainNormalBayes.txx"
+#include "otbTrainRandomForests.txx"
+#include "otbTrainSVM.txx"
+#endif
+#ifdef OTB_USE_LIBSVM
+#include "otbTrainLibSVM.txx"
+#endif
+#endif
+
+#endif
diff --git a/Modules/Applications/AppClassification/include/otbLearningApplicationBase.txx b/Modules/Applications/AppClassification/include/otbLearningApplicationBase.txx
new file mode 100644
index 0000000000000000000000000000000000000000..a545538656efbafd98fa520944b09a183ef87fcb
--- /dev/null
+++ b/Modules/Applications/AppClassification/include/otbLearningApplicationBase.txx
@@ -0,0 +1,177 @@
+/*=========================================================================
+ Program:   ORFEO Toolbox
+ Language:  C++
+ Date:      $Date$
+ Version:   $Revision$
+
+
+ Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+ See OTBCopyright.txt for details.
+
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notices for more information.
+
+ =========================================================================*/
+#ifndef __otbLearningApplicationBase_txx
+#define __otbLearningApplicationBase_txx
+
+#include "otbLearningApplicationBase.h"
+
+namespace otb
+{
+namespace Wrapper
+{
+
+template <class TInputValue, class TOutputValue>
+LearningApplicationBase<TInputValue,TOutputValue>
+::LearningApplicationBase() : m_RegressionFlag(false)
+{
+} 
+
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::DoInit()
+{
+  AddDocTag(Tags::Learning);
+
+  // main choice parameter that will contain all machine learning options
+  AddParameter(ParameterType_Choice, "classifier", "Classifier to use for the training");
+  SetParameterDescription("classifier", "Choice of the classifier to use for the training.");
+
+  //Group LibSVM
+#ifdef OTB_USE_LIBSVM 
+  InitLibSVMParams();
+#endif
+
+#ifdef OTB_USE_OPENCV
+  InitSVMParams();
+  if (!m_RegressionFlag)
+    {
+    InitBoostParams();  // Regression not supported
+    }
+  InitDecisionTreeParams();
+  InitGradientBoostedTreeParams();
+  InitNeuralNetworkParams();
+  if (!m_RegressionFlag)
+    {
+    InitNormalBayesParams(); // Regression not supported
+    }
+  InitRandomForestsParams();
+  InitKNNParams();
+#endif
+}
+
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::Classify(typename ListSampleType::Pointer validationListSample,
+           typename TargetListSampleType::Pointer predictedList,
+           std::string modelPath)
+{
+  // load a machine learning model from file and predict the input sample list
+  ModelPointerType model = ModelFactoryType::CreateMachineLearningModel(modelPath,
+                                                                        ModelFactoryType::ReadMode);
+
+  if (model.IsNull())
+    {
+    otbAppLogFATAL(<< "Error when loading model " << modelPath);
+    }
+
+  model->Load(modelPath);
+  model->SetRegressionMode(this->m_RegressionFlag);
+  model->SetInputListSample(validationListSample);
+  model->SetTargetListSample(predictedList);
+  model->PredictAll();
+}
+
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::Train(typename ListSampleType::Pointer trainingListSample,
+        typename TargetListSampleType::Pointer trainingLabeledListSample,
+        std::string modelPath)
+{
+  // get the name of the chosen machine learning model
+  const std::string modelName = GetParameterString("classifier");
+  // call specific train function
+  if (modelName == "libsvm")
+    {
+	#ifdef OTB_USE_LIBSVM
+    TrainLibSVM(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module LIBSVM is not installed. You should consider turning OTB_USE_LIBSVM on during cmake configuration.");
+    #endif
+    }
+  else if (modelName == "svm")
+    {
+	#ifdef OTB_USE_OPENCV
+    TrainSVM(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
+    #endif
+    }
+  else if (modelName == "boost")
+    {
+	#ifdef OTB_USE_OPENCV
+    TrainBoost(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
+    #endif
+    }
+  else if (modelName == "dt")
+    {
+	#ifdef OTB_USE_OPENCV
+    TrainDecisionTree(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
+    #endif
+    }
+  else if (modelName == "gbt")
+    {
+	#ifdef OTB_USE_OPENCV
+    TrainGradientBoostedTree(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
+    #endif
+    }
+  else if (modelName == "ann")
+    {
+	#ifdef OTB_USE_OPENCV
+    TrainNeuralNetwork(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
+    #endif
+    }
+  else if (modelName == "bayes")
+    {
+	#ifdef OTB_USE_OPENCV
+    TrainNormalBayes(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
+    #endif
+    }
+  else if (modelName == "rf")
+    {
+	#ifdef OTB_USE_OPENCV
+    TrainRandomForests(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
+    #endif
+    }
+  else if (modelName == "knn")
+    {
+	#ifdef OTB_USE_OPENCV
+    TrainKNN(trainingListSample, trainingLabeledListSample, modelPath);
+    #else
+    otbAppLogFATAL("Module OPENCV is not installed. You should consider turning OTB_USE_OPENCV on during cmake configuration.");
+    #endif
+    }
+}
+
+}
+}
+
+#endif
diff --git a/Modules/Applications/AppClassification/app/otbTrainBoost.cxx b/Modules/Applications/AppClassification/include/otbTrainBoost.txx
similarity index 82%
rename from Modules/Applications/AppClassification/app/otbTrainBoost.cxx
rename to Modules/Applications/AppClassification/include/otbTrainBoost.txx
index c107f8016ea1bdecaf2ca9a68c8af259184715c2..23e666bfc1d877072b41312ce0478a8a41036500 100644
--- a/Modules/Applications/AppClassification/app/otbTrainBoost.cxx
+++ b/Modules/Applications/AppClassification/include/otbTrainBoost.txx
@@ -14,16 +14,19 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
-
-#include "otbTrainImagesClassifier.h"
-
+#ifndef __otbTrainBoost_txx
+#define __otbTrainBoost_txx
+#include "otbLearningApplicationBase.h"
 
 namespace otb
 {
 namespace Wrapper
 {
-#ifdef OTB_USE_OPENCV
-  void TrainImagesClassifier::InitBoostParams()
+
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::InitBoostParams()
   {
     AddChoice("classifier.boost", "Boost classifier");
     SetParameterDescription("classifier.boost", "This group of parameters allows to set Boost classifier parameters. "
@@ -55,10 +58,15 @@ namespace Wrapper
     SetParameterDescription("classifier.boost.m","Maximum depth of the tree.");
   }
 
-
-  void TrainImagesClassifier::TrainBoost(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::TrainBoost(typename ListSampleType::Pointer trainingListSample,
+               typename TargetListSampleType::Pointer trainingLabeledListSample,
+               std::string modelPath)
   {
-    BoostType::Pointer boostClassifier = BoostType::New();
+    typename BoostType::Pointer boostClassifier = BoostType::New();
+    boostClassifier->SetRegressionMode(this->m_RegressionFlag);
     boostClassifier->SetInputListSample(trainingListSample);
     boostClassifier->SetTargetListSample(trainingLabeledListSample);
     boostClassifier->SetBoostType(GetParameterInt("classifier.boost.t"));
@@ -67,9 +75,10 @@ namespace Wrapper
     boostClassifier->SetMaxDepth(GetParameterInt("classifier.boost.m"));
 
     boostClassifier->Train();
-    boostClassifier->Save(GetParameterString("io.out"));
+    boostClassifier->Save(modelPath);
   }
-#endif
 
 } //end namespace wrapper
 } //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/app/otbTrainDecisionTree.cxx b/Modules/Applications/AppClassification/include/otbTrainDecisionTree.txx
similarity index 86%
rename from Modules/Applications/AppClassification/app/otbTrainDecisionTree.cxx
rename to Modules/Applications/AppClassification/include/otbTrainDecisionTree.txx
index 9f94a7fc6050bb13a2169242619e8232f32d47c3..04df9380caacca8975caf05f8bb0b6b8237d0ff5 100644
--- a/Modules/Applications/AppClassification/app/otbTrainDecisionTree.cxx
+++ b/Modules/Applications/AppClassification/include/otbTrainDecisionTree.txx
@@ -14,15 +14,19 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
-
-#include "otbTrainImagesClassifier.h"
+#ifndef __otbTrainDecisionTree_txx
+#define __otbTrainDecisionTree_txx
+#include "otbLearningApplicationBase.h"
 
 namespace otb
 {
 namespace Wrapper
 {
-#ifdef OTB_USE_OPENCV
-void TrainImagesClassifier::InitDecisionTreeParams()
+
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::InitDecisionTreeParams()
 {
   AddChoice("classifier.dt", "Decision Tree classifier");
   SetParameterDescription("classifier.dt",
@@ -81,10 +85,15 @@ void TrainImagesClassifier::InitDecisionTreeParams()
 
 }
 
-void TrainImagesClassifier::TrainDecisionTree(ListSampleType::Pointer trainingListSample,
-                                                             LabelListSampleType::Pointer trainingLabeledListSample)
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::TrainDecisionTree(typename ListSampleType::Pointer trainingListSample,
+                    typename TargetListSampleType::Pointer trainingLabeledListSample,
+                    std::string modelPath)
 {
-  DecisionTreeType::Pointer classifier = DecisionTreeType::New();
+  typename DecisionTreeType::Pointer classifier = DecisionTreeType::New();
+  classifier->SetRegressionMode(this->m_RegressionFlag);
   classifier->SetInputListSample(trainingListSample);
   classifier->SetTargetListSample(trainingLabeledListSample);
   classifier->SetMaxDepth(GetParameterInt("classifier.dt.max"));
@@ -101,8 +110,10 @@ void TrainImagesClassifier::TrainDecisionTree(ListSampleType::Pointer trainingLi
     classifier->SetTruncatePrunedTree(false);
     }
   classifier->Train();
-  classifier->Save(GetParameterString("io.out"));
+  classifier->Save(modelPath);
 }
-#endif
+
 } //end namespace wrapper
 } //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/app/otbTrainGradientBoostedTree.cxx b/Modules/Applications/AppClassification/include/otbTrainGradientBoostedTree.txx
similarity index 63%
rename from Modules/Applications/AppClassification/app/otbTrainGradientBoostedTree.cxx
rename to Modules/Applications/AppClassification/include/otbTrainGradientBoostedTree.txx
index c703ad4c57a0d8efb8103dadc64a92701a16d162..b55612581cd1140185ae53eab3ae407c51536b6b 100644
--- a/Modules/Applications/AppClassification/app/otbTrainGradientBoostedTree.cxx
+++ b/Modules/Applications/AppClassification/include/otbTrainGradientBoostedTree.txx
@@ -14,23 +14,34 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
-
-#include "otbTrainImagesClassifier.h"
+#ifndef __otbTrainGradientBoostedTree_txx
+#define __otbTrainGradientBoostedTree_txx
+#include "otbLearningApplicationBase.h"
 
 namespace otb
 {
 namespace Wrapper
 {
-#ifdef OTB_USE_OPENCV
-void TrainImagesClassifier::InitGradientBoostedTreeParams()
+
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::InitGradientBoostedTreeParams()
 {
   AddChoice("classifier.gbt", "Gradient Boosted Tree classifier");
   SetParameterDescription(
       "classifier.gbt",
       "This group of parameters allows to set Gradient Boosted Tree classifier parameters. "
       "See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/gradient_boosted_trees.html}.");
-  //LossFunctionType : not exposed, as only one type is used for Classification,
-  // the other three are used for regression.
+
+  if (m_RegressionFlag)
+    {
+    AddParameter(ParameterType_Choice, "classifier.gbt.t", "Loss Function Type");
+    SetParameterDescription("classifier.gbt.t","Type of loss functionused for training.");
+    AddChoice("classifier.gbt.t.sqr","Squared Loss");
+    AddChoice("classifier.gbt.t.abs","Absolute Loss");
+    AddChoice("classifier.gbt.t.hub","Huber Loss");
+    }
 
   //WeakCount
   AddParameter(ParameterType_Int, "classifier.gbt.w", "Number of boosting algorithm iterations");
@@ -67,10 +78,15 @@ void TrainImagesClassifier::InitGradientBoostedTreeParams()
 
 }
 
-void TrainImagesClassifier::TrainGradientBoostedTree(
-    ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::TrainGradientBoostedTree(typename ListSampleType::Pointer trainingListSample,
+                           typename TargetListSampleType::Pointer trainingLabeledListSample,
+                           std::string modelPath)
 {
-  GradientBoostedTreeType::Pointer classifier = GradientBoostedTreeType::New();
+  typename GradientBoostedTreeType::Pointer classifier = GradientBoostedTreeType::New();
+  classifier->SetRegressionMode(this->m_RegressionFlag);
   classifier->SetInputListSample(trainingListSample);
   classifier->SetTargetListSample(trainingLabeledListSample);
   classifier->SetWeakCount(GetParameterInt("classifier.gbt.w"));
@@ -78,9 +94,34 @@ void TrainImagesClassifier::TrainGradientBoostedTree(
   classifier->SetSubSamplePortion(GetParameterFloat("classifier.gbt.p"));
   classifier->SetMaxDepth(GetParameterInt("classifier.gbt.max"));
 
+  if (m_RegressionFlag)
+    {
+    switch (GetParameterInt("classifier.gbt.t"))
+      {
+      case 0: // SQUARED_LOSS
+        classifier->SetLossFunctionType(CvGBTrees::SQUARED_LOSS);
+        break;
+      case 1: // ABSOLUTE_LOSS
+        classifier->SetLossFunctionType(CvGBTrees::ABSOLUTE_LOSS);
+        break;
+      case 2: // HUBER_LOSS
+        classifier->SetLossFunctionType(CvGBTrees::HUBER_LOSS);
+        break;
+      default:
+        classifier->SetLossFunctionType(CvGBTrees::SQUARED_LOSS);
+        break;
+      }
+    }
+  else
+    {
+    classifier->SetLossFunctionType(CvGBTrees::DEVIANCE_LOSS);
+    }
+
   classifier->Train();
-  classifier->Save(GetParameterString("io.out"));
+  classifier->Save(modelPath);
 }
-#endif
+
 } //end namespace wrapper
 } //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/include/otbTrainKNN.txx b/Modules/Applications/AppClassification/include/otbTrainKNN.txx
new file mode 100644
index 0000000000000000000000000000000000000000..93656003a1b24e601a630393983934bb65b5e797
--- /dev/null
+++ b/Modules/Applications/AppClassification/include/otbTrainKNN.txx
@@ -0,0 +1,86 @@
+/*=========================================================================
+ Program:   ORFEO Toolbox
+ Language:  C++
+ Date:      $Date$
+ Version:   $Revision$
+
+
+ Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+ See OTBCopyright.txt for details.
+
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notices for more information.
+
+ =========================================================================*/
+#ifndef __otbTrainKNN_txx
+#define __otbTrainKNN_txx
+#include "otbLearningApplicationBase.h"
+
+namespace otb
+{
+namespace Wrapper
+{
+
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::InitKNNParams()
+  {
+    AddChoice("classifier.knn", "KNN classifier");
+    SetParameterDescription("classifier.knn", "This group of parameters allows to set KNN classifier parameters. "
+        "See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/k_nearest_neighbors.html}.");
+
+    //K parameter
+    AddParameter(ParameterType_Int, "classifier.knn.k", "Number of Neighbors");
+    SetParameterInt("classifier.knn.k", 32);
+    SetParameterDescription("classifier.knn.k","The number of neighbors to use.");
+
+    if (this->m_RegressionFlag)
+      {
+      // Decision rule : mean / median
+      AddParameter(ParameterType_Choice, "classifier.knn.rule", "Decision rule");
+      SetParameterDescription("classifier.knn.rule", "Decision rule for regression output");
+
+      AddChoice("classifier.knn.rule.mean", "Mean of neighbors values");
+      SetParameterDescription("classifier.knn.rule.mean","Returns the mean of neighbors values");
+
+      AddChoice("classifier.knn.rule.median", "Median of neighbors values");
+      SetParameterDescription("classifier.knn.rule.median","Returns the median of neighbors values");
+      }
+  }
+
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::TrainKNN(typename ListSampleType::Pointer trainingListSample,
+             typename TargetListSampleType::Pointer trainingLabeledListSample,
+             std::string modelPath)
+  {
+    typename KNNType::Pointer knnClassifier = KNNType::New();
+    knnClassifier->SetRegressionMode(this->m_RegressionFlag);
+    knnClassifier->SetInputListSample(trainingListSample);
+    knnClassifier->SetTargetListSample(trainingLabeledListSample);
+    knnClassifier->SetK(GetParameterInt("classifier.knn.k"));
+    if (this->m_RegressionFlag)
+      {
+      std::string decision = this->GetParameterString("classifier.knn.rule");
+      if (decision == "mean")
+        {
+        knnClassifier->SetDecisionRule(KNNType::KNN_MEAN);
+        }
+      else if (decision == "median")
+        {
+        knnClassifier->SetDecisionRule(KNNType::KNN_MEDIAN);
+        }
+      }
+
+    knnClassifier->Train();
+    knnClassifier->Save(modelPath);
+  }
+
+} //end namespace wrapper
+} //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/app/otbTrainLibSVM.cxx b/Modules/Applications/AppClassification/include/otbTrainLibSVM.txx
similarity index 52%
rename from Modules/Applications/AppClassification/app/otbTrainLibSVM.cxx
rename to Modules/Applications/AppClassification/include/otbTrainLibSVM.txx
index e117337708d3effd267a462a1d24f8b2f6736e18..928245655fd30d983c970947feef81dbd0c8304f 100644
--- a/Modules/Applications/AppClassification/app/otbTrainLibSVM.cxx
+++ b/Modules/Applications/AppClassification/include/otbTrainLibSVM.txx
@@ -14,16 +14,19 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
-
-#include "otbTrainImagesClassifier.h"
-
+#ifndef __otbTrainLibSVM_txx
+#define __otbTrainLibSVM_txx
+#include "otbLearningApplicationBase.h"
 
 namespace otb
 {
 namespace Wrapper
 {
-#ifdef OTB_USE_LIBSVM
-  void TrainImagesClassifier::InitLibSVMParams()
+
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::InitLibSVMParams()
   {
     AddChoice("classifier.libsvm", "LibSVM classifier");
     SetParameterDescription("classifier.libsvm", "This group of parameters allows to set SVM classifier parameters.");
@@ -34,6 +37,21 @@ namespace Wrapper
     AddChoice("classifier.libsvm.k.sigmoid", "Sigmoid");
     SetParameterString("classifier.libsvm.k", "linear");
     SetParameterDescription("classifier.libsvm.k", "SVM Kernel Type.");
+    AddParameter(ParameterType_Choice, "classifier.libsvm.m", "SVM Model Type");
+    SetParameterDescription("classifier.libsvm.m", "Type of SVM formulation.");
+    if (this->m_RegressionFlag)
+      {
+      AddChoice("classifier.libsvm.m.epssvr", "Epsilon Support Vector Regression");
+      AddChoice("classifier.libsvm.m.nusvr", "Nu Support Vector Regression");
+      SetParameterString("classifier.libsvm.m", "epssvr");
+      }
+    else
+      {
+      AddChoice("classifier.libsvm.m.csvc", "C support vector classification");
+      AddChoice("classifier.libsvm.m.nusvc", "Nu support vector classification");
+      AddChoice("classifier.libsvm.m.oneclass", "Distribution estimation (One Class SVM)");
+      SetParameterString("classifier.libsvm.m", "csvc");
+      }
     AddParameter(ParameterType_Float, "classifier.libsvm.c", "Cost parameter C");
     SetParameterFloat("classifier.libsvm.c", 1.0);
     SetParameterDescription(
@@ -45,12 +63,25 @@ namespace Wrapper
     AddParameter(ParameterType_Empty, "classifier.libsvm.prob", "Probability estimation");
     MandatoryOff("classifier.libsvm.prob");
     SetParameterDescription("classifier.libsvm.prob", "Probability estimation flag.");
-  }
 
+    if (this->m_RegressionFlag)
+      {
+      AddParameter(ParameterType_Float, "classifier.libsvm.eps", "Epsilon");
+      SetParameterFloat("classifier.libsvm.eps", 1e-3);
+      AddParameter(ParameterType_Float, "classifier.libsvm.nu", "Nu");
+      SetParameterFloat("classifier.libsvm.nu", 0.5);
+      }
+  }
 
-  void TrainImagesClassifier::TrainLibSVM(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::TrainLibSVM(typename ListSampleType::Pointer trainingListSample,
+                typename TargetListSampleType::Pointer trainingLabeledListSample,
+                std::string modelPath)
   {
-    LibSVMType::Pointer libSVMClassifier = LibSVMType::New();
+    typename LibSVMType::Pointer libSVMClassifier = LibSVMType::New();
+    libSVMClassifier->SetRegressionMode(this->m_RegressionFlag);
     libSVMClassifier->SetInputListSample(trainingListSample);
     libSVMClassifier->SetTargetListSample(trainingLabeledListSample);
     //SVM Option
@@ -83,9 +114,47 @@ namespace Wrapper
         libSVMClassifier->SetKernelType(LINEAR);
         break;
       }
+    if (this->m_RegressionFlag)
+      {
+      switch (GetParameterInt("classifier.libsvm.m"))
+        {
+        case 0: // EPSILON_SVR
+          libSVMClassifier->SetSVMType(EPSILON_SVR);
+          break;
+        case 1: // NU_SVR
+          libSVMClassifier->SetSVMType(NU_SVR);
+          break;
+        default:
+          libSVMClassifier->SetSVMType(EPSILON_SVR);
+          break;
+        }
+      libSVMClassifier->SetEpsilon(GetParameterFloat("classifier.libsvm.eps"));
+      libSVMClassifier->SetNu(GetParameterFloat("classifier.libsvm.nu"));
+      }
+    else
+      {
+      switch (GetParameterInt("classifier.libsvm.m"))
+        {
+        case 0: // C_SVC
+          libSVMClassifier->SetSVMType(C_SVC);
+          break;
+        case 1: // NU_SVC
+          libSVMClassifier->SetSVMType(NU_SVC);
+          break;
+        case 2: // ONE_CLASS
+          libSVMClassifier->SetSVMType(ONE_CLASS);
+          break;
+        default:
+          libSVMClassifier->SetSVMType(C_SVC);
+          break;
+        }
+      }
+
     libSVMClassifier->Train();
-    libSVMClassifier->Save(GetParameterString("io.out"));
+    libSVMClassifier->Save(modelPath);
   }
-#endif
+
 } //end namespace wrapper
 } //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/app/otbTrainNeuralNetwork.cxx b/Modules/Applications/AppClassification/include/otbTrainNeuralNetwork.txx
similarity index 87%
rename from Modules/Applications/AppClassification/app/otbTrainNeuralNetwork.cxx
rename to Modules/Applications/AppClassification/include/otbTrainNeuralNetwork.txx
index 29b3657ad2ef612a89a83f923cf370c3a9bbbc5c..40f120f69bcaa1df15454070d63e38feda46e960 100644
--- a/Modules/Applications/AppClassification/app/otbTrainNeuralNetwork.cxx
+++ b/Modules/Applications/AppClassification/include/otbTrainNeuralNetwork.txx
@@ -14,16 +14,20 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
-
+#ifndef __otbTrainNeuralNetwork_txx
+#define __otbTrainNeuralNetwork_txx
 #include <boost/lexical_cast.hpp>
-#include "otbTrainImagesClassifier.h"
+#include "otbLearningApplicationBase.h"
 
 namespace otb
 {
 namespace Wrapper
 {
-#ifdef OTB_USE_OPENCV
-void TrainImagesClassifier::InitNeuralNetworkParams()
+
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::InitNeuralNetworkParams()
 {
   AddChoice("classifier.ann", "Artificial Neural Network classifier");
   SetParameterDescription("classifier.ann",
@@ -119,10 +123,15 @@ void TrainImagesClassifier::InitNeuralNetworkParams()
 
 }
 
-void TrainImagesClassifier::TrainNeuralNetwork(ListSampleType::Pointer trainingListSample,
-                                                              LabelListSampleType::Pointer trainingLabeledListSample)
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::TrainNeuralNetwork(typename ListSampleType::Pointer trainingListSample,
+                     typename TargetListSampleType::Pointer trainingLabeledListSample,
+                     std::string modelPath)
 {
-  NeuralNetworkType::Pointer classifier = NeuralNetworkType::New();
+  typename NeuralNetworkType::Pointer classifier = NeuralNetworkType::New();
+  classifier->SetRegressionMode(this->m_RegressionFlag);
   classifier->SetInputListSample(trainingListSample);
   classifier->SetTargetListSample(trainingLabeledListSample);
 
@@ -153,18 +162,23 @@ void TrainImagesClassifier::TrainNeuralNetwork(ListSampleType::Pointer trainingL
 
 
   unsigned int nbClasses = 0;
-  LabelType currentLabel = 0, prevLabel = 0;
-  for (unsigned int itLab = 0; itLab < trainingLabeledListSample->Size(); ++itLab)
+  if (this->m_RegressionFlag)
+    {
+    layerSizes.push_back(1);
+    }
+  else
     {
-    currentLabel = trainingLabeledListSample->GetMeasurementVector(itLab);
-    if ((currentLabel != prevLabel) || (itLab == 0))
+    std::set<TargetValueType> labelSet;
+    TargetSampleType currentLabel;
+    for (unsigned int itLab = 0; itLab < trainingLabeledListSample->Size(); ++itLab)
       {
-      ++nbClasses;
+      currentLabel = trainingLabeledListSample->GetMeasurementVector(itLab);
+      labelSet.insert(currentLabel[0]);
       }
-    prevLabel = currentLabel;
+    nbClasses = labelSet.size();
+    layerSizes.push_back(nbClasses);
     }
 
-  layerSizes.push_back(nbClasses);
   classifier->SetLayerSizes(layerSizes);
 
   switch (GetParameterInt("classifier.ann.f"))
@@ -208,8 +222,10 @@ void TrainImagesClassifier::TrainNeuralNetwork(ListSampleType::Pointer trainingL
   classifier->SetEpsilon(GetParameterFloat("classifier.ann.eps"));
   classifier->SetMaxIter(GetParameterInt("classifier.ann.iter"));
   classifier->Train();
-  classifier->Save(GetParameterString("io.out"));
+  classifier->Save(modelPath);
 }
-#endif
+
 } //end namespace wrapper
 } //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/app/otbTrainNormalBayes.cxx b/Modules/Applications/AppClassification/include/otbTrainNormalBayes.txx
similarity index 59%
rename from Modules/Applications/AppClassification/app/otbTrainNormalBayes.cxx
rename to Modules/Applications/AppClassification/include/otbTrainNormalBayes.txx
index d33e6eaf9cd79a99120a5176b096a94fb98b190c..d5f3cce8e3ef2ae3fbe02a2b21e99d0004df3944 100644
--- a/Modules/Applications/AppClassification/app/otbTrainNormalBayes.cxx
+++ b/Modules/Applications/AppClassification/include/otbTrainNormalBayes.txx
@@ -14,16 +14,19 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
-
-#include "otbTrainImagesClassifier.h"
-
+#ifndef __otbTrainNormalBayes_txx
+#define __otbTrainNormalBayes_txx
+#include "otbLearningApplicationBase.h"
 
 namespace otb
 {
 namespace Wrapper
 {
-#ifdef OTB_USE_OPENCV
-  void TrainImagesClassifier::InitNormalBayesParams()
+
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::InitNormalBayesParams()
   {
     AddChoice("classifier.bayes", "Normal Bayes classifier");
     SetParameterDescription("classifier.bayes", "Use a Normal Bayes Classifier. "
@@ -31,15 +34,22 @@ namespace Wrapper
 
   }
 
-
-  void TrainImagesClassifier::TrainNormalBayes(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::TrainNormalBayes(typename ListSampleType::Pointer trainingListSample,
+                     typename TargetListSampleType::Pointer trainingLabeledListSample,
+                     std::string modelPath)
   {
-    NormalBayesType::Pointer classifier = NormalBayesType::New();
+    typename NormalBayesType::Pointer classifier = NormalBayesType::New();
+    classifier->SetRegressionMode(this->m_RegressionFlag);
     classifier->SetInputListSample(trainingListSample);
     classifier->SetTargetListSample(trainingLabeledListSample);
     classifier->Train();
-    classifier->Save(GetParameterString("io.out"));
+    classifier->Save(modelPath);
   }
-#endif
+
 } //end namespace wrapper
 } //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/app/otbTrainRandomForests.cxx b/Modules/Applications/AppClassification/include/otbTrainRandomForests.txx
similarity index 87%
rename from Modules/Applications/AppClassification/app/otbTrainRandomForests.cxx
rename to Modules/Applications/AppClassification/include/otbTrainRandomForests.txx
index 1798b9d9b59b7ffa2323936084736b6b423a3cb5..a856243a300dfc35d559e683aef21268d8c25a12 100644
--- a/Modules/Applications/AppClassification/app/otbTrainRandomForests.cxx
+++ b/Modules/Applications/AppClassification/include/otbTrainRandomForests.txx
@@ -14,15 +14,19 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
-
-#include "otbTrainImagesClassifier.h"
+#ifndef __otbTrainRandomForests_txx
+#define __otbTrainRandomForests_txx
+#include "otbLearningApplicationBase.h"
 
 namespace otb
 {
 namespace Wrapper
 {
-#ifdef OTB_USE_OPENCV
-void TrainImagesClassifier::InitRandomForestsParams()
+
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::InitRandomForestsParams()
 {
   AddChoice("classifier.rf", "Random forests classifier");
   SetParameterDescription("classifier.rf",
@@ -95,10 +99,15 @@ void TrainImagesClassifier::InitRandomForestsParams()
   //TerminationCriteria not exposed
 }
 
-void TrainImagesClassifier::TrainRandomForests(ListSampleType::Pointer trainingListSample,
-                                                              LabelListSampleType::Pointer trainingLabeledListSample)
+template <class TInputValue, class TOutputValue>
+void
+LearningApplicationBase<TInputValue,TOutputValue>
+::TrainRandomForests(typename ListSampleType::Pointer trainingListSample,
+                     typename TargetListSampleType::Pointer trainingLabeledListSample,
+                     std::string modelPath)
 {
-  RandomForestType::Pointer classifier = RandomForestType::New();
+  typename RandomForestType::Pointer classifier = RandomForestType::New();
+  classifier->SetRegressionMode(this->m_RegressionFlag);
   classifier->SetInputListSample(trainingListSample);
   classifier->SetTargetListSample(trainingLabeledListSample);
   classifier->SetMaxDepth(GetParameterInt("classifier.rf.max"));
@@ -110,8 +119,10 @@ void TrainImagesClassifier::TrainRandomForests(ListSampleType::Pointer trainingL
   classifier->SetForestAccuracy(GetParameterFloat("classifier.rf.acc"));
 
   classifier->Train();
-  classifier->Save(GetParameterString("io.out"));
+  classifier->Save(modelPath);
 }
-#endif
+
 } //end namespace wrapper
 } //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/app/otbTrainSVM.cxx b/Modules/Applications/AppClassification/include/otbTrainSVM.txx
similarity index 57%
rename from Modules/Applications/AppClassification/app/otbTrainSVM.cxx
rename to Modules/Applications/AppClassification/include/otbTrainSVM.txx
index a41e103746f5f5d7968f26220791436232caac74..ac0f153ed4f78df8c5a7d838c3e32beed62ddd9f 100644
--- a/Modules/Applications/AppClassification/app/otbTrainSVM.cxx
+++ b/Modules/Applications/AppClassification/include/otbTrainSVM.txx
@@ -14,28 +14,38 @@
  PURPOSE.  See the above copyright notices for more information.
 
  =========================================================================*/
-
-#include "otbTrainImagesClassifier.h"
-
+#ifndef __otbTrainSVM_txx
+#define __otbTrainSVM_txx
+#include "otbLearningApplicationBase.h"
 
 namespace otb
 {
 namespace Wrapper
 {
-#ifdef OTB_USE_OPENCV
-  void TrainImagesClassifier::InitSVMParams()
+
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::InitSVMParams()
   {
     AddChoice("classifier.svm", "SVM classifier (OpenCV)");
     SetParameterDescription("classifier.svm", "This group of parameters allows to set SVM classifier parameters. "
         "See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/support_vector_machines.html}.");
     AddParameter(ParameterType_Choice, "classifier.svm.m", "SVM Model Type");
-    AddChoice("classifier.svm.m.csvc", "C support vector classification");
-    AddChoice("classifier.svm.m.nusvc", "Nu support vector classification");
-    AddChoice("classifier.svm.m.oneclass", "Distribution estimation (One Class SVM)");
-    //AddChoice("classifier.svm.m.epssvr", "Epsilon Support Vector Regression");
-    //AddChoice("classifier.svm.m.nusvr", "Nu Support Vector Regression");
-    SetParameterString("classifier.svm.m", "csvc");
     SetParameterDescription("classifier.svm.m", "Type of SVM formulation.");
+    if (this->m_RegressionFlag)
+      {
+      AddChoice("classifier.svm.m.epssvr", "Epsilon Support Vector Regression");
+      AddChoice("classifier.svm.m.nusvr", "Nu Support Vector Regression");
+      SetParameterString("classifier.svm.m", "epssvr");
+      }
+    else
+      {
+      AddChoice("classifier.svm.m.csvc", "C support vector classification");
+      AddChoice("classifier.svm.m.nusvc", "Nu support vector classification");
+      AddChoice("classifier.svm.m.oneclass", "Distribution estimation (One Class SVM)");
+      SetParameterString("classifier.svm.m", "csvc");
+      }
     AddParameter(ParameterType_Choice, "classifier.svm.k", "SVM Kernel Type");
     AddChoice("classifier.svm.k.linear", "Linear");
     AddChoice("classifier.svm.k.rbf", "Gaussian radial basis function");
@@ -52,9 +62,26 @@ namespace Wrapper
                  "Parameter nu of a SVM optimization problem (NU_SVC / ONE_CLASS)");
     SetParameterFloat("classifier.svm.nu", 0.0);
     SetParameterDescription("classifier.svm.nu", "Parameter nu of a SVM optimization problem.");
-    //AddParameter(ParameterType_Float, "classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR)");
-    //SetParameterFloat("classifier.svm.p", 0.0);
-    //SetParameterDescription("classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR).");
+    if (this->m_RegressionFlag)
+      {
+      AddParameter(ParameterType_Float, "classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR)");
+      SetParameterFloat("classifier.svm.p", 1.0);
+      SetParameterDescription("classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR).");
+
+      AddParameter(ParameterType_Choice, "classifier.svm.term", "Termination criteria");
+      SetParameterDescription("classifier.svm.term","Termination criteria for iterative algorithm");
+      AddChoice("classifier.svm.term.iter", "Stops when maximum iteration is reached.");
+      AddChoice("classifier.svm.term.eps", "Stops when accuracy is lower than epsilon.");
+      AddChoice("classifier.svm.term.all", "Stops when either iteration or epsilon criteria is true");
+
+      AddParameter(ParameterType_Float, "classifier.svm.iter", "Maximum iteration");
+      SetParameterFloat("classifier.svm.iter", 1000);
+      SetParameterDescription("classifier.svm.iter", "Maximum number of iterations (corresponds to the termination criteria 'iter').");
+
+      AddParameter(ParameterType_Float, "classifier.svm.eps", "Epsilon accuracy threshold");
+      SetParameterFloat("classifier.svm.eps", FLT_EPSILON);
+      SetParameterDescription("classifier.svm.eps", "Epsilon accuracy (corresponds to the termination criteria 'eps').");
+      }
     AddParameter(ParameterType_Float, "classifier.svm.coef0", "Parameter coef0 of a kernel function (POLY / SIGMOID)");
     SetParameterFloat("classifier.svm.coef0", 0.0);
     SetParameterDescription("classifier.svm.coef0", "Parameter coef0 of a kernel function (POLY / SIGMOID).");
@@ -76,9 +103,15 @@ namespace Wrapper
                             "because the samples are not identically processed within OpenCV.");
   }
 
-  void TrainImagesClassifier::TrainSVM(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
+  template <class TInputValue, class TOutputValue>
+  void
+  LearningApplicationBase<TInputValue,TOutputValue>
+  ::TrainSVM(typename ListSampleType::Pointer trainingListSample,
+             typename TargetListSampleType::Pointer trainingLabeledListSample,
+             std::string modelPath)
   {
-    SVMType::Pointer SVMClassifier = SVMType::New();
+    typename SVMType::Pointer SVMClassifier = SVMType::New();
+    SVMClassifier->SetRegressionMode(this->m_RegressionFlag);
     SVMClassifier->SetInputListSample(trainingListSample);
     SVMClassifier->SetTargetListSample(trainingLabeledListSample);
     switch (GetParameterInt("classifier.svm.k"))
@@ -104,36 +137,69 @@ namespace Wrapper
         std::cout << "CvSVM::LINEAR = " << CvSVM::LINEAR << std::endl;
         break;
       }
-    switch (GetParameterInt("classifier.svm.m"))
+    if (this->m_RegressionFlag)
       {
-      case 0: // C_SVC
-        SVMClassifier->SetSVMType(CvSVM::C_SVC);
-        std::cout << "CvSVM::C_SVC = " << CvSVM::C_SVC << std::endl;
-        break;
-      case 1: // NU_SVC
-        SVMClassifier->SetSVMType(CvSVM::NU_SVC);
-        std::cout << "CvSVM::NU_SVC = " << CvSVM::NU_SVC << std::endl;
-        break;
-      case 2: // ONE_CLASS
-        SVMClassifier->SetSVMType(CvSVM::ONE_CLASS);
-        std::cout << "CvSVM::ONE_CLASS = " << CvSVM::ONE_CLASS << std::endl;
-        break;
-        /*case 3: // EPS_SVR
-         SVMClassifier->SetSVMType(CvSVM::EPS_SVR);
-         std::cout<<"CvSVM::EPS_SVR = "<<CvSVM::EPS_SVR<<std::endl;
-         break;
-         case 4: // NU_SVR
-         SVMClassifier->SetSVMType(CvSVM::NU_SVR);
-         std::cout<<"CvSVM::NU_SVR = "<<CvSVM::NU_SVR<<std::endl;
-         break; */
-      default: // DEFAULT = C_SVC
-        SVMClassifier->SetSVMType(CvSVM::C_SVC);
-        std::cout << "CvSVM::C_SVC = " << CvSVM::C_SVC << std::endl;
-        break;
+      switch (GetParameterInt("classifier.svm.m"))
+        {
+        case 0: // EPS_SVR
+           SVMClassifier->SetSVMType(CvSVM::EPS_SVR);
+           std::cout<<"CvSVM::EPS_SVR = "<<CvSVM::EPS_SVR<<std::endl;
+           break;
+        case 1: // NU_SVR
+           SVMClassifier->SetSVMType(CvSVM::NU_SVR);
+           std::cout<<"CvSVM::NU_SVR = "<<CvSVM::NU_SVR<<std::endl;
+           break;
+        default: // DEFAULT = EPS_SVR
+          SVMClassifier->SetSVMType(CvSVM::EPS_SVR);
+          std::cout << "CvSVM::EPS_SVR = " << CvSVM::EPS_SVR << std::endl;
+          break;
+        }
+      }
+    else
+      {
+      switch (GetParameterInt("classifier.svm.m"))
+        {
+        case 0: // C_SVC
+          SVMClassifier->SetSVMType(CvSVM::C_SVC);
+          std::cout << "CvSVM::C_SVC = " << CvSVM::C_SVC << std::endl;
+          break;
+        case 1: // NU_SVC
+          SVMClassifier->SetSVMType(CvSVM::NU_SVC);
+          std::cout << "CvSVM::NU_SVC = " << CvSVM::NU_SVC << std::endl;
+          break;
+        case 2: // ONE_CLASS
+          SVMClassifier->SetSVMType(CvSVM::ONE_CLASS);
+          std::cout << "CvSVM::ONE_CLASS = " << CvSVM::ONE_CLASS << std::endl;
+          break;
+        default: // DEFAULT = C_SVC
+          SVMClassifier->SetSVMType(CvSVM::C_SVC);
+          std::cout << "CvSVM::C_SVC = " << CvSVM::C_SVC << std::endl;
+          break;
+        }
       }
     SVMClassifier->SetC(GetParameterFloat("classifier.svm.c"));
     SVMClassifier->SetNu(GetParameterFloat("classifier.svm.nu"));
-    //SVMClassifier->SetP(GetParameterFloat("classifier.svm.p"));
+    if (this->m_RegressionFlag)
+      {
+      SVMClassifier->SetP(GetParameterFloat("classifier.svm.p"));
+      switch (GetParameterInt("classifier.svm.term"))
+        {
+        case 0: // ITER
+          SVMClassifier->SetTermCriteriaType(CV_TERMCRIT_ITER);
+          break;
+        case 1: // EPS
+          SVMClassifier->SetTermCriteriaType(CV_TERMCRIT_EPS);
+          break;
+        case 2: // ITER+EPS
+          SVMClassifier->SetTermCriteriaType(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS);
+          break;
+        default:
+          SVMClassifier->SetTermCriteriaType(CV_TERMCRIT_ITER);
+          break;
+        }
+      SVMClassifier->SetMaxIter(GetParameterInt("classifier.svm.iter"));
+      SVMClassifier->SetEpsilon(GetParameterFloat("classifier.svm.eps"));
+      }
     SVMClassifier->SetCoef0(GetParameterFloat("classifier.svm.coef0"));
     SVMClassifier->SetGamma(GetParameterFloat("classifier.svm.gamma"));
     SVMClassifier->SetDegree(GetParameterFloat("classifier.svm.degree"));
@@ -142,17 +208,21 @@ namespace Wrapper
       SVMClassifier->SetParameterOptimization(true);
     }
     SVMClassifier->Train();
-    SVMClassifier->Save(GetParameterString("io.out"));
+    SVMClassifier->Save(modelPath);
 
     // Update the displayed parameters in the GUI after the training process, for further use of them
     SetParameterFloat("classifier.svm.c", static_cast<float> (SVMClassifier->GetOutputC()));
     SetParameterFloat("classifier.svm.nu", static_cast<float> (SVMClassifier->GetOutputNu()));
-    //SetParameterFloat("classifier.svm.p", static_cast<float> (SVMClassifier->GetOutputP()));
+    if (this->m_RegressionFlag)
+      {
+      SetParameterFloat("classifier.svm.p", static_cast<float> (SVMClassifier->GetOutputP()));
+      }
     SetParameterFloat("classifier.svm.coef0", static_cast<float> (SVMClassifier->GetOutputCoef0()));
     SetParameterFloat("classifier.svm.gamma", static_cast<float> (SVMClassifier->GetOutputGamma()));
     SetParameterFloat("classifier.svm.degree", static_cast<float> (SVMClassifier->GetOutputDegree()));
   }
-#endif
 
 } //end namespace wrapper
 } //end namespace otb
+
+#endif
diff --git a/Modules/Applications/AppClassification/test/CMakeLists.txt b/Modules/Applications/AppClassification/test/CMakeLists.txt
index c4e275fe5a33fc8e629a13c9016a5584c8a4b0a3..63a5c0918f5df9138ab25e78968b8305743f2409 100644
--- a/Modules/Applications/AppClassification/test/CMakeLists.txt
+++ b/Modules/Applications/AppClassification/test/CMakeLists.txt
@@ -841,3 +841,36 @@ otb_test_application(NAME cdbTvVectorDataDSValidationWrongRoads_LI
                              ${OTBAPP_BASELINE_FILES}/cdbTvVectorDataDSValidationOutpout_LI_wr.shp
                              ${TEMP}/cdbTvVectorDataDSValidationOutpout_LI_wr.shp)
 
+if(OTB_USE_OPENCV)
+#----------- TrainRegression TESTS ----------------
+# y = 0.01*x^2 + 1.5*x - 300
+otb_test_application(NAME apTvClTrainRegressionTest_monovar
+                     APP  TrainRegression
+                     OPTIONS -io.il ${INPUTDATA}/QB_Toulouse_Ortho_regression.tif
+                             -io.imstat ${INPUTDATA}/QB_Toulouse_Ortho_regression.xml
+                             -io.out ${TEMP}/apTvClTrainRegressionTest_monovar.rf
+                             -sample.mt 20000
+                             -sample.mv 20000
+                             -sample.vtr 0.5
+                             -rand 121212
+                             -classifier rf
+                             -classifier.rf.ra 0.0001
+                             -classifier.rf.max 6
+                             -classifier.rf.acc 0.0005
+                    VALID    --compare-ascii ${NOTOL}
+                             ${OTBAPP_BASELINE_FILES}/apTvClTrainRegressionTest_monovar.rf
+                             ${TEMP}/apTvClTrainRegressionTest_monovar.rf)
+
+#----------- PredictRegression TESTS ----------------
+otb_test_application(NAME apTvClPredictRegressionTest_monovar
+                     APP PredictRegression
+                     OPTIONS -in ${INPUTDATA}/QB_Toulouse_Ortho_PAN.tif
+                             -model ${OTBAPP_BASELINE_FILES}/apTvClTrainRegressionTest_monovar.rf
+                             -imstat ${INPUTDATA}/QB_Toulouse_Ortho_regression.xml
+                             -out ${TEMP}/apTvClPredictRegressionTest_monovar.tif
+                    VALID    --compare-image 1
+                             ${OTBAPP_BASELINE}/apTvClPredictRegressionTest_monovar.tif
+                             ${TEMP}/apTvClPredictRegressionTest_monovar.tif)
+
+endif()
+
diff --git a/Modules/Learning/Supervised/include/otbBoostMachineLearningModel.h b/Modules/Learning/Supervised/include/otbBoostMachineLearningModel.h
index 185d10ba44bf26db0a9444c5eb61025ef4f2899a..a812eca2b8f36708b19e186d7221a0a23cc07f4f 100644
--- a/Modules/Learning/Supervised/include/otbBoostMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbBoostMachineLearningModel.h
@@ -92,6 +92,11 @@ public:
   itkGetMacro(MaxDepth, int);
   itkSetMacro(MaxDepth, int);
 
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
+
   /** Save the model to file */
   virtual void Save(const std::string & filename, const std::string & name="");
 
@@ -117,11 +122,6 @@ protected:
   /** PrintSelf method */
   void PrintSelf(std::ostream& os, itk::Indent indent) const;
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   BoostMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
diff --git a/Modules/Learning/Supervised/include/otbBoostMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbBoostMachineLearningModel.txx
index 493107f9ff2b8da6c652fc67bc31376664eed1cc..15a9d5c293b8fa61a87b2f714aef15d0379a0686 100644
--- a/Modules/Learning/Supervised/include/otbBoostMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbBoostMachineLearningModel.txx
@@ -52,7 +52,7 @@ BoostMachineLearningModel<TInputValue,TOutputValue>
 template <class TInputValue, class TOutputValue>
 void
 BoostMachineLearningModel<TInputValue,TOutputValue>
-::TrainClassification()
+::Train()
 {
   //convert listsample to opencv matrix
   cv::Mat samples;
@@ -77,7 +77,7 @@ template <class TInputValue, class TOutputValue>
 typename BoostMachineLearningModel<TInputValue,TOutputValue>
 ::TargetSampleType
 BoostMachineLearningModel<TInputValue,TOutputValue>
-::PredictClassification(const InputSampleType & input, ConfidenceValueType *quality) const
+::Predict(const InputSampleType & input, ConfidenceValueType *quality) const
 {
   //convert listsample to Mat
   cv::Mat sample;
diff --git a/Modules/Learning/Supervised/include/otbDecisionTreeMachineLearningModel.h b/Modules/Learning/Supervised/include/otbDecisionTreeMachineLearningModel.h
index 6415fa940e273e5d9074a130ba95a8d8105a4978..651a48f6ee319e2c4171f15741df7e9615205c2d 100644
--- a/Modules/Learning/Supervised/include/otbDecisionTreeMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbDecisionTreeMachineLearningModel.h
@@ -147,11 +147,10 @@ public:
     return m_Priors;
   }
 
-  /** Setters/Getters to IsRegression flag
-   *  Default is False
-   */
-  itkGetMacro(IsRegression, bool);
-  itkSetMacro(IsRegression, bool);
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
 
   /** Save the model to file */
   virtual void Save(const std::string & filename, const std::string & name="");
@@ -178,11 +177,6 @@ protected:
   /** PrintSelf method */
   void PrintSelf(std::ostream& os, itk::Indent indent) const;
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   DecisionTreeMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
@@ -196,7 +190,6 @@ private:
   int m_MaxCategories;
   int m_CVFolds;
   bool m_Use1seRule;
-  bool m_IsRegression;
   bool m_TruncatePrunedTree;
   std::vector<float> m_Priors;
 
diff --git a/Modules/Learning/Supervised/include/otbDecisionTreeMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbDecisionTreeMachineLearningModel.txx
index ada456e94c35ff1467e97b8301a5e4a315fa35f6..283fb039c35fd04aea4cc2a63c580ee665014ab4 100644
--- a/Modules/Learning/Supervised/include/otbDecisionTreeMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbDecisionTreeMachineLearningModel.txx
@@ -38,9 +38,9 @@ DecisionTreeMachineLearningModel<TInputValue,TOutputValue>
  m_MaxCategories(10),
  m_CVFolds(10),
  m_Use1seRule(true),
- m_IsRegression(false),
  m_TruncatePrunedTree(true)
 {
+  this->m_IsRegressionSupported = true;
 }
 
 
@@ -55,7 +55,7 @@ DecisionTreeMachineLearningModel<TInputValue,TOutputValue>
 template <class TInputValue, class TOutputValue>
 void
 DecisionTreeMachineLearningModel<TInputValue,TOutputValue>
-::TrainClassification()
+::Train()
 {
   //convert listsample to opencv matrix
   cv::Mat samples;
@@ -73,7 +73,7 @@ DecisionTreeMachineLearningModel<TInputValue,TOutputValue>
   cv::Mat var_type = cv::Mat(this->GetInputListSample()->GetMeasurementVectorSize() + 1, 1, CV_8U );
   var_type.setTo(cv::Scalar(CV_VAR_NUMERICAL) ); // all inputs are numerical
 
-  if (!m_IsRegression) //Classification
+  if (!this->m_RegressionMode) //Classification
     var_type.at<uchar>(this->GetInputListSample()->GetMeasurementVectorSize(), 0) = CV_VAR_CATEGORICAL;
 
   m_DTreeModel->train(samples,CV_ROW_SAMPLE,labels,cv::Mat(),cv::Mat(),var_type,cv::Mat(),params);
@@ -83,7 +83,7 @@ template <class TInputValue, class TOutputValue>
 typename DecisionTreeMachineLearningModel<TInputValue,TOutputValue>
 ::TargetSampleType
 DecisionTreeMachineLearningModel<TInputValue,TOutputValue>
-::PredictClassification(const InputSampleType & input, ConfidenceValueType *quality) const
+::Predict(const InputSampleType & input, ConfidenceValueType *quality) const
 {
   //convert listsample to Mat
   cv::Mat sample;
diff --git a/Modules/Learning/Supervised/include/otbGradientBoostedTreeMachineLearningModel.h b/Modules/Learning/Supervised/include/otbGradientBoostedTreeMachineLearningModel.h
index 3126f44c0ff7073def41f025df615e74013b590b..f10a89c68066f8ee6bbacc252b4088fa057157be 100644
--- a/Modules/Learning/Supervised/include/otbGradientBoostedTreeMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbGradientBoostedTreeMachineLearningModel.h
@@ -100,6 +100,11 @@ public:
   itkGetMacro(UseSurrogates, bool);
   itkSetMacro(UseSurrogates, bool);
 
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
+
   /** Save the model to file */
   virtual void Save(const std::string & filename, const std::string & name="");
 
@@ -125,11 +130,6 @@ protected:
   /** PrintSelf method */
   void PrintSelf(std::ostream& os, itk::Indent indent) const;
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   GradientBoostedTreeMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
@@ -142,7 +142,6 @@ private:
   double m_SubSamplePortion;
   int m_MaxDepth;
   bool m_UseSurrogates;
-  bool m_IsRegression;
 
 
 };
diff --git a/Modules/Learning/Supervised/include/otbGradientBoostedTreeMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbGradientBoostedTreeMachineLearningModel.txx
index 595f1bb060b8ffbcb533352915f41b69c2bd1458..d08abec0290776a5d6a7ddfd3df27e6b5216d620 100644
--- a/Modules/Learning/Supervised/include/otbGradientBoostedTreeMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbGradientBoostedTreeMachineLearningModel.txx
@@ -36,9 +36,9 @@ GradientBoostedTreeMachineLearningModel<TInputValue,TOutputValue>
  m_Shrinkage(0.01),
  m_SubSamplePortion(0.8),
  m_MaxDepth(3),
- m_UseSurrogates(false),
- m_IsRegression(false)
+ m_UseSurrogates(false)
 {
+  this->m_IsRegressionSupported = true;
 }
 
 
@@ -53,7 +53,7 @@ GradientBoostedTreeMachineLearningModel<TInputValue,TOutputValue>
 template <class TInputValue, class TOutputValue>
 void
 GradientBoostedTreeMachineLearningModel<TInputValue,TOutputValue>
-::TrainClassification()
+::Train()
 {
   //convert listsample to opencv matrix
   cv::Mat samples;
@@ -62,7 +62,6 @@ GradientBoostedTreeMachineLearningModel<TInputValue,TOutputValue>
   cv::Mat labels;
   otb::ListSampleToMat<TargetListSampleType>(this->GetTargetListSample(),labels);
 
-
   CvGBTreesParams params = CvGBTreesParams(m_LossFunctionType, m_WeakCount, m_Shrinkage, m_SubSamplePortion,
                                            m_MaxDepth, m_UseSurrogates);
 
@@ -70,7 +69,7 @@ GradientBoostedTreeMachineLearningModel<TInputValue,TOutputValue>
   cv::Mat var_type = cv::Mat(this->GetInputListSample()->GetMeasurementVectorSize() + 1, 1, CV_8U );
   var_type.setTo(cv::Scalar(CV_VAR_NUMERICAL) ); // all inputs are numerical
 
-  if (!m_IsRegression) //Classification
+  if (!this->m_RegressionMode) //Classification
     var_type.at<uchar>(this->GetInputListSample()->GetMeasurementVectorSize(), 0) = CV_VAR_CATEGORICAL;
 
   m_GBTreeModel->train(samples,CV_ROW_SAMPLE,labels,cv::Mat(),cv::Mat(),var_type,cv::Mat(),params, false);
@@ -80,7 +79,7 @@ template <class TInputValue, class TOutputValue>
 typename GradientBoostedTreeMachineLearningModel<TInputValue,TOutputValue>
 ::TargetSampleType
 GradientBoostedTreeMachineLearningModel<TInputValue,TOutputValue>
-::PredictClassification(const InputSampleType & input, ConfidenceValueType *quality) const
+::Predict(const InputSampleType & input, ConfidenceValueType *quality) const
 {
   //convert listsample to Mat
   cv::Mat sample;
diff --git a/Modules/Learning/Supervised/include/otbImageClassificationFilter.txx b/Modules/Learning/Supervised/include/otbImageClassificationFilter.txx
index 88d2ad012441455cc2c2d221e79e175731b3e31f..b0a5d5e3e842ec986dfc2400736b84e2a405bb53 100644
--- a/Modules/Learning/Supervised/include/otbImageClassificationFilter.txx
+++ b/Modules/Learning/Supervised/include/otbImageClassificationFilter.txx
@@ -118,7 +118,7 @@ ImageClassificationFilter<TInputImage, TOutputImage, TMaskImage>
     }
 
   // setup iterator for confidence map
-  bool computeConfidenceMap(m_UseConfidenceMap && m_Model->HasConfidenceIndex());
+  bool computeConfidenceMap(m_UseConfidenceMap && m_Model->HasConfidenceIndex() && !m_Model->GetRegressionMode());
   ConfidenceMapIteratorType confidenceIt;
   if (computeConfidenceMap)
     {
diff --git a/Modules/Learning/Supervised/include/otbKNearestNeighborsMachineLearningModel.h b/Modules/Learning/Supervised/include/otbKNearestNeighborsMachineLearningModel.h
index b8524b7fb677aff35b706a54b87b6a917251754b..3f29a10af3ed6b46d700684eddeab80a5076c17a 100644
--- a/Modules/Learning/Supervised/include/otbKNearestNeighborsMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbKNearestNeighborsMachineLearningModel.h
@@ -58,12 +58,23 @@ public:
   itkGetMacro(K, int);
   itkSetMacro(K, int);
 
-  /** Setters/Getters to IsRegression flag
-   *  Default is False
-   *  \see http://docs.opencv.org/modules/ml/doc/k_nearest_neighbors.html
+  /** Decision rule once the KNN are found :
+   *  [for classification]
+   *   - KNN_VOTING : output value with maximum occurences (for classification)
+   *  [for regression]
+   *   - KNN_MEAN : output mean value of neighbors
+   *   - KNN_MEDIAN : output median value of neighbors
    */
-  itkGetMacro(IsRegression, bool);
-  itkSetMacro(IsRegression, bool);
+  enum {KNN_VOTING, KNN_MEAN, KNN_MEDIAN};
+
+  /** Setters/Getters to the decision rule */
+  itkGetMacro(DecisionRule, int);
+  itkSetMacro(DecisionRule, int);
+
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
 
   /** Save the model to file */
   virtual void Save(const std::string & filename, const std::string & name="");
@@ -90,18 +101,14 @@ protected:
   /** PrintSelf method */
   void PrintSelf(std::ostream& os, itk::Indent indent) const;
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   KNearestNeighborsMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
 
   CvKNearest * m_KNearestModel;
   int m_K;
-  bool m_IsRegression;
+
+  int m_DecisionRule;
 };
 } // end namespace otb
 
diff --git a/Modules/Learning/Supervised/include/otbKNearestNeighborsMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbKNearestNeighborsMachineLearningModel.txx
index e890b9137ac284848eda3c10885bfd736a4c28ac..fd33b7c9061fc3ce66c70cf9ea5c4c7780c42dc8 100644
--- a/Modules/Learning/Supervised/include/otbKNearestNeighborsMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbKNearestNeighborsMachineLearningModel.txx
@@ -23,6 +23,7 @@
 #include "otbOpenCVUtils.h"
 
 #include <fstream>
+#include <set>
 #include "itkMacro.h"
 
 namespace otb
@@ -33,9 +34,10 @@ KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
 ::KNearestNeighborsMachineLearningModel() :
  m_KNearestModel (new CvKNearest),
  m_K(32),
- m_IsRegression(false)
+ m_DecisionRule(KNN_VOTING)
 {
   this->m_ConfidenceIndex = true;
+  this->m_IsRegressionSupported = true;
 }
 
 
@@ -50,7 +52,7 @@ KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
 template <class TInputValue, class TTargetValue>
 void
 KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
-::TrainClassification()
+::Train()
 {
   //convert listsample to opencv matrix
   cv::Mat samples;
@@ -59,26 +61,44 @@ KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
   cv::Mat labels;
   otb::ListSampleToMat<TargetListSampleType>(this->GetTargetListSample(), labels);
 
+  // update decision rule if needed
+  if (this->m_RegressionMode)
+    {
+    if (this->m_DecisionRule == KNN_VOTING)
+      {
+      this->SetDecisionRule(KNN_MEAN);
+      }
+    }
+  else
+    {
+    if (this->m_DecisionRule != KNN_VOTING)
+      {
+      this->SetDecisionRule(KNN_VOTING);
+      }
+    }
+
   //train the KNN model
-  m_KNearestModel->train(samples, labels, cv::Mat(), m_IsRegression, m_K, false);
+  m_KNearestModel->train(samples, labels, cv::Mat(), this->m_RegressionMode, m_K, false);
 }
 
 template <class TInputValue, class TTargetValue>
 typename KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
 ::TargetSampleType
 KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
-::PredictClassification(const InputSampleType & input, ConfidenceValueType *quality) const
+::Predict(const InputSampleType & input, ConfidenceValueType *quality) const
 {
   //convert listsample to Mat
   cv::Mat sample;
   otb::SampleToMat<InputSampleType>(input, sample);
 
   float result;
+  cv::Mat nearest(1,m_K,CV_32FC1);
+  result = m_KNearestModel->find_nearest(sample, m_K,0,0,&nearest,0);
 
+  // compute quality if asked (only happens in classification mode)
   if (quality != NULL)
     {
-    cv::Mat nearest(1,m_K,CV_32FC1);
-    result = m_KNearestModel->find_nearest(sample, m_K,0,0,&nearest,0);
+    assert(!this->m_RegressionMode);
     unsigned int accuracy = 0;
     for (int k=0 ; k < m_K ; ++k)
       {
@@ -89,9 +109,22 @@ KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
       }
     (*quality) = static_cast<ConfidenceValueType>(accuracy);
     }
-  else
+
+  // Decision rule :
+  //  VOTING is OpenCV default behaviour for classification
+  //  MEAN is OpenCV default behaviour for regression
+  //  MEDIAN : only case that must be handled here
+  if (this->m_DecisionRule == KNN_MEDIAN)
     {
-    result = m_KNearestModel->find_nearest(sample, m_K);
+    std::multiset<float> values;
+    for (int k=0 ; k < m_K ; ++k)
+      {
+      values.insert(nearest.at<float>(0,k));
+      }
+    std::multiset<float>::iterator median = values.begin();
+    int pos = (m_K >> 1);
+    for (int k=0 ; k < pos ; ++k , ++median) {}
+    result = *median;
     }
 
   TargetSampleType target;
@@ -106,12 +139,17 @@ KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
 ::Save(const std::string & filename, const std::string & itkNotUsed(name))
 {
   //there is no m_KNearestModel->save(filename.c_str(), name.c_str()).
-  //We need to save the K parameter and IsRegression flag used and the samples.
+  //We need to save the K parameter, IsRegression flag, DecisionRule and the samples.
 
   std::ofstream ofs(filename.c_str());
   //Save K parameter and IsRegression flag.
   ofs << "K=" << m_K << "\n";
-  ofs << "IsRegression=" << m_IsRegression << "\n";
+  ofs << "IsRegression=" << this->m_RegressionMode << "\n";
+  // Save the DecisionRule if regression
+  if (this->m_RegressionMode)
+    {
+    ofs << "DecisionRule=" << m_DecisionRule << "\n";
+    }
 
   //Save the samples. First column is the Label and other columns are the sample data.
   typename InputListSampleType::ConstIterator sampleIt = this->GetInputListSample()->Begin();
@@ -154,8 +192,15 @@ KNearestNeighborsMachineLearningModel<TInputValue,TTargetValue>
   std::getline(ifs, line);
   pos = line.find_first_of("=", 0);
   nextpos = line.find_first_of(" \n\r", pos+1);
-  this->SetIsRegression(boost::lexical_cast<bool>(line.substr(pos+1, nextpos-pos-1)));
-
+  this->SetRegressionMode(boost::lexical_cast<bool>(line.substr(pos+1, nextpos-pos-1)));
+  //third line is the DecisionRule parameter (only for regression)
+  if (this->m_RegressionMode)
+    {
+    std::getline(ifs, line);
+    pos = line.find_first_of("=", 0);
+    nextpos = line.find_first_of(" \n\r", pos+1);
+    this->SetDecisionRule(boost::lexical_cast<int>(line.substr(pos+1, nextpos-pos-1)));
+    }
   //Clear previous listSample (if any)
   typename InputListSampleType::Pointer samples = InputListSampleType::New();
   typename TargetListSampleType::Pointer labels = TargetListSampleType::New();
diff --git a/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.h b/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.h
index 18eba7fbb28d336967ac669d05cda51bc5ebdc4c..622923753e37d1216de669a9724c72b979015f83 100644
--- a/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.h
@@ -59,6 +59,11 @@ public:
   itkNewMacro(Self);
   itkTypeMacro(SVMMachineLearningModel, MachineLearningModel);
 
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
+
   /** Save the model to file */
   virtual void Save(const std::string &filename, const std::string & name="");
 
@@ -75,23 +80,40 @@ public:
   //@}
 
   //Setters/Getters to SVM model
-  // itkGetMacro(SVMType, int);
-  // itkSetMacro(SVMType, int);
+  otbGetObjectMemberMacro(SVMestimator, SVMType, int);
+  otbSetObjectMemberMacro(SVMestimator, SVMType, int);
+
+  otbGetObjectMemberMacro(SVMestimator, KernelType, int);
+  otbSetObjectMemberMacro(SVMestimator, KernelType, int);
+
+  otbGetObjectMemberMacro(SVMestimator, C, double);
+  otbSetObjectMemberMacro(SVMestimator, C, double);
 
-  itkGetMacro(KernelType, int);
-  itkSetMacro(KernelType, int);
+  // TODO : we should harmonize this parameter name : ParameterOptimization -> ParametersOptimization
+  bool GetParameterOptimization()
+    {
+    return this->m_SVMestimator->GetParametersOptimization();
+    }
+  void SetParameterOptimization(bool value)
+    {
+    this->m_SVMestimator->SetParametersOptimization(value);
+    this->Modified();
+    }
 
-  itkGetMacro(C, float);
-  itkSetMacro(C, float);
+  otbGetObjectMemberMacro(SVMestimator, DoProbabilityEstimates, bool);
+  void SetDoProbabilityEstimates(bool value)
+    {
+    this->m_SVMestimator->DoProbabilityEstimates(value);
+    }
 
-  itkGetMacro(ParameterOptimization, bool);
-  itkSetMacro(ParameterOptimization, bool);
- 
-  itkGetMacro(DoProbabilityEstimates, bool);
-  itkSetMacro(DoProbabilityEstimates, bool);
+  otbGetObjectMemberMacro(SVMestimator, Epsilon, double);
+  otbSetObjectMemberMacro(SVMestimator, Epsilon, double);
 
-  // itkGetMacro(Epsilon, int);
-  // itkSetMacro(Epsilon, int);
+  otbGetObjectMemberMacro(SVMestimator, P, double);
+  otbSetObjectMemberMacro(SVMestimator, P, double);
+
+  otbGetObjectMemberMacro(SVMestimator, Nu, double);
+  otbSetObjectMemberMacro(SVMestimator, Nu, double);
 
 protected:
   /** Constructor */
@@ -103,19 +125,10 @@ protected:
   /** PrintSelf method */
   void PrintSelf(std::ostream& os, itk::Indent indent) const;
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   LibSVMMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
 
-  int m_KernelType;
-  float m_C;
-  bool m_ParameterOptimization;
-  bool m_DoProbabilityEstimates;
   typename SVMEstimatorType::Pointer m_SVMestimator;
 };
 } // end namespace otb
diff --git a/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.txx
index 24e91e54a25406d100a8cae8f4b09741f124e80f..e95e97543d3a67537c848c8933165d5907370400 100644
--- a/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.txx
@@ -20,10 +20,6 @@
 
 #include <fstream>
 #include "otbLibSVMMachineLearningModel.h"
-//#include "otbOpenCVUtils.h"
-
-// SVM estimator
-//#include "otbSVMSampleListModelEstimator.h"
 
 namespace otb
 {
@@ -32,15 +28,14 @@ template <class TInputValue, class TOutputValue>
 LibSVMMachineLearningModel<TInputValue,TOutputValue>
 ::LibSVMMachineLearningModel()
 {
-  // m_SVMModel = new CvSVM;
-  // m_SVMType = CvSVM::C_SVC;
-  m_KernelType = LINEAR;
-  // m_TermCriteriaType = CV_TERMCRIT_ITER;
-  m_C = 1.0;
-  // m_Epsilon = 1e-6;
-  m_ParameterOptimization = false;
-  m_DoProbabilityEstimates = false;
   m_SVMestimator = SVMEstimatorType::New();
+  m_SVMestimator->SetSVMType(C_SVC);
+  m_SVMestimator->SetC(1.0);
+  m_SVMestimator->SetKernelType(LINEAR);
+  m_SVMestimator->SetParametersOptimization(false);
+  m_SVMestimator->DoProbabilityEstimates(false);
+  //m_SVMestimator->SetEpsilon(1e-6);
+  this->m_IsRegressionSupported = true;
 }
 
 
@@ -55,7 +50,7 @@ LibSVMMachineLearningModel<TInputValue,TOutputValue>
 template <class TInputValue, class TOutputValue>
 void
 LibSVMMachineLearningModel<TInputValue,TOutputValue>
-::TrainClassification()
+::Train()
 {
   // Set up SVM's parameters
   // CvSVMParams params;
@@ -64,25 +59,19 @@ LibSVMMachineLearningModel<TInputValue,TOutputValue>
   // params.term_crit   = cvTermCriteria(m_TermCriteriaType, m_MaxIter, m_Epsilon);
 
   // // Train the SVM
-
-  m_SVMestimator->SetC(m_C);
-  m_SVMestimator->SetKernelType(m_KernelType);
-  m_SVMestimator->SetParametersOptimization(m_ParameterOptimization);
-  m_SVMestimator->DoProbabilityEstimates(m_DoProbabilityEstimates);
-
   m_SVMestimator->SetInputSampleList(this->GetInputListSample());
   m_SVMestimator->SetTrainingSampleList(this->GetTargetListSample());
 
   m_SVMestimator->Update();
 
-  this->m_ConfidenceIndex = m_DoProbabilityEstimates;
+  this->m_ConfidenceIndex = this->GetDoProbabilityEstimates();
 }
 
 template <class TInputValue, class TOutputValue>
 typename LibSVMMachineLearningModel<TInputValue,TOutputValue>
 ::TargetSampleType
 LibSVMMachineLearningModel<TInputValue,TOutputValue>
-::PredictClassification(const InputSampleType & input, ConfidenceValueType *quality) const
+::Predict(const InputSampleType & input, ConfidenceValueType *quality) const
 {
   TargetSampleType target;
 
diff --git a/Modules/Learning/Supervised/include/otbMachineLearningModel.h b/Modules/Learning/Supervised/include/otbMachineLearningModel.h
index ba0ca60d224fa138fe5a9e8e702993555394559b..b18e39e15c89fabff9ff2c640122740e710972f5 100644
--- a/Modules/Learning/Supervised/include/otbMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbMachineLearningModel.h
@@ -97,10 +97,10 @@ public:
   //@}
 
   /** Train the machine learning model */
-  void Train();
+  virtual void Train() =0;
 
   /** Predict values using the model */
-  TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality = NULL) const;
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality = NULL) const = 0;
 
   /** Classify all samples in InputListSample and fill TargetListSample with the associated label */
   void PredictAll();
@@ -140,6 +140,12 @@ public:
   itkGetObjectMacro(TargetListSample,TargetListSampleType);
   //@}
 
+  /**\name Use model in regression mode */
+  //@{
+  itkGetMacro(RegressionMode,bool);
+  void SetRegressionMode(bool flag);
+  //@}
+
 protected:
   /** Constructor */
   MachineLearningModel();
@@ -156,22 +162,14 @@ protected:
   /** Target list sample */
   typename TargetListSampleType::Pointer m_TargetListSample;
 
-  /** Train the machine learning model */
-  virtual void TrainRegression()
-  {
-    itkGenericExceptionMacro(<< "Regression mode not implemented.");
-  }
-  virtual void TrainClassification() = 0;
-  /** Predict values using the model */
-  virtual TargetSampleType PredictRegression(const InputSampleType& itkNotUsed(input)) const
-  {
-  itkGenericExceptionMacro(<< "Regression mode not implemented.");
-  }
-
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality = NULL) const = 0;
-
+  /** flag to choose between classification and regression modes */
   bool m_RegressionMode;
   
+  /** flag that indicates if the model supports regression, child
+   *  classes should modify it in their constructor if they support
+   *  regression mode */
+  bool m_IsRegressionSupported;
+
   /** flag that tells if the model support confidence index output */
   bool m_ConfidenceIndex;
 private:
diff --git a/Modules/Learning/Supervised/include/otbMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbMachineLearningModel.txx
index b4b6127c11c252c3d5a73d0a50716d48da6e7a49..8cc82055fe1a7a08594a01f3d9aeba6dfc12d927 100644
--- a/Modules/Learning/Supervised/include/otbMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbMachineLearningModel.txx
@@ -25,7 +25,10 @@ namespace otb
 
 template <class TInputValue, class TOutputValue, class TConfidenceValue>
 MachineLearningModel<TInputValue,TOutputValue,TConfidenceValue>
-::MachineLearningModel() : m_RegressionMode(false),m_ConfidenceIndex(false)
+::MachineLearningModel() :
+  m_RegressionMode(false),
+  m_IsRegressionSupported(false),
+  m_ConfidenceIndex(false)
 {}
 
 
@@ -37,23 +40,17 @@ MachineLearningModel<TInputValue,TOutputValue,TConfidenceValue>
 template <class TInputValue, class TOutputValue, class TConfidenceValue>
 void
 MachineLearningModel<TInputValue,TOutputValue,TConfidenceValue>
-::Train()
+::SetRegressionMode(bool flag)
 {
-  if(m_RegressionMode)
-    return this->TrainRegression();
-  else
-    return this->TrainClassification();
-}
-
-template <class TInputValue, class TOutputValue, class TConfidenceValue>
-typename MachineLearningModel<TInputValue,TOutputValue,TConfidenceValue>::TargetSampleType
-MachineLearningModel<TInputValue,TOutputValue,TConfidenceValue>
-::Predict(const InputSampleType& input, ConfidenceValueType *quality) const
-{
-  if(m_RegressionMode)
-    return this->PredictRegression(input);
-  else
-    return this->PredictClassification(input,quality);
+  if (flag && !m_IsRegressionSupported)
+    {
+    itkGenericExceptionMacro(<< "Regression mode not implemented.");
+    }
+  if (m_RegressionMode != flag)
+    {
+    m_RegressionMode = flag;
+    this->Modified();
+    }
 }
 
 template <class TInputValue, class TOutputValue, class TConfidenceValue>
diff --git a/Modules/Learning/Supervised/include/otbNeuralNetworkMachineLearningModel.h b/Modules/Learning/Supervised/include/otbNeuralNetworkMachineLearningModel.h
index 2eab72042d9ff5b3228df2d628ee0618127b01f4..fa1abade18cce80abb493406fabbcebbe97c9ddc 100644
--- a/Modules/Learning/Supervised/include/otbNeuralNetworkMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbNeuralNetworkMachineLearningModel.h
@@ -151,6 +151,11 @@ public:
   itkGetMacro(Epsilon, double);
   itkSetMacro(Epsilon, double);
 
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
+
   /** Save the model to file */
   virtual void Save(const std::string & filename, const std::string & name="");
 
@@ -178,15 +183,14 @@ protected:
   /** PrintSelf method */
   void PrintSelf(std::ostream& os, itk::Indent indent) const;
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   NeuralNetworkMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
 
+  void CreateNetwork();
+  CvANN_MLP_TrainParams SetNetworkParameters();
+  void SetupNetworkAndTrain(cv::Mat& labels);
+
   CvANN_MLP * m_ANNModel;
   int m_TrainMethod;
   int m_ActivateFunction;
diff --git a/Modules/Learning/Supervised/include/otbNeuralNetworkMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbNeuralNetworkMachineLearningModel.txx
index 01c31811d7c793630dfcfe5f0026cc6b7be73356..87eed252e00649ba21cd6c98e8c44441b99ef678 100644
--- a/Modules/Learning/Supervised/include/otbNeuralNetworkMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbNeuralNetworkMachineLearningModel.txx
@@ -43,6 +43,7 @@ NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::NeuralNetworkMachi
   m_CvMatOfLabels(0)
 {
   this->m_ConfidenceIndex = true;
+  this->m_IsRegressionSupported = true;
 }
 
 template<class TInputValue, class TOutputValue>
@@ -52,7 +53,7 @@ NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::~NeuralNetworkMach
   cvReleaseMat(&m_CvMatOfLabels);
 }
 
-/** Train the machine learning model */
+/** Sets the topology of the NN */
 template<class TInputValue, class TOutputValue>
 void NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::SetLayerSizes(const std::vector<unsigned int> layers)
 {
@@ -130,9 +131,8 @@ void NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::LabelsToMat(c
     }
 }
 
-/** Train the machine learning model */
 template<class TInputValue, class TOutputValue>
-void NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::TrainClassification()
+void NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::CreateNetwork()
 {
   //Create the neural network
   const unsigned int nbLayers = m_LayerSizes.size();
@@ -147,14 +147,11 @@ void NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::TrainClassifi
     }
 
   m_ANNModel->create(layers, m_ActivateFunction, m_Alpha, m_Beta);
+}
 
-  //convert listsample to opencv matrix
-  cv::Mat samples;
-  otb::ListSampleToMat<InputListSampleType>(this->GetInputListSample(), samples);
-
-  cv::Mat matOutputANN;
-  LabelsToMat(this->GetTargetListSample(), matOutputANN);
-
+template<class TInputValue, class TOutputValue>
+CvANN_MLP_TrainParams NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::SetNetworkParameters()
+{
   CvANN_MLP_TrainParams params;
   params.train_method = m_TrainMethod;
   params.bp_dw_scale = m_BackPropDWScale;
@@ -163,14 +160,43 @@ void NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::TrainClassifi
   params.rp_dw_min = m_RegPropDWMin;
   CvTermCriteria term_crit = cvTermCriteria(m_TermCriteriaType, m_MaxIter, m_Epsilon);
   params.term_crit = term_crit;
+  return params;
+}
 
+template<class TInputValue, class TOutputValue>
+void NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::SetupNetworkAndTrain(cv::Mat& labels)
+{
+  //convert listsample to opencv matrix
+  cv::Mat samples;
+  otb::ListSampleToMat<InputListSampleType>(this->GetInputListSample(), samples);
+  this->CreateNetwork();
+  CvANN_MLP_TrainParams params = this->SetNetworkParameters();
   //train the Neural network model
-  m_ANNModel->train(samples, matOutputANN, cv::Mat(), cv::Mat(), params);
+  m_ANNModel->train(samples, labels, cv::Mat(), cv::Mat(), params);
+}
+
+/** Train the machine learning model for classification*/
+template<class TInputValue, class TOutputValue>
+void NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::Train()
+{
+  //Transform the targets into a matrix of labels
+  cv::Mat matOutputANN;
+  if (this->m_RegressionMode)
+    {
+    // MODE REGRESSION
+    otb::ListSampleToMat<TargetListSampleType>(this->GetTargetListSample(), matOutputANN);
+    }
+  else
+    {
+    // MODE CLASSIFICATION : store the map between internal labels and output labels
+    LabelsToMat(this->GetTargetListSample(), matOutputANN);
+    }
+  this->SetupNetworkAndTrain(matOutputANN);
 }
 
 template<class TInputValue, class TOutputValue>
 typename NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::TargetSampleType NeuralNetworkMachineLearningModel<
-  TInputValue, TOutputValue>::PredictClassification(const InputSampleType & input, ConfidenceValueType *quality) const
+  TInputValue, TOutputValue>::Predict(const InputSampleType & input, ConfidenceValueType *quality) const
 {
   //convert listsample to Mat
   cv::Mat sample;
@@ -183,6 +209,15 @@ typename NeuralNetworkMachineLearningModel<TInputValue, TOutputValue>::TargetSam
   TargetSampleType target;
   float currentResponse = 0;
   float maxResponse = response.at<float> (0, 0);
+
+  if (this->m_RegressionMode)
+    {
+    // MODE REGRESSION : only output first response
+    target[0] = maxResponse;
+    return target;
+    }
+
+  // MODE CLASSIFICATION : find the highest response
   float secondResponse = -1e10;
   target[0] = m_CvMatOfLabels->data.i[0];
 
diff --git a/Modules/Learning/Supervised/include/otbNormalBayesMachineLearningModel.h b/Modules/Learning/Supervised/include/otbNormalBayesMachineLearningModel.h
index 31b5373ce16510512aa90065e151408a764898ed..8ecfcd2b51d607169b30dcc86dd59d2096d728f4 100644
--- a/Modules/Learning/Supervised/include/otbNormalBayesMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbNormalBayesMachineLearningModel.h
@@ -52,6 +52,11 @@ public:
   itkNewMacro(Self);
   itkTypeMacro(NormalBayesMachineLearningModel, MachineLearningModel);
 
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
+
   /** Save the model to file */
   virtual void Save(const std::string & filename, const std::string & name="");
 
@@ -77,11 +82,6 @@ protected:
   /** PrintSelf method */
   void PrintSelf(std::ostream& os, itk::Indent indent) const;
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   NormalBayesMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
diff --git a/Modules/Learning/Supervised/include/otbNormalBayesMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbNormalBayesMachineLearningModel.txx
index bfc163553ddb58eb7e3e9f2150fc9cb63dbfaca2..7538f02c8818fcd07851f26044724992d4747af5 100644
--- a/Modules/Learning/Supervised/include/otbNormalBayesMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbNormalBayesMachineLearningModel.txx
@@ -45,7 +45,7 @@ NormalBayesMachineLearningModel<TInputValue,TOutputValue>
 template <class TInputValue, class TOutputValue>
 void
 NormalBayesMachineLearningModel<TInputValue,TOutputValue>
-::TrainClassification()
+::Train()
 {
   //convert listsample to opencv matrix
   cv::Mat samples;
@@ -61,7 +61,7 @@ template <class TInputValue, class TOutputValue>
 typename NormalBayesMachineLearningModel<TInputValue,TOutputValue>
 ::TargetSampleType
 NormalBayesMachineLearningModel<TInputValue,TOutputValue>
-::PredictClassification(const InputSampleType & input, ConfidenceValueType *quality) const
+::Predict(const InputSampleType & input, ConfidenceValueType *quality) const
 {
   //convert listsample to Mat
   cv::Mat sample;
diff --git a/Modules/Learning/Supervised/include/otbRandomForestsMachineLearningModel.h b/Modules/Learning/Supervised/include/otbRandomForestsMachineLearningModel.h
index 20aff52a7e200f47f9127005addc442d25fd5799..ec62b761358634daa8e268c1488770bb801d3543 100644
--- a/Modules/Learning/Supervised/include/otbRandomForestsMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbRandomForestsMachineLearningModel.h
@@ -59,6 +59,11 @@ public:
   itkNewMacro(Self);
   itkTypeMacro(RandomForestsMachineLearningModel, MachineLearningModel);
 
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
+
   /** Save the model to file */
   virtual void Save(const std::string & filename, const std::string & name="");
 
@@ -115,9 +120,6 @@ public:
   itkGetMacro(TerminationCriteria, int);
   itkSetMacro(TerminationCriteria, int);
 
-  itkGetMacro(RegressionMode, bool);
-  itkSetMacro(RegressionMode, bool);
-
   /** Returns a matrix containing variable importance */
   VariableImportanceMatrixType GetVariableImportance();
   
@@ -139,11 +141,6 @@ protected:
   /* /\** Target list sample *\/ */
   /* typename TargetListSampleType::Pointer m_TargetListSample; */
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   RandomForestsMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
@@ -208,8 +205,6 @@ private:
   float m_ForestAccuracy;
   /** The type of the termination criteria */
   int m_TerminationCriteria;
-  /** Perform regression instead of classification */
-  bool m_RegressionMode;
 };
 } // end namespace otb
 
diff --git a/Modules/Learning/Supervised/include/otbRandomForestsMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbRandomForestsMachineLearningModel.txx
index 0797c2fabf18077612470b70604d8a3492a64446..78642f1212ac9d75dc75d3bd9e9482d73bc948dc 100644
--- a/Modules/Learning/Supervised/include/otbRandomForestsMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbRandomForestsMachineLearningModel.txx
@@ -32,17 +32,17 @@ RandomForestsMachineLearningModel<TInputValue,TOutputValue>
  m_RFModel (new CvRTrees),
  m_MaxDepth(5),
  m_MinSampleCount(10),
- m_RegressionAccuracy(0),
+ m_RegressionAccuracy(0.01),
  m_ComputeSurrogateSplit(false),
  m_MaxNumberOfCategories(10),
  m_CalculateVariableImportance(false),
  m_MaxNumberOfVariables(0),
  m_MaxNumberOfTrees(100),
  m_ForestAccuracy(0.01),
- m_TerminationCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS),
- m_RegressionMode(false)
+ m_TerminationCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS)
 {
   this->m_ConfidenceIndex = true;
+  this->m_IsRegressionSupported = true;
 }
 
 
@@ -65,7 +65,7 @@ RandomForestsMachineLearningModel<TInputValue,TOutputValue>
 template <class TInputValue, class TOutputValue>
 void
 RandomForestsMachineLearningModel<TInputValue,TOutputValue>
-::TrainClassification()
+::Train()
 {
   //convert listsample to opencv matrix
   cv::Mat samples;
@@ -96,7 +96,7 @@ RandomForestsMachineLearningModel<TInputValue,TOutputValue>
   cv::Mat var_type = cv::Mat(this->GetInputListSample()->GetMeasurementVectorSize() + 1, 1, CV_8U );
   var_type.setTo(cv::Scalar(CV_VAR_NUMERICAL) ); // all inputs are numerical
 
-  if(m_RegressionMode)
+  if(this->m_RegressionMode)
     var_type.at<uchar>(this->GetInputListSample()->GetMeasurementVectorSize(), 0) = CV_VAR_NUMERICAL;
   else
     var_type.at<uchar>(this->GetInputListSample()->GetMeasurementVectorSize(), 0) = CV_VAR_CATEGORICAL;
@@ -110,7 +110,7 @@ template <class TInputValue, class TOutputValue>
 typename RandomForestsMachineLearningModel<TInputValue,TOutputValue>
 ::TargetSampleType
 RandomForestsMachineLearningModel<TInputValue,TOutputValue>
-::PredictClassification(const InputSampleType & value, ConfidenceValueType *quality) const
+::Predict(const InputSampleType & value, ConfidenceValueType *quality) const
 {
   //convert listsample to Mat
   cv::Mat sample;
diff --git a/Modules/Learning/Supervised/include/otbSVMMachineLearningModel.h b/Modules/Learning/Supervised/include/otbSVMMachineLearningModel.h
index 406bdd10e6b04a808fbd2be168e0f7504aa79578..569ab321427925e1e662d941d2be1a398fda54cc 100644
--- a/Modules/Learning/Supervised/include/otbSVMMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbSVMMachineLearningModel.h
@@ -51,6 +51,11 @@ public:
   itkNewMacro(Self);
   itkTypeMacro(SVMMachineLearningModel, MachineLearningModel);
 
+  /** Train the machine learning model */
+  virtual void Train();
+  /** Predict values using the model */
+  virtual TargetSampleType Predict(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
+
   /** Save the model to file */
   virtual void Save(const std::string & filename, const std::string & name="");
 
@@ -126,11 +131,6 @@ protected:
   /** PrintSelf method */
   void PrintSelf(std::ostream& os, itk::Indent indent) const;
 
-  /** Train the machine learning model */
-  virtual void TrainClassification();
-  /** Predict values using the model */
-  virtual TargetSampleType PredictClassification(const InputSampleType& input, ConfidenceValueType *quality=NULL) const;
-
 private:
   SVMMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
diff --git a/Modules/Learning/Supervised/include/otbSVMMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbSVMMachineLearningModel.txx
index 064db060d1670146d4b618d94fa529017475758e..9d0b7d685948ebf67b070c6a16fcc9e6f760ea34 100644
--- a/Modules/Learning/Supervised/include/otbSVMMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbSVMMachineLearningModel.txx
@@ -50,6 +50,7 @@ SVMMachineLearningModel<TInputValue,TOutputValue>
  m_OutputP(0)
 {
   this->m_ConfidenceIndex = true;
+  this->m_IsRegressionSupported = true;
 }
 
 
@@ -64,8 +65,16 @@ SVMMachineLearningModel<TInputValue,TOutputValue>
 template <class TInputValue, class TOutputValue>
 void
 SVMMachineLearningModel<TInputValue,TOutputValue>
-::TrainClassification()
+::Train()
 {
+  // Check that the SVM type is compatible with the chosen mode (classif/regression)
+  if ( bool(m_SVMType == CvSVM::NU_SVR || m_SVMType == CvSVM::EPS_SVR) != this->m_RegressionMode)
+    {
+    itkGenericExceptionMacro("SVM type incompatible with chosen mode (classification or regression."
+                             "SVM types for classification are C_SVC, NU_SVC, ONE_CLASS. "
+                             "SVM types for regression are NU_SVR, EPS_SVR");
+    }
+
   //convert listsample to opencv matrix
   cv::Mat samples;
   otb::ListSampleToMat<InputListSampleType>(this->GetInputListSample(), samples);
@@ -109,7 +118,7 @@ template <class TInputValue, class TOutputValue>
 typename SVMMachineLearningModel<TInputValue,TOutputValue>
 ::TargetSampleType
 SVMMachineLearningModel<TInputValue,TOutputValue>
-::PredictClassification(const InputSampleType & input, ConfidenceValueType *quality) const
+::Predict(const InputSampleType & input, ConfidenceValueType *quality) const
 {
   //convert listsample to Mat
   cv::Mat sample;
@@ -135,10 +144,10 @@ void
 SVMMachineLearningModel<TInputValue,TOutputValue>
 ::Save(const std::string & filename, const std::string & name)
 {
-       if (name == "")
-              m_SVMModel->save(filename.c_str(), 0);
-       else
-              m_SVMModel->save(filename.c_str(), name.c_str());
+  if (name == "")
+    m_SVMModel->save(filename.c_str(), 0);
+  else
+    m_SVMModel->save(filename.c_str(), name.c_str());
 }
 
 template <class TInputValue, class TOutputValue>
@@ -147,9 +156,9 @@ SVMMachineLearningModel<TInputValue,TOutputValue>
 ::Load(const std::string & filename, const std::string & name)
 {
   if (name == "")
-         m_SVMModel->load(filename.c_str(), 0);
+    m_SVMModel->load(filename.c_str(), 0);
   else
-         m_SVMModel->load(filename.c_str(), name.c_str());
+    m_SVMModel->load(filename.c_str(), name.c_str());
 }
 
 template <class TInputValue, class TOutputValue>
@@ -157,29 +166,29 @@ bool
 SVMMachineLearningModel<TInputValue,TOutputValue>
 ::CanReadFile(const std::string & file)
 {
-   std::ifstream ifs;
-   ifs.open(file.c_str());
-
-   if(!ifs)
-   {
-      std::cerr<<"Could not read file "<<file<<std::endl;
-      return false;
-   }
-
-   while (!ifs.eof())
-   {
-      std::string line;
-      std::getline(ifs, line);
-
-      //if (line.find(m_SVMModel->getName()) != std::string::npos)
-      if (line.find(CV_TYPE_NAME_ML_SVM) != std::string::npos)
+  std::ifstream ifs;
+  ifs.open(file.c_str());
+
+  if(!ifs)
+    {
+    std::cerr<<"Could not read file "<<file<<std::endl;
+    return false;
+    }
+
+  while (!ifs.eof())
+    {
+    std::string line;
+    std::getline(ifs, line);
+
+    //if (line.find(m_SVMModel->getName()) != std::string::npos)
+    if (line.find(CV_TYPE_NAME_ML_SVM) != std::string::npos)
       {
-         //std::cout<<"Reading a "<<CV_TYPE_NAME_ML_SVM<<" model"<<std::endl;
-         return true;
+      //std::cout<<"Reading a "<<CV_TYPE_NAME_ML_SVM<<" model"<<std::endl;
+      return true;
       }
-   }
-   ifs.close();
-   return false;
+    }
+  ifs.close();
+  return false;
 }
 
 template <class TInputValue, class TOutputValue>
diff --git a/Modules/Learning/Supervised/test/CMakeLists.txt b/Modules/Learning/Supervised/test/CMakeLists.txt
index 7ffe8757b490c64081ffb6fe424a582d382d4df5..abca168f4f7794ea1a9133370c65555593b9bf19 100644
--- a/Modules/Learning/Supervised/test/CMakeLists.txt
+++ b/Modules/Learning/Supervised/test/CMakeLists.txt
@@ -7,6 +7,7 @@ otbConfusionMatrixMeasurementsTest.cxx
 otbMachineLearningModelCanRead.cxx
 otbTrainMachineLearningModel.cxx
 otbImageClassificationFilter.cxx
+otbMachineLearningRegressionTests.cxx
 )
 
 add_executable(otbSupervisedTestDriver ${OTBSupervisedTests})
diff --git a/Modules/Learning/Supervised/test/otbMachineLearningRegressionTests.cxx b/Modules/Learning/Supervised/test/otbMachineLearningRegressionTests.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..70330c391e4e508c5eb8d3e121d32e03c5c3f741
--- /dev/null
+++ b/Modules/Learning/Supervised/test/otbMachineLearningRegressionTests.cxx
@@ -0,0 +1,669 @@
+/*=========================================================================
+
+ Program:   ORFEO Toolbox
+ Language:  C++
+ Date:      $Date$
+ Version:   $Revision$
+
+
+ Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+ See OTBCopyright.txt for details.
+
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notices for more information.
+
+ =========================================================================*/
+
+#include "otbConfigure.h"
+
+#include "itkMersenneTwisterRandomVariateGenerator.h"
+
+#ifdef OTB_USE_OPENCV
+#include "otbNeuralNetworkMachineLearningModel.h"
+#include "otbSVMMachineLearningModel.h"
+#include "otbBoostMachineLearningModel.h"
+#include "otbDecisionTreeMachineLearningModel.h"
+#include "otbGradientBoostedTreeMachineLearningModel.h"
+#include "otbKNearestNeighborsMachineLearningModel.h"
+#include "otbRandomForestsMachineLearningModel.h"
+#endif
+
+#ifdef OTB_USE_LIBSVM
+#include "otbLibSVMMachineLearningModel.h"
+#endif
+
+typedef float PrecisionType;
+typedef otb::MachineLearningModel<PrecisionType,PrecisionType>   MachineLearningModelRegressionType;
+typedef MachineLearningModelRegressionType::InputValueType       InputValueRegressionType;
+typedef MachineLearningModelRegressionType::InputSampleType      InputSampleRegressionType;
+typedef MachineLearningModelRegressionType::InputListSampleType  InputListSampleRegressionType;
+typedef MachineLearningModelRegressionType::TargetValueType      TargetValueRegressionType;
+typedef MachineLearningModelRegressionType::TargetSampleType     TargetSampleRegressionType;
+typedef MachineLearningModelRegressionType::TargetListSampleType TargetListSampleRegressionType;
+typedef itk::Statistics::MersenneTwisterRandomVariateGenerator   RandomGeneratorType;
+
+const double epsilon = 0.1;
+
+typedef struct RegressionTestParamStruct
+{
+  double vMin;
+  double vMax;
+  size_t count;
+  double eps;
+} RegressionTestParam;
+
+template <typename TPrecision>
+struct LinearFunctionSampleGenerator
+{
+  typedef TPrecision PrecisionType;
+  LinearFunctionSampleGenerator(TPrecision a, TPrecision b)
+    : m_a(a), m_b(b), m_NbInputVars(1), m_NbOutputVars(1) {
+    m_isl = InputListSampleRegressionType::New();
+    m_tsl = TargetListSampleRegressionType::New();
+  };
+  void GenerateSamples(TPrecision sMin, TPrecision sMax, size_t nbSamples)
+  {
+    m_isl->Clear();
+    m_tsl->Clear();
+    m_isl->SetMeasurementVectorSize(m_NbInputVars);
+    m_tsl->SetMeasurementVectorSize(m_NbOutputVars);
+
+    RandomGeneratorType::Pointer randomGenerator = RandomGeneratorType::GetInstance();
+    InputSampleRegressionType inputSample;
+    inputSample.SetSize(m_NbInputVars);
+    TargetSampleRegressionType outputSample;
+
+    TPrecision sampleStep = (sMax-sMin)/nbSamples;
+    for(size_t i=0; i<nbSamples; ++i)
+      {
+      TPrecision x = randomGenerator->GetUniformVariate(0.0, 1.0) * static_cast<TPrecision>(nbSamples);
+      TPrecision inputValue = sMin+ x*sampleStep;
+      inputSample[0] = inputValue;
+      outputSample[0] = m_a*inputValue+m_b;
+      m_isl->PushBack(inputSample);
+      m_tsl->PushBack(outputSample);
+      }
+  }
+
+  TPrecision m_a;
+  TPrecision m_b;
+  const size_t m_NbInputVars;
+  const size_t m_NbOutputVars;
+  InputListSampleRegressionType::Pointer m_isl;
+  TargetListSampleRegressionType::Pointer m_tsl;
+};
+
+template <typename TPrecision>
+struct BilinearFunctionSampleGenerator
+{
+  typedef TPrecision PrecisionType;
+  BilinearFunctionSampleGenerator(TPrecision a, TPrecision b, TPrecision c)
+    : m_a(a), m_b(b), m_c(c), m_NbInputVars(2), m_NbOutputVars(1) {
+    m_isl = InputListSampleRegressionType::New();
+    m_tsl = TargetListSampleRegressionType::New();
+  };
+  void GenerateSamples(TPrecision sMin, TPrecision sMax, size_t nbSamples)
+  {
+    m_isl->Clear();
+    m_tsl->Clear();
+    m_isl->SetMeasurementVectorSize(m_NbInputVars);
+    m_tsl->SetMeasurementVectorSize(m_NbOutputVars);
+
+    RandomGeneratorType::Pointer randomGenerator = RandomGeneratorType::GetInstance();
+    InputSampleRegressionType inputSample;
+    inputSample.SetSize(m_NbInputVars);
+    TargetSampleRegressionType outputSample;
+
+    TPrecision sampleStep = (sMax-sMin)/nbSamples;
+    for(size_t i=0; i<nbSamples; ++i)
+      {
+      TPrecision x = randomGenerator->GetUniformVariate(0.0, 1.0) * static_cast<TPrecision>(nbSamples);
+      TPrecision inputValue1 = sMin+ x*sampleStep;
+      x = randomGenerator->GetUniformVariate(0.0, 1.0) * static_cast<TPrecision>(nbSamples);
+      TPrecision inputValue2 = sMin+ x*sampleStep;
+      inputSample[0] = inputValue1;
+      inputSample[1] = inputValue2;
+      outputSample[0] = m_a*inputValue1+m_b*inputValue2+m_c;
+      m_isl->PushBack(inputSample);
+      m_tsl->PushBack(outputSample);
+      }
+  }
+
+  TPrecision m_a;
+  TPrecision m_b;
+  TPrecision m_c;
+  const size_t m_NbInputVars;
+  const size_t m_NbOutputVars;
+  InputListSampleRegressionType::Pointer m_isl;
+  TargetListSampleRegressionType::Pointer m_tsl;
+};
+
+template <typename TPrecision>
+struct PolynomialFunctionSampleGenerator
+{
+  typedef TPrecision PrecisionType;
+  PolynomialFunctionSampleGenerator(std::vector<TPrecision> c)
+    : m_c(c), m_NbInputVars(1), m_NbOutputVars(1) {
+    m_isl = InputListSampleRegressionType::New();
+    m_tsl = TargetListSampleRegressionType::New();
+  };
+  void GenerateSamples(TPrecision sMin, TPrecision sMax, size_t nbSamples)
+  {
+    m_isl->Clear();
+    m_tsl->Clear();
+    m_isl->SetMeasurementVectorSize(m_NbInputVars);
+    m_tsl->SetMeasurementVectorSize(m_NbOutputVars);
+
+    RandomGeneratorType::Pointer randomGenerator = RandomGeneratorType::GetInstance();
+    InputSampleRegressionType inputSample;
+    inputSample.SetSize(m_NbInputVars);
+    TargetSampleRegressionType outputSample;
+
+    TPrecision sampleStep = (sMax-sMin)/nbSamples;
+    for(size_t i=0; i<nbSamples; ++i)
+      {
+      TPrecision x = randomGenerator->GetUniformVariate(0.0, 1.0) * static_cast<TPrecision>(nbSamples);
+      TPrecision inputValue = sMin+ x*sampleStep;
+      inputSample[0] = inputValue;
+      TPrecision y = 0.0;
+      for (unsigned int j=0; j<m_c.size() ; ++j)
+        {
+        y += m_c[j] * pow(static_cast<double>(inputValue), static_cast<double>(j));
+        }
+      outputSample[0] = y;
+      m_isl->PushBack(inputSample);
+      m_tsl->PushBack(outputSample);
+      }
+  }
+
+  std::vector<TPrecision> m_c;
+  const size_t m_NbInputVars;
+  const size_t m_NbOutputVars;
+  InputListSampleRegressionType::Pointer m_isl;
+  TargetListSampleRegressionType::Pointer m_tsl;
+};
+
+template <typename SampleGeneratorType, typename RegressionType>
+int testRegression(SampleGeneratorType& sg, RegressionType& rgrsn, RegressionTestParam param)
+{
+  std::cout << "Generating training samples" << std::endl;
+  sg.GenerateSamples(param.vMin, param.vMax, param.count);
+
+  rgrsn->SetInputListSample(sg.m_isl);
+  rgrsn->SetTargetListSample(sg.m_tsl);
+  std::cout << "Training" << std::endl;
+  rgrsn->Train();
+
+  std::cout << "Generate validation samples"<<std::endl;
+  sg.GenerateSamples(param.vMin, param.vMax, param.count);
+
+  std::cout << "Validation" << std::endl;
+  //Check the prediction accuracy
+  typename InputListSampleRegressionType::Iterator sampleIt = sg.m_isl->Begin();
+  typename TargetListSampleRegressionType::Iterator resultIt = sg.m_tsl->Begin();
+  typename InputListSampleRegressionType::Iterator sampleLast = sg.m_isl->End();
+  typename TargetListSampleRegressionType::Iterator resultLast = sg.m_tsl->End();
+  typename SampleGeneratorType::PrecisionType rmse = 0.0;
+  while(sampleIt != sampleLast && resultIt != resultLast)
+    {
+    //typename SampleGeneratorType::PrecisionType invalue = sampleIt.GetMeasurementVector()[0];
+    typename SampleGeneratorType::PrecisionType prediction = rgrsn->Predict(sampleIt.GetMeasurementVector())[0];
+    typename SampleGeneratorType::PrecisionType expected = resultIt.GetMeasurementVector()[0];
+    rmse += pow(prediction - expected, 2.0);
+    ++sampleIt;
+    ++resultIt;
+    } 
+
+  rmse = sqrt( rmse / static_cast<double>(param.count) );
+  std::cout << "RMSE = "<< rmse << std::endl;
+  if(rmse > param.eps)
+    {
+    std::cout << "Failed : RMSE above expected precision !" << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
+
+#ifdef OTB_USE_LIBSVM
+MachineLearningModelRegressionType::Pointer getLibSVMRegressionModel()
+{
+  typedef otb::LibSVMMachineLearningModel<InputValueRegressionType,
+                                       TargetValueRegressionType>
+    libsvmType;
+  libsvmType::Pointer regression = libsvmType::New();
+  regression->SetRegressionMode(true);
+  regression->SetSVMType(EPSILON_SVR);
+  regression->SetKernelType(RBF);
+  regression->SetEpsilon(1e-5);
+  regression->SetParameterOptimization(true);
+  return regression.GetPointer();
+}
+
+int otbLibSVMRegressionTests(int itkNotUsed(argc),
+                             char * itkNotUsed(argv) [])
+{
+  int status = EXIT_SUCCESS;
+  int ret;
+  MachineLearningModelRegressionType::Pointer regression;
+
+  RegressionTestParam param;
+  param.vMin = -0.5;
+  param.vMax = 0.5;
+  param.count = 200;
+  param.eps = 0.1;
+
+  std::cout << "Testing regression on a linear monovariate function" << std::endl;
+  LinearFunctionSampleGenerator<PrecisionType> lfsg(2.0, 1.0);
+  regression = getLibSVMRegressionModel();
+  ret = testRegression(lfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a bilinear function" << std::endl;
+  BilinearFunctionSampleGenerator<PrecisionType> bfsg(2.0,-1.0,1.0);
+  regression = getLibSVMRegressionModel();
+  ret = testRegression(bfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a polynomial function" << std::endl;
+  std::vector<PrecisionType> coeffs;
+  coeffs.push_back(0.0);
+  coeffs.push_back(-1.0);
+  coeffs.push_back(0.0);
+  coeffs.push_back(4.0);
+  PolynomialFunctionSampleGenerator<PrecisionType> pfsg(coeffs);
+  regression = getLibSVMRegressionModel();
+  ret = testRegression(pfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  return status;
+}
+
+#endif
+
+#ifdef OTB_USE_OPENCV
+MachineLearningModelRegressionType::Pointer getNeuralNetworkRegressionModel(unsigned int nbInputVar)
+{
+  typedef otb::NeuralNetworkMachineLearningModel<InputValueRegressionType,
+                                                 TargetValueRegressionType>
+    NeuralNetworkType;
+  NeuralNetworkType::Pointer regression = NeuralNetworkType::New();
+
+  regression->SetRegressionMode(1);
+  regression->SetTrainMethod(CvANN_MLP_TrainParams::BACKPROP);
+  std::vector<unsigned int> layerSizes;
+  layerSizes.push_back(nbInputVar);
+  layerSizes.push_back(5);
+  layerSizes.push_back(1);
+  regression->SetLayerSizes(layerSizes);
+  regression->SetActivateFunction(CvANN_MLP::SIGMOID_SYM);
+  regression->SetAlpha(1.0);
+  regression->SetBeta(1.0);
+  regression->SetBackPropDWScale(0.1);
+  regression->SetBackPropMomentScale(0.1);
+  regression->SetRegPropDW0(0.1);
+  regression->SetRegPropDWMin(1e-7);
+  regression->SetTermCriteriaType(CV_TERMCRIT_EPS);
+  regression->SetEpsilon(1e-5);
+  regression->SetMaxIter(1e4);
+  return regression.GetPointer();
+}
+
+int otbNeuralNetworkRegressionTests(int itkNotUsed(argc),
+                                    char * itkNotUsed(argv) [])
+{
+  int status = EXIT_SUCCESS;
+  int ret;
+  MachineLearningModelRegressionType::Pointer regression;
+
+  RegressionTestParam param;
+  param.vMin = -0.5;
+  param.vMax = 0.5;
+  param.count = 20000;
+  param.eps = 0.1;
+
+  std::cout << "Testing regression on a linear monovariate function" << std::endl;
+  LinearFunctionSampleGenerator<PrecisionType> lfsg(2.0, 1.0);
+  regression = getNeuralNetworkRegressionModel(1);
+  ret = testRegression(lfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a bilinear function" << std::endl;
+  BilinearFunctionSampleGenerator<PrecisionType> bfsg(2.0,-1.0,1.0);
+  regression = getNeuralNetworkRegressionModel(2);
+  ret = testRegression(bfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a polynomial function" << std::endl;
+  std::vector<PrecisionType> coeffs;
+  coeffs.push_back(0.0);
+  coeffs.push_back(-1.0);
+  coeffs.push_back(0.0);
+  coeffs.push_back(4.0);
+  PolynomialFunctionSampleGenerator<PrecisionType> pfsg(coeffs);
+  regression = getNeuralNetworkRegressionModel(1);
+  ret = testRegression(pfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  return status;
+}
+
+
+MachineLearningModelRegressionType::Pointer getSVMRegressionModel()
+{
+  typedef otb::SVMMachineLearningModel<InputValueRegressionType,
+                                       TargetValueRegressionType>
+    SVMType;
+  SVMType::Pointer regression = SVMType::New();
+
+  regression->SetRegressionMode(1);
+  regression->SetSVMType(CvSVM::NU_SVR);
+  regression->SetNu(0.5);
+  regression->SetKernelType(CvSVM::RBF);
+  regression->SetTermCriteriaType(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS);
+  regression->SetMaxIter(100000);
+  regression->SetEpsilon(1e-5);
+  regression->SetParameterOptimization(true);
+  return regression.GetPointer();
+}
+
+int otbSVMRegressionTests(int itkNotUsed(argc),
+                                      char * itkNotUsed(argv) [])
+{
+  int status = EXIT_SUCCESS;
+  int ret;
+  MachineLearningModelRegressionType::Pointer regression;
+
+  RegressionTestParam param;
+  param.vMin = -0.5;
+  param.vMax = 0.5;
+  param.count = 200;
+  param.eps = 0.1;
+
+  std::cout << "Testing regression on a linear monovariate function" << std::endl;
+  LinearFunctionSampleGenerator<PrecisionType> lfsg(2.0, 1.0);
+  regression = getSVMRegressionModel();
+  ret = testRegression(lfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a bilinear function" << std::endl;
+  BilinearFunctionSampleGenerator<PrecisionType> bfsg(2.0,-1.0,1.0);
+  regression = getSVMRegressionModel();
+  ret = testRegression(bfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a polynomial function" << std::endl;
+  std::vector<PrecisionType> coeffs;
+  coeffs.push_back(0.0);
+  coeffs.push_back(-1.0);
+  coeffs.push_back(0.0);
+  coeffs.push_back(4.0);
+  PolynomialFunctionSampleGenerator<PrecisionType> pfsg(coeffs);
+  regression = getSVMRegressionModel();
+  ret = testRegression(pfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  return status;
+}
+
+
+MachineLearningModelRegressionType::Pointer getDecisionTreeRegressionModel()
+{
+  typedef otb::DecisionTreeMachineLearningModel<InputValueRegressionType,
+                                       TargetValueRegressionType>
+    DTreeType;
+  DTreeType::Pointer regression = DTreeType::New();
+  regression->SetRegressionMode(true);
+  regression->SetRegressionAccuracy(0.005);
+  return regression.GetPointer();
+}
+
+int otbDecisionTreeRegressionTests(int itkNotUsed(argc),
+                                      char * itkNotUsed(argv) [])
+{
+  int status = EXIT_SUCCESS;
+  int ret;
+  MachineLearningModelRegressionType::Pointer regression;
+
+  RegressionTestParam param;
+  param.vMin = -0.5;
+  param.vMax = 0.5;
+  param.count = 200;
+  param.eps = 0.1;
+
+  std::cout << "Testing regression on a linear monovariate function" << std::endl;
+  LinearFunctionSampleGenerator<PrecisionType> lfsg(2.0, 1.0);
+  regression = getDecisionTreeRegressionModel();
+  ret = testRegression(lfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a bilinear function" << std::endl;
+  BilinearFunctionSampleGenerator<PrecisionType> bfsg(2.0,-1.0,1.0);
+  regression = getDecisionTreeRegressionModel();
+  // increase the number of training samples for bilinear function
+  param.count = 1000;
+  ret = testRegression(bfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a polynomial function" << std::endl;
+  std::vector<PrecisionType> coeffs;
+  coeffs.push_back(0.0);
+  coeffs.push_back(-1.0);
+  coeffs.push_back(0.0);
+  coeffs.push_back(4.0);
+  PolynomialFunctionSampleGenerator<PrecisionType> pfsg(coeffs);
+  param.count = 200;
+  regression = getDecisionTreeRegressionModel();
+  ret = testRegression(pfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  return status;
+}
+
+
+MachineLearningModelRegressionType::Pointer getGradientBoostedTreeRegressionModel()
+{
+  typedef otb::GradientBoostedTreeMachineLearningModel<InputValueRegressionType,
+                                       TargetValueRegressionType>
+    GBTreeType;
+  GBTreeType::Pointer regression = GBTreeType::New();
+  regression->SetRegressionMode(true);
+  regression->SetShrinkage(0.1);
+  regression->SetSubSamplePortion(0.8);
+  regression->SetLossFunctionType(CvGBTrees::SQUARED_LOSS);
+  return regression.GetPointer();
+}
+
+int otbGradientBoostedTreeRegressionTests(int itkNotUsed(argc),
+                                      char * itkNotUsed(argv) [])
+{
+  int status = EXIT_SUCCESS;
+  int ret;
+  MachineLearningModelRegressionType::Pointer regression;
+
+  RegressionTestParam param;
+  param.vMin = -0.5;
+  param.vMax = 0.5;
+  param.count = 200;
+  param.eps = 0.1;
+
+  std::cout << "Testing regression on a linear monovariate function" << std::endl;
+  LinearFunctionSampleGenerator<PrecisionType> lfsg(2.0, 1.0);
+  regression = getGradientBoostedTreeRegressionModel();
+  ret = testRegression(lfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a bilinear function" << std::endl;
+  BilinearFunctionSampleGenerator<PrecisionType> bfsg(2.0,-1.0,1.0);
+  // increase number of training samples for bilinear function
+  param.count = 1000;
+  regression = getGradientBoostedTreeRegressionModel();
+  ret = testRegression(bfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a polynomial function" << std::endl;
+  std::vector<PrecisionType> coeffs;
+  coeffs.push_back(0.0);
+  coeffs.push_back(-1.0);
+  coeffs.push_back(0.0);
+  coeffs.push_back(4.0);
+  PolynomialFunctionSampleGenerator<PrecisionType> pfsg(coeffs);
+  param.count = 200;
+  regression = getGradientBoostedTreeRegressionModel();
+  ret = testRegression(pfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  return status;
+}
+
+
+MachineLearningModelRegressionType::Pointer getKNearestNeighborsRegressionModel()
+{
+  typedef otb::KNearestNeighborsMachineLearningModel<InputValueRegressionType,
+                                       TargetValueRegressionType>
+    KNNType;
+  KNNType::Pointer regression = KNNType::New();
+  regression->SetRegressionMode(true);
+  regression->SetK(5);
+  regression->SetDecisionRule(KNNType::KNN_MEDIAN);
+  return regression.GetPointer();
+}
+
+int otbKNearestNeighborsRegressionTests(int itkNotUsed(argc),
+                                      char * itkNotUsed(argv) [])
+{
+  int status = EXIT_SUCCESS;
+  int ret;
+  MachineLearningModelRegressionType::Pointer regression;
+
+  RegressionTestParam param;
+  param.vMin = -0.5;
+  param.vMax = 0.5;
+  param.count = 200;
+  param.eps = 0.1;
+
+  std::cout << "Testing regression on a linear monovariate function" << std::endl;
+  LinearFunctionSampleGenerator<PrecisionType> lfsg(2.0, 1.0);
+  regression = getKNearestNeighborsRegressionModel();
+  ret = testRegression(lfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a bilinear function" << std::endl;
+  BilinearFunctionSampleGenerator<PrecisionType> bfsg(2.0,-1.0,1.0);
+  regression = getKNearestNeighborsRegressionModel();
+  ret = testRegression(bfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a polynomial function" << std::endl;
+  std::vector<PrecisionType> coeffs;
+  coeffs.push_back(0.0);
+  coeffs.push_back(-1.0);
+  coeffs.push_back(0.0);
+  coeffs.push_back(4.0);
+  PolynomialFunctionSampleGenerator<PrecisionType> pfsg(coeffs);
+  regression = getKNearestNeighborsRegressionModel();
+  ret = testRegression(pfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  return status;
+}
+
+
+MachineLearningModelRegressionType::Pointer getRandomForestsRegressionModel()
+{
+  typedef otb::RandomForestsMachineLearningModel<InputValueRegressionType,
+                                       TargetValueRegressionType>
+    RFType;
+  RFType::Pointer regression = RFType::New();
+  regression->SetRegressionMode(true);
+  regression->SetRegressionAccuracy(0.005);
+  return regression.GetPointer();
+}
+
+
+int otbRandomForestsRegressionTests(int itkNotUsed(argc),
+                                      char * itkNotUsed(argv) [])
+{
+  int status = EXIT_SUCCESS;
+  int ret;
+  MachineLearningModelRegressionType::Pointer regression;
+
+  RegressionTestParam param;
+  param.vMin = -0.5;
+  param.vMax = 0.5;
+  param.count = 200;
+  param.eps = 0.1;
+
+  std::cout << "Testing regression on a linear monovariate function" << std::endl;
+  LinearFunctionSampleGenerator<PrecisionType> lfsg(2.0, 1.0);
+  regression = getRandomForestsRegressionModel();
+  ret = testRegression(lfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a bilinear function" << std::endl;
+  BilinearFunctionSampleGenerator<PrecisionType> bfsg(2.0,-1.0,1.0);
+  // increase number of training samples for bilinear function
+  param.count = 1000;
+  regression = getRandomForestsRegressionModel();
+  ret = testRegression(bfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  std::cout << "Testing regression on a polynomial function" << std::endl;
+  std::vector<PrecisionType> coeffs;
+  coeffs.push_back(0.0);
+  coeffs.push_back(-1.0);
+  coeffs.push_back(0.0);
+  coeffs.push_back(4.0);
+  PolynomialFunctionSampleGenerator<PrecisionType> pfsg(coeffs);
+  param.count = 200;
+  regression = getRandomForestsRegressionModel();
+  ret = testRegression(pfsg,regression,param);
+  if (ret == EXIT_FAILURE)
+    {
+    status = EXIT_FAILURE;
+    }
+  return status;
+}
+#endif
diff --git a/Modules/Learning/Supervised/test/otbSupervisedTestDriver.cxx b/Modules/Learning/Supervised/test/otbSupervisedTestDriver.cxx
index e498bd082ed53ec2862ea5b99913b6b7de897b38..2bf373c87f9f0579df333951b83ae818739d25c2 100644
--- a/Modules/Learning/Supervised/test/otbSupervisedTestDriver.cxx
+++ b/Modules/Learning/Supervised/test/otbSupervisedTestDriver.cxx
@@ -28,6 +28,7 @@ void RegisterTests()
   #ifdef OTB_USE_LIBSVM
   REGISTER_TEST(otbLibSVMMachineLearningModelNew);
   REGISTER_TEST(otbLibSVMMachineLearningModel);
+  REGISTER_TEST(otbLibSVMRegressionTests);
   #endif
   
   #ifdef OTB_USE_OPENCV
@@ -48,8 +49,14 @@ void RegisterTests()
   REGISTER_TEST(otbDecisionTreeMachineLearningModel);
   REGISTER_TEST(otbGradientBoostedTreeMachineLearningModelNew);
   REGISTER_TEST(otbGradientBoostedTreeMachineLearningModel);
-  #endif
+  REGISTER_TEST(otbNeuralNetworkRegressionTests);
+  REGISTER_TEST(otbSVMRegressionTests);
+  REGISTER_TEST(otbDecisionTreeRegressionTests);
+  REGISTER_TEST(otbGradientBoostedTreeRegressionTests);
+  REGISTER_TEST(otbKNearestNeighborsRegressionTests);
+  REGISTER_TEST(otbRandomForestsRegressionTests);
+#endif
   
-  REGISTER_TEST(otbImageClassificationFilterNew);
+    REGISTER_TEST(otbImageClassificationFilterNew);
   REGISTER_TEST(otbImageClassificationFilter);
 }
diff --git a/Modules/Learning/Supervised/test/otbTrainMachineLearningModel.cxx b/Modules/Learning/Supervised/test/otbTrainMachineLearningModel.cxx
index 39a2a2f7628c98aca338093af70d1d75d35badee..64d69a120a2dd2cd8e3418ef248ff8c12324f6ed 100644
--- a/Modules/Learning/Supervised/test/otbTrainMachineLearningModel.cxx
+++ b/Modules/Learning/Supervised/test/otbTrainMachineLearningModel.cxx
@@ -381,20 +381,12 @@ int otbSVMMachineLearningRegressionModel(int argc, char * argv[])
   SVMType::Pointer classifier = SVMType::New();
 
   //Init SVM type in regression mode
-  //Available mode for regression in  openCV are eps_svr and nu_svr
+  classifier->SetRegressionMode(1);
   classifier->SetSVMType(CvSVM::EPS_SVR);
-  //classifier->SetSVMType(CvSVM::NU_SVR);
-
-  //P should be >0. Increasing value give better result. Need to investigate why.
   classifier->SetP(10);
-
-  //IN case you're using nu_svr you should set nu to a positive value between 0
-  //and 1.
-  //classifier->SetNu(0.9);
-
-  //Use RBF kernel.Don't know what is recommended in case of svm regression
   classifier->SetKernelType(CvSVM::RBF);
 
+
   classifier->SetInputListSample(samples);
   classifier->SetTargetListSample(labels);
   classifier->Train();
@@ -432,6 +424,7 @@ int otbSVMMachineLearningRegressionModel(int argc, char * argv[])
     }
   else
     {
+    std::cout << age << "\t" << predicted->GetMeasurementVector(0)[0] << "\n";
     return EXIT_FAILURE;
     }
 }
@@ -447,9 +440,9 @@ int otbKNearestNeighborsMachineLearningModel(int argc, char * argv[])
 {
   if (argc != 3 )
     {
-      std::cout<<"Wrong number of arguments "<<std::endl;
-      std::cout<<"Usage : sample file, output file"<<std::endl;
-      return EXIT_FAILURE;
+    std::cout<<"Wrong number of arguments "<<std::endl;
+    std::cout<<"Usage : sample file, output file"<<std::endl;
+    return EXIT_FAILURE;
     }
 
   typedef otb::KNearestNeighborsMachineLearningModel<InputValueType,TargetValueType> KNearestNeighborsType;
@@ -528,11 +521,11 @@ int otbRandomForestsMachineLearningModelNew(int itkNotUsed(argc), char * itkNotU
 int otbRandomForestsMachineLearningModel(int argc, char * argv[])
 {
   if (argc != 3 )
-  {
+    {
     std::cout<<"Wrong number of arguments "<<std::endl;
     std::cout<<"Usage : sample file, output file "<<std::endl;
     return EXIT_FAILURE;
-  }
+    }
 
   typedef otb::RandomForestsMachineLearningModel<InputValueType,TargetValueType> RandomForestType;
   InputListSampleType::Pointer samples = InputListSampleType::New();
@@ -620,9 +613,9 @@ int otbBoostMachineLearningModel(int argc, char * argv[])
 {
   if (argc != 3 )
     {
-      std::cout<<"Wrong number of arguments "<<std::endl;
-      std::cout<<"Usage : sample file, output file "<<std::endl;
-      return EXIT_FAILURE;
+    std::cout<<"Wrong number of arguments "<<std::endl;
+    std::cout<<"Usage : sample file, output file "<<std::endl;
+    return EXIT_FAILURE;
     }
 
   typedef otb::BoostMachineLearningModel<InputValueType, TargetValueType> BoostType;
diff --git a/Modules/Learning/Supervised/test/tests-libsvm.cmake b/Modules/Learning/Supervised/test/tests-libsvm.cmake
index 04764df301cb2b2fa7f1951d023495ddf93b5b4b..30dc7eec130d6baa87a8fd37296ce7501548dc7c 100644
--- a/Modules/Learning/Supervised/test/tests-libsvm.cmake
+++ b/Modules/Learning/Supervised/test/tests-libsvm.cmake
@@ -22,3 +22,7 @@ otb_add_test(NAME leTuLibSVMMachineLearningModelCanRead COMMAND otbSupervisedTes
   ${TEMP}/libsvm_model.txt
   )
 set_property(TEST leTuLibSVMMachineLearningModelCanRead PROPERTY DEPENDS leTvLibSVMMachineLearningModel)
+
+otb_add_test(NAME leTvLibSVMMachineLearningModelReg COMMAND otbSupervisedTestDriver
+  otbLibSVMRegressionTests
+  )
diff --git a/Modules/Learning/Supervised/test/tests-opencv.cmake b/Modules/Learning/Supervised/test/tests-opencv.cmake
index 55855a4aa912a3b3497bb9b8d9f2bc69d6dccea4..3be269fe8d8e7f17f85f9e4d11c944feb2dc3e02 100644
--- a/Modules/Learning/Supervised/test/tests-opencv.cmake
+++ b/Modules/Learning/Supervised/test/tests-opencv.cmake
@@ -10,6 +10,32 @@ otb_add_test(NAME leTvANNMachineLearningModel COMMAND otbSupervisedTestDriver
   ${TEMP}/ann_model.txt
   )
 
+# ------------------ Regression tests --------------------
+otb_add_test(NAME leTvANNMachineLearningModelReg COMMAND otbSupervisedTestDriver
+  otbNeuralNetworkRegressionTests
+  )
+
+otb_add_test(NAME leTvSVMMachineLearningModelReg COMMAND otbSupervisedTestDriver
+  otbSVMRegressionTests
+  )
+
+otb_add_test(NAME leTvDecisionTreeMachineLearningModelReg COMMAND otbSupervisedTestDriver
+  otbDecisionTreeRegressionTests
+  )
+
+otb_add_test(NAME leTvGradientBoostedTreeMachineLearningModelReg COMMAND otbSupervisedTestDriver
+  otbGradientBoostedTreeRegressionTests
+  )
+
+otb_add_test(NAME leTvKNearestNeighborsMachineLearningModelReg COMMAND otbSupervisedTestDriver
+  otbKNearestNeighborsRegressionTests
+  )
+
+otb_add_test(NAME leTvRandomForestsMachineLearningModelReg COMMAND otbSupervisedTestDriver
+  otbRandomForestsRegressionTests
+  )
+# --------------------------------------------------------------
+
 otb_add_test(NAME leTuSVMMachineLearningModelNew COMMAND otbSupervisedTestDriver
   otbSVMMachineLearningModelNew)