diff --git a/CMake/OTBModuleMacros.cmake b/CMake/OTBModuleMacros.cmake
index 0938a0f2fb9ed50fe44edf7017d9a4ac325e41b7..afb9a29f5c45b2509d0cd0bfe350df5a59ea846f 100644
--- a/CMake/OTBModuleMacros.cmake
+++ b/CMake/OTBModuleMacros.cmake
@@ -277,6 +277,12 @@ macro(otb_module_test)
   foreach(dep IN LISTS OTB_MODULE_${otb-module-test}_DEPENDS)
     list(APPEND ${otb-module-test}_LIBRARIES "${${dep}_LIBRARIES}")
   endforeach()
+  # make sure the test can link with optional libs
+  foreach(dep IN LISTS OTB_MODULE_${otb-module}_OPTIONAL_DEPENDS)
+    if (${dep}_ENABLED)
+      list(APPEND ${otb-module-test}_LIBRARIES "${${dep}_LIBRARIES}")
+    endif()
+  endforeach()
 endmacro()
 
 macro(otb_module_warnings_disable)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 985c8e349de0fdee5191f10d5e6b56167e6c6ae7..408e015d961232587c6809b168192fe0c11ce867 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -58,6 +58,13 @@ then send a merge request.
 Note that we also accept PRs on our [GitHub mirror](https://github.com/orfeotoolbox/OTB)
 which we will manually merge.
 
+Feature branches are tested on multiple platforms on the OTB test infrastructure (a.k.a the [Dashboard](https://dash.orfeo-toolbox.org/)). They appear in the FeatureBranches section. 
+
+Caveat: even if the Dashboard build on develop branch is broken, it is not
+allowed to push fixes directly on develop. The developer trying to fix the
+build should create a merge request and submit it for review. Direct push to
+develop without review must be avoided.
+
 ### Commit message
 
 On your feature branch, write a good [commit message](https://xkcd.com/1296/):
@@ -93,7 +100,11 @@ OTB team.
 * Merge requests **must receive at least 2 positives votes from core developers** (members of Main Repositories group in Gitlab with at least "Developer" level; this includes PSC members) before being merged
 * The merger is responsible for checking that the branch is up-to-date with develop
 * Merge requests can be merged by anyone (not just PSC or RM) with push access to develop
-* Merge requests can be merged once the dashboard is proven green for this branch
+* Merge requests can be merged once the dashboard is proven green for this branch.
+  This condition is mandatory unless reviewers and authors explicitely agree that
+  it can be skipped (for instance in case of documentation merges or compilation
+  fixes on develop). Branches of that sort can be identified with the ~patch label, 
+  which tells the reviewer that the author would like to merge without dashboard testing.
 
 Branches can be registered for dashboard testing by adding one line in `Config/feature_branches.txt` in [otb-devutils repository](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb-devutils.git).
 
@@ -162,6 +173,7 @@ Regarding labels, we use the following set:
   correspond to a Request for Comments that has turned into a development action
 * ~bug: Bug, crash or unexpected behavior, reported by a user or a developer
 * ~feature: Feature request expressed by an OTB user/developer
+* ~patch: A small patch fixing build warnings, compilation errors, typos in logs or documentation
 * ~"To Do": action is planned
 * ~Doing: work in progress
 * ~api ~app ~documentation ~monteverdi ~packaging ~qgis: optional context information
diff --git a/Documentation/Cookbook/rst/conf.py.in b/Documentation/Cookbook/rst/conf.py.in
index 30ce0dc728056846e6a2bdf192840a2beb1b0df6..1e4442b07f398766735fa3898e75692c5b9d52d4 100644
--- a/Documentation/Cookbook/rst/conf.py.in
+++ b/Documentation/Cookbook/rst/conf.py.in
@@ -211,7 +211,7 @@ latex_elements = {
 # (source start file, target name, title,
 #  author, documentclass [howto, manual, or own class]).
 latex_documents = [
-  ('index_TOC', 'CookBook-@OTB_VERSION_MAJOR@.@OTB_VERSION_MINOR@.tex', u'OTB CookBook Documentation',
+  ('index_TOC', 'CookBook-@OTB_VERSION_MAJOR@.@OTB_VERSION_MINOR@.@OTB_VERSION_PATCH@.tex', u'OTB CookBook Documentation',
    u'OTB Team', 'manual'),
 ]
 
diff --git a/Modules/Applications/AppClassification/app/otbKMeansClassification.cxx b/Modules/Applications/AppClassification/app/otbKMeansClassification.cxx
index 52e0d6b426d7defb6343113029c17abe6dcacd15..ee04d9cd0fb2ff3035ca2208a8cd64c735cd93d8 100644
--- a/Modules/Applications/AppClassification/app/otbKMeansClassification.cxx
+++ b/Modules/Applications/AppClassification/app/otbKMeansClassification.cxx
@@ -291,12 +291,15 @@ protected:
         itkExceptionMacro(<< "File : " << modelFileName << " couldn't be opened");
       }
 
-      // get the end line with the centroids
+      // get the line with the centroids (starts with "2 ")
       std::string line, centroidLine;
       while(std::getline(infile,line))
       {
-        if (!line.empty())
+        if (line.size() > 2 && line[0] == '2' && line[1] == ' ')
+          {
           centroidLine = line;
+          break;
+          }
       }
 
       std::vector<std::string> centroidElm;
diff --git a/Modules/Applications/AppClassification/app/otbSampleAugmentation.cxx b/Modules/Applications/AppClassification/app/otbSampleAugmentation.cxx
index 6bb7382856e619cce13f501acea90530cfd18f37..093e690739de5eba191f11ac0dba3c48e93ae64d 100644
--- a/Modules/Applications/AppClassification/app/otbSampleAugmentation.cxx
+++ b/Modules/Applications/AppClassification/app/otbSampleAugmentation.cxx
@@ -51,7 +51,7 @@ public:
 private:
   SampleAugmentation() {}
 
-  void DoInit()
+  void DoInit() override
   {
     SetName("SampleAugmentation");
     SetDescription("Generates synthetic samples from a sample data file.");
@@ -145,7 +145,7 @@ private:
     SetOfficialDocLink();
   }
 
-  void DoUpdateParameters()
+  void DoUpdateParameters() override
   {
     if ( HasValue("in") )
       {
@@ -182,7 +182,7 @@ private:
       }
   }
 
-  void DoExecute()
+  void DoExecute() override
     {
     ogr::DataSource::Pointer vectors;
     ogr::DataSource::Pointer output;
diff --git a/Modules/Applications/AppDimensionalityReduction/app/otbImageDimensionalityReduction.cxx b/Modules/Applications/AppDimensionalityReduction/app/otbImageDimensionalityReduction.cxx
index 1df2c463fc26c531095154515039a9245d0ba5c1..d9dd8e8816fec2cce2502a81b624a59ce33817ab 100644
--- a/Modules/Applications/AppDimensionalityReduction/app/otbImageDimensionalityReduction.cxx
+++ b/Modules/Applications/AppDimensionalityReduction/app/otbImageDimensionalityReduction.cxx
@@ -124,12 +124,12 @@ protected:
 private:
   void DoInit() override
   {
-    SetName("DimensionalityReduction");
+    SetName("ImageDimensionalityReduction");
     SetDescription("Performs dimensionality reduction of the input image "
       "according to a dimensionality reduction model file.");
 
     // Documentation
-    SetDocName("DimensionalityReduction");
+    SetDocName("Image Dimensionality Reduction");
     SetDocLongDescription("This application reduces the dimension of an input"
                           " image, based on a machine learning model file produced by"
                           " the TrainDimensionalityReduction application. Pixels of the "
diff --git a/Modules/Applications/AppImageUtils/app/otbExtractROI.cxx b/Modules/Applications/AppImageUtils/app/otbExtractROI.cxx
index 66f2886edc0c2b5d47acca8bef211319d023726e..adcb2f3f43e3596bd5ce3030bbf94940df8033ed 100644
--- a/Modules/Applications/AppImageUtils/app/otbExtractROI.cxx
+++ b/Modules/Applications/AppImageUtils/app/otbExtractROI.cxx
@@ -255,31 +255,39 @@ private:
       inImage->UpdateOutputInformation();
       ImageType::RegionType  largestRegion = inImage->GetLargestPossibleRegion();
 
-      bool userExtent = !HasUserValue( "mode.extent.ulx" ) \
-                     && !HasUserValue( "mode.extent.uly" ) \
-                     && !HasUserValue( "mode.extent.lrx" ) \
-                     && !HasUserValue( "mode.extent.lry" );
-
-      bool userRadius = !HasUserValue( "mode.radius.r" ) \
-                     && !HasUserValue( "mode.radius.cx" ) \
-                     && !HasUserValue( "mode.radius.cy" );
-
-      // Update the sizes only if the user has not defined a size
-      if (!HasUserValue("sizex")  && !HasUserValue("sizey") )
+      ImageType::RegionType currentLargest;
+      currentLargest.SetSize( 0 , GetDefaultParameterInt("sizex") );
+      currentLargest.SetSize( 1 , GetDefaultParameterInt("sizey") );
+      currentLargest.SetIndex( 1 , GetDefaultParameterInt("starty") );
+      currentLargest.SetIndex( 0 , GetDefaultParameterInt("startx") );
+      // Update default only if largest has changed
+      if ( currentLargest != largestRegion )
         {
-        SetParameterInt("sizex",largestRegion.GetSize()[0]);
-        SetParameterInt("sizey",largestRegion.GetSize()[1]);
-
-        // Compute extent parameter with default sizex and sizey
-        if ( GetParameterString( "mode" ) == "extent" && userExtent )
-          ComputeExtentFromIndex( inImage, largestRegion );
-
-        // Compute radius parameter with default sizex and sizey
-        if ( GetParameterString( "mode" ) == "radius" && userRadius )
-          ComputeRadiusFromIndex( inImage , largestRegion );
+        // Setting maximum value
+        SetMaximumParameterIntValue( "sizex" , largestRegion.GetSize(0) );
+        SetMaximumParameterIntValue( "sizey" , largestRegion.GetSize(1) );
+        SetMaximumParameterIntValue( "startx" , 
+          largestRegion.GetIndex(0) + largestRegion.GetSize(0) );
+        SetMaximumParameterIntValue( "starty" , 
+          largestRegion.GetIndex(1) + largestRegion.GetSize(1) );
+        // Setting default value
+        SetDefaultParameterInt( "sizex" , largestRegion.GetSize(0) );
+        SetDefaultParameterInt( "sizey" , largestRegion.GetSize(1) );
+        SetDefaultParameterInt( "startx" , largestRegion.GetIndex(0) );
+        SetDefaultParameterInt( "starty" , largestRegion.GetIndex(1) );
+        // Setting actual value
+        if ( !HasUserValue("sizex") )
+          SetParameterFloat( "sizex" , 
+            GetDefaultParameterFloat( "sizex" ) );
+        if ( !HasUserValue("sizey") )
+          SetParameterFloat( "sizey" , 
+            GetDefaultParameterFloat( "sizey" ) );
+        // Compute radius parameters default value
+        ComputeRadiusFromIndex( inImage , largestRegion );
+        // Compute extent parameters default value
+        ComputeExtentFromIndex( inImage, largestRegion );
         }
 
-
       unsigned int nbComponents = inImage->GetNumberOfComponentsPerPixel();
       ListViewParameter *clParam = 
                   dynamic_cast<ListViewParameter*>(GetParameterByKey("cl"));
@@ -296,55 +304,25 @@ private:
           }
         }
 
-      // Put the limit of the index and the size relative the image
-      
-      SetMaximumParameterIntValue("sizex", largestRegion.GetSize(0));      
-      SetMaximumParameterIntValue("sizey", largestRegion.GetSize(1));      
-      SetMaximumParameterIntValue("startx", largestRegion.GetSize(0));
-      SetMaximumParameterIntValue("starty", largestRegion.GetSize(1));
-
-      
       // Update the start and size parameter depending on the mode
-      if ( GetParameterString("mode") == "extent" && !userExtent)
+      if ( GetParameterString("mode") == "extent" )
           ComputeIndexFromExtent();
-      if (GetParameterString("mode") == "radius" && !userRadius)
+      if (GetParameterString("mode") == "radius" )
           ComputeIndexFromRadius();
 
-      
-      // Crop the roi region to be included in the largest possible
-      // region
-      if(!this->CropRegionOfInterest())
-        {
-        // Put the index of the ROI to origin and try to crop again
-        SetParameterInt("startx",0);
-        SetParameterInt("starty",0);
-        this->CropRegionOfInterest();
-        }
-
       if(GetParameterString("mode")=="fit")
         {
-        this->SetParameterRole("startx",Role_Output);
-        this->SetParameterRole("starty",Role_Output);
-        this->SetParameterRole("sizex",Role_Output);
-        this->SetParameterRole("sizey",Role_Output);
-        this->DisableParameter("startx");
-        this->DisableParameter("starty");
-        this->DisableParameter("sizex");
-        this->DisableParameter("sizey");
+        SetParameterRole("startx",Role_Output);
+        SetParameterRole("starty",Role_Output);
+        SetParameterRole("sizex",Role_Output);
+        SetParameterRole("sizey",Role_Output);
         }
-
-      else if(GetParameterString("mode")=="standard" || 
-              GetParameterString("mode")=="extent" ||
-              GetParameterString("mode")== "radius" )
+      else
         {
-        this->SetParameterRole("startx",Role_Input);
-        this->SetParameterRole("starty",Role_Input);
-        this->SetParameterRole("sizex",Role_Input);
-        this->SetParameterRole("sizey",Role_Input);
-        this->EnableParameter("startx");
-        this->EnableParameter("starty");
-        this->EnableParameter("sizex");
-        this->EnableParameter("sizey");
+        SetParameterRole("startx",Role_Input);
+        SetParameterRole("starty",Role_Input);
+        SetParameterRole("sizex",Role_Input);
+        SetParameterRole("sizey",Role_Input);
         }
       }
 
@@ -355,6 +333,10 @@ private:
       MandatoryOff("starty");
       MandatoryOff("sizex");
       MandatoryOff("sizey");
+      DisableParameter("startx");
+      DisableParameter("starty");
+      DisableParameter("sizex");
+      DisableParameter("sizey");
       }
     else
       {
@@ -362,6 +344,10 @@ private:
       MandatoryOn("starty");
       MandatoryOn("sizex");
       MandatoryOn("sizey");
+      EnableParameter("startx");
+      EnableParameter("starty");
+      EnableParameter("sizex");
+      EnableParameter("sizey");
       }
 
     if ( GetParameterString( "mode" ) == "fit" && HasValue( "mode.fit.im" ) )
@@ -384,18 +370,15 @@ private:
     region.SetSize(1,  GetParameterInt("sizey"));
     region.SetIndex(0, GetParameterInt("startx"));
     region.SetIndex(1, GetParameterInt("starty"));
-    if ( HasValue("in") )
+    ImageType* inImage = GetParameterImage("in");
+    inImage->UpdateOutputInformation();
+    if (region.Crop(inImage->GetLargestPossibleRegion()))
       {
-      ImageType* inImage = GetParameterImage("in");
-      inImage->UpdateOutputInformation();
-      if (region.Crop(inImage->GetLargestPossibleRegion()))
-        {
-        SetParameterInt("sizex",region.GetSize(0));
-        SetParameterInt("sizey",region.GetSize(1));
-        SetParameterInt("startx",region.GetIndex(0));
-        SetParameterInt("starty",region.GetIndex(1));
-        return true;
-        }
+      SetParameterInt("sizex",region.GetSize(0));
+      SetParameterInt("sizey",region.GetSize(1));
+      SetParameterInt("startx",region.GetIndex(0));
+      SetParameterInt("starty",region.GetIndex(1));
+      return true;
       }
     return false;
   }
@@ -404,20 +387,14 @@ private:
   ComputeIndexFromExtent()
   {
     assert( GetParameterString( "mode" ) == "extent" );
-    int pixelValue = -1 ;
     // Compute standard parameter depending on the unit chosen by the user
+    FloatVectorImageType::IndexType uli , lri;
     if (GetParameterString( "mode.extent.unit" ) == "pxl" )
       {
-      pixelValue = std::round( GetParameterFloat( "mode.extent.ulx" ) );
-      SetParameterInt( "startx", pixelValue);
-      pixelValue = std::round( GetParameterFloat( "mode.extent.lrx" ) \
-                   - pixelValue ) + 1 ;
-      SetParameterInt( "sizex", pixelValue);
-      pixelValue = std::round( GetParameterFloat( "mode.extent.uly" ) );
-      SetParameterInt( "starty", pixelValue);
-      pixelValue = std::round( GetParameterFloat( "mode.extent.lry" ) \
-                   - pixelValue ) + 1 ;
-      SetParameterInt( "sizey", pixelValue);
+      uli[0] = std::round( GetParameterFloat( "mode.extent.ulx" ) );
+      uli[1] = std::round( GetParameterFloat( "mode.extent.uly" ) );
+      lri[0] = std::round( GetParameterFloat( "mode.extent.lrx" ) );
+      lri[1] = std::round( GetParameterFloat( "mode.extent.lry" ) );
       }
     else if( GetParameterString( "mode.extent.unit" ) == "phy" )
       {
@@ -426,20 +403,11 @@ private:
       ulp[ 1 ] = GetParameterFloat( "mode.extent.uly" );
       lrp[ 0 ] = GetParameterFloat( "mode.extent.lrx" );
       lrp[ 1 ] = GetParameterFloat( "mode.extent.lry" );
-
       ImageType * inImage = GetParameterImage("in");
-      FloatVectorImageType::IndexType uli , lri;
       inImage->TransformPhysicalPointToIndex(ulp,uli);
-      inImage->TransformPhysicalPointToIndex(lrp,lri);
-
-      SetParameterInt( "startx", uli[0]);
-      SetParameterInt( "starty", uli[1]);
-
-      SetParameterInt( "sizex", lri[0] - uli[0] + 1);
-      SetParameterInt( "sizey", lri[1] - uli[1] + 1);
-      
+      inImage->TransformPhysicalPointToIndex(lrp,lri);    
       }
-    else if( GetParameterString( "mode.extent.unit" ) == "lonlat" )
+    else // if( GetParameterString( "mode.extent.unit" ) == "lonlat" )
       {
       RSTransformType::Pointer rsTransform = RSTransformType::New();
       ImageType* inImage = GetParameterImage("in");
@@ -453,18 +421,13 @@ private:
       lrp_in[ 1 ] = GetParameterFloat( "mode.extent.lry" );
       ulp_out = rsTransform->TransformPoint(ulp_in);
       lrp_out = rsTransform->TransformPoint(lrp_in);
-
-      FloatVectorImageType::IndexType uli_out , lri_out;
-      inImage->TransformPhysicalPointToIndex(ulp_out,uli_out);
-      inImage->TransformPhysicalPointToIndex(lrp_out,lri_out);
-
-      SetParameterInt( "startx", uli_out[0]);
-      SetParameterInt( "starty", uli_out[1]);
-
-      SetParameterInt( "sizex", lri_out[0] - uli_out[0] + 1);
-      SetParameterInt( "sizey", lri_out[1] - uli_out[1] + 1);
+      inImage->TransformPhysicalPointToIndex(ulp_out,uli);
+      inImage->TransformPhysicalPointToIndex(lrp_out,lri);
       }
-      this->CropRegionOfInterest();
+      SetParameterInt( "startx", uli[0]);
+      SetParameterInt( "starty", uli[1]);
+      SetParameterInt( "sizex", lri[0] - uli[0] + 1);
+      SetParameterInt( "sizey", lri[1] - uli[1] + 1);
   }
 
   void
@@ -477,24 +440,24 @@ private:
     lri[ 1 ] = largestRegion.GetSize()[1];
     if ( GetParameterString( "mode.extent.unit" ) == "pxl" )
       {
-      SetParameterFloat("mode.extent.ulx", uli[0]);
-      SetParameterFloat("mode.extent.uly", uli[1]);
-      SetParameterFloat("mode.extent.lrx", lri[0]);
-      SetParameterFloat("mode.extent.lry", lri[1]);
+      SetDefaultParameterFloat("mode.extent.ulx", uli[0]);
+      SetDefaultParameterFloat("mode.extent.uly", uli[1]);
+      SetDefaultParameterFloat("mode.extent.lrx", lri[0]);
+      SetDefaultParameterFloat("mode.extent.lry", lri[1]);
       }
     else if ( GetParameterString( "mode.extent.unit" ) == "phy" )
       {
       itk::Point<float, 2> ulp,  lrp;
 
       input->TransformIndexToPhysicalPoint(uli,ulp);
-      SetParameterFloat("mode.extent.ulx",ulp[0]);
-      SetParameterFloat("mode.extent.uly",ulp[1]);
+      SetDefaultParameterFloat("mode.extent.ulx",ulp[0]);
+      SetDefaultParameterFloat("mode.extent.uly",ulp[1]);
 
       input->TransformIndexToPhysicalPoint(lri,lrp);
-      SetParameterFloat("mode.extent.lrx",lrp[0]);
-      SetParameterFloat("mode.extent.lry",lrp[1]);  
+      SetDefaultParameterFloat("mode.extent.lrx",lrp[0]);
+      SetDefaultParameterFloat("mode.extent.lry",lrp[1]);  
       }
-    else if ( GetParameterString( "mode.extent.unit" ) == "lonlat" )
+    else // if ( GetParameterString( "mode.extent.unit" ) == "lonlat" )
       {
       RSTransformType::Pointer rsTransform = RSTransformType::New();
       rsTransform->SetInputKeywordList( input->GetImageKeywordlist() );
@@ -503,31 +466,42 @@ private:
       itk::Point<float, 2> ulp_in,  lrp_in , ulp_out , lrp_out;
       input->TransformIndexToPhysicalPoint(uli,ulp_in);
       ulp_out = rsTransform->TransformPoint( ulp_in );
-      SetParameterFloat( "mode.extent.ulx" , ulp_out[ 0 ]);
-      SetParameterFloat( "mode.extent.uly" , ulp_out[ 1 ]);
+      SetDefaultParameterFloat( "mode.extent.ulx" , ulp_out[ 0 ]);
+      SetDefaultParameterFloat( "mode.extent.uly" , ulp_out[ 1 ]);
 
       input->TransformIndexToPhysicalPoint( lri , lrp_in );
       lrp_out = rsTransform->TransformPoint( lrp_in );
-      SetParameterFloat( "mode.extent.lrx" , lrp_out[ 0 ]);
-      SetParameterFloat( "mode.extent.lry" , lrp_out[ 1 ]);
+      SetDefaultParameterFloat( "mode.extent.lrx" , lrp_out[ 0 ]);
+      SetDefaultParameterFloat( "mode.extent.lry" , lrp_out[ 1 ]);
       }
+    if ( !HasUserValue( "mode.extent.ulx" ) )
+      SetParameterFloat( "mode.extent.ulx" , 
+        GetDefaultParameterFloat( "mode.extent.ulx" ) );
+    if ( !HasUserValue( "mode.extent.uly" ) )
+      SetParameterFloat( "mode.extent.uly" , 
+        GetDefaultParameterFloat( "mode.extent.uly" ) );
+    if ( !HasUserValue( "mode.extent.lrx" ) )
+      SetParameterFloat( "mode.extent.lrx" , 
+        GetDefaultParameterFloat( "mode.extent.lrx" ) );
+    if ( !HasUserValue( "mode.extent.lry" ) )
+      SetParameterFloat( "mode.extent.lry" , 
+        GetDefaultParameterFloat( "mode.extent.lry" ) );
   }
 
   void
   ComputeIndexFromRadius()
   {
-    int pixelValue = -1;
+    FloatVectorImageType::SizeType radiusi ;
+    radiusi.Fill(0);
     assert( GetParameterString( "mode" ) == "radius" );
-    // First compute sizex sizey thanks to the radius
-    if ( HasUserValue( "mode.radius.r" ) )
+    if ( HasValue( "mode.radius.r" ) )
       {
       if ( GetParameterString( "mode.radius.unitr" ) == "pxl" )
         {
-        pixelValue = std::floor( 2 * GetParameterFloat( "mode.radius.r" ) ) + 1;
-        SetParameterInt( "sizey", pixelValue);
-        SetParameterInt( "sizex", pixelValue);
+        radiusi[0] = std::floor( GetParameterFloat( "mode.radius.r" ) );
+        radiusi[1] = std::floor( GetParameterFloat( "mode.radius.r" ) );
         }
-      if ( GetParameterString( "mode.radius.unitr" ) == "phy" )
+      else //if ( GetParameterString( "mode.radius.unitr" ) == "phy" )
         {
         ImageType * inImage = GetParameterImage("in");
         itk::Point<float, 2> radxp , radyp , ulp ;
@@ -540,64 +514,34 @@ private:
         radyp[1] += GetParameterFloat( "mode.radius.r" );
         bool lgtx = inImage->TransformPhysicalPointToIndex( radxp , radxi );
         bool lgty = inImage->TransformPhysicalPointToIndex( radyp , radyi );
-        FloatVectorImageType::IndexValueType maxR = 
-                std::min( inImage->GetLargestPossibleRegion().GetSize()[0] , 
-                          inImage->GetLargestPossibleRegion().GetSize()[1] );
-        maxR = maxR / 2 - ( (maxR + 1) % 2 );
-        if ( lgtx && lgty)
-          {
-          pixelValue = std::max( radxi[0] , radyi[1] );
-          if ( maxR<pixelValue )
-            {
-            pixelValue = std::min( std::min( radxi[0] , radyi[1] ) , maxR );
-            }
-          }
-        else if ( lgtx )
-          {
-          pixelValue = std::min( radxi[0] , maxR );
-          }
-        else if ( lgty )
-          {
-          pixelValue = std::min( radyi[1] , maxR );
-          }
+        if ( lgtx )
+          radiusi[0] = radxp[0];
         else
-          {
-          pixelValue = maxR;
-          }
-        SetParameterInt( "sizey", 2 * pixelValue + 1);
-        SetParameterInt( "sizex", 2 * pixelValue + 1);
+          radiusi[0] = GetDefaultParameterInt( "sizex");
+        if ( lgty )
+          radiusi[1] = radyp[1];
+        else 
+          radiusi[1] = GetDefaultParameterInt( "sizey");
         }
       }
-
-    // Then compute startx and starty
-    bool size = ( HasValue("sizex")  && HasValue("sizey") );
-    if ( size ) 
+    FloatVectorImageType::IndexType centeri ;
+    bool isIn(true);
+    if ( HasValue("sizex") && HasValue("sizey") ) 
       {
-      int radiusxi = GetParameterInt("sizex") / 2 ;
-      int radiusyi = GetParameterInt("sizey") / 2 ;
-
-      if ( GetParameterString( "mode.radius.unitc" ) == "pxl" && size )
+      if ( GetParameterString( "mode.radius.unitc" ) == "pxl" )
         {
-        pixelValue = std::round(GetParameterFloat( "mode.radius.cx" ));
-        SetParameterInt( "startx", pixelValue - radiusxi);
-        pixelValue = std::round(GetParameterFloat( "mode.radius.cy" ));
-        SetParameterInt( "starty", pixelValue - radiusyi);
+        centeri[0] = std::round(GetParameterFloat( "mode.radius.cx" ));
+        centeri[1] = std::round(GetParameterFloat( "mode.radius.cy" ));
         }
-      if ( GetParameterString( "mode.radius.unitc" ) == "phy" && size ) 
+      else if ( GetParameterString( "mode.radius.unitc" ) == "phy" ) 
         {
         ImageType * inImage = GetParameterImage("in");
         itk::Point<float, 2> centerp;
         centerp[ 0 ] = GetParameterFloat( "mode.radius.cx" );
         centerp[ 1 ] = GetParameterFloat( "mode.radius.cy" );
-        FloatVectorImageType::IndexType centeri ;
-        bool isIn = inImage->TransformPhysicalPointToIndex( centerp , centeri );
-          if ( isIn )
-          {
-          SetParameterInt( "startx", centeri[0] - radiusxi);
-          SetParameterInt( "starty", centeri[1] - radiusyi);
-          }
+        isIn = inImage->TransformPhysicalPointToIndex( centerp , centeri );     
         }
-      if ( GetParameterString( "mode.radius.unitc" ) == "lonlat" && size )
+      else // if ( GetParameterString( "mode.radius.unitc" ) == "lonlat" )
         {
         ImageType* inImage = GetParameterImage("in");
         RSTransformType::Pointer rsTransform = RSTransformType::New();
@@ -608,16 +552,21 @@ private:
         centerp_in[ 0 ] = GetParameterFloat( "mode.radius.cx" );
         centerp_in[ 1 ] = GetParameterFloat( "mode.radius.cy" );
         centerp_out = rsTransform->TransformPoint(centerp_in);
-        FloatVectorImageType::IndexType centeri_out;
-        bool isIn = inImage->TransformPhysicalPointToIndex( centerp_out , 
-                                                            centeri_out );
-        if ( isIn )
-          {
-          SetParameterInt( "startx", centeri_out[0] - radiusxi);
-          SetParameterInt( "starty", centeri_out[1] - radiusyi);
-          }
+        isIn = inImage->TransformPhysicalPointToIndex( centerp_out , 
+                                                            centeri );
         }
       }
+    if ( isIn )
+      {  
+      SetParameterInt( "startx", centeri[0] - radiusi[0]);
+      SetParameterInt( "sizex", centeri[0] + radiusi[0] + 1 );
+      SetParameterInt( "starty", centeri[1] - radiusi[1]);
+      SetParameterInt( "sizey", centeri[1] + radiusi[1] + 1 );
+      }
+    else
+      {
+      // log
+      }
   }
 
   void
@@ -636,30 +585,36 @@ private:
     if ( GetParameterString("mode.radius.unitr") == "pxl" )
       {
       int rad = std::min( centeri[ 0 ], centeri[ 1 ] );
-      SetParameterFloat( "mode.radius.r" , rad);
+      SetDefaultParameterFloat( "mode.radius.r" , rad);
       }
-    if ( GetParameterString("mode.radius.unitr") == "phy" )
+    else // if  ( GetParameterString("mode.radius.unitr") == "phy" )
       {
       itk::Point<float, 2> centerp , helpRxp, helpRyp;
       input->TransformIndexToPhysicalPoint(centeri,centerp);
       input->TransformIndexToPhysicalPoint(helpRxi,helpRxp);
       input->TransformIndexToPhysicalPoint(helpRyi,helpRyp);
       float rad = std::min( helpRxp[0] - helpRyp[0] , helpRyp[1] - helpRxp[1] );
-      SetParameterFloat( "mode.radius.r" , rad);
+      SetDefaultParameterFloat( "mode.radius.r" , rad);
       }
+
+    if ( !HasUserValue( "mode.radius.r" ) )
+      SetParameterFloat( "mode.radius.r" , 
+        GetDefaultParameterFloat( "mode.radius.r" ) );
+
+    // Center
     if ( GetParameterString("mode.radius.unitc") == "pxl" )
       {
-      SetParameterFloat( "mode.radius.cx" , centeri[0]);
-      SetParameterFloat( "mode.radius.cy" , centeri[1]) ;
+      SetDefaultParameterFloat( "mode.radius.cx" , centeri[0] );
+      SetDefaultParameterFloat( "mode.radius.cy" , centeri[1] );
       }
-    if ( GetParameterString("mode.radius.unitc") == "phy" )
+    else if ( GetParameterString("mode.radius.unitc") == "phy" )
       {
-      itk::Point<float, 2> centerp , helpRp;
+      itk::Point<float, 2> centerp ;
       input->TransformIndexToPhysicalPoint(centeri,centerp);
-      SetParameterFloat( "mode.radius.cx" , centerp[0]);
-      SetParameterFloat( "mode.radius.cy" , centerp[1]) ;
+      SetDefaultParameterFloat( "mode.radius.cx" , centerp[0] );
+      SetDefaultParameterFloat( "mode.radius.cy" , centerp[1] );
       }
-    if ( GetParameterString("mode.radius.unitc") == "lonlat" )
+    else // if ( GetParameterString("mode.radius.unitc") == "lonlat" )
       {
       RSTransformType::Pointer rsTransform = RSTransformType::New();
       rsTransform->SetInputKeywordList( input->GetImageKeywordlist() );
@@ -668,9 +623,15 @@ private:
       itk::Point<float, 2> centerp_in,  centerp_out;
       input->TransformIndexToPhysicalPoint(centeri,centerp_in);
       centerp_out = rsTransform->TransformPoint( centerp_in );
-      SetParameterFloat( "mode.radius.cx" , centerp_out[ 0 ]);
-      SetParameterFloat( "mode.radius.cy" , centerp_out[ 1 ]);
+      SetDefaultParameterFloat( "mode.radius.cx" , centerp_out[ 0 ]);
+      SetDefaultParameterFloat( "mode.radius.cy" , centerp_out[ 1 ]);
       }
+    if ( !HasUserValue( "mode.radius.cx") )
+      SetParameterFloat( "mode.radius.cx" , 
+        GetDefaultParameterFloat( "mode.radius.cx" ) );
+    if ( !HasUserValue( "mode.radius.cy") )
+      SetParameterFloat( "mode.radius.cy" , 
+        GetDefaultParameterFloat( "mode.radius.cy" ) );
   }
 
   void 
@@ -766,7 +727,7 @@ private:
       // Setup the DEM Handler
       otb::Wrapper::ElevationParametersHandler::SetupDEMHandlerFromElevationParameters(this,"elev");
 
-      FloatVectorImageType::Pointer referencePtr = this->GetParameterImage("mode.fit.im");
+      FloatVectorImageType::Pointer referencePtr = GetParameterImage("mode.fit.im");
       referencePtr->UpdateOutputInformation();
 
       RSTransformType::Pointer rsTransform = RSTransformType::New();
@@ -826,7 +787,9 @@ private:
 
       }
 
-    this->CropRegionOfInterest();
+    if ( !CropRegionOfInterest() )
+      otbAppLogWARNING(<<"Could not extract the ROI as it is out of the "
+        "input image.");
 
     ExtractROIFilterType::Pointer extractROIFilter = ExtractROIFilterType::New();
     extractROIFilter->SetInput(inImage);
diff --git a/Modules/Applications/AppOpticalCalibration/test/CMakeLists.txt b/Modules/Applications/AppOpticalCalibration/test/CMakeLists.txt
index a3068e1a64d109373ccb799f839f62a9c74c03b1..7be8316cb8ba425920d426bf402e6640b3c06001 100644
--- a/Modules/Applications/AppOpticalCalibration/test/CMakeLists.txt
+++ b/Modules/Applications/AppOpticalCalibration/test/CMakeLists.txt
@@ -87,7 +87,7 @@ otb_test_application(NAME apTvRaOpticalCalibration_UnknownSensor
            -acqui.sun.elev 62.7
            -acqui.sun.azim 152.7
            -acqui.view.elev 87.5
-           -acqui.view.azim -77.0
+           -acqui.view.azim 283
            -acqui.solarilluminations ${INPUTDATA}/apTvRaOpticalCalibrationUnknownSensorSolarIllumations2.txt
            -atmo.rsr ${INPUTDATA}/apTvRaOpticalCalibrationUnknownSensorRSR.txt
            -atmo.pressure 1013.0
diff --git a/Modules/Core/Common/include/otbLogger.h b/Modules/Core/Common/include/otbLogger.h
index ef420c7b2389a38af6be6d1f7cf19faaa4b7f1b5..0da8d382194a6815c31c19ccc21a3856a10fd383 100644
--- a/Modules/Core/Common/include/otbLogger.h
+++ b/Modules/Core/Common/include/otbLogger.h
@@ -59,8 +59,15 @@ public:
   // Overwrite this to provide custom formatting of log entries
   std::string BuildFormattedEntry(itk::Logger::PriorityLevelType, std::string const&) override;
 
+  /** Output logs about the RAM, caching and multi-threading settings */
   void LogSetupInformation();
-  
+
+  /** Return true if the LogSetupInformation has already been called*/
+  bool IsLogSetupInformationDone();
+
+  /** Set the flag m_LogSetupInfoDone to true */
+  void LogSetupInformationDone();
+
 protected:
   Logger();
   virtual ~Logger() ITK_OVERRIDE;
@@ -71,6 +78,8 @@ private:
 
   static Pointer CreateInstance();
 
+  bool m_LogSetupInfoDone;
+
 }; // class Logger
 
 } // namespace otb
diff --git a/Modules/Core/Common/src/otbLogger.cxx b/Modules/Core/Common/src/otbLogger.cxx
index b3730cf445a5816ca693da32de7d2a71f3aaba37..6678aa16320e4498b1a93310abbf346479467795 100644
--- a/Modules/Core/Common/src/otbLogger.cxx
+++ b/Modules/Core/Common/src/otbLogger.cxx
@@ -38,9 +38,6 @@ Logger::Pointer Logger::CreateInstance()
   defaultOutput->SetStream(std::cout);
   
   instance->AddLogOutput(defaultOutput);
-  
-  // Log setup information
-  instance->LogSetupInformation();
 
   return instance;
 }
@@ -61,6 +58,8 @@ Logger::Logger()
 
   this->SetTimeStampFormat(itk::LoggerBase::HUMANREADABLE);
   this->SetHumanReadableFormat("%Y-%m-%d %H:%M:%S");
+
+  m_LogSetupInfoDone = false;
 }
 
 Logger::~Logger()
@@ -69,22 +68,29 @@ Logger::~Logger()
 
 void Logger::LogSetupInformation()
 {
-  std::ostringstream oss;
-  
-  oss<<"Default RAM limit for OTB is "<<otb::ConfigurationManager::GetMaxRAMHint()<<" MB"<<std::endl;
-  this->Info(oss.str());
-  oss.str("");
-  oss.clear();
-
-  oss<<"GDAL maximum cache size is "<<GDALGetCacheMax64()/(1024*1024)<<" MB"<<std::endl;
-  this->Info(oss.str());
-  oss.str("");
-  oss.clear();
-
-  oss<<"OTB will use at most "<<itk::MultiThreader::GetGlobalDefaultNumberOfThreads()<<" threads"<<std::endl;
-  this->Info(oss.str());
-  oss.str("");
-  oss.clear();
+  if (! IsLogSetupInformationDone())
+    {
+    std::ostringstream oss;
+
+    oss<<"Default RAM limit for OTB is "<<otb::ConfigurationManager::GetMaxRAMHint()<<" MB"<<std::endl;
+    this->Info(oss.str());
+    oss.str("");
+    oss.clear();
+
+    oss<<"GDAL maximum cache size is "<<GDALGetCacheMax64()/(1024*1024)<<" MB"<<std::endl;
+    this->Info(oss.str());
+    oss.str("");
+    oss.clear();
+
+    oss<<"OTB will use at most "<<itk::MultiThreader::GetGlobalDefaultNumberOfThreads()<<" threads"<<std::endl;
+    this->Info(oss.str());
+    oss.str("");
+    oss.clear();
+
+    // only switch the flag for the singleton, so that other instances can call
+    // LogSetupInformation() several times
+    Instance()->LogSetupInformationDone();
+    }
 }
 
 std::string Logger::BuildFormattedEntry(itk::Logger::PriorityLevelType level, std::string const & content)
@@ -116,4 +122,14 @@ std::string Logger::BuildFormattedEntry(itk::Logger::PriorityLevelType level, st
   return s.str();
 }
 
+bool Logger::IsLogSetupInformationDone()
+{
+  return m_LogSetupInfoDone;
+}
+
+void Logger::LogSetupInformationDone()
+{
+  m_LogSetupInfoDone = true;
+}
+
 } // namespace otb
diff --git a/Modules/Core/Streaming/include/otbStreamingImageVirtualWriter.txx b/Modules/Core/Streaming/include/otbStreamingImageVirtualWriter.txx
index ec0ae5ae6d760b11ca6238deb582174752bb7f21..7336993ed392de64e980c05a7178fdc4cbe66ca7 100644
--- a/Modules/Core/Streaming/include/otbStreamingImageVirtualWriter.txx
+++ b/Modules/Core/Streaming/include/otbStreamingImageVirtualWriter.txx
@@ -183,6 +183,8 @@ void
 StreamingImageVirtualWriter<TInputImage>
 ::GenerateData(void)
 {
+  otb::Logger::Instance()->LogSetupInformation();
+
   /**
    * Prepare all the outputs. This may deallocate previous bulk data.
    */
diff --git a/Modules/Core/Streaming/include/otbStreamingManager.txx b/Modules/Core/Streaming/include/otbStreamingManager.txx
index 24e487a4e55998ee7d28b1be1a58cbda21ee8768..9d2f3ddbd8243ed40946058cafe96d6060d2926d 100644
--- a/Modules/Core/Streaming/include/otbStreamingManager.txx
+++ b/Modules/Core/Streaming/include/otbStreamingManager.txx
@@ -156,7 +156,7 @@ StreamingManager<TImage>::EstimateOptimalNumberOfDivisions(itk::DataObject * inp
   unsigned int optimalNumberOfDivisions =
       otb::PipelineMemoryPrintCalculator::EstimateOptimalNumberOfStreamDivisions(pipelineMemoryPrint, availableRAMInBytes);
 
-  otbLogMacro(Info,<<"Estimated memory for full processing: "<<pipelineMemoryPrint * otb::PipelineMemoryPrintCalculator::ByteToMegabyte<<"MB (avail.: "<<availableRAMInBytes * otb::PipelineMemoryPrintCalculator::ByteToMegabyte<<" NB), optimal image partitioning: "<<optimalNumberOfDivisions<<" blocks");
+  otbLogMacro(Info,<<"Estimated memory for full processing: "<<pipelineMemoryPrint * otb::PipelineMemoryPrintCalculator::ByteToMegabyte<<"MB (avail.: "<<availableRAMInBytes * otb::PipelineMemoryPrintCalculator::ByteToMegabyte<<" MB), optimal image partitioning: "<<optimalNumberOfDivisions<<" blocks");
   
   return optimalNumberOfDivisions;
 }
diff --git a/Modules/Filtering/Statistics/include/otbStreamingStatisticsVectorImageFilter.txx b/Modules/Filtering/Statistics/include/otbStreamingStatisticsVectorImageFilter.txx
index af68ad38efab6be1f3465a7b5d2c9626fbc481d7..c721aa0a9b032bc563aff963c7c37f5dc8eedd4e 100644
--- a/Modules/Filtering/Statistics/include/otbStreamingStatisticsVectorImageFilter.txx
+++ b/Modules/Filtering/Statistics/include/otbStreamingStatisticsVectorImageFilter.txx
@@ -591,7 +591,7 @@ PersistentStreamingStatisticsVectorImageFilter<TInputImage, TPrecision>
             {
             for (unsigned int c = 0; c < threadSecondOrder.Cols(); ++c)
               {
-              threadSecondOrder(r, c) += vectorValue[r] * vectorValue[c];
+              threadSecondOrder(r, c) += static_cast<PrecisionType>(vectorValue[r]) * static_cast<PrecisionType>(vectorValue[c]);
               }
             }
           threadSecondOrderComponent += vectorValue.GetSquaredNorm();
diff --git a/Modules/IO/ImageIO/include/otbImageFileReader.txx b/Modules/IO/ImageIO/include/otbImageFileReader.txx
index c8d4fb2d0f13c9a9add354d34fbff848b2397100..512be71058983155680a18eab313c6f6fbb3f51f 100644
--- a/Modules/IO/ImageIO/include/otbImageFileReader.txx
+++ b/Modules/IO/ImageIO/include/otbImageFileReader.txx
@@ -380,6 +380,17 @@ ImageFileReader<TOutputImage, ConvertPixelTraits>
         spacing[i] = 1.0;
         }
       origin[i] = 0.5*spacing[i];
+      for (unsigned j = 0; j < TOutputImage::ImageDimension; ++j)
+        {
+        if (i == j)
+          {
+          direction[j][i] = 1.0;
+          }
+        else
+          {
+          direction[j][i] = 0.0;
+          }
+        }
       }
     }
 
diff --git a/Modules/IO/ImageIO/include/otbImageFileWriter.txx b/Modules/IO/ImageIO/include/otbImageFileWriter.txx
index 6cfff64f8ae1df38f26344110537bb1c47d0336c..f61c624d02470aa03296ffd03499c85a6c2749e1 100644
--- a/Modules/IO/ImageIO/include/otbImageFileWriter.txx
+++ b/Modules/IO/ImageIO/include/otbImageFileWriter.txx
@@ -279,6 +279,8 @@ ImageFileWriter<TInputImage>
     itkExceptionMacro(<< "No input to writer");
     }
 
+  otb::Logger::Instance()->LogSetupInformation();
+
   /** Parse streaming modes */
   if(m_FilenameHelper->StreamingTypeIsSet())
     {
diff --git a/Modules/IO/ImageIO/include/otbMultiImageFileWriter.h b/Modules/IO/ImageIO/include/otbMultiImageFileWriter.h
index 5d2cf24f80ddbcf858d00becb3fbbd3489e5e9f9..d8b05f5c4bdaa69a60606dc5a5ed4db1ef47b416 100644
--- a/Modules/IO/ImageIO/include/otbMultiImageFileWriter.h
+++ b/Modules/IO/ImageIO/include/otbMultiImageFileWriter.h
@@ -41,6 +41,8 @@ namespace otb
  *  When the user gives a number of lines per strip or a tile size, the value
  *  is interpreted on the first input to deduce the number of streams. This
  *  number of streams is then used to split the other inputs.
+ *
+ * \ingroup OTBImageIO
  */
 class OTBImageIO_EXPORT MultiImageFileWriter: public itk::ProcessObject
 {
@@ -226,7 +228,11 @@ private:
   bool m_IsObserving;
   unsigned long m_ObserverID;
 
-  /** Internal base wrapper class to handle each ImageFileWriter */
+  /** \class SinkBase
+   * Internal base wrapper class to handle each ImageFileWriter
+   *
+   * \ingroup OTBImageIO
+   */
   class SinkBase
   {
   public:
@@ -248,6 +254,8 @@ private:
 
   /** \class Sink
    *  Wrapper class for each ImageFileWriter
+   *
+   * \ingroup OTBImageIO
    */
   template <class TImage>
   class Sink : public SinkBase
diff --git a/Modules/IO/TestKernel/include/otbTestMain.h b/Modules/IO/TestKernel/include/otbTestMain.h
index ccb70a639cc5ec5a295d775affc2d5a510747c7d..7a833306b5ecfd8e6b434488a0932f3fdc2a5933 100644
--- a/Modules/IO/TestKernel/include/otbTestMain.h
+++ b/Modules/IO/TestKernel/include/otbTestMain.h
@@ -28,7 +28,7 @@
 #include <iostream>
 
 #include "itkMultiThreader.h"
-#include "itkMacro.h"
+#include "otbMacro.h"
 
 #include "otbOGRDriversInit.h"
 #include "otbTestHelper.h"
@@ -298,6 +298,7 @@ int main(int ac, char* av[])
       }
   else
     {
+    otb::Logger::Instance()->LogSetupInformation();
     MainFuncPointer f = j->second;
     int             result;
     try
diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbAutoencoderModel.h b/Modules/Learning/DimensionalityReductionLearning/include/otbAutoencoderModel.h
index 7bf3231a9b8d372473fd5ed0f598d3f2fd7c7c06..5fe7ec2f27c9f91f21a2ce2417502f5d4beac54d 100644
--- a/Modules/Learning/DimensionalityReductionLearning/include/otbAutoencoderModel.h
+++ b/Modules/Learning/DimensionalityReductionLearning/include/otbAutoencoderModel.h
@@ -33,8 +33,9 @@
 #endif
 #include "otb_shark.h"
 #include <shark/Algorithms/StoppingCriteria/AbstractStoppingCriterion.h>
-#include <shark/Models/FFNet.h>
-#include <shark/Models/Autoencoder.h>
+#include <shark/Models/LinearModel.h>
+#include <shark/Models/ConcatenatedModel.h>
+#include <shark/Models/NeuronLayers.h>
 #if defined(__GNUC__) || defined(__clang__)
 #pragma GCC diagnostic pop
 #endif
@@ -76,9 +77,9 @@ public:
   typedef typename Superclass::ConfidenceListSampleType         ConfidenceListSampleType;
 
   /// Neural network related typedefs
-  typedef shark::Autoencoder<NeuronType,shark::LinearNeuron> OutAutoencoderType;
-  typedef shark::Autoencoder<NeuronType,NeuronType> AutoencoderType;
-  typedef shark::FFNet<NeuronType,shark::LinearNeuron> NetworkType;
+  typedef shark::ConcatenatedModel<shark::RealVector> ModelType;
+  typedef shark::LinearModel<shark::RealVector,NeuronType> LayerType;
+  typedef shark::LinearModel<shark::RealVector, shark::LinearNeuron> OutLayerType;
 
   itkNewMacro(Self);
   itkTypeMacro(AutoencoderModel, DimensionalityReductionModel);
@@ -127,18 +128,16 @@ public:
 
   void Train() override;
 
-  template <class T, class Autoencoder>
+  template <class T>
   void TrainOneLayer(
     shark::AbstractStoppingCriterion<T> & criterion,
-    Autoencoder &,
     unsigned int,
     shark::Data<shark::RealVector> &,
     std::ostream&);
 
-  template <class T, class Autoencoder>
+  template <class T>
   void TrainOneSparseLayer(
     shark::AbstractStoppingCriterion<T> & criterion,
-    Autoencoder &,
     unsigned int,
     shark::Data<shark::RealVector> &,
     std::ostream&);
@@ -166,7 +165,9 @@ protected:
 
 private:
   /** Internal Network */
-  NetworkType m_Net;
+  ModelType m_Encoder;
+  std::vector<LayerType> m_InLayers;
+  OutLayerType m_OutLayer;
   itk::Array<unsigned int> m_NumberOfHiddenNeurons;
   /** Training parameters */
   unsigned int m_NumberOfIterations; // stop the training after a fixed number of iterations
diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbAutoencoderModel.txx b/Modules/Learning/DimensionalityReductionLearning/include/otbAutoencoderModel.txx
index 33f1c28e247c43f80ac28a1d608b1c15967c6a5e..e5a26e9ee3dc8cbf4918222f4b7b45bc93e925cb 100644
--- a/Modules/Learning/DimensionalityReductionLearning/include/otbAutoencoderModel.txx
+++ b/Modules/Learning/DimensionalityReductionLearning/include/otbAutoencoderModel.txx
@@ -34,18 +34,17 @@
 #include "otbSharkUtils.h"
 //include train function
 #include <shark/ObjectiveFunctions/ErrorFunction.h>
-#include <shark/ObjectiveFunctions/SparseAutoencoderError.h>//the error function performing the regularisation of the hidden neurons
+//~ #include <shark/ObjectiveFunctions/SparseAutoencoderError.h>//the error function performing the regularisation of the hidden neurons
 
 #include <shark/Algorithms/GradientDescent/Rprop.h>// the RProp optimization algorithm
 #include <shark/ObjectiveFunctions/Loss/SquaredLoss.h> // squared loss used for regression
 #include <shark/ObjectiveFunctions/Regularizer.h> //L2 regulariziation
-#include <shark/Models/ImpulseNoiseModel.h> //noise source to corrupt the inputs
-#include <shark/Models/ConcatenatedModel.h>//to concatenate the noise with the model
+//~ #include <shark/Models/ImpulseNoiseModel.h> //noise source to corrupt the inputs
 
 #include <shark/Algorithms/StoppingCriteria/MaxIterations.h> //A simple stopping criterion that stops after a fixed number of iterations
 #include <shark/Algorithms/StoppingCriteria/TrainingProgress.h> //Stops when the algorithm seems to converge, Tracks the progress of the training error over a period of time
 
-#include <shark/Algorithms/GradientDescent/SteepestDescent.h>
+#include <shark/Algorithms/GradientDescent/Adam.h>
 #if defined(__GNUC__) || defined(__clang__)
 #pragma GCC diagnostic pop
 #endif
@@ -83,96 +82,56 @@ AutoencoderModel<TInputValue,NeuronType>
     }
 
   // Initialization of the feed forward neural network
-  std::vector<size_t> layers;
-  layers.push_back(shark::dataDimension(inputSamples));
+  m_Encoder = ModelType();
+  m_InLayers.clear();
+  size_t previousShape = shark::dataDimension(inputSamples);
   for (unsigned int i = 0 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
     {
-    layers.push_back(m_NumberOfHiddenNeurons[i]);
+    m_InLayers.push_back( LayerType(previousShape, m_NumberOfHiddenNeurons[i]) );
+    previousShape = m_NumberOfHiddenNeurons[i];
+    m_Encoder.add(&(m_InLayers.back()), true);
     }
-
   for (unsigned int i = std::max(0,static_cast<int>(m_NumberOfHiddenNeurons.Size()-1)) ; i > 0; --i)
     {
-    layers.push_back(m_NumberOfHiddenNeurons[i-1]);
-    }
-
-  layers.push_back(shark::dataDimension(inputSamples));
-  m_Net.setStructure(layers);
-  shark::initRandomNormal(m_Net,0.1);
-
-  // Training of the first Autoencoder (first and last layer of the FF network)
-  if (m_Epsilon > 0)
-    {
-    shark::TrainingProgress<> criterion(5,m_Epsilon);
-
-    OutAutoencoderType net;
-    // Shark doesn't allow to train a layer using a sparsity term AND a noisy input. 
-    if (m_Noise[0] != 0)
-      {
-      TrainOneLayer(criterion, net, 0, inputSamples, ofs);
-      }
-    else
-      {
-      TrainOneSparseLayer(criterion, net, 0, inputSamples, ofs);
-      }
-    criterion.reset();
+    m_InLayers.push_back( LayerType(previousShape, m_NumberOfHiddenNeurons[i-1]) );
+    previousShape = m_NumberOfHiddenNeurons[i-1];
     }
-  else
-    {
-    shark::MaxIterations<> criterion(m_NumberOfIterations);
+  m_OutLayer = OutLayerType(previousShape, shark::dataDimension(inputSamples));
 
-    OutAutoencoderType net;
-    // Shark doesn't allow to train a layer using a sparsity term AND a noisy input.
-    if (m_Noise[0] != 0)
-      {
-      TrainOneLayer(criterion, net, 0, inputSamples, ofs);
-      otbMsgDevMacro(<< "m_Noise " << m_Noise[0]);
-      }
-    else
-      {
-      TrainOneSparseLayer(criterion, net, 0, inputSamples, ofs);
-      }
-    criterion.reset();
-    }
-
-  // Training of the other autoencoders
-  if (m_Epsilon > 0)
+  // Training of the autoencoders pairwise, starting from the first and last layers
+  for (unsigned int i = 0 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
     {
-    shark::TrainingProgress<> criterion(5,m_Epsilon);
-
-    for (unsigned int i = 1 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
+    if (m_Epsilon > 0)
       {
-      AutoencoderType net;
+      shark::TrainingProgress<> criterion(5,m_Epsilon);
       // Shark doesn't allow to train a layer using a sparsity term AND a noisy input.
       if (m_Noise[i] != 0)
         {
-        TrainOneLayer(criterion, net, i, inputSamples, ofs);
+        TrainOneLayer(criterion, i, inputSamples, ofs);
         }
       else
         {
-        TrainOneSparseLayer(criterion, net, i, inputSamples, ofs);
+        TrainOneSparseLayer(criterion, i, inputSamples, ofs);
         }
       criterion.reset();
       }
-    }
-  else
-    {
-    shark::MaxIterations<> criterion(m_NumberOfIterations);
-
-    for (unsigned int i = 1 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
+    else
       {
-      AutoencoderType net;
+      shark::MaxIterations<> criterion(m_NumberOfIterations);
       // Shark doesn't allow to train a layer using a sparsity term AND a noisy input.
       if (m_Noise[i] != 0)
         {
-        TrainOneLayer(criterion, net, i, inputSamples, ofs);
+        TrainOneLayer(criterion, i, inputSamples, ofs);
         otbMsgDevMacro(<< "m_Noise " << m_Noise[0]);
         }
       else
         {
-        TrainOneSparseLayer( criterion, net, i, inputSamples, ofs);
+        TrainOneSparseLayer( criterion, i, inputSamples, ofs);
         }
       criterion.reset();
       }
+    // encode the samples with the last encoder trained
+    inputSamples = m_InLayers[i](inputSamples);
     }
   if (m_NumberOfIterationsFineTuning > 0)
     {
@@ -183,31 +142,37 @@ AutoencoderModel<TInputValue,NeuronType>
 }
 
 template <class TInputValue, class NeuronType>
-template <class T, class Autoencoder>
+template <class T>
 void
 AutoencoderModel<TInputValue,NeuronType>
 ::TrainOneLayer(
   shark::AbstractStoppingCriterion<T> & criterion,
-  Autoencoder & net,
   unsigned int layer_index,
   shark::Data<shark::RealVector> &samples,
   std::ostream& File)
 {
+  typedef shark::AbstractModel<shark::RealVector,shark::RealVector> BaseModelType;
+  ModelType net;
+  net.add(&(m_InLayers[layer_index]), true);
+  net.add( (layer_index ?
+    (BaseModelType*) &(m_InLayers[m_NumberOfHiddenNeurons.Size()*2 - 1 - layer_index]) :
+    (BaseModelType*) &m_OutLayer) , true);
+
   otbMsgDevMacro(<< "Noise " <<  m_Noise[layer_index]);
   std::size_t inputs = dataDimension(samples);
-  net.setStructure(inputs, m_NumberOfHiddenNeurons[layer_index]);
   initRandomUniform(net,-m_InitFactor*std::sqrt(1.0/inputs),m_InitFactor*std::sqrt(1.0/inputs));
 
-  shark::ImpulseNoiseModel noise(inputs,m_Noise[layer_index],1.0); //set an input pixel with probability m_Noise to 0
-  shark::ConcatenatedModel<shark::RealVector,shark::RealVector> model = noise>> net;
+  //~ shark::ImpulseNoiseModel noise(inputs,m_Noise[layer_index],1.0); //set an input pixel with probability m_Noise to 0
+  //~ shark::ConcatenatedModel<shark::RealVector,shark::RealVector> model = noise>> net;
   shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);//labels identical to inputs
   shark::SquaredLoss<shark::RealVector> loss;
-  shark::ErrorFunction error(trainSet, &model, &loss);
+  //~ shark::ErrorFunction error(trainSet, &model, &loss);
+  shark::ErrorFunction<> error(trainSet, &net, &loss);
 
-  shark::TwoNormRegularizer regularizer(error.numberOfVariables());
+  shark::TwoNormRegularizer<> regularizer(error.numberOfVariables());
   error.setRegularizer(m_Regularization[layer_index],&regularizer);
 
-  shark::IRpropPlusFull optimizer;
+  shark::Adam<> optimizer;
   error.init();
   optimizer.init(error);
 
@@ -230,35 +195,37 @@ AutoencoderModel<TInputValue,NeuronType>
     } while( !criterion.stop( optimizer.solution() ) );
 
   net.setParameterVector(optimizer.solution().point);
-  m_Net.setLayer(layer_index,net.encoderMatrix(),net.hiddenBias());  // Copy the encoder in the FF neural network
-  m_Net.setLayer( m_NumberOfHiddenNeurons.Size()*2 - 1 - layer_index,net.decoderMatrix(),net.outputBias()); // Copy the decoder in the FF neural network
-  samples = net.encode(samples);
 }
 
 template <class TInputValue, class NeuronType>
-template <class T, class Autoencoder>
+template <class T>
 void AutoencoderModel<TInputValue,NeuronType>::TrainOneSparseLayer(
   shark::AbstractStoppingCriterion<T> & criterion,
-  Autoencoder & net,
   unsigned int layer_index,
   shark::Data<shark::RealVector> &samples,
   std::ostream& File)
 {
-  //AutoencoderType net;
-  std::size_t inputs = dataDimension(samples);
-  net.setStructure(inputs, m_NumberOfHiddenNeurons[layer_index]);
+  typedef shark::AbstractModel<shark::RealVector,shark::RealVector> BaseModelType;
+  ModelType net;
+  net.add(&(m_InLayers[layer_index]), true);
+  net.add( (layer_index ?
+    (BaseModelType*) &(m_InLayers[m_NumberOfHiddenNeurons.Size()*2 - 1 - layer_index]) :
+    (BaseModelType*) &m_OutLayer) , true);
 
+  std::size_t inputs = dataDimension(samples);
   shark::initRandomUniform(net,-m_InitFactor*std::sqrt(1.0/inputs),m_InitFactor*std::sqrt(1.0/inputs));
 
   // Idea : set the initials value for the output weights higher than the input weights
 
   shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);//labels identical to inputs
   shark::SquaredLoss<shark::RealVector> loss;
-  shark::SparseAutoencoderError error(trainSet,&net, &loss, m_Rho[layer_index], m_Beta[layer_index]);
-
-  shark::TwoNormRegularizer regularizer(error.numberOfVariables());
+  //~ shark::SparseAutoencoderError error(trainSet,&net, &loss, m_Rho[layer_index], m_Beta[layer_index]);
+  // SparseAutoencoderError doesn't exist anymore, for now use a plain ErrorFunction
+  shark::ErrorFunction<> error(trainSet, &net, &loss);
+  
+  shark::TwoNormRegularizer<> regularizer(error.numberOfVariables());
   error.setRegularizer(m_Regularization[layer_index],&regularizer);
-  shark::IRpropPlusFull optimizer;
+  shark::Adam<> optimizer;
   error.init();
   optimizer.init(error);
 
@@ -279,9 +246,6 @@ void AutoencoderModel<TInputValue,NeuronType>::TrainOneSparseLayer(
     File << "end layer" << std::endl;
     }
   net.setParameterVector(optimizer.solution().point);
-  m_Net.setLayer(layer_index,net.encoderMatrix(),net.hiddenBias());  // Copy the encoder in the FF neural network
-  m_Net.setLayer( m_NumberOfHiddenNeurons.Size()*2 - 1 - layer_index,net.decoderMatrix(),net.outputBias()); // Copy the decoder in the FF neural network
-  samples = net.encode(samples);
 }
 
 template <class TInputValue, class NeuronType>
@@ -293,15 +257,23 @@ AutoencoderModel<TInputValue,NeuronType>
   shark::Data<shark::RealVector> &samples,
   std::ostream& File)
 {
+  // create full network
+  ModelType net;
+  for (auto &layer : m_InLayers)
+    {
+    net.add(&layer, true);
+    }
+  net.add(&m_OutLayer, true);
+  
   //labels identical to inputs
   shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);
   shark::SquaredLoss<shark::RealVector> loss;
 
-  shark::ErrorFunction error(trainSet, &m_Net, &loss);
-  shark::TwoNormRegularizer regularizer(error.numberOfVariables());
+  shark::ErrorFunction<> error(trainSet, &net, &loss);
+  shark::TwoNormRegularizer<> regularizer(error.numberOfVariables());
   error.setRegularizer(m_Regularization[0],&regularizer);
 
-  shark::IRpropPlusFull optimizer;
+  shark::Adam<> optimizer;
   error.init();
   optimizer.init(error);
   otbMsgDevMacro(<<"Error before training : " << optimizer.solution().value);
@@ -326,7 +298,6 @@ AutoencoderModel<TInputValue,NeuronType>
   try
     {
     this->Load(filename);
-    m_Net.name();
     }
   catch(...)
     {
@@ -350,22 +321,15 @@ AutoencoderModel<TInputValue,NeuronType>
 {
   otbMsgDevMacro(<< "saving model ...");
   std::ofstream ofs(filename);
-  ofs << m_Net.name() << std::endl; // the first line of the model file contains a key
+  ofs << "Autoencoder" << std::endl; // the first line of the model file contains a key
+  ofs << (m_InLayers.size() + 1) << std::endl; // second line is the number of encoders/decoders 
   shark::TextOutArchive oa(ofs);
-  oa << m_Net;
-  ofs.close();
-
-  if (this->m_WriteWeights == true)     // output the map vectors in a txt file
+  for (const auto &layer : m_InLayers)
     {
-    std::ofstream otxt(filename+".txt");
-    for (unsigned int i = 0 ; i < m_Net.layerMatrices().size(); ++i)
-      {
-      otxt << "layer " << i << std::endl;
-      otxt << m_Net.layerMatrix(i) << std::endl;
-      otxt << m_Net.bias(i) << std::endl;
-      otxt << std::endl;
-      }
+    oa << layer;
     }
+  oa << m_OutLayer;
+  ofs.close();
 }
 
 template <class TInputValue, class NeuronType>
@@ -373,23 +337,39 @@ void
 AutoencoderModel<TInputValue,NeuronType>
 ::Load(const std::string & filename, const std::string & /*name*/)
 {
-  NetworkType net;
   std::ifstream ifs(filename);
-  char autoencoder[256];
-  ifs.getline(autoencoder,256);
-  std::string autoencoderstr(autoencoder);
-
-  if (autoencoderstr != net.name()){
+  char buffer[256];
+  // check first line
+  ifs.getline(buffer,256);
+  std::string bufferStr(buffer);
+  if (bufferStr != "Autoencoder"){
     itkExceptionMacro(<< "Error opening " << filename.c_str() );
     }
+  // check second line
+  ifs.getline(buffer,256);
+  int nbLevels = boost::lexical_cast<int>(buffer);
+  if (nbLevels < 2 || nbLevels%2 == 1)
+    {
+    itkExceptionMacro(<< "Unexpected number of levels : "<<buffer );
+    }
+  m_InLayers.clear();
+  m_Encoder = ModelType();
   shark::TextInArchive ia(ifs);
-  ia >> m_Net;
+  for (int i=0 ; (i+1) < nbLevels ; i++)
+    {
+    LayerType layer;
+    ia >> layer;
+    m_InLayers.push_back(layer);
+    }
+  ia >> m_OutLayer;
   ifs.close();
 
-  // This gives us the dimension if we keep the encoder and decoder
-  size_t feature_layer_index = m_Net.layerMatrices().size()/2;
-  // number of neurons in the feature layer (second dimension of the first decoder weight matrix)
-  this->SetDimension(m_Net.layerMatrix(feature_layer_index).size2());
+  for (int i=0 ; i < nbLevels/2 ; i++)
+    {
+    m_Encoder.add(&(m_InLayers[i]) ,true);
+    }
+
+  this->SetDimension( m_Encoder.outputShape()[0] );
 }
 
 template <class TInputValue, class NeuronType>
@@ -409,7 +389,7 @@ AutoencoderModel<TInputValue,NeuronType>
   shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
 
   // features layer for a network containing the encoder and decoder part
-  data = m_Net.evalLayer( m_Net.layerMatrices().size()/2-1 ,data);
+  data = m_Encoder(data);
   TargetSampleType target;
   target.SetSize(this->m_Dimension);
 
@@ -435,7 +415,7 @@ AutoencoderModel<TInputValue,NeuronType>
   shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
   TargetSampleType target;
   // features layer for a network containing the encoder and decoder part
-  data = m_Net.evalLayer( m_Net.layerMatrices().size()/2-1 ,data);
+  data = m_Encoder(data);
 
   unsigned int id = startIndex;
   target.SetSize(this->m_Dimension);
diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.txx b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.txx
index 9f39326a21bc5f1980a49d80ecdaea55b42a450a..a387852fecc386d9c5f2a6c27c7bf39cd7a3649d 100644
--- a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.txx
+++ b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.txx
@@ -137,11 +137,11 @@ PCAModel<TInputValue>::Load(const std::string & filename, const std::string & /*
   ifs.close();
   if (this->m_Dimension ==0)
   {
-    this->m_Dimension = m_Encoder.outputSize();
+    this->m_Dimension = m_Encoder.outputShape()[0];
   }
 
   auto eigenvectors = m_Encoder.matrix();
-  eigenvectors.resize(this->m_Dimension,m_Encoder.inputSize());
+  eigenvectors.resize(this->m_Dimension,m_Encoder.inputShape()[0]);
 
   m_Encoder.setStructure(eigenvectors, m_Encoder.offset() );
 }
diff --git a/Modules/Learning/LearningBase/otb-module.cmake b/Modules/Learning/LearningBase/otb-module.cmake
index afa2a339a1813cf16e5f6ea3700f079a36180dcd..c0af985032de6d4a2acd11988be1b9a177cf8219 100644
--- a/Modules/Learning/LearningBase/otb-module.cmake
+++ b/Modules/Learning/LearningBase/otb-module.cmake
@@ -28,7 +28,11 @@ otb_module(OTBLearningBase
     OTBImageBase
     OTBITK
 
-  TEST_DEPENDS
+    OPTIONAL_DEPENDS
+    OTBShark
+
+    TEST_DEPENDS
+    OTBBoost
     OTBTestKernel
     OTBImageIO
 
diff --git a/Modules/Learning/LearningBase/test/CMakeLists.txt b/Modules/Learning/LearningBase/test/CMakeLists.txt
index d1d16c3e65801e606c6e6903538b65264a4483a6..48e28cc5cad320ffa41eee0659ff6979d0bf4457 100644
--- a/Modules/Learning/LearningBase/test/CMakeLists.txt
+++ b/Modules/Learning/LearningBase/test/CMakeLists.txt
@@ -32,6 +32,10 @@ otbKMeansImageClassificationFilterNew.cxx
 otbMachineLearningModelTemplates.cxx
 )
 
+if(OTB_USE_SHARK)
+  set(OTBLearningBaseTests ${OTBLearningBaseTests} otbSharkUtilsTests.cxx)
+endif()
+
 add_executable(otbLearningBaseTestDriver ${OTBLearningBaseTests})
 target_link_libraries(otbLearningBaseTestDriver ${OTBLearningBase-Test_LIBRARIES})
 otb_module_target_label(otbLearningBaseTestDriver)
@@ -68,3 +72,7 @@ otb_add_test(NAME leTuDecisionTreeNew COMMAND otbLearningBaseTestDriver
 otb_add_test(NAME leTuKMeansImageClassificationFilterNew COMMAND otbLearningBaseTestDriver
   otbKMeansImageClassificationFilterNew)
 
+if(OTB_USE_SHARK)
+  otb_add_test(NAME leTuSharkNormalizeLabels COMMAND otbLearningBaseTestDriver
+    otbSharkNormalizeLabels)
+endif()
diff --git a/Modules/Learning/LearningBase/test/otbLearningBaseTestDriver.cxx b/Modules/Learning/LearningBase/test/otbLearningBaseTestDriver.cxx
index 5b38bf300dd4520c18e198b6e6643848cbdc937c..dc2d36b7943129ec6519ebbc4f194d1dd6078800 100644
--- a/Modules/Learning/LearningBase/test/otbLearningBaseTestDriver.cxx
+++ b/Modules/Learning/LearningBase/test/otbLearningBaseTestDriver.cxx
@@ -29,4 +29,7 @@ void RegisterTests()
   REGISTER_TEST(otbSEMClassifierNew);
   REGISTER_TEST(otbDecisionTreeNew);
   REGISTER_TEST(otbKMeansImageClassificationFilterNew);
+#ifdef OTB_USE_SHARK
+  REGISTER_TEST(otbSharkNormalizeLabels);
+#endif
 }
diff --git a/Modules/Learning/LearningBase/test/otbSharkUtilsTests.cxx b/Modules/Learning/LearningBase/test/otbSharkUtilsTests.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..bc3783cb728b0f5ad0f6b2d43620b18ba7939e30
--- /dev/null
+++ b/Modules/Learning/LearningBase/test/otbSharkUtilsTests.cxx
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
+ *
+ * This file is part of Orfeo Toolbox
+ *
+ *     https://www.orfeo-toolbox.org/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "itkMacro.h"
+#include "otbSharkUtils.h"
+
+
+int otbSharkNormalizeLabels(int itkNotUsed(argc), char* itkNotUsed(argv) [])
+{
+  std::vector<unsigned int> inLabels = {2, 2, 3, 20, 1};
+  std::vector<unsigned int> expectedDictionary = {2, 3, 20, 1};
+  std::vector<unsigned int> expectedLabels = {0, 0, 1, 2, 3};
+
+  auto newLabels = inLabels;
+  std::vector<unsigned int> labelDict;
+  otb::Shark::NormalizeLabelsAndGetDictionary(newLabels, labelDict);
+
+  if(newLabels != expectedLabels)
+    {
+    std::cout << "Wrong new labels\n";
+    for(size_t i = 0; i<newLabels.size(); ++i)
+      std::cout << "Got " << newLabels[i] << " expected " << expectedLabels[i] << '\n';
+
+    return EXIT_FAILURE;
+    }
+
+  if(labelDict != expectedDictionary)
+    {
+    std::cout << "Wrong dictionary\n";
+    for(size_t i = 0; i<labelDict.size(); ++i)
+      std::cout << "Got " << labelDict[i] << " expected " << expectedDictionary[i] << '\n';
+
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Modules/Learning/Sampling/src/otbSamplingRateCalculator.cxx b/Modules/Learning/Sampling/src/otbSamplingRateCalculator.cxx
index a7e8f2bc3ff14263ed453d6c46066e74d68c8356..86de38dfb0c5d8c596a1b0f73576c49b1d4200d3 100644
--- a/Modules/Learning/Sampling/src/otbSamplingRateCalculator.cxx
+++ b/Modules/Learning/Sampling/src/otbSamplingRateCalculator.cxx
@@ -233,7 +233,7 @@ SamplingRateCalculator
         std::string::size_type pos5 = line.find_first_not_of(" \t", parts[2].begin() - line.begin());
         std::string::size_type pos6 = line.find_last_not_of(" \t", parts[2].end() - line.begin() -1);
         std::string::size_type pos7 = line.find_first_not_of(" \t", parts[3].begin() - line.begin());
-        std::string::size_type pos8 = line.find_last_not_of(" \t", parts[3].end() - line.begin() -1);
+        std::string::size_type pos8 = line.find_last_not_of(" \t\r", parts[3].end() - line.begin() -1);
         if (pos2 != std::string::npos && pos1 <= pos2 &&
             pos4 != std::string::npos && pos3 <= pos4 &&
             pos6 != std::string::npos && pos5 <= pos6 &&
@@ -336,7 +336,7 @@ SamplingRateCalculator
         std::string::size_type pos1 = line.find_first_not_of(" \t", parts[0].begin() - line.begin());
         std::string::size_type pos2 = line.find_last_not_of(" \t", parts[0].end() - line.begin() -1);
         std::string::size_type pos3 = line.find_first_not_of(" \t", parts[1].begin() - line.begin());
-        std::string::size_type pos4 = line.find_last_not_of(" \t", parts[1].end() - line.begin() -1);
+        std::string::size_type pos4 = line.find_last_not_of(" \t\r", parts[1].end() - line.begin() -1);
         if (pos2 != std::string::npos && pos1 <= pos2 &&
             pos4 != std::string::npos && pos3 <= pos4)
           {
diff --git a/Modules/Learning/Supervised/include/otbSharkRandomForestsMachineLearningModel.h b/Modules/Learning/Supervised/include/otbSharkRandomForestsMachineLearningModel.h
index 7dd41d6eed22208d512ecf0e5d4eb5116f2bf5c0..41015ee9dc7f5f6bb6c3d1defbfad5ccb1c0c47b 100644
--- a/Modules/Learning/Supervised/include/otbSharkRandomForestsMachineLearningModel.h
+++ b/Modules/Learning/Supervised/include/otbSharkRandomForestsMachineLearningModel.h
@@ -33,7 +33,10 @@
 #pragma GCC diagnostic ignored "-Wsign-compare"
 #pragma GCC diagnostic ignored "-Wcast-align"
 #pragma GCC diagnostic ignored "-Wunknown-pragmas"
+#pragma GCC diagnostic ignored "-Wheader-guard"
+#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #endif
+#include <shark/Models/Classifier.h>
 #include "otb_shark.h"
 #include "shark/Algorithms/Trainers/RFTrainer.h"
 #if defined(__GNUC__) || defined(__clang__)
@@ -134,6 +137,10 @@ public:
   /** If true, margin confidence value will be computed */
   itkSetMacro(ComputeMargin, bool);
 
+  /** If true, class labels will be normalised in [0 ... nbClasses] */
+  itkGetMacro(NormalizeClassLabels, bool);
+  itkSetMacro(NormalizeClassLabels, bool);
+
 protected:
   /** Constructor */
   SharkRandomForestsMachineLearningModel();
@@ -154,8 +161,10 @@ private:
   SharkRandomForestsMachineLearningModel(const Self &); //purposely not implemented
   void operator =(const Self&); //purposely not implemented
 
-  shark::RFClassifier m_RFModel;
-  shark::RFTrainer m_RFTrainer;
+  shark::RFClassifier<unsigned int> m_RFModel;
+  shark::RFTrainer<unsigned int> m_RFTrainer;
+  std::vector<unsigned int> m_ClassDictionary;
+  bool m_NormalizeClassLabels;
 
   unsigned int m_NumberOfTrees;
   unsigned int m_MTry;
diff --git a/Modules/Learning/Supervised/include/otbSharkRandomForestsMachineLearningModel.txx b/Modules/Learning/Supervised/include/otbSharkRandomForestsMachineLearningModel.txx
index 207f1abdd77e4b5cfffd9bc5d104c4b40232f853..72c816069bebddc048a0f8af48f24579a55fa38b 100644
--- a/Modules/Learning/Supervised/include/otbSharkRandomForestsMachineLearningModel.txx
+++ b/Modules/Learning/Supervised/include/otbSharkRandomForestsMachineLearningModel.txx
@@ -32,7 +32,6 @@
 #pragma GCC diagnostic ignored "-Woverloaded-virtual"
 #pragma GCC diagnostic ignored "-Wignored-qualifiers"
 #endif
-#include <shark/Models/Converter.h>
 #if defined(__GNUC__) || defined(__clang__)
 #pragma GCC diagnostic pop
 #endif
@@ -52,6 +51,7 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
   this->m_ConfidenceIndex = true;
   this->m_IsRegressionSupported = false;
   this->m_IsDoPredictBatchMultiThreaded = true;
+  this->m_NormalizeClassLabels = true;
 }
 
 
@@ -76,13 +76,17 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
 
   Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
   Shark::ListSampleToSharkVector(this->GetTargetListSample(), class_labels);
+  if(m_NormalizeClassLabels)
+    {
+    Shark::NormalizeLabelsAndGetDictionary(class_labels, m_ClassDictionary);
+    }
   shark::ClassificationDataset TrainSamples = shark::createLabeledDataFromRange(features,class_labels);
 
   //Set parameters
   m_RFTrainer.setMTry(m_MTry);
   m_RFTrainer.setNTrees(m_NumberOfTrees);
   m_RFTrainer.setNodeSize(m_NodeSize);
-  m_RFTrainer.setOOBratio(m_OobRatio);
+  //  m_RFTrainer.setOOBratio(m_OobRatio);
   m_RFTrainer.train(m_RFModel, TrainSamples);
 
 }
@@ -125,15 +129,20 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
     }
   if (quality != ITK_NULLPTR)
     {
-    shark::RealVector probas = m_RFModel(samples);
+    shark::RealVector probas = m_RFModel.decisionFunction()(samples);
     (*quality) = ComputeConfidence(probas, m_ComputeMargin);
     }
-  shark::ArgMaxConverter<shark::RFClassifier> amc;
-  amc.decisionFunction() = m_RFModel;
-  unsigned int res;
-  amc.eval(samples, res);
+  unsigned int res{0};
+  m_RFModel.eval(samples, res);
   TargetSampleType target;
-  target[0] = static_cast<TOutputValue>(res);
+  if(m_NormalizeClassLabels)
+    {
+    target[0] = m_ClassDictionary[static_cast<TOutputValue>(res)];
+    }
+  else
+    {
+    target[0] = static_cast<TOutputValue>(res);
+    }
   return target;
 }
 
@@ -157,13 +166,13 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
   Shark::ListSampleRangeToSharkVector(input, features,startIndex,size);
   shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange(features);
 
-  #ifdef _OPENMP
+#ifdef _OPENMP
   omp_set_num_threads(itk::MultiThreader::GetGlobalDefaultNumberOfThreads());
-  #endif
+#endif
   
   if(quality != ITK_NULLPTR)
     {
-    shark::Data<shark::RealVector> probas = m_RFModel(inputSamples);
+    shark::Data<shark::RealVector> probas = m_RFModel.decisionFunction()(inputSamples);
     unsigned int id = startIndex;
     for(shark::RealVector && p : probas.elements())
       {
@@ -175,14 +184,19 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
       }
     }
     
-  shark::ArgMaxConverter<shark::RFClassifier> amc;
-  amc.decisionFunction() = m_RFModel;
-  auto prediction = amc(inputSamples);
+  auto prediction = m_RFModel(inputSamples);
   unsigned int id = startIndex;
   for(const auto& p : prediction.elements())
     {
     TargetSampleType target;
-    target[0] = static_cast<TOutputValue>(p);
+    if(m_NormalizeClassLabels)
+      {
+      target[0] = m_ClassDictionary[static_cast<TOutputValue>(p)];
+      }
+    else
+      {
+      target[0] = static_cast<TOutputValue>(p);
+      }
     targets->SetMeasurementVector(id,target);
     ++id;
     }
@@ -199,7 +213,18 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
     itkExceptionMacro(<< "Error opening " << filename.c_str() );
     }
   // Add comment with model file name
-  ofs << "#" << m_RFModel.name() << std::endl;
+  ofs << "#" << m_RFModel.name();
+  if(m_NormalizeClassLabels) ofs  << " with_dictionary";
+  ofs << std::endl;
+  if(m_NormalizeClassLabels)
+    {
+    ofs << m_ClassDictionary.size() << " ";
+    for(const auto& l : m_ClassDictionary)
+      {
+      ofs << l << " ";
+      }
+    ofs << std::endl;
+    }
   shark::TextOutArchive oa(ofs);
   m_RFModel.save(oa,0);
 }
@@ -219,6 +244,10 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
       {
       if( line.find( m_RFModel.name() ) == std::string::npos )
         itkExceptionMacro( "The model file : " + filename + " cannot be read." );
+      if( line.find( "with_dictionary" ) == std::string::npos )
+        {
+        m_NormalizeClassLabels=false;
+        }
       }
     else
       {
@@ -226,6 +255,18 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
       ifs.clear();
       ifs.seekg( 0, std::ios::beg );
       }
+    if(m_NormalizeClassLabels)
+      {
+      size_t nbLabels{0};
+      ifs >> nbLabels;
+      m_ClassDictionary.resize(nbLabels);
+      for(size_t i=0; i<nbLabels; ++i)
+        {
+        unsigned int label;
+        ifs >> label;
+        m_ClassDictionary[i]=label;
+        }
+      }
     shark::TextInArchive ia( ifs );
     m_RFModel.load( ia, 0 );
     }
diff --git a/Modules/Learning/Unsupervised/include/otbSharkKMeansMachineLearningModel.txx b/Modules/Learning/Unsupervised/include/otbSharkKMeansMachineLearningModel.txx
index 9dd43948a719c9305dace0a6366ebfd40e4b3e24..1b08d538c943001279d9401f314d51e21e8dbf88 100644
--- a/Modules/Learning/Unsupervised/include/otbSharkKMeansMachineLearningModel.txx
+++ b/Modules/Learning/Unsupervised/include/otbSharkKMeansMachineLearningModel.txx
@@ -55,6 +55,7 @@ SharkKMeansMachineLearningModel<TInputValue, TOutputValue>
         m_Normalized( false ), m_K(2), m_MaximumNumberOfIterations( 10 )
 {
   // Default set HardClusteringModel
+  this->m_ConfidenceIndex = true;
   m_ClusteringModel = boost::make_shared<ClusteringModelType>( &m_Centroids );
 }
 
@@ -174,7 +175,7 @@ SharkKMeansMachineLearningModel<TInputValue, TOutputValue>
   // Change quality measurement only if SoftClustering or other clustering method is used.
   if( quality != ITK_NULLPTR )
     {
-    for( unsigned int qid = startIndex; qid < size; ++qid )
+    for( unsigned int qid = startIndex; qid < startIndex+size; ++qid )
       {
       quality->SetMeasurementVector( qid, static_cast<ConfidenceValueType>(1.) );
       }
diff --git a/Modules/ThirdParty/Boost/otb-module-init.cmake b/Modules/ThirdParty/Boost/otb-module-init.cmake
index a5f58041fb0bf5001d123ff2e0f772a26cff6d34..0a07bdfdbc7a9d33a03464925c95667a036bea59 100644
--- a/Modules/ThirdParty/Boost/otb-module-init.cmake
+++ b/Modules/ThirdParty/Boost/otb-module-init.cmake
@@ -31,3 +31,8 @@ if (BUILD_TESTING)
     message(STATUS "Found Boost components: unit_test_framework")
   endif()
 endif() #BUILD_TESTING
+
+if(WIN32)
+  # disable autolinking in boost
+	add_definitions( -DBOOST_ALL_NO_LIB )
+endif()
diff --git a/Modules/ThirdParty/Shark/include/otbSharkUtils.h b/Modules/ThirdParty/Shark/include/otbSharkUtils.h
index de3adf77401d0f131d2bd7d447627829b3df64ff..04c57b6d4e7f5a022b0c4fafa86ac41b134f690c 100644
--- a/Modules/ThirdParty/Shark/include/otbSharkUtils.h
+++ b/Modules/ThirdParty/Shark/include/otbSharkUtils.h
@@ -23,6 +23,7 @@
 
 #include <stdexcept>
 #include <string>
+#include <unordered_map>
 
 #if defined(__GNUC__) || defined(__clang__)
 #pragma GCC diagnostic push
@@ -127,6 +128,27 @@ template <class T> void ListSampleToSharkVector(const T * listSample, std::vecto
   assert(listSample != nullptr);
   ListSampleRangeToSharkVector(listSample,output,0, static_cast<unsigned int>(listSample->Size()));
 }
+
+/** Shark assumes that labels are 0 ... (nbClasses-1). This function modifies the labels contained in the input vector and returns a vector with size = nbClasses which allows the translation from the normalised labels to the new ones oldLabel = dictionary[newLabel].
+*/
+template <typename T> void NormalizeLabelsAndGetDictionary(std::vector<T>& labels, 
+                                                           std::vector<T>& dictionary)
+{
+  std::unordered_map<T, T> dictMap;
+  T labelCount{0};
+  for(const auto& l : labels)
+    {
+    if(dictMap.find(l)==dictMap.end())
+      dictMap.insert({l, labelCount++});
+    }
+  dictionary.resize(labelCount);
+  for(auto& l : labels)
+    {
+    auto newLabel = dictMap[l];
+    dictionary[newLabel] = l;
+    l = newLabel;
+    }
+}
   
 }
 }
diff --git a/Modules/Visualization/Monteverdi/include/mvdMainWindow.h b/Modules/Visualization/Monteverdi/include/mvdMainWindow.h
index 3ae223763f6392aa498f81f9fcaf5776f3211a9f..5b7d6b66cf3a055353803aae37cf06821d58c8be 100644
--- a/Modules/Visualization/Monteverdi/include/mvdMainWindow.h
+++ b/Modules/Visualization/Monteverdi/include/mvdMainWindow.h
@@ -150,7 +150,7 @@ public slots:
 
   /**
    */
-  void ImportImages( const QStringList & filenames );
+  void ImportImages( const QStringList & filenames, bool enableOverviews );
 
   /*-[ SIGNALS SECTION ]-----------------------------------------------------*/
 
diff --git a/Modules/Visualization/Monteverdi/src/main.cxx b/Modules/Visualization/Monteverdi/src/main.cxx
index e44dbb8459d0b239c8bc5c5283d5c41a6412d7ef..1765c73bec64c6ba02f19ef8ba06187c1289cf66 100644
--- a/Modules/Visualization/Monteverdi/src/main.cxx
+++ b/Modules/Visualization/Monteverdi/src/main.cxx
@@ -1,5 +1,6 @@
 /*
  * Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
+ * Copyright (C) 2017 CS Systemes d'Information (CS SI)
  *
  * This file is part of Orfeo Toolbox
  *
@@ -31,6 +32,8 @@
 //
 // Qt includes (sorted by alphabetic order)
 //// Must be included before system/custom includes.
+#include <QCoreApplication>
+#include <QFile>
 #include <QPixmap>
 #include <QSplashScreen>
 
@@ -48,6 +51,7 @@
 
 //
 // Monteverdi includes (sorted by alphabetic order)
+#include "mvdAlgorithm.h"
 #include "mvdApplication.h"
 #include "mvdMainWindow.h"
 
@@ -65,34 +69,39 @@ struct Flags
 {
   Flags() :
     loadOTBApplications( false ),
-    forceNoGLSL( false )
+    forceNoGLSL( false ),
+    forceNoOverviews( false )
   {
   }
 
   bool loadOTBApplications: 1;
   bool forceNoGLSL: 1;
+  bool forceNoOverviews: 1;
 };
 
 
 /*****************************************************************************/
 /* FUNCTIONS DECLARATION                                                     */
 /*****************************************************************************/
+void
+DisplayUsage( const char * );
 
+void
+AppendFromTextFile( QStringList &, const QString & );
 
 /*****************************************************************************/
 /* MAIN                                                                      */
 /*****************************************************************************/
 int
-main( int argc, char* argv[] )
+main( int argc, char * argv[] )
 {
   QApplication qtApp( argc, argv );
-  Flags flags;
 
   //
   // 0. Splash-screen.
 #if USE_SPLASH_SCREEN
-  QPixmap pixmap(QLatin1String( ":/images/application_splash" ));
-  QSplashScreen splash(pixmap);
+  QPixmap pixmap( QLatin1String( ":/images/application_splash" ) );
+  QSplashScreen splash( pixmap );
   splash.show();
   qtApp.processEvents();//This is used to accept a click on the screen so that user can cancel the screen
 #endif
@@ -100,24 +109,16 @@ main( int argc, char* argv[] )
   //
   // 0bis. Parse pre-initialization command-line arguments.
   QStringList args( qtApp.arguments() );
+  Flags flags;
   {
+  QStringList filenames;
+
   for( QStringList::iterator it( args.begin() );
        it!=args.end(); )
     if( it->compare( "-h" )==0 ||
 	it->compare( "--help" )==0 )
       {
-      std::cout
-	<< mvd::ToLocalStdString(
-	  QCoreApplication::translate(
-	    PROJECT_NAME,
-	    "Usage: %1 [-h|--help] [-a|--applications] [<filename>...]\n"
-	    "  -n, --no-glsl      force OpenGL 1.x compatible rendering."
-	    "  -a, --applications load OTB-applications from OTB_APPLICATIONS_PATH."
-	    "  -h, --help         display this help message.\n"
-	  )
-	  .arg( QFileInfo( argv[ 0 ] ).baseName() )
-	)
-	<< std::endl;
+      DisplayUsage( argv[ 0 ] );
 
       return ERROR_CODE_USAGE;
       }
@@ -130,15 +131,44 @@ main( int argc, char* argv[] )
       it = args.erase( it );
       }
 
-    else if(it->compare( "-n" )==0 ||
+    else if(it->compare( "-g" )==0 ||
 	    it->compare( "--no-glsl" )==0 )
       {
       flags.forceNoGLSL = true;
 
       it = args.erase( it );
       }
+
+    else if(it->compare( "-o" )==0 ||
+	    it->compare( "--no-overviews" )==0 )
+      {
+      flags.forceNoOverviews = true;
+
+      it = args.erase( it );
+      }
+
+    else if(it->compare( "-t" )==0 ||
+	    it->compare( "--txt-file" )==0 )
+      {
+      it = args.erase( it );
+
+      if( it==args.end() ||
+	  it->startsWith( '-' ) )
+	{
+	DisplayUsage( argv[ 0 ] );
+
+	return ERROR_CODE_USAGE;
+	}
+
+      AppendFromTextFile( filenames, *it );
+
+      it = args.erase( it );
+      }
+
     else
       ++ it;
+
+  args << filenames;
   }
 
   //
@@ -205,14 +235,14 @@ main( int argc, char* argv[] )
 #if USE_OTB_APPS
     mainWindow.SetupOTBApplications();
 #else // USE_OTB_APPS
-    qWarning() << "OTB-applications support is not included in this build.";
+  qWarning() << "OTB-applications support is not included in this build.";
 #endif // USE_OTB_APPS
 
   //
   // 6. Load command-line filenames.
   args.pop_front();
 
-  mainWindow.ImportImages( args );
+  mainWindow.ImportImages( args, !flags.forceNoOverviews );
 
   //
   // 6. Let's go: run the application and return exit code.
@@ -232,3 +262,63 @@ main( int argc, char* argv[] )
 /*****************************************************************************/
 /* FUNCTIONS IMPLEMENTATION                                                  */
 /*****************************************************************************/
+void
+DisplayUsage( const char * argv0 )
+{
+  std::cout
+    << mvd::ToLocalStdString(
+      QCoreApplication::translate(
+	PROJECT_NAME,
+	"Usage: %1 "
+	"[-h|--help] "
+	"[-a|--applications] "
+	"[-g|--no-glsl] "
+	"[-o|--no-overviews] "
+	"[-t|--txt-file <filename>] "
+	"[<filename>...]\n"
+	"  -a, --applications    load OTB-applications from OTB_APPLICATIONS_PATH.\n"
+#if 0
+	"  -f, --file            load Monteverdi project file.\n"
+#endif
+	"  -h, --help            display this help message.\n"
+	"  -g, --no-glsl         force OpenGL 1.x compatible rendering.\n"
+	"  -o, --no-overviews    ignore build GDAL overviews step.\n"
+#if 0
+	"  -O, --force-overviews force build GDAL overviews step.\n"
+#endif
+	"  -t, --txt-file        read layer filenames from text file.\n"
+#if 0
+	"  -c, --csv-file        read layer filenames & settings from CSV file.\n"
+	"  -x, --xml-file        read layer filenames & settings from XML file.\n"
+#endif
+      )
+      .arg( QFileInfo( argv0 ).baseName() )
+    )
+    << std::endl;
+}
+
+/*****************************************************************************/
+void
+AppendFromTextFile( QStringList & strings,
+		    const QString & filename )
+{
+  QFile file( filename );
+
+  if( !file.open( QFile::ReadOnly | QFile::Text  ) )
+    throw mvd::SystemError(
+      mvd::ToStdString(
+	QCoreApplication::translate( "mvd::", "Failed to open '%1'" )
+	.arg( filename )
+      )
+    );
+
+  QTextStream is( &file );
+
+  while( !is.atEnd() )
+    {
+    QString line( is.readLine() );
+
+    if( !line.isNull() )
+      strings << line;
+    }
+}
diff --git a/Modules/Visualization/Monteverdi/src/mvdMainWindow.cxx b/Modules/Visualization/Monteverdi/src/mvdMainWindow.cxx
index 9420452e321f19bb13b9ba6521c5b79ae9415280..f02f3537d0a550a288d5cd17b108c8df503d59d2 100644
--- a/Modules/Visualization/Monteverdi/src/mvdMainWindow.cxx
+++ b/Modules/Visualization/Monteverdi/src/mvdMainWindow.cxx
@@ -1,5 +1,6 @@
 /*
  * Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
+ * Copyright (C) 2017 CS Systemes d'Information (CS SI)
  *
  * This file is part of Orfeo Toolbox
  *
@@ -1362,7 +1363,7 @@ MainWindow
 /*****************************************************************************/
 void
 MainWindow
-::ImportImages( const QStringList & filenames )
+::ImportImages( const QStringList & filenames, bool enableOverviews )
 {
   if( filenames.isEmpty() )
     return;
@@ -1376,7 +1377,8 @@ MainWindow
       )
     );
 
-    if( !( value.isValid() ? value.toBool() : OVERVIEWS_ENABLED_DEFAULT ) ||
+    if( enableOverviews &&
+	( value.isValid() ? value.toBool() : OVERVIEWS_ENABLED_DEFAULT ) &&
 	!BuildGDALOverviews( filenames ) )
       return;
   }
@@ -1809,7 +1811,8 @@ MainWindow
   // Select filename.
   QString caption(tr("Open file..."));
   ImportImages(
-    otb::GetOpenFileNames( this, caption )
+    otb::GetOpenFileNames( this, caption ),
+    true
   );
 }
 
diff --git a/Modules/Wrappers/ApplicationEngine/include/otbWrapperApplication.h b/Modules/Wrappers/ApplicationEngine/include/otbWrapperApplication.h
index 4748ab0c5ddc71a9a1ffff72b8ba89585d86bc8d..421ef9a4a71e239ba313477d0d9b9fa25fdff714 100644
--- a/Modules/Wrappers/ApplicationEngine/include/otbWrapperApplication.h
+++ b/Modules/Wrappers/ApplicationEngine/include/otbWrapperApplication.h
@@ -307,7 +307,7 @@ public:
    */
   bool IsParameterMissing(const std::string &key) const;
 
-  /* Set an default integer value, must used in the
+  /* Set a default integer value, must be used in the
    * DoInit when setting a value by default
    * for the parameter
    *
@@ -319,7 +319,17 @@ public:
    */
   void SetDefaultParameterInt(std::string parameter, int value);
 
-  /* Set a default floating value, must used in the
+  /* Get the default integer value of a parameter
+   *
+   * Can be called for types :
+   * \li ParameterType_Int
+   * \li ParameterType_Float
+   * \li ParameterType_Radius
+   * \li ParameterType_Choice
+   */
+  int GetDefaultParameterInt(std::string parameter);
+
+  /* Set a default floating value, must be used in the
    * DoInit when setting a value by default
    * for the parameter
    *
@@ -328,6 +338,13 @@ public:
    */
   void SetDefaultParameterFloat(std::string parameter, float value);
 
+  /* Get the default floating value of a parameter
+   *
+   * Can be called for types :
+   * \li ParameterType_Float
+   */
+  float GetDefaultParameterFloat(std::string parameter);
+
   /** Set a default pixel type for an output image parameter
    *
    * \param[in] parameter Name of the output image parameter
diff --git a/Modules/Wrappers/ApplicationEngine/include/otbWrapperNumericalParameter.h b/Modules/Wrappers/ApplicationEngine/include/otbWrapperNumericalParameter.h
index de97a3471b75a47f0a8cdcafa67b965b76e7d199..54871040f393428f3febbba17ab0374de14b1934 100644
--- a/Modules/Wrappers/ApplicationEngine/include/otbWrapperNumericalParameter.h
+++ b/Modules/Wrappers/ApplicationEngine/include/otbWrapperNumericalParameter.h
@@ -62,8 +62,8 @@ public:
   /** Set the value */
   void SetValue( ScalarType value)
   {
-    // TODO check minimum/maximum
-    m_Value = value;
+    m_Value = ( value < m_MinimumValue ) ? m_MinimumValue :
+              ( value < m_MaximumValue ) ? value : m_MaximumValue ;
 
     // Set Active only if the parameter is not automatically set
     if (!GetAutomaticValue())
diff --git a/Modules/Wrappers/ApplicationEngine/src/otbWrapperApplication.cxx b/Modules/Wrappers/ApplicationEngine/src/otbWrapperApplication.cxx
index 31f46f3a019e6fe335ebcae5535b4b6284bc2f4a..cd34d9bddcb85596a19be8c969c11ad23dda56a5 100644
--- a/Modules/Wrappers/ApplicationEngine/src/otbWrapperApplication.cxx
+++ b/Modules/Wrappers/ApplicationEngine/src/otbWrapperApplication.cxx
@@ -645,6 +645,8 @@ int Application::ExecuteAndWriteOutput()
 {
   m_Chrono.Restart();
 
+  m_Logger->LogSetupInformation();
+
   int status = this->Execute();
 
   if (status == 0)
@@ -1040,6 +1042,37 @@ void Application::SetDefaultParameterInt(std::string parameter, int value)
     }
 }
 
+int Application::GetDefaultParameterInt(std::string parameter)
+{
+  Parameter* param = GetParameterByKey(parameter);
+  int ret = 0 ;
+  if (dynamic_cast<RadiusParameter*>(param))
+    {
+    RadiusParameter* paramRadius = dynamic_cast<RadiusParameter*>(param);
+    ret = paramRadius->GetDefaultValue();
+    }
+   else if (dynamic_cast<IntParameter*>(param))
+    {
+    IntParameter* paramInt = dynamic_cast<IntParameter*>(param);
+    ret = paramInt->GetDefaultValue();
+    }
+  else if (dynamic_cast<FloatParameter*>(param))
+    {
+    FloatParameter* paramFloat = dynamic_cast<FloatParameter*>(param);
+    ret = paramFloat->GetDefaultValue();
+    }
+  else if (dynamic_cast<RAMParameter*>(param))
+    {
+    RAMParameter* paramRAM = dynamic_cast<RAMParameter*>(param);
+    ret = paramRAM->GetDefaultValue();
+    }
+  else
+    {
+    // log
+    }
+  return ret;
+}
+
 void Application::SetDefaultParameterFloat(std::string parameter, float value)
 {
   Parameter* param = GetParameterByKey(parameter);
@@ -1052,6 +1085,18 @@ void Application::SetDefaultParameterFloat(std::string parameter, float value)
     }
 }
 
+float Application::GetDefaultParameterFloat(std::string parameter)
+{
+  Parameter* param = GetParameterByKey(parameter);
+
+  if (dynamic_cast<FloatParameter*>(param))
+    {
+    FloatParameter* paramFloat = dynamic_cast<FloatParameter*>(param);
+    return paramFloat->GetDefaultValue();
+    }
+  return 0;
+}
+
 void Application::SetDefaultOutputPixelType(std::string parameter, ImagePixelType type)
 {
   Parameter* param = GetParameterByKey(parameter);
diff --git a/SuperBuild/CMake/External_shark.cmake b/SuperBuild/CMake/External_shark.cmake
index ce8486db084935352b4266fc384f40be3604a29c..33934d4bb943dce00faceb9d6910534197a2342e 100644
--- a/SuperBuild/CMake/External_shark.cmake
+++ b/SuperBuild/CMake/External_shark.cmake
@@ -30,8 +30,8 @@ ADD_SUPERBUILD_CMAKE_VAR(SHARK BOOST_LIBRARYDIR)
 
 ExternalProject_Add(SHARK
   PREFIX SHARK
-  URL "https://github.com/Shark-ML/Shark/archive/349f29bd71c370e0f88f7fc9aa66fa5c4768fcb0.zip"
-  URL_MD5 d6e4310f943e8dda4a0151612b5c62ce
+  URL "https://github.com/Shark-ML/Shark/archive/2fd55e2b83f0666d05b403b291712668f4b76a13.zip"
+  URL_MD5 863bb5f0d94b01be5292867beb05a0bb
   SOURCE_DIR ${SHARK_SB_SRC}
   BINARY_DIR ${SHARK_SB_BUILD_DIR}
   INSTALL_DIR ${SB_INSTALL_PREFIX}
@@ -45,6 +45,7 @@ ExternalProject_Add(SHARK
   -DENABLE_HDF5:BOOL=OFF
   -DENABLE_CBLAS:BOOL=OFF
   -DENABLE_OPENMP:BOOL=${OTB_USE_OPENMP}
+  -DSHARK_INSTALL_LIB_DIR:STRING=lib/
   ${SHARK_SB_CONFIG}
   CMAKE_COMMAND ${SB_CMAKE_COMMAND}
   LOG_DOWNLOAD 1
diff --git a/SuperBuild/patches/SHARK/shark-2-ext-num-literals-all.diff b/SuperBuild/patches/SHARK/shark-2-ext-num-literals-all.diff
new file mode 100644
index 0000000000000000000000000000000000000000..0b964c1b9ada7aa4409f0f032285a70723caacfe
--- /dev/null
+++ b/SuperBuild/patches/SHARK/shark-2-ext-num-literals-all.diff
@@ -0,0 +1,13 @@
+diff -burN Shark.orig/CMakeLists.txt Shark/CMakeLists.txt
+--- Shark.orig/CMakeLists.txt	2018-02-05 18:04:58.012612932 +0100
++++ Shark/CMakeLists.txt	2018-02-05 18:20:50.032233165 +0100
+@@ -415,6 +415,9 @@
+ #####################################################################
+ #                       General Path settings
+ #####################################################################
++if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
++  add_definitions(-fext-numeric-literals)
++endif()
+ include_directories( ${shark_SOURCE_DIR}/include )
+ include_directories( ${shark_BINARY_DIR}/include )
+ add_subdirectory( include )
diff --git a/SuperBuild/patches/SHARK/shark-2-find-boost-all.diff b/SuperBuild/patches/SHARK/shark-2-find-boost-all.diff
deleted file mode 100644
index a97c1ac4afd1f56118fdba14cf7b993755bb5c00..0000000000000000000000000000000000000000
--- a/SuperBuild/patches/SHARK/shark-2-find-boost-all.diff
+++ /dev/null
@@ -1,16 +0,0 @@
-diff -burN Shark-349f29bd71c370e0f88f7fc9aa66fa5c4768fcb0.orig/CMakeLists.txt Shark-349f29bd71c370e0f88f7fc9aa66fa5c4768fcb0/CMakeLists.txt
---- Shark-349f29bd71c370e0f88f7fc9aa66fa5c4768fcb0.orig/CMakeLists.txt	2017-08-22 11:31:50.472052695 +0200
-+++ Shark-349f29bd71c370e0f88f7fc9aa66fa5c4768fcb0/CMakeLists.txt	2017-08-22 11:32:36.448358789 +0200
-@@ -141,10 +141,8 @@
- 
- find_package( 
- 	Boost 1.48.0 REQUIRED COMPONENTS
--	system date_time filesystem
--	program_options serialization thread
--	unit_test_framework
--)
-+	serialization
-+	)
- 
- if(NOT Boost_FOUND)
- 	message(FATAL_ERROR "Please make sure Boost 1.48.0 is installed on your system")