diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5abbcf10b0f5fa68ad0967544d9a0448669fe8db..2e150a5f241918ccc1ad0069e1c0465f87c279e9 100755
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
 # Change Log
-All notable changes to Let It Snow (LIS) will be documented in this file.
+
+Since 2020-05, and 1.7 version, all notable changes are directly recorder inside gitlab: https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/-/releases.
+
+Previously, all notable changes to Let It Snow (LIS) were documented in this file.
 
 
 ## [1.6] - 2020-04
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c2155125f7bad93e49a53e4755449fe567fefc9f..5c18ee6892f044a8284d230b174189800099ab3a 100755
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -20,11 +20,12 @@
 
 PROJECT(lis)
 
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
+CMAKE_MINIMUM_REQUIRED(VERSION 3.5)
 
 # Find necessary packages
 
 find_package(GDAL REQUIRED)
+find_package(Boost REQUIRED filesystem serialization)
 
 
 if(NOT GDAL_FOUND)
@@ -54,8 +55,8 @@ find_package( PythonLibs 3.8 REQUIRED)
 include_directories( ${PYTHON_INCLUDE_DIRS} )
  
 # Link to the Orfeo ToolBox
-# LIS required OTB 7.4
-SET(OTB_MIN_VERSION "7.4.1")
+# LIS required OTB 8.1
+SET(OTB_MIN_VERSION "8.1.2")
 
 find_package(OTB ${OTB_MIN_VERSION} REQUIRED)
 if(OTB_FOUND)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index eef5cb5c57bf63d91f2e08fb62f524aa22c8f0f8..59b879cd74bf2be98ec90a3383c53b3a5acd4988 100755
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -106,8 +106,8 @@ version.
 
 - Check that the [milestone](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/-/milestones) corresponding to the version to be realeased is up to date (with all issues fixed by this realease)
 - Check that all issues and corresponding merge requests have been merged to develop.
-- If necessary, create a merge request to update python/version.py file with the new version.
-- If necessary, push a new Data-LIS archive on Zenodo (https://zenodo.org/record/7352173), don't forget to update the main Readme to point to the new link.
+- Create a merge request to update python/setup.py.in file with the new version.
+- If necessary, push a new Data-LIS archive on Zenodo (https://zenodo.org/records/8214966), don't forget to update the main Readme to point to the new link.
 - Create a tag corresponding to the version you want to release. You can do this through gitlab or using git commands.
 - Launch validation tests on this tag.
 - Create a release associated to the tag through gitlab, describing new features and bug fixes (according to the milestone). Sort issues by category: Features, Fixes, Other changes.
diff --git a/Dockerfile b/Dockerfile
index d31568faf81d60d1937faf9f2df9ff51b32f93a8..7e337956847cb845b75238520aeb6ce3ff7f85c4 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -25,22 +25,20 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         software-properties-common \
         git \
         wget \
-        tar \
         unzip \
         # packages needed for compilation
         cmake \
         swig \
         ninja-build \
         python3-dev \
-        pkg-config \
+        libboost-filesystem-dev \
+        libboost-serialization-dev \
+        libfftw3-dev \
+        libgsl-dev \
         libinsighttoolkit4-dev \
-        libopenthreads-dev \
-        libossim-dev \
-        libtinyxml-dev \
         libmuparser-dev \
         libmuparserx-dev \
-        libfftw3-dev \
-        libgsl-dev \
+        libtinyxml-dev \
         mono-runtime-common \
         && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
@@ -66,8 +64,8 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
     ln -s /usr/lib/python3/dist-packages/vtkgdcmPython.cpython-38-x86_64-linux-gnu.so /usr/lib/python/dist-packages/vtkgdcmPython.so && \
     mkdir -p /root/otb-build/build && \
     cd /root/otb-build && \
-    wget -q --ca-certificate=/usr/local/share/ca-certificates/ca-bundle.crt https://www.orfeo-toolbox.org/packages/archives/OTB/OTB-7.4.2.tar.gz -O /tmp/OTB.tar.gz && \
-    tar -xvzf /tmp/OTB.tar.gz && \
+    wget -q --ca-certificate=/usr/local/share/ca-certificates/ca-bundle.crt https://www.orfeo-toolbox.org/packages/archives/OTB/OTB-8.1.2.zip -O /tmp/OTB.zip && \
+    unzip /tmp/OTB.zip && \
     cd /root/otb-build/build && \
     cmake \
         "-DBUILD_COOKBOOK:BOOL=OFF" \
@@ -87,7 +85,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         "-DCMAKE_BUILD_TYPE=Release" \
         -DCMAKE_INSTALL_PREFIX="/install/otb" -GNinja .. && \
     ninja install && \
-    rm -rf /root/otb-build /tmp/OTB.tar.gz
+    rm -rf /root/otb-build /tmp/OTB.zip
 
 # Build LIS
 ADD . /LIS_src/
@@ -118,7 +116,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 FROM ${REGISTRY_URL}ubuntu:20.04
 
 LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu celine.raille@thalesgroup.com"
-LABEL org.opencontainers.image.description="LIS + OTB 7.4 Container"
+LABEL org.opencontainers.image.description="LIS + OTB 8.1 Container"
 
 # system packages
 COPY cert[s]/* /usr/local/share/ca-certificates/
@@ -126,7 +124,6 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
     apt-get update -y --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         ca-certificates \
-        file \
         git \
         libpython3.8 \
         python3 \
@@ -135,15 +132,13 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         python3-lxml \
         software-properties-common \
         # for OTB
+        libboost-filesystem1.71.0 \
+        libfftw3-3 \
+        libgsl23 \
         libinsighttoolkit4.13 \
-        libopenthreads21 \
-        libossim1 \
-        libtinyxml2.6.2v5 \
         libmuparser2v5 \
         libmuparserx4.0.7 \
-        libfftw3-3 \
-        libgsl23 \
-        libgslcblas0 \
+        libtinyxml2.6.2v5 \
         && \
     # GDAL
     add-apt-repository ppa:ubuntugis/ubuntugis-unstable --yes && \
@@ -170,13 +165,13 @@ RUN mv /usr/local/lib/otbapp_* /usr/local/lib/otb/applications/
 # install DANS GINA from builder
 COPY --from=builder /install/dans /usr/local
 
-# install rastertools and additionnal dependancies
+# install rastertools and add additionnal dependancies
 ARG GIT_USER
 ARG GIT_TOKEN
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
     mkdir -p /install && \
     cd /install && \
-    git -c http.sslVerify=false clone --branch master https://${GIT_USER}:${GIT_TOKEN}@gitlab.cnes.fr/eolab/processing/rastertools.git && \
+    git clone --branch 0.6.1 https://github.com/CNES/rastertools.git && \
     export PYTHONWARNINGS="ignore:Unverified HTTPS request" && \
     pip3 install \
     --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org \
diff --git a/README.md b/README.md
index af641781c818d55ebaf18a5f61b92f9ba8bcc84b..a5143942f35e596664f4c71052e5851dac4335af 100755
--- a/README.md
+++ b/README.md
@@ -100,7 +100,7 @@ and can be **overwritten** by the following command line options:
 * "-d", "--densification_products_list" - Path to densification products, containing L8 snow products (optional)
 * "-b", "--date_start"                  - Start date defining the synthesis period
 * "-e", "--date_stop"                   - Stop date defining the synthesis period
-* "-m", "--date_margin"                 - date margin related to start and stop date
+* "-m", "--date_margin"                 - date margin related to start and stop date, set to 30 days by default
 * "-o", "--output_dir"                  - Path to output directory; which will contains synthesis product
 * "-l", "--log_level"                   - Log level ('INFO', 'DEBUG', 'WARNING', 'ERROR')
 * "-c", "--config_file"                 - Path to configuration file
@@ -135,6 +135,16 @@ LIS FSC generates the following files for S2 :
 or
 - Raster: **LIS\_S2-SNOW-FSC-TOC\_<*tag*>_<*chain_version*>_<*product_counter*>.tif** (if Tree cover density is not defined) 
 
+The QCFLAGS raster content is :
+  * 1st bit: sun too low for an accurate slope correction (from MAJA MG2 mask)
+  * 2nd bit: sun tangent to slope (from MAJA MG2 mask)
+  * 3rd bit: water mask
+  * 4th bit: tree cover density > 90%
+  * 5th bit: pixels classified under clouds
+  * 6th bit: tree cover density undefined or unavailable
+  * 7th bit: artificial 100 fsc due to shaded snow detection (only available if shaded_snow_hillshade or shaded_snow_casted
+  is set to True in lis_configuration.json file)
+
 LIS FSC generates the following files for L8 :
 - Raster: **LIS\_L8-SNOW-MSK\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, 
 
@@ -145,8 +155,8 @@ Moreover, in the tmp directory:
 * LIS_SNOW_ALL: Binary mask of snow and clouds.
   * 1st bit: Snow mask after pass1
   * 2nd bit: Snow mask after pass2
-  * 3rd bit: Clouds detected at pass0
-  * 4th bit: Clouds refined  at pass0
+  * 3rd bit: Clouds detected at pass1
+  * 4th bit: Clouds refined  at pass2
   * 5th bit: Clouds initial (all_cloud)
   * 6th bit: Slope flag (optional 1: bad slope correction)
 
@@ -181,9 +191,10 @@ LIS synthesis generates the following files:
 
 - Raster: **LIS\_<*mission*>-SNOW-SOD\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the date of snow appearance (Snow Onset Date), defined as the first date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
 
-- Raster: **LIS\_<*mission*>-SNOW-NOBS\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the number of clear observations to compute the SCD, SMOD and SOD syntheses
+- Raster: **LIS\_<*mission*>-SNOW-NOBS\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the number of clear observations to compute the SCD, SMOD and SOD syntheses. Only the clear sky observations from within the synthesis time period are taken into account. The NOBS value is raised by one if at least one clear sky observation is observed during the margin time period before the beginning of the synthesis time period. The value is also incremented by one if a clear sky observation is spotted during the margin time period after the end of the synthesis time period.
+
 
-- Raster: **LIS\_<*mission*>-SNOW-NSP\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the number of snow periods (NSP). A snow period is a duration with snow between two durations without snow.
+- Raster: **LIS\_<*mission*>-SNOW-NSP\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the number of snow periods (NSP). A snow period is a continuous duration with snow.
 
 with :
 - <**tag**> : <**tile**><**synthesis_start_date**><**synthesis_stop_date**>
@@ -219,8 +230,8 @@ LIS processing chain uses CMake (http://www.cmake.org) for building from source.
 
 Following a summary of the required dependencies: 
 
-* GDAL >=3.0
-* OTB >= 7.4
+* GDAL >=3.4
+* OTB >= 8.1
 * Python interpreter >= 3.8.4
 * Python libs >= 3.8.4
 * Python packages:
@@ -228,7 +239,7 @@ Following a summary of the required dependencies:
   * lxml
   * matplotlib
   * rasterio
-* Rastertools (https://gitlab.cnes.fr/eolab/processing/rastertools) branch master (since LIS-1.11, if not installed, shaded snow will not be fully detected)
+* Rastertools (https://github.com/CNES/rastertools) branch master (since LIS-1.11, if not installed, shaded snow will not be fully detected)
 Python package dependencies:
   * pyscaffold
   * geopandas
diff --git a/analysis/snow_annual_map.py b/analysis/snow_annual_map.py
index a706c562399652e149c0f4e90af078ad6c73c176..c9e270e8b8f1bbd45b0893bf97e03789798dc157 100755
--- a/analysis/snow_annual_map.py
+++ b/analysis/snow_annual_map.py
@@ -37,10 +37,7 @@ from analysis.app_wrappers import band_math, get_app_output, super_impose, band_
 from s2snow.utils import str_to_datetime, datetime_to_str
 from s2snow.utils import write_list_to_file, read_list_from_file
 from s2snow.snow_product_parser import load_snow_product
-
-# Build gdal option to generate maks of 1 byte using otb extended filename
-# syntaxx
-GDAL_OPT = "?&gdal:co:NBITS=1&gdal:co:COMPRESS=DEFLATE"
+from s2snow.lis_constant import GDAL_OPT, GDAL_OPT_1B
 
 
 def parse_xml(filepath):
@@ -72,12 +69,12 @@ def merge_masks_at_same_date(snow_product_list, merged_snow_product, threshold=1
     #   and then the densification products
     img_index = list(range(1, len(snow_product_list) + 1))
     expression_merging = "".join(["(im" + str(i) + "b1<=" + str(threshold) + "?im" + str(i) + "b1:" for i in img_index])
-    expression_merging += "im" + str(img_index[-1]) + "b1"
+    expression_merging += f"im{img_index[-1]}b1"
     expression_merging += "".join([")" for i in img_index])
 
     img_list = [i.get_snow_mask() for i in snow_product_list]
     bandMathApp = band_math(img_list,
-                            merged_snow_product,
+                            f"{merged_snow_product}?{GDAL_OPT}",
                             expression_merging,
                             ram,
                             otb.ImagePixelType_uint8)
@@ -179,7 +176,7 @@ class snow_annual_map():
                         if not os.path.exists(reprojected_mask):
                             super_impose_app = super_impose(s2_footprint_ref,
                                                             original_mask,
-                                                            reprojected_mask,
+                                                            f"{reprojected_mask}?{GDAL_OPT},
                                                             "nn",
                                                             int(self.label_no_data),
                                                             self.ram,
@@ -230,14 +227,14 @@ class snow_annual_map():
                 self.resulting_snow_mask_dict[key] = self.product_dict[key][0].get_snow_mask()
 
         # convert the snow masks into binary snow masks
-        expression = "(im1b1==" + self.label_snow + ")?1:0"
-        self.binary_snowmask_list = self.convert_mask_list(expression, "snow", GDAL_OPT)
+        expression = f"(im1b1=={self.label_snow})?1:0"
+        self.binary_snowmask_list = self.convert_mask_list(expression, "snow", f"?{GDAL_OPT_1B}")
         logging.debug("Binary snow mask list:")
         logging.debug(self.binary_snowmask_list)
 
         # convert the snow masks into binary cloud masks
-        expression = "im1b1==" + self.label_cloud + "?1:(im1b1==" + self.label_no_data + "?1:(im1b1==" + self.label_no_data_old + "?1:0))"
-        self.binary_cloudmask_list = self.convert_mask_list(expression, "cloud", GDAL_OPT)
+        expression = f"im1b1=={self.label_cloud}?1:(im1b1=={self.label_no_data}?1:(im1b1=={self.label_no_data_old}?1:0))"
+        self.binary_cloudmask_list = self.convert_mask_list(expression, "cloud", f"?{GDAL_OPT_1B}")
         logging.debug("Binary cloud mask list:")
         logging.debug(self.binary_cloudmask_list)
 
@@ -255,7 +252,7 @@ class snow_annual_map():
         expression = "+".join(["im1b" + str(i) for i in band_index])
 
         bandMathApp = band_math([self.multitemp_cloud_vrt],
-                                self.cloud_occurence_img,
+                                f"{self.cloud_occurence_img}?{GDAL_OPT}",
                                 expression,
                                 self.ram,
                                 otb.ImagePixelType_uint16)
@@ -277,7 +274,7 @@ class snow_annual_map():
         logging.info("Scale by 100 multitemp snow mask vrt")
         multitemp_snow100 = op.join(self.path_tmp, "multitemp_snow100.tif")
         bandMathXApp = band_mathX([self.multitemp_snow_vrt],
-                                  multitemp_snow100,
+                                  f"{multitemp_snow100}?{GDAL_OPT}",
                                   "im1 mlt 100",
                                   self.ram,
                                   otb.ImagePixelType_uint8)
@@ -288,7 +285,7 @@ class snow_annual_map():
         multitemp_snow100_gapfilled = op.join(self.path_tmp, "multitemp_snow100_gapfilled.tif")
         app_gap_filling = gap_filling(multitemp_snow100,
                                       self.multitemp_cloud_vrt,
-                                      multitemp_snow100_gapfilled + "?&gdal:co:COMPRESS=DEFLATE",
+                                      f"{multitemp_snow100_gapfilled}?{GDAL_OPT}",
                                       self.input_dates_filename,
                                       self.output_dates_filename,
                                       self.ram,
@@ -306,7 +303,7 @@ class snow_annual_map():
         # threshold to 0 or 1
         logging.info("Round to binary series of snow occurrence")
         bandMathXApp = band_mathX([img_in],
-                                  self.gapfilled_timeserie + GDAL_OPT,
+                                  f"{self.gapfilled_timeserie}?{GDAL_OPT_1B}",
                                   "(im1 mlt 2) dv 100",
                                   self.ram,
                                   otb.ImagePixelType_uint8)
@@ -318,7 +315,7 @@ class snow_annual_map():
         expression = "+".join(["im1b" + str(i) for i in band_index])
 
         bandMathApp = band_math([self.gapfilled_timeserie],
-                                self.annual_snow_map,
+                                f"{self.annual_snow_map}?{GDAL_OPT}",
                                 expression,
                                 self.ram,
                                 otb.ImagePixelType_uint16)
@@ -373,7 +370,7 @@ class snow_annual_map():
         binary_mask_list = []
         for mask_date in sorted(self.resulting_snow_mask_dict):
             binary_mask = op.join(self.path_tmp,
-                                  mask_date + "_" + type_name + "_binary.tif")
+                                  f"{mask_date}_{type_name}_binary.tif")
             binary_mask = self.extract_binary_mask(self.resulting_snow_mask_dict[mask_date],
                                                    binary_mask,
                                                    expression,
diff --git a/analysis/snow_annual_map_analysis_schema.json b/analysis/snow_annual_map_analysis_schema.json
index 289310d6c806a04f9d917e2ae10397401fdb35e3..74fac52d640c529366c03936a7c1a8b3598f14ad 100755
--- a/analysis/snow_annual_map_analysis_schema.json
+++ b/analysis/snow_annual_map_analysis_schema.json
@@ -81,7 +81,7 @@
             "type": "string"
         },
         "date_margin": {
-            "default": 15,
+            "default": 30,
             "description": "The margin ouside the date range to use for better interpolation results (in days) (optional)",
             "id": "date_margin",
             "title": "The Date_margin schema.",
diff --git a/analysis/snow_annual_map_evaluation.py b/analysis/snow_annual_map_evaluation.py
index 7701ce720f70eadfce8811969fbd0d899707215d..916dc69762c142a94d788418c5be72e0a4f37890 100755
--- a/analysis/snow_annual_map_evaluation.py
+++ b/analysis/snow_annual_map_evaluation.py
@@ -37,14 +37,7 @@ from analysis.snow_annual_map import merge_masks_at_same_date
 from analysis.app_wrappers import band_math, super_impose, confusion_matrix
 from s2snow.utils import get_raster_as_array, str_to_datetime, write_list_to_file, read_list_from_file
 from analysis.snow_annual_map import snow_annual_map
-
-# Build gdal option to generate maks of 1 byte using otb extended filename
-# syntaxx
-GDAL_OPT = "?&gdal:co:NBITS=1&gdal:co:COMPRESS=DEFLATE"
-
-# Build gdal option to generate maks of 2 bytes using otb extended filename
-# syntax
-GDAL_OPT_2B = "?&gdal:co:NBITS=2&gdal:co:COMPRESS=DEFLATE"
+from s2snow.lis_constant import GDAL_OPT
 
 
 def get_raster_extent_as_poly(raster1):
@@ -176,8 +169,7 @@ class snow_annual_map_evaluation(snow_annual_map):
                 self.resulting_snow_mask_dict[comparison_tag] = self.product_dict[key][0].get_snow_mask()
 
         # convert the snow masks into binary snow masks
-        expression = "im1b1==" + self.label_cloud + "?2:(im1b1==" + self.label_no_data + "?2:" \
-                     + "(im1b1==" + self.label_snow + ")?1:0)"
+        expression = f"im1b1=={self.label_cloud}?2:(im1b1=={self.label_no_data}?2:(im1b1=={self.label_snow})?1:0)"
         self.binary_snowmask_list = self.convert_mask_list(expression, "snow_eval")
         logging.debug("Binary snow mask list:")
         logging.debug(self.binary_snowmask_list)
@@ -198,7 +190,7 @@ class snow_annual_map_evaluation(snow_annual_map):
             if not os.path.exists(mask_out):
                 super_impose_app = super_impose(self.annual_snow_map,
                                                 mask_in,
-                                                mask_out + GDAL_OPT_2B,
+                                                f"{mask_out}?&gdal:co:NBITS=2{GDAL_OPT}",
                                                 "linear",
                                                 2,
                                                 self.ram,
diff --git a/app/let_it_snow_synthesis.py b/app/let_it_snow_synthesis.py
index 183b4622fd168f3ddb53eba97784dcc04d3e2b90..dff3a09c4919515855d82f3cbddb2819fdd4a64b 100755
--- a/app/let_it_snow_synthesis.py
+++ b/app/let_it_snow_synthesis.py
@@ -30,7 +30,7 @@ from s2snow.lis_exception import LisConfigurationException, NoSnowProductFound,
 from s2snow.snow_synthesis import compute_snow_synthesis
 from s2snow.synthesis_config import SynthesisConfig
 from s2snow.lis_constant import INPUT_PARAMETER_ERROR, CONFIGURATION_ERROR, TMP_DIR, LOG_FILE, NO_SNOW_PRODUCT_FOUND, \
-    NO_ZIP_FOUND, NO_PRODUCT_MATCHING_SYNTHESIS, UNKNOWN_PLATFORM, OUTPUT_UNDEFINED
+    NO_ZIP_FOUND, NO_PRODUCT_MATCHING_SYNTHESIS, UNKNOWN_PLATFORM, OUTPUT_UNDEFINED, DEFAULT_DATE_MARGIN
 from s2snow.parser import create_synthesis_argument_parser
 
 
@@ -44,7 +44,7 @@ def main(args):
             output_dir = global_config.get("output_dir", None)
             date_start = global_config.get("date_start", None)
             date_stop = global_config.get("date_stop", None)
-            date_margin = global_config.get("date_margin", None)
+            date_margin = global_config.get("date_margin", DEFAULT_DATE_MARGIN)
             log_level = global_config.get("log_level", "INFO")
             config_file = global_config.get("config_file", None)
             h2_chain_version = global_config.get("chain_version", None)
@@ -57,7 +57,7 @@ def main(args):
         config_file = None
         date_start = None
         date_stop = None
-        date_margin = None
+        date_margin = DEFAULT_DATE_MARGIN
         h2_chain_version = None
         product_counter = None
         log_level = None
@@ -132,8 +132,10 @@ def main(args):
         logging.info("product counter: %s", args.product_counter)
         product_counter = args.product_counter
 
-    let_it_snow_synthesis(config_file, tile_id, input_products_list, densification_products_list, output_dir,
-                                 date_start, date_stop, date_margin, h2_chain_version, product_counter)
+    let_it_snow_synthesis(
+        config_file, tile_id, input_products_list, densification_products_list, output_dir,
+        date_start, date_stop, date_margin, h2_chain_version, product_counter
+    )
 
 
 def let_it_snow_synthesis(config_file, tile_id, input_products_list, densification_products_list, output_dir,
diff --git a/doc/LIS_configuration_for_experts.md b/doc/LIS_configuration_for_experts.md
index 4db86a13f46a1caadf2e1644ef94b954391f9461..4c94eac43455c877cf1f3e3cb0ce16c27ca196e7 100755
--- a/doc/LIS_configuration_for_experts.md
+++ b/doc/LIS_configuration_for_experts.md
@@ -22,10 +22,10 @@ This part describes the expert use of the fsc configuration file, in particular
 - tcd: command line value will be used first, launch configuration value will be used as second, and finally configuration value will be used at last
 
 #### shaded_snow
-- "detect_shaded_snow": Activate or de activate shaded snow detection
+- "shaded_snow_hillshade": Activate or deactivate shaded snow detection with the hillshade algorithm
 - "hillshade_lim": Define hillshade limitation threshold
 - "shaded_snow_pass": Define shaded snow pass threshold
-- "rastertools_use": Activate or de rastertools hillshade detection
+- "shaded_snow_casted": Activate or deactivate shaded snow detection with casted shadow rastertools based algorithm
 - "rastertools_window_size": Define size of tiles to distribute processing
 - "rastertools_radius": Max distance (in pixels) around a point to evaluate horizontal elevation angle
 
@@ -58,7 +58,7 @@ This part describes the expert use of the fsc configuration file, in particular
 - densification_products_list: The densification products list, containing the paths of L8 snow products (optional). Overload using : "-d" or "--densification_products_list"
 - date_start: Start of the date range for which we want to generate the snow_annual_map (DD/MM/YYYY) (mandatory). Overload using : "-b" or "--date_start"
 - date_stop: Stop of the date range for which we want to generate the snow_annual_map (DD/MM/YYYY) (mandatory). Overload using : "-e" or "--date_stop"
-- date_margin: The margin outside the date range to use for better interpolation results (in days) (optional). Overload using : "-m" or "--date_margin"
+- date_margin: The margin outside the date range to use for better interpolation results (in days) (optional). Overload using : "-m" or "--date_margin". If not specified, set by default to 30 days.
 - output_dir: Output directory containing LIS products (mandatory). Overload using : "-o" or "--output_dir"
 - log_level:Log level : INFO, DEBUG, WARNING, ERROR (optional). Overload using : "-l" or "--log_level"
 - config_file: LIS configuration file - see synthesis_config_schema.json (mandatory). Overload using : "-c" or "--config_file"
diff --git a/doc/atbd/fsc_config_schema.json b/doc/atbd/fsc_config_schema.json
index c916d59af89ebdc055b4eb7b37858dc3a65bbf48..545ec00499dd547af5c13c2135ad2dcbe25e2dcb 100755
--- a/doc/atbd/fsc_config_schema.json
+++ b/doc/atbd/fsc_config_schema.json
@@ -272,11 +272,11 @@
         "shaded_snow": {
             "id": "shaded_snow",
             "properties": {
-                "detect_shaded_snow": {
-                    "id": "detect_shaded_snow",
+                "shaded_snow_hillshade": {
+                    "id": "shaded_snow_hillshade",
                     "default": true,
                     "description": "Activate shaded snow detection",
-                    "title": "the detect_shaded_snow schema",
+                    "title": "the shaded_snow_hillshade schema",
                     "type": "boolean"
                 },
                 "hillshade_lim": {
@@ -293,26 +293,26 @@
                     "title": "the shaded_snow_pass schema",
                     "type": "integer"
                 },
-                "rastertools_use": {
-                    "id": "rastertools_use",
+                "shaded_snow_casted": {
+                    "id": "shaded_snow_casted",
                     "default": true,
                     "description": "Activate rastertools hillshade detection",
-                    "title": "the rastertools_use schema",
+                    "title": "the shaded_snow_casted schema",
                     "type": "boolean"
                 },
                 "rastertools_window_size": {
                     "id": "rastertools_window_size",
-                    "default": 1024,
+                    "default": 2048,
                     "description": "Define size of tiles to distribute processing",
                     "title": "the rastertools_window_size schema",
-                    "type": "boolean"
+                    "type": "integer"
                 },
                 "rastertools_radius": {
                     "id": "rastertools_radius",
-                    "default": null,
+                    "default": 512,
                     "description": "Max distance (in pixels) around a point to evaluate horizontal elevation angle",
                     "title": "the rastertools_radius schema",
-                    "type": "boolean"
+                    "type": "integer"
                 }
             }
         },
diff --git a/doc/atbd/snow_annual_map_schema.json b/doc/atbd/snow_annual_map_schema.json
index a66f4ab21e57ffc2d548af14c8e8f8d7d8b55609..9b7158a2eca7e91d45bc0409b9a1d31ed0494543 100755
--- a/doc/atbd/snow_annual_map_schema.json
+++ b/doc/atbd/snow_annual_map_schema.json
@@ -81,7 +81,7 @@
             "type": "string"
         },
         "date_margin": {
-            "default": 15,
+            "default": 30,
             "description": "The margin ouside the date range to use for better interpolation results (in days) (optional)",
             "id": "date_margin",
             "title": "The Date_margin schema.",
diff --git a/doc/atbd/synthesis_launch_schema.json b/doc/atbd/synthesis_launch_schema.json
index c727723766ef8801c144476129e385fcd134ff43..d8048f70f36c342d8ae35f11273522484ba9aa97 100755
--- a/doc/atbd/synthesis_launch_schema.json
+++ b/doc/atbd/synthesis_launch_schema.json
@@ -35,7 +35,7 @@
             "type": "string"
         },
         "date_margin": {
-            "default": 15,
+            "default": 30,
             "description": "The margin outside the date range to use for better interpolation results (in days) (optional)",
             "id": "date_margin",
             "title": "The Date_margin schema.",
diff --git a/doc/lis_default_configuration.json b/doc/lis_default_configuration.json
index 8147c5ea3fa716f93e799566192d51097a574819..e831c97d5dd2069285eb97f6ba77f6f8a444ddfe 100755
--- a/doc/lis_default_configuration.json
+++ b/doc/lis_default_configuration.json
@@ -43,12 +43,12 @@
         "fscToc_Eq": "0.5*tanh(2.65*ndsi-1.42)+0.5"
     },
     "shaded_snow": {
-        "detect_shaded_snow": true,
+        "shaded_snow_hillshade": true,
         "hillshade_lim": 0.2,
         "shaded_snow_pass": 160,
-        "rastertools_use": true,
-        "rastertools_window_size": 1024,
-        "rastertools_radius":null
+        "shaded_snow_casted": true,
+        "rastertools_window_size": 2048,
+        "rastertools_radius": 512
     },
     "water_mask":{
          "water_mask_raster_values":[1]
diff --git a/doc/lis_preproc_configuration.json b/doc/lis_preproc_configuration.json
new file mode 100755
index 0000000000000000000000000000000000000000..8220f5fb24f4b750c8979c86e56bebfb0cd419b3
--- /dev/null
+++ b/doc/lis_preproc_configuration.json
@@ -0,0 +1,57 @@
+{
+    "general":{
+        "multi":10,
+        "mode": "S2",
+        "no_data":-10000,
+        "ram":2024,
+        "nb_threads":2,
+        "preprocessing":true
+         },
+    "vector":{
+        "generate_vector":true,
+        "generate_intermediate_vectors":false,
+        "use_gdal_trace_outline": true,
+        "gdal_trace_outline_dp_toler": 0,
+        "gdal_trace_outline_min_area":0
+    },
+    "cloud":{
+        "all_cloud_mask":1,
+        "high_cloud_mask":128,
+        "shadow_in_mask":32,
+        "shadow_out_mask":64,
+        "red_back_to_cloud":100,
+        "resize_factor":12,
+        "red_dark_cloud":300,
+        "strict_cloud_mask":false,
+        "rm_snow_inside_cloud":false,
+        "rm_snow_inside_cloud_dilation_radius": 5,
+        "rm_snow_inside_cloud_threshold": 0.85,
+        "rm_snow_inside_cloud_min_area": 250000
+    },
+    "snow":{
+        "dz":100,
+        "ndsi_pass1":0.4,
+        "red_pass1":200,
+        "ndsi_pass2":0.15,
+        "red_pass2":40,
+        "fsnow_lim":0.1,
+        "fsnow_total_lim":0.001,
+        "fclear_lim": 0.1
+    },
+    "fsc":{
+        "fscOg_Eq":"fscToc/(1-tcd)",
+        "fscToc_Eq": "0.5*tanh(2.65*ndsi-1.42)+0.5"
+    },
+    "shaded_snow": {
+        "shaded_snow_hillshade": true,
+        "hillshade_lim": 0.2,
+        "shaded_snow_pass": 160,
+        "shaded_snow_casted": true,
+        "rastertools_window_size": 2048,
+        "rastertools_radius": 512
+    },
+    "water_mask":{
+         "water_mask_raster_values":[1]
+    }
+}
+
diff --git a/doc/synthesis_launch_example.json b/doc/synthesis_launch_example.json
index e7b181d9f24829bd55f67adb43b3c4b9e060f94f..2609ab1e9f4fc9bcb544626c56bad3deaa78152d 100755
--- a/doc/synthesis_launch_example.json
+++ b/doc/synthesis_launch_example.json
@@ -8,7 +8,7 @@
     "date_stop": "31/01/2018",
     "output_dir": "XXX/SNOW_SYNTHESIS",
     "config_file": "XXX/SNOW_SYNTHESIS/synthesis_configuration.json",
-    "date_margin": 10,
+    "date_margin": 30,
     "densification_products_list": [
         "XXX/SNOW_PRODUCTS/LANDSAT8-OLITIRS-XS_20180115-103629-617_L2A_T31TCH_D_V1-9",
         "XXX/SNOW_PRODUCTS/LANDSAT8-OLITIRS-XS_20180131-103619-890_L2A_T31TCH_D_V1-9"
diff --git a/doc/tutorials/prepare_snow_annual_map_data.md b/doc/tutorials/prepare_snow_annual_map_data.md
index bf6e56c485bf94722863767b792ab404bb135dcf..5537408433f91b43c86b4cc68f3e138b5de4440b 100755
--- a/doc/tutorials/prepare_snow_annual_map_data.md
+++ b/doc/tutorials/prepare_snow_annual_map_data.md
@@ -38,7 +38,7 @@ def main():
     params = {"tile_id":"T32TPS",
               "date_start":"01/09/2017",
               "date_stop":"31/08/2018",
-              "date_margin":15,
+              "date_margin":30,
               "mode":"DEBUG",
               "input_products_list":[],
               # path_tmp is an actual parameter but must only be uncomment with a correct path
diff --git a/docker/README.md b/docker/README.md
index 62105d12461e246bfbd9bdedfc92b4effba1f95e..b48409eaa520d7c0a477c5c8f49a558251d4ffd1 100755
--- a/docker/README.md
+++ b/docker/README.md
@@ -1,6 +1,6 @@
 # How to use this Dockerfile
 
-## Retrieve OTB 7
+## Retrieve OTB 8
 OTB and its dependances can be retrieved at https://www.orfeo-toolbox.org/
 We need :
 * otb.tar.gz 
@@ -15,4 +15,18 @@ LIS can be retrieve from this repository by git clone and compress into the Dock
 ## Run 
 `docker run --rm -it lis`
 
-or define an entrypoint
\ No newline at end of file
+or define an entrypoint;
+
+## Validation
+
+You can validate the docker image created with the tests available in the "docker/tests" folder.
+
+There is actually four tests :
+- FSC snow coverage on l8
+- FSC snow coverage on S2 with classic hillshade method
+- FSC snow coverage on S2 with rastertools
+- Snow synthesis on one month
+
+To launch a test : `docker run -v ./docker/tests:/tests -v __input_data_folder__:/input-data -v __baseline_folder__:/baseline lis /tests/tests-<name_test>.sh /input-data /baseline`
+
+The input data folder and baseline folder must be the same folders used for LIS validation tests.
\ No newline at end of file
diff --git a/docker/tests/test-fsc-snow-l8.sh b/docker/tests/test-fsc-snow-l8.sh
new file mode 100755
index 0000000000000000000000000000000000000000..edea06061cc67da1cb2fde8dc5772b65a5b875b3
--- /dev/null
+++ b/docker/tests/test-fsc-snow-l8.sh
@@ -0,0 +1,45 @@
+#!/bin/bash -u
+# Usage: test-fsc-snow-l8.sh <data-test> <baseline>
+
+DATA_TEST=$1
+BASELINE=$2
+
+test_name="l8"
+
+# Cleanup
+rm -rf ${test_name}
+
+# Run test
+let_it_snow_fsc.py \
+  -c ${DATA_TEST}/Landsat/lis_configuration.json \
+  -i ${DATA_TEST}/Landsat/LANDSAT8_OLITIRS_XS_20170308_N2A_France-MetropoleD0004H0001 \
+  -d ${DATA_TEST}/Landsat/LANDSAT8_OLITIRS_XS_20170308_N2A_France-MetropoleD0004H0001/SRTM/dem.tif \
+  -o ${test_name}
+
+
+# Output comparison
+errors=0
+
+echo "-- Comparison pass1"
+gdalcompare.py ${BASELINE}/l8_test/pass1.tif ${test_name}/tmp/snow_pass1.tif
+errors=$((errors+$?))
+
+echo "-- Comparison pass2"
+gdalcompare.py ${BASELINE}/l8_test/pass2.tif ${test_name}/tmp/snow_pass2.tif
+errors=$((errors+$?))
+
+echo "-- Comparison pass3"
+gdalcompare.py ${BASELINE}/l8_test/pass3.tif ${test_name}/tmp/snow_pass3.tif
+errors=$((errors+$?))
+
+echo "-- Comparison snow_all"
+gdalcompare.py ${BASELINE}/l8_test/snow_all.tif ${test_name}/tmp/LIS_SNOW_ALL.TIF
+errors=$((errors+$?))
+
+echo "-- Comparison final_mask"
+gdalcompare.py ${BASELINE}/l8_test/final_mask.tif ${test_name}/tmp/LIS_SEB.TIF
+errors=$((errors+$?))
+
+exit ${errors}
+
+
diff --git a/docker/tests/test-fsc-snow-s2-hillshade.sh b/docker/tests/test-fsc-snow-s2-hillshade.sh
new file mode 100755
index 0000000000000000000000000000000000000000..d75dfac64414510b02655b4d960114032205b53f
--- /dev/null
+++ b/docker/tests/test-fsc-snow-s2-hillshade.sh
@@ -0,0 +1,31 @@
+#!/bin/bash -u
+# Usage: test-fsc-snow-s2-hillshade.sh <data-test> <baseline>
+
+DATA_TEST=$1
+BASELINE=$2
+
+test_name="shaded_snow_no_relief_shadow_mask"
+
+# Cleanup
+rm -rf ${test_name}
+
+# Run test
+let_it_snow_fsc.py \
+  -c ${DATA_TEST}/S2/shaded_snow/global_parameters_shaded_snow.json \
+  -i ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0 \
+  -d ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0/Leman_dem_merged.tif \
+  -o ${test_name} \
+  -V 1.11.0
+
+
+# Output comparison
+errors=0
+
+echo "-- Comparison FSC TOC"
+gdalcompare.py \
+  ${BASELINE}/shaded_snow_test/shaded_snow_no_relief_shadow_mask/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif \
+  ${test_name}/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
+
+errors=$((errors+$?))
+exit ${errors}
+
diff --git a/docker/tests/test-fsc-snow-s2-rastertools.sh b/docker/tests/test-fsc-snow-s2-rastertools.sh
new file mode 100755
index 0000000000000000000000000000000000000000..fee5736dcd61671f9c09668221af68bdfe3e8fa6
--- /dev/null
+++ b/docker/tests/test-fsc-snow-s2-rastertools.sh
@@ -0,0 +1,30 @@
+#!/bin/bash -u
+# Usage: test-fsc-snow-s2-rastertools.sh <data-test> <baseline>
+
+DATA_TEST=$1
+BASELINE=$2
+
+test_name="shaded_snow_rastertools"
+
+# Cleanup
+rm -rf ${test_name}
+
+# Run test
+let_it_snow_fsc.py \
+  -c ${DATA_TEST}/S2/shaded_snow/global_parameters_shaded_snow_rastertools.json \
+  -i ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0 \
+  -d ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0/Leman_dem_merged.tif \
+  -o ${test_name} \
+  -V 1.11.0
+
+
+# Output comparison
+errors=0
+
+echo "-- Comparison FSC TOC"
+gdalcompare.py \
+  ${BASELINE}/shaded_snow_test/shaded_snow_detect_shaded_snow_rastertools/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif \
+  ${test_name}/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
+
+errors=$((errors+$?))
+exit ${errors}
diff --git a/docker/tests/test-snow-synthesis.sh b/docker/tests/test-snow-synthesis.sh
new file mode 100755
index 0000000000000000000000000000000000000000..bd96dd002b9256c34f3d930776e7cfbb3818bd73
--- /dev/null
+++ b/docker/tests/test-snow-synthesis.sh
@@ -0,0 +1,63 @@
+#!/bin/bash -u
+# Usage: test-synthesis.sh <data-test> <baseline>
+
+DATA_TEST=$1
+BASELINE=$2
+
+test_name="snow_synthesis"
+
+# Cleanup
+rm -rf ${test_name}
+
+# Run test
+let_it_snow_synthesis.py \
+  -i ${DATA_TEST}/SNOW_PRODUCTS/SENTINEL2A_20180101-105435-457_L2A_T31TCH_D_V1-4 \
+  -i ${DATA_TEST}/SNOW_PRODUCTS/SENTINEL2A_20180131-105416-437_L2A_T31TCH_D_V1-4 \
+  -d ${DATA_TEST}/SNOW_PRODUCTS/LANDSAT8-OLITIRS-XS_20180115-103629-617_L2A_T31TCH_D_V1-9 \
+  -d ${DATA_TEST}/SNOW_PRODUCTS/LANDSAT8-OLITIRS-XS_20180131-103619-890_L2A_T31TCH_D_V1-9 \
+  -c ${DATA_TEST}/SYNTHESIS/synthesis_configuration.json \
+  -j ${DATA_TEST}/SYNTHESIS/synthesis_launch.json \
+  -o ${test_name} \
+  -V 1.11.0
+
+
+# Output comparison
+errors=0
+
+echo "-- Comparison SCD"
+gdalcompare.py \
+  ${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SCD_T31TCH_20180101_20180131_1.11.0_1.tif \
+  ${test_name}/LIS_S2L8-SNOW-SCD_T31TCH_20180101_20180131_1.11.0_1.tif
+errors=$((errors+$?))
+
+echo "-- Comparison NOBS"
+gdalcompare.py \
+  ${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-NOBS_T31TCH_20180101_20180131_1.11.0_1.tif \
+  ${test_name}/LIS_S2L8-SNOW-NOBS_T31TCH_20180101_20180131_1.11.0_1.tif
+errors=$((errors+$?))
+
+echo "-- Comparison SOD"
+gdalcompare.py \
+  ${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SOD_T31TCH_20180101_20180131_1.11.0_1.tif \
+  ${test_name}/LIS_S2L8-SNOW-SOD_T31TCH_20180101_20180131_1.11.0_1.tif
+errors=$((errors+$?))
+
+echo "-- Comparison SMOD"
+gdalcompare.py \
+  ${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SMOD_T31TCH_20180101_20180131_1.11.0_1.tif \
+  ${test_name}/LIS_S2L8-SNOW-SMOD_T31TCH_20180101_20180131_1.11.0_1.tif
+errors=$((errors+$?))
+
+echo "-- Comparison NSP"
+gdalcompare.py \
+  ${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-NSP_T31TCH_20180101_20180131_1.11.0_1.tif \
+  ${test_name}/LIS_S2L8-SNOW-NSP_T31TCH_20180101_20180131_1.11.0_1.tif
+errors=$((errors+$?))
+
+echo "-- Comparison CCD"
+gdalcompare.py \
+  ${BASELINE}/snow_synthesis_test/CLOUD_OCCURENCE_T31TCH_20180101_20180131.tif \
+  ${test_name}/tmp/CLOUD_OCCURENCE_T31TCH_20180101_20180131.tif
+errors=$((errors+$?))
+
+exit ${errors}
diff --git a/hpc/prepare_data_for_snow_annual_map.py b/hpc/prepare_data_for_snow_annual_map.py
index f2a53bc4521404f96c3f25acb2dc6b5c7c848547..0065da30a1b3a0d7a962cdb357728e957b34d602 100755
--- a/hpc/prepare_data_for_snow_annual_map.py
+++ b/hpc/prepare_data_for_snow_annual_map.py
@@ -255,7 +255,7 @@ def main():
     params = {"tile_id":"T32TPS",
               "date_start":"01/09/2017",
               "date_stop":"31/08/2018",
-              "date_margin":15,
+              "date_margin":30,
               "mode":"DEBUG",
               "input_products_list":[],
               # path_tmp is an actual parameter but must only be uncomment with a correct path
diff --git a/python/s2snow/cloud_extraction.py b/python/s2snow/cloud_extraction.py
index db01f0d97e5868af4be3b482af4c85a5f7496b31..0a87b533aa2585a3dc7fde624258b6d252c07398 100755
--- a/python/s2snow/cloud_extraction.py
+++ b/python/s2snow/cloud_extraction.py
@@ -23,7 +23,7 @@ import logging
 import os.path as op
 
 from s2snow.otb_wrappers import compute_cloud_mask, band_math
-from s2snow.lis_constant import MODE_LASRC, MODE_SEN2COR, GDAL_OPT, ALL_CLOUD_MASK, SHADOW_MASK, HIGH_CLOUD_MASK, \
+from s2snow.lis_constant import MODE_LASRC, MODE_SEN2COR, GDAL_OPT_1B, ALL_CLOUD_MASK, SHADOW_MASK, HIGH_CLOUD_MASK, \
     MASK_BACK_TO_CLOUD, SHADOW_IN_MASK, SHADOW_OUT_MASK, CLOUD_REFINE, MODE_SENTINEL2
 
 
@@ -49,7 +49,10 @@ def extract_all_clouds(cloud_mask, output_dir, all_cloud_mask=1, mode=MODE_SENTI
     if mode == MODE_LASRC:
         # Extract shadow which corresponds to  all cloud shadows in larsc product
         logging.info("lasrc mode -> extract all clouds from LASRC product using ComputeCloudMask application...")
-        compute_cloud_mask(cloud_mask, all_cloud_mask_file + GDAL_OPT, str(all_cloud_mask), ram)
+        compute_cloud_mask(cloud_mask,
+                           f"{all_cloud_mask_file}?{GDAL_OPT_1B}",
+                           str(all_cloud_mask),
+                           ram)
     else:
         if mode == MODE_SEN2COR:
             logging.debug("sen2cor mode -> extract all clouds from SCL layer...")
@@ -62,7 +65,10 @@ def extract_all_clouds(cloud_mask, output_dir, all_cloud_mask=1, mode=MODE_SENTI
         else:
             condition_all_clouds = "im1b1 > 0"
         logging.debug("condition_all_clouds : " + condition_all_clouds)
-        band_math([cloud_mask], all_cloud_mask_file + GDAL_OPT, "(" + condition_all_clouds + " > 0)?1:0", ram)
+        band_math([cloud_mask],
+                  f"{all_cloud_mask_file}?{GDAL_OPT_1B}",
+                  f"({condition_all_clouds} > 0)?1:0",
+                  ram)
     logging.debug("End extract_all_clouds... : ")
     return all_cloud_mask_file
 
@@ -83,7 +89,10 @@ def extract_cloud_shadows(cloud_mask, output_dir, mode="", shadow_in_mask=32,
     if mode == MODE_SEN2COR:
         logging.debug("sen2cor mode -> extract all clouds from SCL layer...")
         logging.debug("- label == 3 -> Cloud shadows")
-        band_math([cloud_mask], shadow_mask_file + GDAL_OPT, "(im1b1 == 3)", ram)
+        band_math([cloud_mask],
+                  f"{shadow_mask_file}?{GDAL_OPT_1B}",
+                  "(im1b1 == 3)",
+                  ram)
     else:
         shadow_in_mask_file = op.join(output_dir, SHADOW_IN_MASK)
         logging.debug("shadow_in_mask_file : " + shadow_in_mask_file)
@@ -92,14 +101,22 @@ def extract_cloud_shadows(cloud_mask, output_dir, mode="", shadow_in_mask=32,
 
         # First extract shadow wich corresponds to shadow of clouds inside the
         # image
-        compute_cloud_mask(cloud_mask, shadow_in_mask_file + GDAL_OPT, str(shadow_in_mask), ram)
+        compute_cloud_mask(cloud_mask,
+                           f"{shadow_in_mask_file}?{GDAL_OPT_1B}",
+                           str(shadow_in_mask),
+                           ram)
 
         # Then extract shadow mask of shadows from clouds outside the image
-        compute_cloud_mask(cloud_mask, shadow_out_mask_file + GDAL_OPT, str(shadow_out_mask), ram )
+        compute_cloud_mask(cloud_mask,
+                           f"{shadow_out_mask_file}?{GDAL_OPT_1B}",
+                           str(shadow_out_mask),
+                           ram)
 
         # The output shadow mask corresponds to a OR logic between the 2 shadow masks
-        band_math([shadow_in_mask_file, shadow_out_mask_file], shadow_mask_file + GDAL_OPT,
-                  "(im1b1 == 1) || (im2b1 == 1)", ram)
+        band_math([shadow_in_mask_file, shadow_out_mask_file],
+                  f"{shadow_mask_file}?{GDAL_OPT_1B}",
+                  "(im1b1 == 1) || (im2b1 == 1)",
+                  ram)
 
     return shadow_mask_file
 
@@ -117,9 +134,15 @@ def extract_high_clouds(cloud_mask_file, output_dir, high_cloud_mask=128, mode="
     if mode == 'sen2cor':
         logging.debug("sen2cor mode -> extract all clouds from SCL layer...")
         logging.debug("- label == 10 -> Thin cirrus")
-        band_math([cloud_mask_file], high_cloud_mask_file + GDAL_OPT, "(im1b1 == 10)", ram)
+        band_math([cloud_mask_file],
+                  f"{high_cloud_mask_file}?{GDAL_OPT_1B}",
+                  "(im1b1 == 10)",
+                  ram)
     else:
-        compute_cloud_mask(cloud_mask_file, high_cloud_mask_file + GDAL_OPT, str(high_cloud_mask), ram)
+        compute_cloud_mask(cloud_mask_file,
+                           f"{high_cloud_mask_file}?{GDAL_OPT_1B}",
+                           str(high_cloud_mask),
+                           ram)
 
     return high_cloud_mask_file
 
@@ -147,10 +170,12 @@ def extract_back_to_cloud_mask(cloud_mask_file, red_band_file, output_dir, red_b
     else:
         condition_all_clouds = "im1b1 > 0"
 
-    condition_back_to_cloud = "(" + condition_all_clouds + ") and (im2b1 > " + str(red_back_to_cloud) + ")"
+    condition_back_to_cloud = f"({condition_all_clouds}) and (im2b1 > {str(red_back_to_cloud)})"
     logging.debug("condition_back_to_cloud : " + condition_back_to_cloud)
 
-    band_math([cloud_mask_file, red_band_file], mask_back_to_cloud_file + GDAL_OPT, condition_back_to_cloud + "?1:0",
+    band_math([cloud_mask_file, red_band_file],
+              f"{mask_back_to_cloud_file}?{GDAL_OPT_1B}",
+              f"{condition_back_to_cloud}?1:0",
               ram)
 
     return mask_back_to_cloud_file
@@ -180,21 +205,22 @@ def refine_cloud_mask(all_cloud_mask_file, shadow_mask_file, red_nn_file, high_c
     logging.debug("red_dark_cloud : " + str(red_dark_cloud))
     logging.debug("ram : " + str(ram))
 
-    condition_cloud2 = "im3b1>" + str(red_dark_cloud)
+    condition_cloud2 = f"im3b1 > {str(red_dark_cloud)}"
     logging.debug("condition_cloud2 : " + condition_cloud2)
 
     # this condition check if pass1_5 caused a cloud mask update
     condition_donuts = "(im1b1!=im5b1)"
     logging.debug("condition_donuts : " + condition_donuts)
 
-    condition_shadow = "((im1b1==1 and " + condition_cloud2 + \
-                       ") or im2b1==1 or im4b1==1 or " + condition_donuts + ")"
+    condition_shadow = f"((im1b1==1 and {condition_cloud2}) or im2b1==1 or im4b1==1 or {condition_donuts})"
     logging.debug("condition_shadow : " + condition_shadow)
 
     cloud_refine_file = op.join(output_dir, CLOUD_REFINE)
     logging.info("Refine cloud file: " + cloud_refine_file)
 
-    band_math([all_cloud_mask_file, shadow_mask_file, red_nn_file, high_cloud_mask_file,
-               cloud_pass1_file], cloud_refine_file + GDAL_OPT, condition_shadow, ram)
+    band_math([all_cloud_mask_file, shadow_mask_file, red_nn_file, high_cloud_mask_file, cloud_pass1_file],
+              f"{cloud_refine_file}?{GDAL_OPT_1B}",
+              condition_shadow,
+              ram)
 
     return cloud_refine_file
diff --git a/python/s2snow/compute_NOBS.py b/python/s2snow/compute_NOBS.py
index e40b58d79586e9084b099d2ca65127560e2f36cc..b6caecd703221e0549eca4e3f41b9e712de92436 100755
--- a/python/s2snow/compute_NOBS.py
+++ b/python/s2snow/compute_NOBS.py
@@ -25,12 +25,14 @@ import numpy as np
 import os, sys
 import logging
 from importlib.metadata import version
+from s2snow.lis_constant import BLOCKSIZE, COMPRESS_METHOD, TILED
 
 
-def compute_NOBS(input_file, synthesis_name=None, output_dir=None):
+def compute_NOBS(input_file, margin_info_dict, synthesis_name=None, output_dir=None):
     """
     computes NOBS_xxx.tif, the number of clear observations to compute the SCD, SMOD and SOD syntheses
     :param input_file: the cloud mask vrt generated using run_show_annual_map script, multitemp_cloud_mask.vrt
+    :param margin_info_dict: the dict containing the information about the number of FSC product from the before and after margins.
     :param synthesis_name: synthesis_name
     :param output_dir: outpout directory
     :return:
@@ -54,16 +56,64 @@ def compute_NOBS(input_file, synthesis_name=None, output_dir=None):
 
     src = rasterio.open(input_file, 'r')
 
+    # Count number of clear sky observation between start_date and stop_date of the
+    # synthesis period.
     n = src.meta["count"]
-    W = src.read(range(1, n + 1))
-    S = n - np.sum(W, axis=0)
+    profile = src.profile
+    
+    W_synthesis = src.read(range(
+        1 + margin_info_dict['margin_before'],
+        n + 1 - margin_info_dict['margin_after']
+    ))
+    S_synthesis = (
+        n - margin_info_dict['margin_after'] - margin_info_dict['margin_before'] - np.sum(
+            W_synthesis, axis=0
+        )
+    )
+
+    if margin_info_dict['margin_before'] > 0:
+        # Count number of clear sky observations in the margin before the start_date
+        W_margin_before = src.read(range(
+            1,
+            margin_info_dict['margin_before'] + 1
+        ))
+        S_margin_before = margin_info_dict['margin_before'] - np.sum(W_margin_before, axis=0)
+        # Binarize information : was there at least one clear sky observation during the before
+        # margin period.
+        S_margin_before[S_margin_before > 0] = 1
+    else:
+        # If there is no product in the margin, initialize the array with zeros.
+        S_margin_before = np.zeros(S_synthesis.shape)
+
+    if margin_info_dict['margin_after'] > 0:
+        # Count number of clear sky observations in the margin after the stop_date
+        W_margin_after = src.read(range(
+            n + 1 - margin_info_dict['margin_after'],
+            n + 1
+        ))
+        S_margin_after = margin_info_dict['margin_after'] - np.sum(W_margin_after, axis=0)
+        # Binarize information : was there at least one clear sky observation during the after
+        # margin period.
+        S_margin_after[S_margin_after > 0] = 1
+    else:
+        # If there is no product in the margin, initialize the array with zeros.
+        S_margin_after = np.zeros(S_synthesis.shape)
+
+    # Add +1 if there is at least one clear sky observation in the margin before the start of the 
+    # synthesis period, and +1 if there is at least one clear sky observation in the margin after 
+    # the end of the synthesis period.
+    S = S_synthesis + S_margin_before + S_margin_after
+    
+    src.close()
 
     with rasterio.Env():
-        profile = src.profile
         profile.update(
             dtype=rasterio.uint16,
             driver='GTiff',
-            compress='deflate',
+            compress=COMPRESS_METHOD,
+            tiled=(TILED=="YES"),
+            blockxsize=BLOCKSIZE,
+            blockysize=BLOCKSIZE,
             count=1)
 
         with rasterio.open(output_file, 'w', **profile) as dst:
diff --git a/python/s2snow/compute_NSP.py b/python/s2snow/compute_NSP.py
index 2f72d09cd363310c8578843277d9617d8e6ad802..c2f86ac63ba7a43bb747e438885198a0f0e9708c 100755
--- a/python/s2snow/compute_NSP.py
+++ b/python/s2snow/compute_NSP.py
@@ -25,12 +25,13 @@ import numpy as np
 import os, sys
 import logging
 from importlib.metadata import version
+from s2snow.lis_constant import BLOCKSIZE, COMPRESS_METHOD, SIZE_WINDOW, TILED
 
 
 def compute_NSP(input_file, synthesis_name=None, output_dir=None):
     """
     computes NSP_xxx.tif, the number of snow periods in the input serie
-    :param input_file: the interpolated daily raster generated using run_show_annual_map script.
+    :param input_file: the interpolated daily raster
     :param synthesis_name: synthesis_name
     :param output_dir: outpout directory
     :return:
@@ -39,7 +40,7 @@ def compute_NSP(input_file, synthesis_name=None, output_dir=None):
     logging.debug("input_file : %s", input_file)
     logging.debug("synthesis_name : %s", synthesis_name)
     logging.debug("output_dir : %s", output_dir)
-
+    
     if not os.path.isfile(input_file):
         logging.error("Input file does not exist : %s", input_file)
         return
@@ -51,20 +52,41 @@ def compute_NSP(input_file, synthesis_name=None, output_dir=None):
         output_file = os.path.join(output_dir, "LIS_SNOW-NSP_{}.tif".format(synthesis_id))
     else:
         output_file = os.path.join(output_dir, synthesis_name.format("NSP"))
-
+    
+    # Open dataset
     src = rasterio.open(input_file, 'r')
-
-    n = src.meta["count"]
-    time_serie = src.read(range(1, n + 1))
-    nsp = (np.diff(time_serie, axis=0) > 0).sum(axis=0)
-
+    bands = src.meta["count"]
+    profile = src.profile
+        
+    n = profile["height"]
+    m = profile["width"]
+    nsp = np.zeros((n, m), dtype='uint16')
+    
+    # Calculation
+    i = 0
+    while i < n:
+        j = 0
+        height = min(n-i, SIZE_WINDOW)
+        while j < m:
+            width = min(m-j, SIZE_WINDOW)
+            time_serie = src.read(range(1, bands + 1), window=rasterio.windows.Window(j, i, width, height))
+            nsp[i:i+height, j:j+width] = (np.diff(time_serie, axis=0, prepend=0) == 1).sum(axis=0)
+            j += SIZE_WINDOW
+        i += SIZE_WINDOW
+    
+    # Close dataset
+    src.close()
+    del src
+
+    # Write output files
     with rasterio.Env():
-        profile = src.profile
         profile.update(
             dtype=rasterio.uint16,
-            driver='GTiff',
-            compress='deflate',
-            count=1)
+            count=1,
+            compress=COMPRESS_METHOD,
+            tiled=(TILED=="YES"),
+            blockxsize=BLOCKSIZE,
+            blockysize=BLOCKSIZE)
 
         with rasterio.open(output_file, 'w', **profile) as dst:
             dst.write(nsp.astype(rasterio.uint16), 1)
diff --git a/python/s2snow/compute_SOD_SMOD.py b/python/s2snow/compute_SOD_SMOD.py
index dbfc457c21be66c8605ee2bcf6a3d613b366c133..044a39a3b4f4043a43b71881176da7dddf5aad67 100755
--- a/python/s2snow/compute_SOD_SMOD.py
+++ b/python/s2snow/compute_SOD_SMOD.py
@@ -22,9 +22,20 @@
 
 import rasterio
 import numpy as np
-import itertools, operator, sys, os
+import sys, os 
 import logging
 from importlib.metadata import version
+from s2snow.lis_constant import BLOCKSIZE, COMPRESS_METHOD, SIZE_WINDOW, TILED
+
+
+def frequent_func(z):
+    if z[-1] < z.shape[0] - 10:
+        freq = np.bincount(z)
+        most_freq = freq.argmax()
+        index_start = np.where(z == most_freq)[0][0]
+        index_end = index_start + freq[most_freq] - 1
+        return [index_start + 1*(most_freq > 0), index_end]
+    return [0, 0]
 
 
 def compute_SOD_SMOD(input_file, synthesis_name=None, output_dir=None):
@@ -32,7 +43,7 @@ def compute_SOD_SMOD(input_file, synthesis_name=None, output_dir=None):
     Computes the snow onset date (SOD) and the snow melt-out date (SMOD) from a stack of daily snow maps.
     The dates are given in number of days since the first day of the synthesis (usually September 01).
     :param output_dir : output directory
-    :param input_file: the interpolated daily raster generated using run_show_annual_map script.
+    :param input_file: the interpolated daily raster
     :param synthesis_name: synthesis_name
     :return:
     """
@@ -41,14 +52,14 @@ def compute_SOD_SMOD(input_file, synthesis_name=None, output_dir=None):
     logging.debug("tmp_dir : %s", output_dir)
     logging.debug("input_file : %s", input_file)
     logging.debug("synthesis_name : %s", synthesis_name.format("XXX"))
-
+    
     if not os.path.isfile(input_file):
         msg = "Input file does not exist : {}".format(input_file)
         logging.error(msg)
         raise IOError(msg)
-
+    
     if output_dir is None:
-        output_dir = os.path.split(input_file)[0]
+        output_dir = os.path.split(os.path.split(input_file)[0])[0]
     if synthesis_name is None:
         synthesis_id = os.path.split(input_file)[1]
         sod_file = os.path.join(output_dir, "LIS_SNOW-SOD_{}.tif".format(synthesis_id))
@@ -57,37 +68,52 @@ def compute_SOD_SMOD(input_file, synthesis_name=None, output_dir=None):
         sod_file = os.path.join(output_dir, synthesis_name.format("SOD"))
         smod_file = os.path.join(output_dir, synthesis_name.format("SMOD"))
 
+    # Open dataset
     src = rasterio.open(input_file, 'r')
-    n = src.meta["count"]
-
-    W = src.read(range(1, n + 1))
-    n = np.shape(W)[1]
-    m = np.shape(W)[2]
+    bands = src.meta["count"]
+    profile = src.profile
+    
+    n = profile["height"]
+    m = profile["width"]
     sod = np.zeros((n, m), dtype='uint16')
     smod = np.zeros((n, m), dtype='uint16')
-    for i in range(0, n):
-        for j in range(0, m):
-            w = W[:, i, j]
-            if np.sum(w) > 10:
-                r = max((list(y) for (x, y) in itertools.groupby((enumerate(w)), operator.itemgetter(1)) if x == 1),
-                        key=len)
-                smod[i, j] = r[-1][0]
-                sod[i, j] = r[0][0]
-
+    
+    # Calculation
+    i = 0
+    while i < n:
+        j = 0
+        height = min(n-i, SIZE_WINDOW)
+        while j < m:
+            width = min(m-j, SIZE_WINDOW)         
+            W = src.read(range(1, bands + 1), window=rasterio.windows.Window(j, i, width, height))
+            W_cumsum = np.cumsum((W == 0), axis=0, dtype=np.uint16)
+            res = np.apply_along_axis(frequent_func, 0, W_cumsum)
+            sod[i:i+height, j:j+width] = res[0]
+            smod[i:i+height, j:j+width] = res[1]
+            j += SIZE_WINDOW
+        i += SIZE_WINDOW
+    
+    # Close dataset
+    src.close()
+    del src
+
+    # Write output files
     with rasterio.Env():
-        profile = src.profile
         profile.update(
             dtype=rasterio.uint16,
-            compress='deflate',
-            count=1)
-
+            count=1,
+            compress=COMPRESS_METHOD,
+            tiled=(TILED=="YES"),
+            blockxsize=BLOCKSIZE,
+            blockysize=BLOCKSIZE)
+        
         with rasterio.open(smod_file, 'w', **profile) as dst:
             dst.write(smod.astype(rasterio.uint16), 1)
 
         with rasterio.open(sod_file, 'w', **profile) as dst:
             dst.write(sod.astype(rasterio.uint16), 1)
 
-
+            
 
 def show_help():
     """
diff --git a/python/s2snow/dem_builder.py b/python/s2snow/dem_builder.py
index d7ccc1e93c5462444ba2f4f6a41248326b3b1ba2..29c7dac677f6aab7ce0d44b5009b58f7d6ff56a9 100755
--- a/python/s2snow/dem_builder.py
+++ b/python/s2snow/dem_builder.py
@@ -26,6 +26,7 @@ import logging
 import ast
 
 from osgeo import gdal, gdalconst, osr
+from s2snow.lis_constant import COMPRESS_METHOD, TILED, BLOCKSIZE
 
 def show_help():
     print("This script is used to compute srtm mask from a vrt file to a region extent")
@@ -107,6 +108,14 @@ def build_dem(psrtm, pimg, pout, ram, nb_threads):
          str(te[3]),
          "-t_srs",
          str(spatial_ref_projection),
+         "-co",
+         "COMPRESS=" + COMPRESS_METHOD,
+         "-co",
+         "TILED=" + TILED,
+         "-co",
+         "BLOCKXSIZE=" + str(BLOCKSIZE),
+         "-co",
+         "BLOCKYSIZE=" + str(BLOCKSIZE),
          str(psrtm),
          str(pout)
         ]
diff --git a/python/s2snow/fsc_config.py b/python/s2snow/fsc_config.py
index f588bef5492b95d7243bf3ba21659bca6cdce89e..448988612ecf0f65e720290e7d51324913f2b152 100755
--- a/python/s2snow/fsc_config.py
+++ b/python/s2snow/fsc_config.py
@@ -25,7 +25,7 @@ import os
 import zipfile
 import re
 
-from s2snow.lis_constant import MODE_SENTINEL2, MISSION_S2, MISSION_L8, MISSION_T5
+from s2snow.lis_constant import MODE_SENTINEL2, MISSION_S2, MISSION_L8, MISSION_T5, DEFAULT_MAX_RADIUS, DEFAULT_WINDOW_SIZE
 from s2snow.lis_exception import LisConfigurationException, UnknownProductException
 from s2snow.mission_config import mission_parameters
 from s2snow.utils import find_file
@@ -87,10 +87,13 @@ class FscConfig:
 
         
         # shaded snow
-        self.detect_shaded_snow = False
+        self.shaded_snow_hillshade = False
         self.relief_shadow_mask = None
         self.hillshade_lim = 0.2
         self.shaded_snow_pass = 160
+        self.shaded_snow_casted = False
+        self.rastertools_window_size = 2048
+        self.rastertools_radius = 512
         
         # fsc
         self.tcd_file = None
@@ -253,13 +256,13 @@ class FscConfig:
         if shaded_snow is not None:
             logging.info("Load shaded_snow parameters")
 
-            self.detect_shaded_snow = shaded_snow.get("detect_shaded_snow", False)
+            self.shaded_snow_hillshade = shaded_snow.get("shaded_snow_hillshade", True)
             self.relief_shadow_mask = shaded_snow.get("relief_shadow_mask", None)
             self.hillshade_lim = shaded_snow.get("hillshade_lim", 0.2)
             self.shaded_snow_pass = shaded_snow.get("shaded_snow_pass", 160)
-            self.rastertools_use = shaded_snow.get("rastertools_use", True)
-            self.rastertools_window_size = shaded_snow.get("rastertools_window_size", 1024)
-            self.rastertools_radius = shaded_snow.get("rastertools_radius", None)
+            self.shaded_snow_casted = shaded_snow.get("shaded_snow_casted", True)
+            self.rastertools_window_size = shaded_snow.get("rastertools_window_size", DEFAULT_WINDOW_SIZE)
+            self.rastertools_radius = shaded_snow.get("rastertools_radius", DEFAULT_MAX_RADIUS)
         else:
             logging.warning("shaded_snow parameters are not defined, let-it-snow will not detect shaded snow.")
         
@@ -402,7 +405,8 @@ class FscConfig:
                 logging.error(msg)
                 raise LisConfigurationException(msg)
         
-        if self.detect_shaded_snow and (self.relief_shadow_mask is None) and (self.metadata is None):
+        detect_shaded_snow = self.shaded_snow_hillshade or self.shaded_snow_casted
+        if detect_shaded_snow and (self.relief_shadow_mask is None) and (self.metadata is None):
             # If detect_shaded_snow is True, you need either the relief_shadow_mask or the metadata file
             msg= "No metadata nor relief_shadow_mask is given. One or the other should be defined in the configuration file"
             logging.error(msg)
@@ -562,7 +566,8 @@ class FscConfig:
             logging.error(msg)
             raise LisConfigurationException(msg)
 
-        if self.detect_shaded_snow:
+        detect_shaded_snow = self.shaded_snow_hillshade or self.shaded_snow_casted
+        if detect_shaded_snow:
             if self.blue_band_path is not None:
                 if not os.path.exists(self.blue_band_path):
                     msg = "Blue band " + self.blue_band_path + " does not exist."
@@ -648,10 +653,13 @@ class FscConfig:
         logging.debug("fclear_lim : " + str(self.fclear_lim))
         
         logging.debug("== Shaded_snow ==")
-        logging.debug("detect_shaded_snow : " + str(self.detect_shaded_snow))
-        logging.debug("shaded_snow_pass : " + str(self.shaded_snow_pass))
+        logging.debug("shaded_snow_hillshade : " + str(self.shaded_snow_hillshade))
         logging.debug("relief_shadow_mask : " + str(self.relief_shadow_mask))
         logging.debug("hillshade_lim : " + str(self.hillshade_lim))
+        logging.debug("shaded_snow_pass : " + str(self.shaded_snow_pass))
+        logging.debug("shaded_snow_casted : " + str(self.shaded_snow_casted))
+        logging.debug("rastertools_window_size : " + str(self.rastertools_window_size))
+        logging.debug("rastertools_radius : " + str(self.rastertools_radius))
 
         logging.debug("== Cloud ==")
         logging.debug("all_cloud_mask : " + str(self.all_cloud_mask))
diff --git a/python/s2snow/gdal_wrappers.py b/python/s2snow/gdal_wrappers.py
index 2df6766c38478c7fa0b2ebfc791e952ef5a07846..55cc4a681bf33e870c8c1e9e028488288719f93e 100755
--- a/python/s2snow/gdal_wrappers.py
+++ b/python/s2snow/gdal_wrappers.py
@@ -25,7 +25,7 @@ import numpy as np
 from osgeo import gdal
 from osgeo.gdalconst import GA_ReadOnly, GA_Update
 
-from s2snow.lis_constant import BAND_EXTRACTED, RED_BAND, RED_NN, RED_COARSE, IMG_VRT, N_RED
+from s2snow.lis_constant import BAND_EXTRACTED, RED_BAND, RED_NN, RED_COARSE, IMG_VRT, N_RED, CREATION_OPTIONS
 
 
 def extract_band(band_name, band_path, no_band, output_dir, no_data = -1000):
@@ -48,7 +48,8 @@ def extract_band(band_name, band_path, no_band, output_dir, no_data = -1000):
     extracted_band_file = op.join(output_dir, band_name + BAND_EXTRACTED)
     logging.info("Extract band " + extracted_band_file)
     gdal.Translate(extracted_band_file, band_path, format='GTiff',
-                   outputType=gdal.GDT_Int16, noData=no_data, bandList=[no_band])
+                   outputType=gdal.GDT_Int16, noData=no_data, bandList=[no_band],
+                   creationOptions=CREATION_OPTIONS)
 
     return extracted_band_file
 
@@ -69,7 +70,9 @@ def extract_red_band(img_vrt, output_dir, no_data=-10000):
     red_band_file = op.join(output_dir, RED_BAND)
     logging.info("Extract red band file from vrt : " + red_band_file)
 
-    gdal.Translate(red_band_file, img_vrt, format='GTiff', outputType=gdal.GDT_Int16, noData=no_data, bandList=[N_RED])
+    gdal.Translate(red_band_file, img_vrt,
+                   outputType=gdal.GDT_Int16, noData=no_data, bandList=[N_RED],
+                   creationOptions=CREATION_OPTIONS)
     return red_band_file
 
 
@@ -94,7 +97,7 @@ def resample_red_band(red_band, output_dir, xSize, ySize, resize_factor=12):
     logging.info("Resample red band using multiresolution pyramid : " + red_coarse_file)
 
     gdal.Warp(red_coarse_file, red_band, resampleAlg=gdal.GRIORA_Bilinear, width=xSize / resize_factor,
-              height=ySize / resize_factor)
+              height=ySize / resize_factor, creationOptions=CREATION_OPTIONS)
 
     return red_coarse_file
 
@@ -116,7 +119,8 @@ def resample_red_band_nn(red_coarse_file, output_dir, xSize, ySize):
     red_nn_file = op.join(output_dir, RED_NN)
     logging.info("Resample red band nn  : " + red_nn_file)
 
-    gdal.Warp(red_nn_file, red_coarse_file, resampleAlg=gdal.GRIORA_NearestNeighbour, width=xSize, height=ySize)
+    gdal.Warp(red_nn_file, red_coarse_file, resampleAlg=gdal.GRIORA_NearestNeighbour, width=xSize,
+              height=ySize, creationOptions=CREATION_OPTIONS)
 
     return red_nn_file
 
diff --git a/python/s2snow/hillshade.py b/python/s2snow/hillshade.py
index 40babc6a71e0ebff178582058ce365491ea4799f..07c59124964cead136c83c612e5043f0ede02c59 100755
--- a/python/s2snow/hillshade.py
+++ b/python/s2snow/hillshade.py
@@ -3,15 +3,18 @@ import os
 from datetime import datetime, timedelta
 import time
 import numpy as np
+import rasterio
 from lxml import etree
 from eolab.rastertools import Hillshade
 import logging
 import logging.config
 from osgeo import gdal, osr
-from s2snow.lis_constant import AZIMUTH_PATH, ZENITH_PATH
+from s2snow.lis_constant import AZIMUTH_PATH, ZENITH_PATH, CREATION_OPTIONS, CREATION_OPTIONS_1B
 from s2snow.lis_exception import UnknownProductException
 from s2snow.otb_wrappers import band_math
 
+from s2snow.lis_constant import DEFAULT_WINDOW_SIZE, DEFAULT_MAX_RADIUS
+
 _logger = logging.getLogger(__name__)
 """
 This scripts compute the hillshade from a DEM and a Sentinel2 Metadata file containing the sun angles.
@@ -84,8 +87,13 @@ def resample_angles(angle_array, angle_geoTransform, angle_spatialRef, ref_dem,
     angles.SetProjection(angle_spatialRef.ExportToWkt())
     angles.GetRasterBand(1).WriteArray(angle_array)
     angles.FlushCache()
-    gdal.Warp(output_angle, angles, resampleAlg=gdal.GRIORA_Cubic, width=ref_dem.RasterXSize, height=ref_dem.RasterYSize)
-
+    gdal.Warp(output_angle,
+              angles,
+              resampleAlg=gdal.GRIORA_Cubic,
+              width=ref_dem.RasterXSize,
+              height=ref_dem.RasterYSize,
+              creationOptions=CREATION_OPTIONS
+             )
     return gdal.Open(output_angle)
 
 
@@ -151,10 +159,9 @@ def save_alt(geo_array, ref_geo, name):
     :param name: the name of the final .tif file (ex: "Angles.tif")
     :return None:
     """
-
     [rows, cols] = geo_array.shape
     driver = gdal.GetDriverByName("GTiff")
-    outdata_water = driver.Create(name, cols, rows, 1, gdal.GDT_Float32)
+    outdata_water = driver.Create(name, cols, rows, 1, gdal.GDT_Byte, options=CREATION_OPTIONS_1B)
     outdata_water.SetGeoTransform(ref_geo.GetGeoTransform())##sets same geotransform as input
     outdata_water.SetProjection(ref_geo.GetProjection())##sets same projection as input
     outdata_water.GetRasterBand(1).WriteArray(geo_array)
@@ -170,7 +177,7 @@ def compute_hillshade_mask(output_file, dem_path, hillshade_lim, xml, output_fol
     :param output_file: the file where the mask will be written
     :param xml: the input directory containg the METADATA file. Should be Sentinel2 format
     :param output_folder: tmp_dir
-    """
+    """    
     if not os.path.exists(xml):
         raise UnknownProductException(f"Could not find the Metadata {xml} file.")
 
@@ -207,6 +214,8 @@ def compute_hillshade_mask(output_file, dem_path, hillshade_lim, xml, output_fol
     hillshade_mask = (hillshade<hillshade_lim).astype(dtype=np.int8)
     save_alt(hillshade_mask, dem, output_file)
 
+    return hillshade_mask
+
 def get_azimuth_angle_from_xml(xml_path):
     """Return the average solar azimuth angle of the product from the metadata
     xml_path: path to xml file
@@ -281,7 +290,7 @@ def local_solar_time(dt, long):
     return tst
 
 def compute_hillshade_mask_rastertools(output_file, dem_path, xml, output_folder, dem_resolution,
-                                       rastertools_window_size=1024, rastertools_radius=None):
+                                       rastertools_window_size=DEFAULT_WINDOW_SIZE, rastertools_radius=DEFAULT_MAX_RADIUS):
     """
     Compute the hillshade from the DEM, using rastertools
     :param output_file: the file where the mask will be written
@@ -289,17 +298,16 @@ def compute_hillshade_mask_rastertools(output_file, dem_path, xml, output_folder
     :param xml: the input directory containg the METADATA file. Should be Sentinel2 format
     :param output_folder: tmp_dir
     :param dem_resolution: spatial resolution of dem or target resolution (in meters)
-    :param rastertools_window_size: Size of tiles to distribute processing, default: 1024
-    :param rastertools_radius: Max dist (in pixels) arunnd a point to evaluate horizontal elevation angle, default: None
+    :param rastertools_window_size: Size of tiles to distribute processing, default: DEFAULT_WINDOW_SIZE = 2048
+    :param rastertools_radius: Max dist (in pixels) around a point to evaluate horizontal elevation angle, default: DEFAULT_MAX_RADIUS = 512
     """
-
     # Retrieve solar angles from xml metadata
     solar_elevation = get_elevation_angle_from_xml(xml)
     solar_azimuth = get_azimuth_angle_from_xml(xml)
     logging.info(f"solar elevation angle :{solar_elevation:.3f}; solar azimuth angle : {solar_azimuth:.3f}")
     logging.info(f"dem spatial resolution : {dem_resolution}")
     logging.info(f"windows_size is set to {rastertools_window_size}")
-    if rastertools_radius: logging.info(f"radius is set to {rastertools_radius}")
+    logging.info(f"radius threshold is set to {rastertools_radius}")
         
     # create the rastertool object and run hillshade computation
     start_t = time.time()
@@ -308,12 +316,38 @@ def compute_hillshade_mask_rastertools(output_file, dem_path, xml, output_folder
     tool.with_windows(rastertools_window_size)
     hillshade_str = tool.process_file(dem_path)
     logging.info(f"Rastertools execution successful. Result is stored at {hillshade_str[-1]}")
-    # Rename result file
+  
+    # Rename and optimize result file
+    gdal.Translate(output_file,
+                   hillshade_str[-1],
+                   outputType=gdal.GDT_Byte,
+                   creationOptions=CREATION_OPTIONS_1B)
     if os.path.exists(hillshade_str[-1]):
-        os.rename(hillshade_str[-1], output_file)
-    logging.info(f"Renaming file {hillshade_str[-1]} in {output_file} has been successful.")
+        os.remove(hillshade_str[-1])   
+    
     # measuring time computation
     end_t = time.time()
     logging.info(f"Computation time : {(end_t - start_t):.3f}")
 
-
+def compute_combined_hillshade_mask(output_file, output_folder, hillshade_mask):
+    """
+    Compute the hillshade combining both gradient method hillshade and rasertools method
+    :param output_file: the file where the rastertools mask is written and
+    where the final hillshade mask will be written
+    :param output_folder: tmp_dir
+    :param hillshade_mask: Numpy array of computed gradient hillshade mask
+    """  
+    # Load rastertools hillshade image
+    rastertools_file = os.path.join(output_folder, output_file)
+
+    with rasterio.open(rastertools_file, "r+") as rast:
+        try:
+            hillshade_rastertools = rast.read(1)
+            # Merge both mask
+            hillshade_final = hillshade_mask | hillshade_rastertools
+        except Exception as error:
+            hillshade_final = hillshade_mask
+            logging.warning(f"An error has been raised when combining hillshade masks: {error}."
+                             " Classic hillshade method is used instead.")
+        # Write final hillshade mask
+        rast.write(hillshade_final, 1)
\ No newline at end of file
diff --git a/python/s2snow/lis_constant.py b/python/s2snow/lis_constant.py
index c45122fc33fbcea03b5d62ee69a17a57eea62827..06f24468872ea8fbadf55c20f3d4ef68bf71a780 100755
--- a/python/s2snow/lis_constant.py
+++ b/python/s2snow/lis_constant.py
@@ -25,6 +25,7 @@ LABEL_NO_SNOW = "0"
 LABEL_SNOW = "100"
 LABEL_CLOUD = "205"
 LABEL_NO_DATA = "255"
+LABEL_NO_DATA_OLD = "254"
 
 N_SWIR = 1
 N_RED = 2
@@ -54,9 +55,24 @@ LANDSAT8 = "LANDSAT8"
 N2A = "N2A"
 FSC = "FSC"
 
-# Build gdal option to generate maks of 1 byte using otb extended filename
-# syntaxx
-GDAL_OPT = "?&gdal:co:NBITS=1&gdal:co:COMPRESS=DEFLATE"
+# GDAL creation options
+COMPRESS_METHOD = "LZW"
+TILED = "YES"
+BLOCKSIZE = 512
+
+GDAL_OPT = f"&gdal:co:COMPRESS={COMPRESS_METHOD}&gdal:co:TILED={TILED}&gdal:co:BLOCKXSIZE={BLOCKSIZE}&gdal:co:BLOCKYSIZE={BLOCKSIZE}"
+CREATION_OPTIONS = [f"COMPRESS={COMPRESS_METHOD}", f"TILED={TILED}", f"BLOCKXSIZE={BLOCKSIZE}", f"BLOCKYSIZE={BLOCKSIZE}"]
+
+GDAL_OPT_1B = f"&gdal:co:NBITS=1{GDAL_OPT}"
+CREATION_OPTIONS_1B = ["NBITS=1"] + CREATION_OPTIONS
+
+# Fsc optimisation
+DEFAULT_MAX_RADIUS = 512
+DEFAULT_WINDOW_SIZE = 2048
+
+# Synthesis optimisation
+SIZE_WINDOW = 1024
+DEFAULT_DATE_MARGIN = 30
 
 # Filenames
 TMP_DIR = "tmp"
diff --git a/python/s2snow/otb_wrappers.py b/python/s2snow/otb_wrappers.py
index bbec0ff0c536c2a502a246de7bfc7739e9fe509d..b2f150db0133ed838b872d445882f46a8366dce5 100755
--- a/python/s2snow/otb_wrappers.py
+++ b/python/s2snow/otb_wrappers.py
@@ -37,6 +37,7 @@ def band_math(il, out, exp, ram=None, out_type=otb.ImagePixelType_uint8):
             logging.info("out_type = " + str(out_type))
             band_math_app.SetParameterOutputImagePixelType("out", out_type)
         band_math_app.ExecuteAndWriteOutput()
+        band_math_app = None
     else:
         msg = "Parameters il, out and exp are required."
         logging.error(msg)
@@ -75,7 +76,8 @@ def band_mathX(il, out, exp, ram=None, out_type=None):
         if out_type is not None:
             logging.info("out_type = " + str(out_type))
             band_math_x_app.SetParameterOutputImagePixelType("out", out_type)
-        return band_math_x_app
+        band_math_x_app.ExecuteAndWriteOutput()
+        band_math_x_app = None
     else:
         msg = "Parameters il, out and exp are required."
         logging.error(msg)
@@ -117,7 +119,8 @@ def super_impose(img_in, mask_in, img_out, interpolator=None,
         if out_type is not None:
             logging.info("out_type = " + str(out_type))
             super_impose_app.SetParameterOutputImagePixelType("out", out_type)
-        return super_impose_app
+        super_impose_app.ExecuteAndWriteOutput()
+        super_impose_app = None
     else:
         msg = "Parameters img_in, img_out and mask_in are required."
         logging.error(msg)
diff --git a/python/s2snow/parser.py b/python/s2snow/parser.py
index 4ea2b655e6600a2ca723d242bc8a537455b9d811..5b824577faf12dd46227562c7278025c129300c7 100755
--- a/python/s2snow/parser.py
+++ b/python/s2snow/parser.py
@@ -135,7 +135,7 @@ def create_synthesis_argument_parser():
     arg_parser.add_argument("-m", "--date_margin",
                             help="date margin related to start and stop date",
                             type=float,
-                            default=15)
+                            default=None)
     arg_parser.add_argument("-o", "--output_dir",
                             help="Path to output directory; which will contains synthesis Product",
                             default=None)
diff --git a/python/s2snow/qc_flags.py b/python/s2snow/qc_flags.py
index 29b0a1131268969bd56024d3a25d0904b35dfb14..8d50d5b6553b05cc6ebbc2a5695a0116956bfa89 100755
--- a/python/s2snow/qc_flags.py
+++ b/python/s2snow/qc_flags.py
@@ -7,7 +7,7 @@ import numpy as np
 from osgeo import gdal
 import copy
 
-from s2snow.lis_constant import FSCCLD, QCFLAGS, QCOG, QCTOC
+from s2snow.lis_constant import FSCCLD, QCFLAGS, QCOG, QCTOC, CREATION_OPTIONS
 
 
 def edit_lis_fsc_qc_layers(fsc_toc_file, maja_cloud_mask_file, geophysical_mask_file, output_folder, fsc_og_file=None,
@@ -18,7 +18,7 @@ def edit_lis_fsc_qc_layers(fsc_toc_file, maja_cloud_mask_file, geophysical_mask_
     # cloud : there used to be a specific cloud processing but instead we are just copying the MAJA cloud mask
     output_cloud_file = op.join(output_folder, FSCCLD)
     copy_original(maja_cloud_mask_file, output_cloud_file)
-    compress_geotiff_file(output_cloud_file, add_overviews=True, use_default_cosims_config=True)
+    compress_geotiff_file(output_cloud_file)
 
     output_expert_file = op.join(output_folder, QCFLAGS)
 
@@ -36,10 +36,10 @@ def edit_lis_fsc_qc_layers(fsc_toc_file, maja_cloud_mask_file, geophysical_mask_
             # bit 3: tree cover density > 90%
             source_list += [{'sources': [{'filepath': tcd_path, 'bandnumber': 1, 'unpack_bits': False}], \
                              'operation': {3: 'np.logical_and(A0<101,A0>90)'}}]
-            # bit 4: snow detected under thin clouds
+            # bit 4: pixels classified under clouds
             source_list += [{'sources': [{'filepath': maja_cloud_mask_file, 'bandnumber': 1, 'unpack_bits': False},
                                          {'filepath': fsc_toc_file, 'bandnumber': 1, 'unpack_bits': False}], \
-                             'operation': {4: '(A0>0)*(A1>0)*(A1<101)'}}]
+                             'operation': {4: '(A0>0)*(A1>=0)*(A1<101)'}}]
             # bit 5: tree cover density undefined or unavailable
             source_list += [{'sources': [{'filepath': tcd_path, 'bandnumber': 1, 'unpack_bits': False}], \
                              'operation': {5: 'A0>100'}}]
@@ -47,8 +47,7 @@ def edit_lis_fsc_qc_layers(fsc_toc_file, maja_cloud_mask_file, geophysical_mask_
                 # bit 6: artificial 100 fsc due to shaded snow detection
                 source_list += [{'sources': [{'filepath': shaded_snow_path, 'bandnumber': 1, 'unpack_bits': False}, ], \
                                  'operation': {6: 'A0==1'}}]
-            bit_bandmath(output_expert_file, raster_gdal_info, [source_list], compress=True,
-                         use_default_cosims_config=True)
+            bit_bandmath(output_expert_file, raster_gdal_info, [source_list])
 
             if fsc_og_file is not None:
                 # QC layer on ground
@@ -61,8 +60,8 @@ def edit_lis_fsc_qc_layers(fsc_toc_file, maja_cloud_mask_file, geophysical_mask_
                 source_list += [{'sources': [{'filepath': fsc_og_file, 'bandnumber': 1, 'unpack_bits': False}], \
                                  'operation': 'B*(A0!=205)*(A0!=255) + 205*(A0==205) + 255*(A0==255)'}]
                 output_qc_og_file = op.join(output_folder, QCOG)
-                bit_bandmath(output_qc_og_file, raster_gdal_info, [source_list], no_data_values_per_band=[np.uint8(255)],
-                             compress=True, use_default_cosims_config=True)
+                bit_bandmath(output_qc_og_file, raster_gdal_info, [source_list],
+                             no_data_values_per_band=[np.uint8(255)])
             else:
                 logging.warning("FSCOG is not defined. QC Layer on ground file cannot be computed.")
 
@@ -75,8 +74,8 @@ def edit_lis_fsc_qc_layers(fsc_toc_file, maja_cloud_mask_file, geophysical_mask_
             source_list += [{'sources': [{'filepath': fsc_toc_file, 'bandnumber': 1, 'unpack_bits': False}], \
                              'operation': 'B*(A0!=205)*(A0!=255) + 205*(A0==205) + 255*(A0==255)'}]
             output_qc_toc_file = op.join(output_folder, QCTOC)
-            bit_bandmath(output_qc_toc_file, raster_gdal_info, [source_list], no_data_values_per_band=[np.uint8(255)],
-                         compress=True, use_default_cosims_config=True)
+            bit_bandmath(output_qc_toc_file, raster_gdal_info, [source_list],
+                         no_data_values_per_band=[np.uint8(255)])
         else:
             logging.warning("Water mask is not defined. QCFlags files cannot be computed.")
     else:
@@ -86,8 +85,7 @@ def edit_lis_fsc_qc_layers(fsc_toc_file, maja_cloud_mask_file, geophysical_mask_
     return output_expert_file
 
 
-def bit_bandmath(output_file, raster_info_dict, source_list_per_band, no_data_values_per_band=None, compress=True,
-                 add_overviews=True, use_default_cosims_config=True):
+def bit_bandmath(output_file, raster_info_dict, source_list_per_band, no_data_values_per_band=None):
     """Creates a copy of a monoband uint8 source model file and fills each bits independantly from different uint8 files.
 
     :param output_file: path to output TIF file
@@ -162,45 +160,20 @@ def bit_bandmath(output_file, raster_info_dict, source_list_per_band, no_data_va
         ds_out.SetProjection(raster_info_dict['coordinateSystem']['wkt'])
     ds_out = None
     del ds_out
-    if compress or add_overviews:
-        compress_geotiff_file(output_file, add_overviews=add_overviews,
-                              use_default_cosims_config=use_default_cosims_config)
+    compress_geotiff_file(output_file)
 
 
-def compress_geotiff_file(src_file, dest_file=None, tiled=True, compress='deflate', zlevel=4, predictor=1,
-                          add_overviews=False, use_default_cosims_config=False):
+def compress_geotiff_file(src_file, dest_file=None):
     if dest_file is None:
         dest_file = src_file
-
-    if add_overviews:
-        # build external overviews base on src image
-        tmp_img = gdal.Open(src_file, 0)  # 0 = read-only (external overview), 1 = read-write.
-        gdal.SetConfigOption('COMPRESS_OVERVIEW', 'DEFLATE')
-        if use_default_cosims_config:
-            gdal.SetConfigOption('GDAL_TIFF_OVR_BLOCKSIZE', '1024')
-        tmp_img.BuildOverviews("NEAREST", [2, 4, 8, 16, 32])
-        del tmp_img
-
-    compression_options = []
-    if tiled:
-        compression_options.append('tiled=yes')
-        if use_default_cosims_config:
-            compression_options.append('blockxsize=1024')
-            compression_options.append('blockysize=1024')
-    compression_options.append('compress=%s' % compress)
-    if compress == 'deflate':
-        compression_options.append('zlevel=%d' % zlevel)
-    if compress in ['deflate', 'lzw', 'lzma']:
-        compression_options.append('predictor=%d' % predictor)
-    # any src_file overviews will be internal in dest_file
-    compression_options.append('COPY_SRC_OVERVIEWS=YES')
+        
     if os.path.abspath(src_file) == os.path.abspath(dest_file):
         prefix, sufix = '.'.join(dest_file.split('.')[0:-1]), dest_file.split('.')[-1]
         temp_name = prefix + '_temp.' + sufix
-        gdal.Translate(temp_name, src_file, creationOptions=compression_options)
+        gdal.Translate(temp_name, src_file, creationOptions=CREATION_OPTIONS)
         shutil.move(temp_name, dest_file)
     else:
-        gdal.Translate(dest_file, src_file, creationOptions=compression_options)
+        gdal.Translate(dest_file, src_file, creationOptions=CREATION_OPTIONS)
     if os.path.exists(src_file + '.ovr'):
         # remove temporary external overview
         os.unlink(src_file + '.ovr')
diff --git a/python/s2snow/resolution.py b/python/s2snow/resolution.py
index 35ff8688a824b2bee0642d8e64c9c7677575750b..0d627ab85c399677282fb6a2d43f69cf1e882320 100755
--- a/python/s2snow/resolution.py
+++ b/python/s2snow/resolution.py
@@ -23,7 +23,7 @@ import logging
 import os.path as op
 from osgeo import gdal
 from osgeo.gdalconst import GA_ReadOnly
-from s2snow.lis_constant import BAND_RESAMPLED
+from s2snow.lis_constant import BAND_RESAMPLED, CREATION_OPTIONS
 
 
 def define_band_resolution(band):
@@ -64,9 +64,10 @@ def adapt_to_target_resolution(band_name, resolution, target_resolution, band_ex
             band_extracted_file,
             resampleAlg=gdal.GRIORA_Cubic,
             xRes=target_resolution,
-            yRes=target_resolution)
+            yRes=target_resolution,
+            creationOptions=CREATION_OPTIONS)
     else:
-        logging.info("Band resolution is already target resolution : " + str(target_resolution))
+        logging.info(band_name + " band resolution is already target resolution : " + str(target_resolution))
         band_resampled_file = band_extracted_file
     return band_resampled_file
 
diff --git a/python/s2snow/snow_detector.py b/python/s2snow/snow_detector.py
index 21e3fa8f10221c20d035a2a3efa276405bbba558..828547b550d9bcadc310d8fd782d0db16b58198a 100755
--- a/python/s2snow/snow_detector.py
+++ b/python/s2snow/snow_detector.py
@@ -27,7 +27,8 @@ import numpy as np
 import scipy.ndimage as nd
 from lxml import etree
 from s2snow.hillshade import (compute_hillshade_mask,
-                              compute_hillshade_mask_rastertools)
+                              compute_hillshade_mask_rastertools,
+                              compute_combined_hillshade_mask)
 from otbApplication import ImagePixelType_uint8
 from s2snow.cloud_extraction import (extract_all_clouds,
                                      extract_back_to_cloud_mask,
@@ -38,7 +39,7 @@ from s2snow.gdal_wrappers import (extract_band, extract_red_band,
                                   extract_red_nn, initialize_vrt,
                                   update_cloud_mask, update_snow_mask)
 from s2snow.lis_constant import (BLUE, CLOUD_PASS1, DEM_RESAMPLED, DOI_URL,
-                                 FSCTOC, FSCTOCHS, GDAL_OPT, GREEN,
+                                 FSCTOC, FSCTOCHS, GDAL_OPT, GDAL_OPT_1B, GREEN,
                                  HILLSHADE_MASK, HISTOGRAM, LABEL_CLOUD,
                                  LABEL_NO_DATA, LABEL_SNOW, METADATA,
                                  MISSION_S2, MODE_SENTINEL2, N_BLUE, N_GREEN,
@@ -85,7 +86,8 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
                                       tmp_dir, no_data=config.no_data)
     swir_extracted_band = extract_band(SWIR, config.swir_band_path, config.swir_band_no_band,
                                        tmp_dir, no_data=config.no_data)
-    if config.detect_shaded_snow:
+    detect_shaded_snow = config.shaded_snow_hillshade or config.shaded_snow_casted
+    if detect_shaded_snow:
         logging.debug("Extract bands Blue and NIR")
         blue_extracted_band = extract_band(BLUE, config.blue_band_path, config.blue_band_no_band, tmp_dir,
                                            no_data=config.no_data)
@@ -104,7 +106,7 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
     sb_resolution = define_band_resolution(swir_extracted_band)
     logging.debug(SWIR + " band resolution : " + str(sb_resolution))
 
-    if config.detect_shaded_snow:
+    if detect_shaded_snow:
         bb_resolution = define_band_resolution(blue_extracted_band)
         logging.debug(BLUE + " band resolution : " + str(bb_resolution))
         nb_resolution = define_band_resolution(nir_extracted_band)
@@ -125,7 +127,7 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
     swir_band_resampled = adapt_to_target_resolution(SWIR, sb_resolution, tg_resolution, swir_extracted_band,
                                                      tmp_dir)
 
-    if config.detect_shaded_snow:
+    if detect_shaded_snow:
         blue_band_resampled = adapt_to_target_resolution(BLUE, bb_resolution, tg_resolution, blue_extracted_band,
                                                          tmp_dir)
         nir_band_resampled = adapt_to_target_resolution(NIR, nb_resolution, tg_resolution, nir_extracted_band, tmp_dir)
@@ -150,27 +152,31 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
                      ram=config.ram)
 
     logging.info("Reading Relief shadow mask")
-    if config.detect_shaded_snow and (config.relief_shadow_mask is None):
+    if detect_shaded_snow and (config.relief_shadow_mask is None):
         logging.info("No relief shadow mask provided, computing it")
         relief_shadow_mask = op.join(tmp_dir, HILLSHADE_MASK)
-        if not config.rastertools_use:
+        hillshade_mask = None
+        if config.shaded_snow_hillshade:
             logging.info("Use classic hillshade methods as requested in configuration file.")
-            compute_hillshade_mask(relief_shadow_mask, dem, config.hillshade_lim, config.metadata, tmp_dir)
-        else:
+            hillshade_mask = compute_hillshade_mask(relief_shadow_mask, dem, config.hillshade_lim, config.metadata, tmp_dir)
+        if config.shaded_snow_casted:
             try:
                 logging.info("Use rastertools hillshade methods.")
-                compute_hillshade_mask_rastertools(relief_shadow_mask, dem, config.metadata, tmp_dir, tg_resolution,
-                                                    config.rastertools_window_size, config.rastertools_radius)
+                compute_hillshade_mask_rastertools(relief_shadow_mask, dem, config.metadata, tmp_dir, tg_resolution, 
+                                                   config.rastertools_window_size, config.rastertools_radius)
             except Exception as err:
                 logging.error(f"Rastertools hillshade methods encountered an error : {err}")
                 raise
+        if config.shaded_snow_hillshade and config.shaded_snow_casted:
+            logging.info("Combine both classic hillshade method and rastertools methods.")
+            compute_combined_hillshade_mask(relief_shadow_mask, tmp_dir, hillshade_mask)
     else:
         relief_shadow_mask = config.relief_shadow_mask
 
     # Initialize the no_data mask
     logging.info("Initialize no_data mask")
     no_data_mask_file = initialize_no_data_mask(img_vrt, tmp_dir, no_data=config.no_data, ram=config.ram)
-
+    
     logging.info("Launch pass0")
     # Extract red band
     red_band_file = extract_red_band(img_vrt, tmp_dir, no_data=config.no_data)
@@ -199,8 +205,7 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
     mask_back_to_cloud_file = extract_back_to_cloud_mask(config.cloud_mask, red_band_file, tmp_dir,
                                                          config.red_back_to_cloud, config.mode, config.ram)
 
-    ndsi_formula = "(im1b" + str(N_GREEN) + "-im1b" + str(N_SWIR) + ")/(im1b" + str(N_GREEN) + "+im1b" + str(
-        N_SWIR) + ")"
+    ndsi_formula = f"(im1b{N_GREEN}-im1b{N_SWIR})/(im1b{N_GREEN}+im1b{N_SWIR})"
     logging.info("ndsi formula : " + ndsi_formula)
 
     logging.info("Launch pass1")
@@ -254,7 +259,7 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
             compute_snow_pass2_vec(snow_pass2_file, tmp_dir, config.generate_intermediate_vectors,
                                    config.use_gdal_trace_outline, config.gdal_trace_outline_min_area,
                                    config.gdal_trace_outline_dp_toler)
-            if config.detect_shaded_snow:
+            if detect_shaded_snow:
                 shaded_snow_file = compute_shaded_snow(img_vrt, dem, relief_shadow_mask, cloud_refine_file, tmp_dir,
                                                        config.shaded_snow_pass, zs, ram=config.ram)
                 uncalibrated_shaded_snow_mask = compute_uncalibrated_shaded_snow_mask(snow_pass1_file, snow_pass2_file, tmp_dir, shaded_snow_file, ram=config.ram)
@@ -271,7 +276,9 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
 
     if snow_pass2_file is None:
         snow_pass2_file = op.join(tmp_dir, SNOW_PASS2)
-        band_math([snow_pass1_file], snow_pass2_file + GDAL_OPT, "0", config.ram, ImagePixelType_uint8)
+        band_math([snow_pass1_file],
+                  f"{snow_pass2_file}?{GDAL_OPT_1B}",
+                  "0", config.ram, ImagePixelType_uint8)
 
     compute_snow_pass3_vec(generic_snow_path, tmp_dir, config.generate_intermediate_vectors,
                            config.use_gdal_trace_outline,
@@ -286,9 +293,10 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
     # Compute the complete snow mask
     snow_all_file = op.join(tmp_dir, SNOW_ALL)
     logging.info("Compute snow all file")
-    compute_snow_mask(snow_pass1_file, snow_pass2_file, cloud_pass1_file, cloud_refine_file,
-                      all_cloud_mask_file, snow_all_file, slope_mask_file, config.ram,
-                      ImagePixelType_uint8)
+    compute_snow_mask(snow_pass1_file, snow_pass2_file, cloud_pass1_file,
+                      cloud_refine_file, all_cloud_mask_file,
+                      f"{snow_all_file}?{GDAL_OPT}",
+                      slope_mask_file, config.ram, ImagePixelType_uint8)
 
     # FSC has not been validated yet for L8 and Take5
     if config.mission is MISSION_S2:
@@ -310,7 +318,7 @@ def detect_snow(config, output_dir, chain_version=None, product_counter=None):
             logging.info("Compute FSC OG")
             fsc_og_file = compute_fsc_og(name, fsc_toc_file, config.tcd_file, final_mask_file, output_dir,
                                          config.fscOg_Eq, ram=config.ram)
-
+            
             logging.info("Compute QC Flags")
             qc_flags_file = edit_lis_fsc_qc_layers(fsc_toc_file, config.cloud_mask, config.div_mask,  tmp_dir,
                                                    fsc_og_file=fsc_og_file, water_mask_path=config.water_mask_path,
@@ -438,7 +446,10 @@ def initialize_no_data_mask(img_vrt, tmp_dir, no_data=-10000, ram=512):
     no_data_mask_file = op.join(tmp_dir, NO_DATA_MASK)
     logging.info("No data mask : %s", no_data_mask_file)
 
-    band_math([img_vrt], no_data_mask_file, no_data_mask_expr, ram)
+    band_math([img_vrt],
+              f"{no_data_mask_file}?{GDAL_OPT_1B}",
+              no_data_mask_expr,
+              ram)
 
     return no_data_mask_file
 
@@ -545,8 +556,11 @@ def manage_slope_mask(div_mask, div_slope_threshold, tmp_dir, ram=512):
             # Extract the bad slope correction flag
             expr = "im1b1>=" + str(div_slope_threshold) + "?1:0"
             logging.debug("expression : " + expr)
-
-            band_math([div_mask], slope_mask_file, expr, ram, out_type=ImagePixelType_uint8)
+            band_math([div_mask],
+                      f"{slope_mask_file}?{GDAL_OPT_1B}",
+                      expr,
+                      ram,
+                      out_type=ImagePixelType_uint8)
 
             return slope_mask_file
         else:
@@ -568,16 +582,19 @@ def compute_snow_pass1(img_vrt, all_cloud_mask_file, tmp_dir, ndsi_formula, ndsi
     :return: snow pass 1 file
     """
     # NDSI condition (ndsi > x and not cloud)
-    condition_ndsi = "(im2b1!=1 and (" + ndsi_formula + ")>" + str(ndsi_pass1) + " "
+    condition_ndsi = f"(im2b1!=1 and ({ndsi_formula}) > {ndsi_pass1} "
     logging.debug("condition_ndsi : " + condition_ndsi)
 
-    condition_pass1 = condition_ndsi + " and im1b" + str(N_RED) + "> " + str(red_pass1) + ")"
+    condition_pass1 = f"{condition_ndsi} and im1b{N_RED} > {red_pass1})"
     logging.debug("condition_pass1 : " + condition_pass1)
 
     pass1_file = op.join(tmp_dir, SNOW_PASS1)
     logging.info("snow_pass1_file : " + pass1_file)
 
-    band_math([img_vrt, all_cloud_mask_file], pass1_file + GDAL_OPT, condition_pass1 + "?1:0", ram)
+    band_math([img_vrt, all_cloud_mask_file],
+              f"{pass1_file}?{GDAL_OPT_1B}",
+              f"{condition_pass1}?1:0",
+              ram)
 
     return pass1_file
 
@@ -691,8 +708,10 @@ def compute_snow_pass2(img_vrt, dem, cloud_refine_file, tmp_dir, ndsi_formula, n
 
     snow_pass2_file = op.join(tmp_dir, SNOW_PASS2)
 
-    band_math([img_vrt, dem, cloud_refine_file], snow_pass2_file + GDAL_OPT,
-              condition_pass2 + "?1:0", ram)
+    band_math([img_vrt, dem, cloud_refine_file],
+              f"{snow_pass2_file}?{GDAL_OPT_1B}",
+              f"{condition_pass2}?1:0",
+              ram)
     return snow_pass2_file
 
 
@@ -731,16 +750,17 @@ def compute_shaded_snow(img_vrt, dem, relief_shadow_mask, cloud_refine_file, tmp
     :return: shaded snow file
     """
 
-    condition_shaded_snow = "(im3b1 != 1) and (im2b1>=" + str(zs) + ") and (im4b1!=0) and ((im1b" + str(
-        N_BLUE) + "-im1b" + str(N_NIR) + ")>" + str(shaded_snow_pass) + ")"
+    condition_shaded_snow = f"(im3b1 != 1) and (im2b1>={zs}) and (im4b1!=0) and ((im1b{N_BLUE}-im1b{N_NIR})>{shaded_snow_pass})"
 
     shaded_snow_file = op.join(tmp_dir, SHADED_SNOW)
 
     logging.debug("Compute_shaded_snow condition : " + condition_shaded_snow)
     logging.debug("shaded_snow file: " + shaded_snow_file)
 
-    band_math([img_vrt, dem, cloud_refine_file, relief_shadow_mask], shaded_snow_file + GDAL_OPT,
-              condition_shaded_snow + "?1:0", ram)
+    band_math([img_vrt, dem, cloud_refine_file, relief_shadow_mask],
+              f"{shaded_snow_file}?{GDAL_OPT_1B}",
+              f"{condition_shaded_snow}?1:0",
+              ram)
     return shaded_snow_file
 
 
@@ -814,7 +834,7 @@ def compute_final_mask(cloud_refine_file, generic_snow_path, mask_back_to_cloud_
     else:
         condition_snow = "(im2b1==1)"
 
-    condition_final = condition_snow + "?" + str(LABEL_SNOW) + ":((im1b1==1) or (im3b1==1))?" + str(LABEL_CLOUD) + ":0"
+    condition_final = f"{condition_snow}?{LABEL_SNOW}:((im1b1==1) or (im3b1==1))?{LABEL_CLOUD}:0"
 
     logging.info("Final condition for snow masking: " + condition_final)
 
@@ -822,8 +842,9 @@ def compute_final_mask(cloud_refine_file, generic_snow_path, mask_back_to_cloud_
               condition_final, ram)
 
     # Apply the no-data mask
-    band_math([final_mask_file, no_data_mask_file], final_mask_file,
-              "im2b1==1?" + str(LABEL_NO_DATA) + ":im1b1", ram)
+    band_math([final_mask_file, no_data_mask_file],
+              f"{final_mask_file}?{GDAL_OPT}",
+              f"im2b1==1?{LABEL_NO_DATA}:im1b1", ram)
 
     return final_mask_file
 
@@ -844,14 +865,18 @@ def compute_snow_pass3(snow_pass1_file, snow_pass2_file, tmp_dir, shaded_snow_fi
             logging.debug("condition pass3 : %s", condition_pass3)
             snow_pass3_file = op.join(tmp_dir, SNOW_PASS3)
             logging.debug("snow_pass3_file : %s", snow_pass3_file)
-            band_math([snow_pass1_file, snow_pass2_file, shaded_snow_file], snow_pass3_file + GDAL_OPT,
-                      condition_pass3 + "?1:0", ram)
+            band_math([snow_pass1_file, snow_pass2_file, shaded_snow_file],
+                      f"{snow_pass3_file}?{GDAL_OPT_1B}",
+                      f"{condition_pass3}?1:0",
+                      ram)
         else:
             condition_pass3 = "(im1b1 == 1 or im2b1 == 1)"
             logging.debug("condition pass3 : %s", condition_pass3)
             snow_pass3_file = op.join(tmp_dir, SNOW_PASS3)
             logging.debug("snow_pass3_file : %s", snow_pass3_file)
-            band_math([snow_pass1_file, snow_pass2_file], snow_pass3_file + GDAL_OPT, condition_pass3 + "?1:0",
+            band_math([snow_pass1_file, snow_pass2_file],
+                      f"{snow_pass3_file}?{GDAL_OPT_1B}",
+                      f"{condition_pass3}?1:0",
                       ram)
         generic_snow_path = snow_pass3_file
     else:
@@ -873,8 +898,10 @@ def compute_uncalibrated_shaded_snow_mask(snow_pass1_file, snow_pass2_file, tmp_
     logging.debug("condition uncalibrated shaded snow mask : %s", condition)
     uncalibrated_shaded_snow_file = op.join(tmp_dir, UNCALIBRATED_SHADED_SNOW)
     logging.debug("uncalibrated_shaded_snow_file : %s", uncalibrated_shaded_snow_file)
-    band_math([snow_pass1_file, snow_pass2_file, shaded_snow_file], uncalibrated_shaded_snow_file + GDAL_OPT,
-                condition + "?1:0", ram)
+    band_math([snow_pass1_file, snow_pass2_file, shaded_snow_file],
+              f"{uncalibrated_shaded_snow_file}?{GDAL_OPT_1B}",
+              f"{condition}?1:0",
+              ram)
     return uncalibrated_shaded_snow_file
 
 def compute_ndsi(img_vrt, final_mask_file, tmp_dir, ndsi_formula, ram=512):
@@ -890,9 +917,11 @@ def compute_ndsi(img_vrt, final_mask_file, tmp_dir, ndsi_formula, ram=512):
     # write NDSIx100 (0-100), nosnow (0) cloud (205) and nodata (255)
     ndsi_file = op.join(tmp_dir, NDSI)
     logging.debug("ndsi_file : %s", ndsi_file)
-    expression = "(im2b1 == " + LABEL_SNOW + ")?100*" + str(ndsi_formula) + ":im2b1"
+    expression = f"(im2b1 == {LABEL_SNOW})?100*{str(ndsi_formula)}:im2b1"
     logging.debug("expression : %s", expression)
-    band_math([img_vrt, final_mask_file], ndsi_file, expression, ram)
+    band_math([img_vrt, final_mask_file],
+              f"{ndsi_file}?{GDAL_OPT}",
+              expression, ram)
     edit_nodata_value(ndsi_file, nodata_value=int(LABEL_NO_DATA))
     return ndsi_file
 
@@ -912,10 +941,12 @@ def compute_fsc_toc(ndsi_file, final_mask_file, tmp_dir, fscToc_Eq, ram=512):
     logging.debug("eq : %s", eq)
     exp = eq.replace("ndsi", "im1b1/100")  # ndsi was written in %
     logging.debug("exp : %s", exp)
-    expression = "(im2b1 == " + LABEL_SNOW + ") ? 100*" + exp + " : im2b1"
+    expression = f"(im2b1 == {LABEL_SNOW}) ? 100*{exp} : im2b1"
     logging.debug("expression : %s", expression)
     fscToc_file = op.join(tmp_dir, FSCTOC)
-    band_math([ndsi_file, final_mask_file], fscToc_file, expression, ram)
+    band_math([ndsi_file, final_mask_file],
+              f"{fscToc_file}?{GDAL_OPT}",
+              expression, ram)
     edit_nodata_value(fscToc_file, nodata_value=int(LABEL_NO_DATA))
     return fscToc_file
 
@@ -932,7 +963,9 @@ def adjust_for_hillshade(uncalibrated_shaded_snow, fsc_toc_file, tmp_dir, ram):
     expression = "(im1b1 == 1) ? 100 : im2b1 "
     logging.debug("expression : %s", expression)
     fscTocHS_file = op.join(tmp_dir, FSCTOCHS)
-    band_math([uncalibrated_shaded_snow, fsc_toc_file], fscTocHS_file, expression, ram)
+    band_math([uncalibrated_shaded_snow, fsc_toc_file],
+              f"{fscTocHS_file}?{GDAL_OPT}",
+              expression, ram)
     return fscTocHS_file
 
 
@@ -955,10 +988,12 @@ def compute_fsc_og(name, fscToc_file, tcd_file, final_mask_file, output_dir, fsc
         exp = eq.replace("fscToc", "im1b1/100")  # fscToc was written in %
         logging.debug("exp : %s", exp)
         exp = exp.replace("tcd", "im3b1/100")  # tcd is given in %
-        expression = "(im2b1 == " + LABEL_SNOW + ") ? ( im3b1 > 100 ? im1b1 : 100*" + exp + " ) : im2b1"
+        expression = f"(im2b1 == {LABEL_SNOW}) ? ( im3b1 > 100 ? im1b1 : 100*{exp} ) : im2b1"
         logging.debug("expression : %s", expression)
         fscOg_file = op.join(output_dir, name)
-        band_math([fscToc_file, final_mask_file, tcd_file], fscOg_file, expression, ram)
+        band_math([fscToc_file, final_mask_file, tcd_file],
+                  f"{fscOg_file}?{GDAL_OPT}",
+                  expression, ram)
         edit_nodata_value(fscOg_file, nodata_value=int(LABEL_NO_DATA))
         return fscOg_file
     else:
diff --git a/python/s2snow/snow_synthesis.py b/python/s2snow/snow_synthesis.py
index 94fadc45de2611d39beed6b21c38008f19b3fd75..75cdb2f1426a3fe4ad75745dd8893745828795b8 100755
--- a/python/s2snow/snow_synthesis.py
+++ b/python/s2snow/snow_synthesis.py
@@ -36,27 +36,23 @@ from otbApplication import ImagePixelType_uint8, ImagePixelType_uint16
 from s2snow.compute_NOBS import compute_NOBS
 from s2snow.compute_NSP import compute_NSP
 from s2snow.compute_SOD_SMOD import compute_SOD_SMOD
-from s2snow.lis_constant import TMP_DIR, OUTPUT_DATES_FILE, DOI_URL
+from s2snow.lis_constant import (DOI_URL, GDAL_OPT, GDAL_OPT_1B, LABEL_CLOUD,
+                                 LABEL_NO_DATA, LABEL_NO_DATA_OLD, LABEL_NO_SNOW,
+                                 LABEL_SNOW, OUTPUT_DATES_FILE, TMP_DIR)
 from s2snow.lis_exception import NoProductMatchingSynthesis
 from s2snow.otb_wrappers import band_math, super_impose, band_mathX, gap_filling, get_app_output
 from s2snow.snow_product import SnowProduct
 from s2snow.utils import datetime_to_str
 from s2snow.utils import write_list_to_file, read_list_from_file
 
-# Build gdal option to generate maks of 1 byte using otb extended filename
-# syntaxx
 from importlib.metadata import version
 
-GDAL_OPT = "?&gdal:co:NBITS=1&gdal:co:COMPRESS=DEFLATE"
-
-LABEL_NO_SNOW = "0"
-LABEL_SNOW = "100"
-LABEL_CLOUD = "205"
-LABEL_NO_DATA = "255"
-LABEL_NO_DATA_OLD = "254"
-
-
 def compute_snow_synthesis(config, output_dir, h2_chain_version=None, product_counter=None):
+     # Set maximum ITK threads
+    if config.nb_threads:
+        os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(config.nb_threads)
+    logging.debug("threads number : " + str(config.nb_threads))
+
     tmp_dir = os.path.join(output_dir, TMP_DIR)
 
     # manage synthesis name
@@ -69,7 +65,7 @@ def compute_snow_synthesis(config, output_dir, h2_chain_version=None, product_co
         synthesis_name = synthesis_name + "_" + product_counter
 
     logging.info("Load S2 snow products.")
-    product_dict = load_snow_products(config.date_start, config.date_stop, config.date_margin,
+    product_dict, margin_info_dict = load_snow_products(config.date_start, config.date_stop, config.date_margin,
                                       config.input_products_list, config.tile_id, tmp_dir)
 
     # Exiting with error if none of the input products were loaded
@@ -126,8 +122,7 @@ def compute_snow_synthesis(config, output_dir, h2_chain_version=None, product_co
     multitemp_cloud_vrt = build_cloud_mask_vrt(tmp_dir, binary_cloud_mask_list)
 
     logging.info("Compute cloud occurence")
-    compute_CCD(tmp_dir, binary_cloud_mask_list, multitemp_cloud_vrt, config.synthesis_id,
-                config.ram)
+    compute_CCD(tmp_dir, binary_cloud_mask_list, multitemp_cloud_vrt, config.synthesis_id, config.ram)
 
     # ----------------------------------------------------------------------------------------
     # Compute Snow Coverage Duration "SCD" and multitemp_snow_mask
@@ -135,10 +130,12 @@ def compute_snow_synthesis(config, output_dir, h2_chain_version=None, product_co
     logging.info("Compute snow vrt")
     multitemp_snow_vrt = build_snow_mask_vrt(tmp_dir, binary_snow_mask_list)
     multitemp_snow100 = build_multitemp_snow100(tmp_dir, multitemp_snow_vrt, config.ram)
+
     logging.info("Compute gapfilled timeserie")
     gapfilled_timeserie = build_gapfilled_timeserie(tmp_dir, multitemp_snow100, multitemp_cloud_vrt,
                                                     input_dates_file_path, output_dates_filename,
                                                     config.synthesis_id, config.ram)
+
     logging.info("Compute snow coveration duration")
     compute_SCD(output_dir, output_dates, gapfilled_timeserie, synthesis_name, config.ram)
 
@@ -148,7 +145,9 @@ def compute_snow_synthesis(config, output_dir, h2_chain_version=None, product_co
 
     # run compute NOBS
     logging.info("Compute NOBS")
-    compute_NOBS(multitemp_cloud_vrt, synthesis_name=synthesis_name, output_dir=output_dir)
+    compute_NOBS(
+        multitemp_cloud_vrt, margin_info_dict, synthesis_name=synthesis_name, output_dir=output_dir
+    )
 
     # run compute NSP
     logging.info("Compute NSP")
@@ -184,14 +183,12 @@ def build_multitemp_snow100(tmp_dir, multitemp_snow_vrt, ram):
     
     # multiply by 100 for the temporal interpolation
     logging.debug("Scale by 100 multitemp snow mask vrt")
-    bandMathXApp = band_mathX([multitemp_snow_vrt],
-                              multitemp_snow100,
-                              "im1 mlt 100",
-                              ram,
-                              ImagePixelType_uint8)
+    band_mathX([multitemp_snow_vrt],
+               f"{multitemp_snow100}?{GDAL_OPT}",
+               "im1 mlt 100",
+               ram,
+               ImagePixelType_uint8)
     
-    bandMathXApp.ExecuteAndWriteOutput()
-    bandMathXApp = None
     return multitemp_snow100
 
 
@@ -226,16 +223,14 @@ def build_gapfilled_timeserie(tmp_dir, multitemp_snow100, multitemp_cloud_vrt, i
     logging.debug("ram : %s", str(ram))
 
     multitemp_snow100_gapfilled = op.join(tmp_dir, "multitemp_snow100_gapfilled.tif")
-    gapfilled_timeserie = op.join(tmp_dir, "DAILY_SNOW_MASKS_" + synthesis_id + ".tif")
 
     app_gap_filling = gap_filling(multitemp_snow100,
                                   multitemp_cloud_vrt,
-                                  multitemp_snow100_gapfilled + "?&gdal:co:COMPRESS=DEFLATE",
+                                  f"{multitemp_snow100_gapfilled}?{GDAL_OPT}",
                                   input_dates_filename,
                                   output_dates_filename,
                                   ram,
                                   ImagePixelType_uint8)
-    app_gap_filling
 
     # @TODO the mode is for now forced to DEBUG in order to generate img on disk
     # img_in = get_app_output(app_gap_filling, "out", mode)
@@ -244,15 +239,16 @@ def build_gapfilled_timeserie(tmp_dir, multitemp_snow100, multitemp_cloud_vrt, i
     # app_gap_filling = None
     img_in = get_app_output(app_gap_filling, "out", "DEBUG")
     app_gap_filling = None
+    
     # threshold to 0 or 1
-    logging.info("Round to binary series of snow occurrence")
-    bandMathXApp = band_mathX([img_in],
-                              gapfilled_timeserie + GDAL_OPT,
-                              "(im1 mlt 2) dv 100",
-                              ram,
-                              ImagePixelType_uint8)
-    bandMathXApp.ExecuteAndWriteOutput()
-    bandMathXApp = None
+    logging.info("Round to binary series of snow occurrence") 
+    gapfilled_timeserie = op.join(tmp_dir, "DAILY_SNOW_MASKS_" + synthesis_id + ".tif")
+    
+    band_mathX([img_in],
+               f"{gapfilled_timeserie}?{GDAL_OPT_1B}",
+               "im1 mlt 0.02",
+               ram,
+               ImagePixelType_uint8)
 
     return gapfilled_timeserie
 
@@ -271,7 +267,9 @@ def compute_SCD(output_dir, output_dates, gapfilled_timeserie, synthesis_name, r
     logging.debug("Bande index: %s", str(band_index))
     expression = "+".join(["im1b" + str(i) for i in band_index])
     logging.debug("expression: %s", expression)
-    band_math([gapfilled_timeserie], snow_coverage_duration + "?&gdal:co:COMPRESS=DEFLATE", expression, ram, ImagePixelType_uint16)
+    band_math([gapfilled_timeserie],
+              f"{snow_coverage_duration}?{GDAL_OPT}",
+              expression, ram, ImagePixelType_uint16)
     return snow_coverage_duration
 
 
@@ -306,10 +304,10 @@ def compute_CCD(tmp_dir, binary_cloud_mask_list, multitemp_cloud_vrt, synthesis_
     expression = "+".join(["im1b" + str(i) for i in band_index])
     logging.debug("expression : %s", expression)
     band_math([multitemp_cloud_vrt],
-                            ccd_file_path,
-                            expression,
-                            ram,
-                            ImagePixelType_uint16)
+              f"{ccd_file_path}?{GDAL_OPT}",
+              expression,
+              ram,
+              ImagePixelType_uint16)
 
     return ccd_file_path
 
@@ -321,9 +319,8 @@ def convert_snow_masks_into_binary_cloud_masks(path_out, ram, product_dict):
     logging.debug("ram : %s", str(ram))
 
     # convert the snow masks into binary cloud masks
-    expression = "im1b1==" + LABEL_CLOUD + "?1:(im1b1==" + LABEL_NO_DATA + "?1:(im1b1==" + LABEL_NO_DATA_OLD + "?1:0))"
-    binary_cloud_mask_list = convert_mask_list(path_out, product_dict, expression, "cloud", ram,
-                                               mask_format=GDAL_OPT)
+    expression = f"im1b1=={LABEL_CLOUD}?1:(im1b1=={LABEL_NO_DATA}?1:(im1b1=={LABEL_NO_DATA_OLD}?1:0))"
+    binary_cloud_mask_list = convert_mask_list(path_out, product_dict, expression, "cloud", ram)
     logging.debug("Binary cloud mask list:")
     logging.debug(binary_cloud_mask_list)
     return binary_cloud_mask_list
@@ -337,9 +334,8 @@ def convert_snow_masks_into_binary_snow_masks(path_out, ram, product_dict):
 
     # convert the snow masks into binary snow masks
     #expression = "(im1b1==" + LABEL_SNOW + ")?1:0"
-    expression = "(im1b1>0 and im1b1<=" + LABEL_SNOW + ")?1:0"
-    binary_snow_mask_list = convert_mask_list(path_out, product_dict, expression, "snow", ram,
-                                              mask_format=GDAL_OPT)
+    expression = f"(im1b1>0 and im1b1<={LABEL_SNOW})?1:0"
+    binary_snow_mask_list = convert_mask_list(path_out, product_dict, expression, "snow", ram)
     logging.debug("Binary snow mask list:")
     logging.debug(binary_snow_mask_list)
     return binary_snow_mask_list
@@ -398,7 +394,7 @@ def load_densification_products(date_margin, date_start, date_stop, densificatio
     logging.debug("ram : %s", str(ram))
 
     # load densification snow products
-    densification_product_dict = load_snow_products(date_start, date_stop, date_margin, densification_products_list, None, path_tmp)
+    densification_product_dict, margin_densification_dict = load_snow_products(date_start, date_stop, date_margin, densification_products_list, None, path_tmp)
     logging.debug("Densification product dict:")
     logging.debug(densification_product_dict)
 
@@ -414,15 +410,13 @@ def load_densification_products(date_margin, date_start, date_stop, densificatio
                 reprojected_mask = op.join(path_tmp,
                                            densifier_product.product_name + "_reprojected.tif")
                 if not os.path.exists(reprojected_mask):
-                    super_impose_app = super_impose(s2_footprint_ref,
-                                                    original_mask,
-                                                    reprojected_mask,
-                                                    "nn",
-                                                    int(LABEL_NO_DATA),
-                                                    ram,
-                                                    ImagePixelType_uint8)
-                    super_impose_app.ExecuteAndWriteOutput()
-                    super_impose_app = None
+                    super_impose(s2_footprint_ref,original_mask,
+                                 f"{reprojected_mask}?{GDAL_OPT}",
+                                 "nn",
+                                 int(LABEL_NO_DATA),
+                                 ram,
+                                 ImagePixelType_uint8)
+                    
                 densifier_product.snow_mask = reprojected_mask
                 logging.debug(densifier_product.snow_mask)
 
@@ -447,6 +441,10 @@ def load_snow_products(date_start, date_stop, date_margin, snow_products_list, t
 
     # init result snow product dict
     s2_snow_products = {}
+    s2_snow_products_in_margin = {
+        'margin_before': 0,
+        'margin_after': 0
+    }
     search_start_date = date_start - date_margin
     search_stop_date = date_stop + date_margin
 
@@ -462,6 +460,8 @@ def load_snow_products(date_start, date_stop, date_margin, snow_products_list, t
 
         acquisition_day = datetime.strftime(product.acquisition_date, "%Y%m%d")
         should_be_used_in_synthesis = True
+        is_in_margin_before = False
+        is_in_margin_after = False
         if search_start_date > product.acquisition_date or \
                 search_stop_date < product.acquisition_date:
             should_be_used_in_synthesis = False
@@ -472,6 +472,16 @@ def load_snow_products(date_start, date_stop, date_margin, snow_products_list, t
             should_be_used_in_synthesis = False
             logging.warning("Discarding product : {}, because product tile : {} does not correspond to "
                             "synthesis tile : {}".format(str(product_path), str(product.tile_id), str(tile_id)))
+        if should_be_used_in_synthesis and date_start > product.acquisition_date:
+            is_in_margin_before = True
+            logging.warning("Product : {}, with acquisition date : {} is in the margin "
+                            "period : {} - {}".format(str(product_path), str(product.acquisition_date),
+                                                      str(search_start_date), str(date_start)))
+        if should_be_used_in_synthesis and date_stop < product.acquisition_date:
+            is_in_margin_after = True
+            logging.warning("Product : {}, with acquisition date : {} is in the margin "
+                            "period : {} - {}".format(str(product_path), str(product.acquisition_date),
+                                                      str(date_stop), str(search_stop_date)))
 
         if should_be_used_in_synthesis:
             if acquisition_day not in list(s2_snow_products.keys()):
@@ -480,13 +490,19 @@ def load_snow_products(date_start, date_stop, date_margin, snow_products_list, t
             else:
                 s2_snow_products[acquisition_day].append(product)
             logging.info("Keeping product : %s",str(product))
+            if is_in_margin_before and not is_in_margin_after:
+                s2_snow_products_in_margin['margin_before'] = s2_snow_products_in_margin['margin_before'] + 1
+            elif is_in_margin_after and not is_in_margin_before:
+                s2_snow_products_in_margin['margin_after'] = s2_snow_products_in_margin['margin_after'] + 1
+            else:
+                continue
 
     logging.debug("S2 snow products dictionary:")
 
-    return s2_snow_products
+    return s2_snow_products, s2_snow_products_in_margin
 
 
-def convert_mask_list(path_tmp, product_dict, expression, type_name, ram, mask_format=""):
+def convert_mask_list(path_tmp, product_dict, expression, type_name, ram):
     logging.debug("convert_mask_list")
     logging.debug("path_tmp : %s", path_tmp)
     logging.debug("product_dict : ")
@@ -495,30 +511,30 @@ def convert_mask_list(path_tmp, product_dict, expression, type_name, ram, mask_f
     logging.debug("expression : %s", expression)
     logging.debug("type_name : %s", type_name)
     logging.debug("ram : %s", str(ram))
-    logging.debug("mask_format : %s", mask_format)
 
     binary_mask_list = []
     for mask_date in sorted(product_dict):
-        binary_mask = op.join(path_tmp,
-                              mask_date + "_" + type_name + "_binary.tif")
+        binary_mask = op.join(path_tmp, f"{mask_date}_{type_name}_binary.tif")
         binary_mask = extract_binary_mask(product_dict[mask_date],
                                           binary_mask,
                                           expression,
-                                          ram,
-                                          mask_format)
+                                          ram)
         binary_mask_list.append(binary_mask)
     return binary_mask_list
 
 
-def extract_binary_mask(mask_in, mask_out, expression, ram, mask_format=""):
+def extract_binary_mask(mask_in, mask_out, expression, ram):
     logging.debug("extract_binary_mask")
     logging.debug("mask_in : %s", mask_in)
     logging.debug("mask_out : %s", mask_out)
     logging.debug("expression : %s", expression)
     logging.debug("ram : %s", str(ram))
-    logging.debug("mask_format : %s", mask_format)
 
-    band_math([mask_in], mask_out + mask_format, expression, ram, ImagePixelType_uint8)
+    band_math([mask_in],
+              f"{mask_out}?{GDAL_OPT_1B}",
+              expression,
+              ram,
+              ImagePixelType_uint8)
     return mask_out
 
 
@@ -547,10 +563,13 @@ def merge_masks_at_same_date(snow_product_list, merged_snow_product, threshold=1
     #   we expect to have first the main input products
     #   and then the densification products
     img_index = list(range(1, len(snow_product_list) + 1))
-    expression_merging = "".join(["(im" + str(i) + "b1<=" + str(threshold) + "?im" + str(i) + "b1:" for i in img_index])
-    expression_merging += "im" + str(img_index[-1]) + "b1"
-    expression_merging += "".join([")" for i in img_index])
+    expression_merging = "".join([f"(im{i}b1 <= {threshold}?im{i}b1:" for i in img_index])
+    expression_merging += f"im{img_index[-1]}b1" + ")"*len(img_index)
 
     img_list = [i.get_snow_mask() for i in snow_product_list]
-    band_math(img_list, merged_snow_product, expression_merging, ram, ImagePixelType_uint8)
+    band_math(img_list,
+              f"{merged_snow_product}?{GDAL_OPT}",
+              expression_merging,
+              ram,
+              ImagePixelType_uint8)
 
diff --git a/python/setup.py.in b/python/setup.py.in
index 173fc48dff2db3224713a8e04404a6c6b897c914..b43acab0b9dd23b11eb55f97634aa099a6cfdf6c 100755
--- a/python/setup.py.in
+++ b/python/setup.py.in
@@ -19,9 +19,9 @@
 #
 from distutils.core import setup
 
-VERSION = '1.11.0'
+VERSION = '1.12.0'
 
 setup(name='s2snow',
       version=VERSION,
       package_dir={ '': '${CMAKE_CURRENT_SOURCE_DIR}' },
-      packages=['s2snow'])
+      packages=['s2snow'])
\ No newline at end of file
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index 169cefc0fcf5edf9118c23f4a447dde066761ae6..d898157bc3e03d4400169e75521a09bfbe4f470c 100755
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -74,7 +74,7 @@ add_test(NAME fsc_config_unitest
 
 #add_test(NAME snow_synthesis_unitest
 #  COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/snow_synthesis_test.py
-#  "${OUTPUT_TEST_SYNTHESIS}"
+#  "${UNIT_TEST}/"
 #  "${OUTPUT_TEST_UNITEST}/snow_synthesis"
 #    )
 
@@ -185,13 +185,13 @@ add_test(NAME l8_shaded_snow_test
   -i ${DATA_TEST}/Landsat/LANDSAT8-OLITIRS-XS_20200325-103612-548_L2A_T31TCH_C_V3-1
   -d /work/datalake/static_aux/MNT/COP-DEM_GLO-30-DGED_extracted/dem_cubic.vrt
   -o ${OUTPUT_TEST}/l8_shaded_snow
-  -V 1.11.0
+  -V 1.12.0
   )
 
 add_test(NAME l8_shaded_snow_compare_pass1_test
   COMMAND gdalcompare.py
-  ${BASELINE}/l8_test/shaded_snow/LIS_L8-SNOW-MSK_V3-1_T31TCH_1.11.0.tif
-  ${OUTPUT_TEST}/l8_shaded_snow/LIS_L8-SNOW-MSK_V3-1_T31TCH_1.11.0.tif
+  ${BASELINE}/l8_test/shaded_snow/LIS_L8-SNOW-MSK_V3-1_T31TCH_1.12.0.tif
+  ${OUTPUT_TEST}/l8_shaded_snow/LIS_L8-SNOW-MSK_V3-1_T31TCH_1.12.0.tif
   )
 set_tests_properties(l8_shaded_snow_compare_pass1_test PROPERTIES DEPENDS l8_shaded_snow_test)
 
@@ -295,6 +295,41 @@ add_test(NAME s2_hysope2
   -o ${OUTPUT_TEST}/hysope2
   )
 
+add_test(NAME s2_hysope2_compare_pass1_test
+  COMMAND gdalcompare.py
+  ${BASELINE}/hysope2_test/pass1.tif
+  ${OUTPUT_TEST}/hysope2/tmp/snow_pass1.tif
+  )
+set_tests_properties(s2_hysope2_compare_pass1_test PROPERTIES DEPENDS s2_test)
+
+add_test(NAME s2_hysope2_compare_pass2_test
+  COMMAND gdalcompare.py
+  ${BASELINE}/hysope2_test/pass2.tif
+  ${OUTPUT_TEST}/hysope2/tmp/snow_pass2.tif
+  )
+set_tests_properties(s2_hysope2_compare_pass2_test PROPERTIES DEPENDS s2_test)
+
+add_test(NAME s2_hysope2_compare_pass3_test
+  COMMAND gdalcompare.py
+  ${BASELINE}/hysope2_test/pass3.tif
+  ${OUTPUT_TEST}/hysope2/tmp/snow_pass3.tif
+  )
+set_tests_properties(s2_hysope2_compare_pass3_test PROPERTIES DEPENDS s2_test)
+
+add_test(NAME s2_hysope2_compare_snow_all_test
+  COMMAND gdalcompare.py
+  ${BASELINE}/hysope2_test/snow_all.tif
+  ${OUTPUT_TEST}/hysope2/tmp/LIS_SNOW_ALL.TIF
+  )
+set_tests_properties(s2_hysope2_compare_snow_all_test PROPERTIES DEPENDS s2_test)
+
+add_test(NAME s2_hysope2_compare_final_mask_test
+  COMMAND gdalcompare.py
+  ${BASELINE}/hysope2_test/final_mask.tif
+  ${OUTPUT_TEST}/hysope2/tmp/LIS_SEB.TIF
+  )
+set_tests_properties(s2_hysope2_compare_final_mask_test PROPERTIES DEPENDS s2_test)
+
 add_test(NAME dem_builder_test
   COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_SOURCE_DIR}/python/s2snow/dem_builder.py
   "${DATA_TEST}/SRTM/sud_ouest.vrt"
@@ -321,13 +356,13 @@ add_test(NAME shaded_snow_test
     -d ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0/Leman_dem_merged.tif
     -s ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0/hillshade_mask.tif
     -o ${OUTPUT_TEST}/s2/shaded_snow
-    -V 1.11.0
+    -V 1.12.0
     )
 
 add_test(NAME shaded_compare_test
   COMMAND gdalcompare.py
-  ${BASELINE}/shaded_snow_test/shaded_snow/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
-  ${OUTPUT_TEST}/s2/shaded_snow/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
+  ${BASELINE}/shaded_snow_test/shaded_snow/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
+  ${OUTPUT_TEST}/s2/shaded_snow/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
   )
 set_tests_properties(shaded_compare_test PROPERTIES DEPENDS shaded_snow_test)
 
@@ -338,13 +373,13 @@ add_test(NAME shaded_snow_no_relief_shadow_mask_test
     -i ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0
     -d ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0/Leman_dem_merged.tif
     -o ${OUTPUT_TEST}/s2/shaded_snow_no_relief_shadow_mask
-    -V 1.11.0
+    -V 1.12.0
     )
 
 add_test(NAME shaded_snow_no_relief_shadow_mask_compare_test
   COMMAND gdalcompare.py
-  ${BASELINE}/shaded_snow_test/shaded_snow_no_relief_shadow_mask/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
-  ${OUTPUT_TEST}/s2/shaded_snow_no_relief_shadow_mask/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
+  ${BASELINE}/shaded_snow_test/shaded_snow_no_relief_shadow_mask/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
+  ${OUTPUT_TEST}/s2/shaded_snow_no_relief_shadow_mask/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
   )
 set_tests_properties(shaded_snow_no_relief_shadow_mask_compare_test PROPERTIES DEPENDS shaded_snowshaded_snow_no_relief_shadow_mask_test_no_relief_shadow_mask)
 
@@ -355,17 +390,16 @@ add_test(NAME shaded_snow_detect_shaded_snow_false_test
     -i ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0
     -d ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0/Leman_dem_merged.tif
     -o ${OUTPUT_TEST}/s2/shaded_snow_detect_shaded_snow_false
-    -V 1.11.0
+    -V 1.12.0
     )
 
 add_test(NAME shaded_snow_detect_shaded_snow_false_compare_test
   COMMAND gdalcompare.py
-  ${BASELINE}/shaded_snow_test/shaded_snow_detect_shaded_snow_false/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
-  ${OUTPUT_TEST}/s2/shaded_snow_detect_shaded_snow_false/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
+  ${BASELINE}/shaded_snow_test/shaded_snow_detect_shaded_snow_false/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
+  ${OUTPUT_TEST}/s2/shaded_snow_detect_shaded_snow_false/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
   )
 set_tests_properties(shaded_snow_detect_shaded_snow_false_compare_test PROPERTIES DEPENDS shaded_snow_detect_shaded_snow_false_test)
 
-
 add_test(NAME shaded_snow_detect_shaded_snow_rastertools
   COMMAND ${PYTHON_EXECUTABLE}
     ${CMAKE_BINARY_DIR}/app/let_it_snow_fsc.py
@@ -373,16 +407,33 @@ add_test(NAME shaded_snow_detect_shaded_snow_rastertools
     -i ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0
     -d ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0/Leman_dem_merged.tif
     -o ${OUTPUT_TEST}/s2/shaded_snow_detect_shaded_snow_rastertools
-    -V 1.11.0
+    -V 1.12.0
     )
     
 add_test(NAME shaded_snow_detect_shaded_snow_rastertools_compare_test
   COMMAND gdalcompare.py
-  ${BASELINE}/shaded_snow_test/shaded_snow_detect_shaded_snow_rastertools/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
-  ${OUTPUT_TEST}/s2/shaded_snow_detect_shaded_snow_rastertools/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.11.0.tif
+  ${BASELINE}/shaded_snow_test/shaded_snow_detect_shaded_snow_rastertools/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
+  ${OUTPUT_TEST}/s2/shaded_snow_detect_shaded_snow_rastertools/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
   )
 set_tests_properties(shaded_snow_detect_shaded_snow_rastertools_compare_test PROPERTIES DEPENDS shaded_snow_detect_shaded_snow_rastertools)
 
+add_test(NAME shaded_snow_detect_shaded_snow_combined_hillshade
+  COMMAND ${PYTHON_EXECUTABLE}
+    ${CMAKE_BINARY_DIR}/app/let_it_snow_fsc.py
+    -c ${DATA_TEST}/S2/shaded_snow/global_parameters_shaded_snow_combined_hillshade.json
+    -i ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0
+    -d ${DATA_TEST}/S2/SENTINEL2B_20210124-103807-070_L2A_T31TGM_C_V1-0/Leman_dem_merged.tif
+    -o ${OUTPUT_TEST}/s2/shaded_snow_detect_shaded_snow_combined_hillshade
+    -V 1.12.0
+    )
+
+add_test(NAME shaded_snow_detect_shaded_snow_combined_hillshade_compare_test
+  COMMAND gdalcompare.py
+  ${BASELINE}/shaded_snow_test/shaded_snow_detect_shaded_snow_combined_hillshade/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
+  ${OUTPUT_TEST}/s2/shaded_snow_detect_shaded_snow_combined_hillshade/LIS_S2-SNOW-FSC-TOC_T31TGM_20210124T103807_1.12.0.tif
+  )
+set_tests_properties(shaded_snow_detect_shaded_snow_combined_hillshade_compare_test PROPERTIES DEPENDS shaded_snow_detect_shaded_snow_combined_hillshade)
+
 # -------------------------
 # Synthesis
 # -------------------------
@@ -394,14 +445,14 @@ add_test(NAME snow_synthesis_test
   -d ${DATA_TEST}/SNOW_PRODUCTS/LANDSAT8-OLITIRS-XS_20180131-103619-890_L2A_T31TCH_D_V1-9
   -c ${DATA_TEST}/SYNTHESIS/synthesis_configuration.json
   -j ${DATA_TEST}/SYNTHESIS/synthesis_launch.json
-  -V 1.11.0
+  -V 1.12.0
   -o ${OUTPUT_TEST}/SNOW_SYNTHESIS
   )
 
 add_test(NAME snow_synthesis_compare_scd_test
   COMMAND gdalcompare.py
-  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SCD_T31TCH_20180101_20180131_1.11.0_1.tif"
-  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-SCD_T31TCH_20180101_20180131_1.11.0_1.tif"
+  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SCD_T31TCH_20180101_20180131_1.12.0_1.tif"
+  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-SCD_T31TCH_20180101_20180131_1.12.0_1.tif"
   )
 set_tests_properties(snow_synthesis_compare_scd_test PROPERTIES DEPENDS snow_synthesis_test)
 
@@ -414,25 +465,32 @@ set_tests_properties(snow_synthesis_compare_ccd_test PROPERTIES DEPENDS snow_syn
 
 add_test(NAME snow_synthesis_compare_nobs_test
   COMMAND gdalcompare.py
-  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-NOBS_T31TCH_20180101_20180131_1.11.0_1.tif"
-  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-NOBS_T31TCH_20180101_20180131_1.11.0_1.tif"
+  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-NOBS_T31TCH_20180101_20180131_1.12.0_1.tif"
+  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-NOBS_T31TCH_20180101_20180131_1.12.0_1.tif"
   )
 set_tests_properties(snow_synthesis_compare_nobs_test PROPERTIES DEPENDS snow_synthesis_test)
 
 add_test(NAME snow_synthesis_compare_sod_test
   COMMAND gdalcompare.py
-  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SOD_T31TCH_20180101_20180131_1.11.0_1.tif"
-  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-SOD_T31TCH_20180101_20180131_1.11.0_1.tif"
+  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SOD_T31TCH_20180101_20180131_1.12.0_1.tif"
+  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-SOD_T31TCH_20180101_20180131_1.12.0_1.tif"
   )
 set_tests_properties(snow_synthesis_compare_sod_test PROPERTIES DEPENDS snow_synthesis_test)
 
 add_test(NAME snow_synthesis_compare_smod_test
   COMMAND gdalcompare.py
-  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SMOD_T31TCH_20180101_20180131_1.11.0_1.tif"
-  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-SMOD_T31TCH_20180101_20180131_1.11.0_1.tif"
+  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-SMOD_T31TCH_20180101_20180131_1.12.0_1.tif"
+  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-SMOD_T31TCH_20180101_20180131_1.12.0_1.tif"
   )
 set_tests_properties(snow_synthesis_compare_smod_test PROPERTIES DEPENDS snow_synthesis_test)
 
+add_test(NAME snow_synthesis_compare_nsp_test
+  COMMAND gdalcompare.py
+  "${BASELINE}/snow_synthesis_test/LIS_S2L8-SNOW-NSP_T31TCH_20180101_20180131_1.12.0_1.tif"
+  "${OUTPUT_TEST}/SNOW_SYNTHESIS/LIS_S2L8-SNOW-NSP_T31TCH_20180101_20180131_1.12.0_1.tif"
+  )
+set_tests_properties(snow_synthesis_compare_nsp_test PROPERTIES DEPENDS snow_synthesis_test)
+
 if(NOT GITLAB_CI_BUILD MATCHES "true")
   add_test(NAME snow_synthesis_muscate_test
     COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/let_it_snow_synthesis.py
@@ -464,12 +522,12 @@ add_test(NAME snow_synthesis_without_densification_test
 
 if(NOT GITLAB_CI_BUILD MATCHES "true")
   # ----------------------------------
-  # Synthesis from 1.7 snow product
+  # Synthesis from 1.12.0 snow product
   # ----------------------------------
   add_test(NAME synthesis_from_l2A_S2_20180101
     COMMAND ${PYTHON_EXECUTABLE}
     ${CMAKE_BINARY_DIR}/app/let_it_snow_fsc.py
-    -c ${DATA_TEST}/L2A_PRODUCTS/lis_configuration.json
+    -c ${DATA_TEST}/L2A_PRODUCTS/lis_configuration_shaded_snow_with_rastertools.json
     -i ${DATA_TEST}/L2A_PRODUCTS/SENTINEL2A_20180101-105435-457_L2A_T31TCH_C_V2-2
     -d "/work/datalake/static_aux/MNT/Copernicus_DSM/world.vrt"
     -t "/work/datalake/static_aux/TreeCoverDensity/31TCH/TCD_31TCH.tif"
@@ -480,7 +538,7 @@ if(NOT GITLAB_CI_BUILD MATCHES "true")
   add_test(NAME synthesis_from_l2A_S2_20180131
     COMMAND ${PYTHON_EXECUTABLE}
     ${CMAKE_BINARY_DIR}/app/let_it_snow_fsc.py
-    -c ${DATA_TEST}/L2A_PRODUCTS/lis_configuration.json
+    -c ${DATA_TEST}/L2A_PRODUCTS/lis_configuration_shaded_snow_with_rastertools.json
     -i ${DATA_TEST}/L2A_PRODUCTS/SENTINEL2A_20180131-105416-437_L2A_T31TCH_C_V2-2
     -d "/work/datalake/static_aux/MNT/Copernicus_DSM/world.vrt"
     -t "/work/datalake/static_aux/TreeCoverDensity/31TCH/TCD_31TCH.tif"
@@ -491,7 +549,7 @@ if(NOT GITLAB_CI_BUILD MATCHES "true")
   add_test(NAME synthesis_from_l2A_L8_20180115
     COMMAND ${PYTHON_EXECUTABLE}
     ${CMAKE_BINARY_DIR}/app/let_it_snow_fsc.py
-    -c ${DATA_TEST}/L2A_PRODUCTS/lis_configuration.json
+    -c ${DATA_TEST}/L2A_PRODUCTS/lis_configuration_shaded_snow_with_rastertools.json
     -i ${DATA_TEST}/L2A_PRODUCTS/LANDSAT8-OLITIRS-XS_20180115-103629-617_L2A_T31TCH_D_V1-9
     -d "/work/datalake/static_aux/MNT/Copernicus_DSM/world.vrt"
     -t "/work/datalake/static_aux/TreeCoverDensity/31TCH/TCD_31TCH.tif"
@@ -502,7 +560,7 @@ if(NOT GITLAB_CI_BUILD MATCHES "true")
   add_test(NAME synthesis_from_l2A_L8_20180131
     COMMAND ${PYTHON_EXECUTABLE}
     ${CMAKE_BINARY_DIR}/app/let_it_snow_fsc.py
-    -c ${DATA_TEST}/L2A_PRODUCTS/lis_configuration.json
+    -c ${DATA_TEST}/L2A_PRODUCTS/lis_configuration_shaded_snow_with_rastertools.json
     -i ${DATA_TEST}/L2A_PRODUCTS/LANDSAT8-OLITIRS-XS_20180131-103619-890_L2A_T31TCH_D_V1-9
     -d "/work/datalake/static_aux/MNT/Copernicus_DSM/world.vrt"
     -t "/work/datalake/static_aux/TreeCoverDensity/31TCH/TCD_31TCH.tif"
@@ -526,6 +584,48 @@ if(NOT GITLAB_CI_BUILD MATCHES "true")
   set_tests_properties(snow_synthesis_from_last_version_test PROPERTIES DEPENDS synthesis_from_l2A_S2_20180131)
   set_tests_properties(snow_synthesis_from_last_version_test PROPERTIES DEPENDS synthesis_from_l2A_S2_20180101)
 
+  add_test(NAME snow_synthesis_from_last_version_test_compare_scd_test
+  COMMAND gdalcompare.py
+  "${BASELINE}/snow_synthesis_from_last_version_test/LIS_S2L8-SNOW-SCD_T31TCH_20180101_20180131_1.12.0_1.tif"
+  "${OUTPUT_TEST}/SNOW_SYNTHESIS_FROM_LAST_VERSION/LIS_S2L8-SNOW-SCD_T31TCH_20180101_20180131_1.12.0_1.tif"
+  )
+  set_tests_properties(snow_synthesis_from_last_version_test_compare_scd_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_from_last_version_test_compare_ccd_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_from_last_version_test/tmp/CLOUD_OCCURENCE_T31TCH_20180101_20180131.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_FROM_LAST_VERSION/tmp/CLOUD_OCCURENCE_T31TCH_20180101_20180131.tif"
+    )
+  set_tests_properties(snow_synthesis_from_last_version_test_compare_ccd_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_from_last_version_test_compare_nobs_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_from_last_version_test/LIS_S2L8-SNOW-NOBS_T31TCH_20180101_20180131_1.12.0_1.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_FROM_LAST_VERSION/LIS_S2L8-SNOW-NOBS_T31TCH_20180101_20180131_1.12.0_1.tif"
+    )
+  set_tests_properties(snow_synthesis_from_last_version_test_compare_nobs_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_from_last_version_test_compare_sod_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_from_last_version_test/LIS_S2L8-SNOW-SOD_T31TCH_20180101_20180131_1.12.0_1.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_FROM_LAST_VERSION/LIS_S2L8-SNOW-SOD_T31TCH_20180101_20180131_1.12.0_1.tif"
+    )
+  set_tests_properties(snow_synthesis_from_last_version_test_compare_sod_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_from_last_version_test_compare_smod_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_from_last_version_test/LIS_S2L8-SNOW-SMOD_T31TCH_20180101_20180131_1.12.0_1.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_FROM_LAST_VERSION/LIS_S2L8-SNOW-SMOD_T31TCH_20180101_20180131_1.12.0_1.tif"
+    )
+  set_tests_properties(snow_synthesis_from_last_version_test_compare_smod_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_from_last_version_test_compare_nsp_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_from_last_version_test/LIS_S2L8-SNOW-NSP_T31TCH_20180101_20180131_1.12.0_1.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_FROM_LAST_VERSION/LIS_S2L8-SNOW-NSP_T31TCH_20180101_20180131_1.12.0_1.tif"
+    )
+  set_tests_properties(snow_synthesis_from_last_version_test_compare_nsp_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
   # ----------------------------------
   # Synthesis with zip files
   # ----------------------------------
@@ -580,8 +680,51 @@ if(NOT GITLAB_CI_BUILD MATCHES "true")
     -c ${DATA_TEST}/SYNTHESIS/synthesis_h2_configuration.json
     -o ${OUTPUT_TEST}/SNOW_SYNTHESIS_WITH_H2_SNOW_PRODUCTS
     )
+
+  add_test(NAME snow_synthesis_with_h2_snow_products_compare_scd_test
+  COMMAND gdalcompare.py
+  "${BASELINE}/snow_synthesis_h2_test/LIS_S2-SNOW-SCD_T32TLQ_20220121_20220219_1.12.0.tif"
+  "${OUTPUT_TEST}/SNOW_SYNTHESIS_WITH_H2_SNOW_PRODUCTS/LIS_S2-SNOW-SCD_T32TLQ_20220121_20220219_1.12.0.tif"
+  )
+  set_tests_properties(snow_synthesis_with_h2_snow_products_compare_scd_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_with_h2_snow_products_compare_ccd_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_h2_test/tmp/CLOUD_OCCURENCE_T32TLQ_20220121_20220219.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_WITH_H2_SNOW_PRODUCTS/tmp/CLOUD_OCCURENCE_T32TLQ_20220121_20220219.tif"
+    )
+  set_tests_properties(snow_synthesis_with_h2_snow_products_compare_ccd_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_with_h2_snow_products_compare_nobs_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_h2_test/LIS_S2-SNOW-NOBS_T32TLQ_20220121_20220219_1.12.0.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_WITH_H2_SNOW_PRODUCTS/LIS_S2-SNOW-NOBS_T32TLQ_20220121_20220219_1.12.0.tif"
+    )
+  set_tests_properties(snow_synthesis_with_h2_snow_products_compare_nobs_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_with_h2_snow_products_compare_sod_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_h2_test/LIS_S2-SNOW-SOD_T32TLQ_20220121_20220219_1.12.0.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_WITH_H2_SNOW_PRODUCTS/LIS_S2-SNOW-SOD_T32TLQ_20220121_20220219_1.12.0.tif"
+    )
+  set_tests_properties(snow_synthesis_with_h2_snow_products_compare_sod_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_with_h2_snow_products_compare_smod_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_h2_test/LIS_S2-SNOW-SMOD_T32TLQ_20220121_20220219_1.12.0.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_WITH_H2_SNOW_PRODUCTS/LIS_S2-SNOW-SMOD_T32TLQ_20220121_20220219_1.12.0.tif"
+    )
+  set_tests_properties(snow_synthesis_with_h2_snow_products_compare_smod_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
+
+  add_test(NAME snow_synthesis_with_h2_snow_products_compare_nsp_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_synthesis_h2_test/LIS_S2-SNOW-NSP_T32TLQ_20220121_20220219_1.12.0.tif"
+    "${OUTPUT_TEST}/SNOW_SYNTHESIS_WITH_H2_SNOW_PRODUCTS/LIS_S2-SNOW-NSP_T32TLQ_20220121_20220219_1.12.0.tif"
+    )
+  set_tests_properties(snow_synthesis_with_h2_snow_products_compare_nsp_test PROPERTIES DEPENDS snow_synthesis_from_last_version_test)
 endif()
 
+
 ADD_EXECUTABLE(histo_utils_snowline_internal_test histo_utils_snowline_internal_test.cxx)
 TARGET_LINK_LIBRARIES(histo_utils_snowline_internal_test histo_utils)
 
diff --git a/test/histo_utils_snow_fraction_test.cxx b/test/histo_utils_snow_fraction_test.cxx
index 979f1b68c7254504d3c1a49a604d93560f116c50..2cff619c1b234521aebcb1bf9ff10912cdc6c842 100755
--- a/test/histo_utils_snow_fraction_test.cxx
+++ b/test/histo_utils_snow_fraction_test.cxx
@@ -23,7 +23,7 @@
 #include "itkImageRandomNonRepeatingIteratorWithIndex.h"
 #include <iostream>
 
-typedef itk::VectorImage< short, 2> ImageType;
+typedef otb::VectorImage< short, 2> ImageType;
 const unsigned int MeasurementVectorSize = 1; // 3D vectors
 
 void CreateImage(ImageType::Pointer image, const int nbSamples, const int pixValue);
diff --git a/test/snow_detector_test.py b/test/snow_detector_test.py
index c7192a1187ae43fa9ade2574aeeebc80d95f4df0..5e0a3366d6e80309c19b75b4f0ebfe4f82548c18 100755
--- a/test/snow_detector_test.py
+++ b/test/snow_detector_test.py
@@ -76,8 +76,7 @@ class MyTestCase(unittest.TestCase):
     def test_compute_snow_pass1(self):
         img_vrt = self.unit_test + "lis.vrt"
         all_cloud_mask_file = self.unit_test + "all_cloud_mask.tif"
-        ndsi_formula = "(im1b" + str(N_GREEN) + "-im1b" + str(N_SWIR) + \
-                       ")/(im1b" + str(N_GREEN) + "+im1b" + str(N_SWIR) + ")"
+        ndsi_formula = f"(im1b{N_GREEN}-im1b{N_SWIR})/(im1b{N_GREEN}+im1b{N_SWIR})"
         ndsi_pass1 = 0.4
         red_pass1 = 200
         pass_file_1 = compute_snow_pass1(img_vrt, all_cloud_mask_file,
@@ -101,8 +100,7 @@ class MyTestCase(unittest.TestCase):
         dem_path = "/work/datalake/static_aux/MNT/Copernicus_DSM/world_absolute_paths.vrt"
         dem = manage_dem(img_vrt, dem_path, output_dir, True)
         cloud_refine_file = self.unit_test + "cloud_refine.tif"
-        ndsi_formula = "(im1b" + str(N_GREEN) + "-im1b" + str(N_SWIR) + \
-                       ")/(im1b" + str(N_GREEN) + "+im1b" + str(N_SWIR) + ")"
+        ndsi_formula = f"(im1b{N_GREEN}-im1b{N_SWIR})/(im1b{N_GREEN}+im1b{N_SWIR})"
         ndsi_pass2 = 0.15
         red_pass2 = 40
         zs = -1000
diff --git a/test/snow_synthesis_test.py b/test/snow_synthesis_test.py
index 888bf1e63e6129fe956a31ef332bfc84aa9daa72..e709800a4ec153890fea2f45b7b32d82e2cb1a5e 100755
--- a/test/snow_synthesis_test.py
+++ b/test/snow_synthesis_test.py
@@ -10,9 +10,8 @@ from s2snow.compute_NSP import compute_NSP
 
 class MyTestCase(unittest.TestCase):
 
-    def __init__(self, testname, data_test, unit_test, output_dir):
+    def __init__(self, testname, unit_test, output_dir):
         super(MyTestCase, self).__init__(testname)
-        self.data_test = data_test
         self.unit_test = unit_test + "synthesis_test/"
         self.output_dir = output_dir
         if not os.path.exists(self.output_dir):
@@ -52,10 +51,11 @@ class MyTestCase(unittest.TestCase):
         input_dates_filename = os.path.join(self.unit_test,"input_dates.txt")
         output_dates_filename = os.path.join(self.unit_test,"output_dates.txt")
         gapfilled = build_gapfilled_timeserie(self.output_dir, multitemp_snow100, multitemp_cloud_vrt, input_dates_filename,
-                              output_dates_filename, synthesis_id, 512)
+                                              output_dates_filename, synthesis_id, 512)
         self.assertTrue("DAILY_SNOW_MASKS" in gapfilled)
         self.assertTrue(self.output_dir in gapfilled)
         self.assertTrue(os.path.exists(gapfilled))
+
         
     def test_compute_scd(self):
         synthesis_id="LIS_S2L8-SNOW-{}_T31TCH_20180101_20180131_1.7.0.tif"
@@ -99,16 +99,15 @@ class MyTestCase(unittest.TestCase):
         
         
 if __name__ == '__main__':
-    data_test = sys.argv[1]
-    unit_test = sys.argv[2]
-    output_dir = sys.argv[3]
+    unit_test = sys.argv[1]
+    output_dir = sys.argv[2]
 
     test_loader = unittest.TestLoader()
     test_names = test_loader.getTestCaseNames(MyTestCase)
 
     suite = unittest.TestSuite()
     for test_name in test_names:
-        suite.addTest(MyTestCase(test_name, data_test, unit_test, output_dir))
+        suite.addTest(MyTestCase(test_name, unit_test, output_dir))
 
     result = unittest.TextTestRunner().run(suite)
     sys.exit(not result.wasSuccessful())