From ac8b4745ebecf5264b4e866f0ab4ec3f19c7763b Mon Sep 17 00:00:00 2001
From: Aurore Dupuis <aurore.dupuis@gmail.com>
Date: Sun, 3 May 2020 22:32:54 +0200
Subject: [PATCH 01/34] #36 - Move compute SOD SMOD and NOBS into python part.
 Include the treatment into run_snow_annual_map.py

---
 CHANGELOG.md                                  |   36 +-
 CMakeLists.txt                                |   20 +
 CONTRIBUTING.md                               |  104 +
 LICENSE                                       |  204 +-
 README.md                                     |   83 +-
 analysis/readme.md                            |    1 +
 analysis/run_snowcover.py                     |  181 ++
 analysis/snow_annual_map.py                   |  396 +++
 .../snow_annual_map_analysis_schema.json      |   60 +-
 .../snow_annual_map_evaluation.py             |  163 +-
 analysis/snowcover.py                         | 2541 +++++++++++++++++
 app/CMakeLists.txt                            |   22 +
 app/build_json.py                             |  410 +--
 app/build_snow_annual_map_json.py             |  142 +
 app/run_cloud_removal.py                      |   41 +-
 app/run_snow_annual_map.py                    |  244 +-
 app/run_snow_detector.py                      |   35 +-
 doc/atbd/ATBD_CES-Neige.tex                   |  934 +++---
 doc/atbd/snow_annual_map_schema.json          |  112 +
 ...{schema.json => snow_detector_schema.json} |   75 +-
 doc/cla/ccla-en.doc                           |  Bin 0 -> 33792 bytes
 doc/cla/icla-en.doc                           |  Bin 0 -> 30208 bytes
 doc/snow_annual_map.md                        |   20 +-
 doc/tutorials/prepare_snow_annual_map_data.md |    2 +-
 hpc/LIS_SEB_style_OTB.txt                     |   20 +
 hpc/batch_compute_NOBS.sh                     |   40 +
 hpc/batch_compute_PSL.sh                      |   40 +
 hpc/batch_compute_SOD_SMOD.sh                 |   48 +
 hpc/compute_PSL.py                            |   70 +
 hpc/makeTCDforLIS.sh                          |   34 +
 ...igureTile_lis_Sentinel2_cluster_muscate.sh |   20 +
 hpc/old/run_cloud_removal_cluster.sh          |  122 -
 hpc/old/run_lis_cluster.sh                    |  118 -
 hpc/old/run_lis_s2_cluster.sh                 |  154 -
 hpc/param20190612T31TCH.json                  |   67 +
 hpc/prepare_data_for_snow_annual_map.py       |  588 ++--
 hpc/prepare_data_for_snow_monthly_map.py      |  309 ++
 ...e_lis_Sentinel2_cluster_muscate_anytile.sh |   20 +
 ...ntinel2_cluster_muscate_anytile_anydate.sh |   20 +
 hpc/runTile_lis_Sentinel2_datalake_anytile.sh |   20 +
 ..._lis_Sentinel2_datalake_anytile_anydate.sh |   20 +
 hpc/run_lis_from_filelist.sh                  |   20 +
 hpc/run_snow_annual_map.sh                    |   20 +
 legacy/app/runBatch_Landsat8.sh               |   57 -
 legacy/app/runBatch_Take5.sh                  |   54 -
 legacy/app/runLis.sh                          |   16 -
 legacy/app/runLis_griz.sh                     |   15 -
 .../config/param_cloudremoval_template.json   |   23 -
 legacy/config/param_extract_maroc.json        |   29 -
 .../config/param_extract_maroc_cluster.json   |   29 -
 .../config/param_full_Landsat8_template.json  |   45 -
 legacy/config/param_full_Take5_template.json  |   33 -
 legacy/config/param_full_maroc.json           |   29 -
 legacy/config/param_s2.json                   |   29 -
 legacy/config/param_test.json                 |   33 -
 .../param_test_preprocessing_landsat.json     |   32 -
 legacy/config/param_test_s2.json              |   46 -
 legacy/config/s2_n2a_format.txt               |   35 -
 legacy/test/histo_utils_test.cxx              |   34 -
 legacy/utils/projette_mnt_Landsat8_full.sh    |   44 -
 legacy/utils/projette_mnt_Take5_full.sh       |   44 -
 python/CMakeLists.txt                         |   20 +
 python/s2snow/app_wrappers.py                 |   86 +-
 python/s2snow/cloud_builder.py                |   25 +-
 python/s2snow/cloud_removal.py                |   49 +-
 python/s2snow/compute_NOBS.py                 |  103 +
 python/s2snow/compute_SOD_SMOD.py             |  121 +
 python/s2snow/dem_builder.py                  |   31 +-
 python/s2snow/findRefCandidates.py            |   29 +-
 python/s2snow/snow_annual_map.py              |  624 ++--
 python/s2snow/snow_detector.py                |  198 +-
 python/s2snow/snow_product_parser.py          |   39 +-
 python/s2snow/utils.py                        |   86 +-
 python/s2snow/version.py                      |   24 +-
 python/setup.py                               |   22 +
 python/setup.py.in                            |   19 +
 src/CMakeLists.txt                            |   20 +
 src/ComputeCloudMask.cxx                      |   20 +-
 src/ComputeContours.cxx                       |   25 +-
 src/ComputeNbPixels.cxx                       |   20 +-
 src/ComputeSnowLine.cxx                       |   26 +-
 src/ComputeSnowMask.cxx                       |   20 +-
 src/histo_utils.cxx                           |   34 +-
 src/histo_utils.h                             |   34 +-
 src/itkNarySnowMaskImageFilter.h              |   34 +-
 src/itkUnaryCloudMaskImageFilter.h            |   19 +
 ...treamingHistogramMaskedVectorImageFilter.h |   34 +-
 ...eamingHistogramMaskedVectorImageFilter.txx |   31 +-
 ...ector.qml => LIS.1.2_SEB_style_vector.qml} |    0
 styles/LIS_FSC_style_raster.qml               |   24 +
 styles/LIS_NDSI_style_raster.qml              |   24 +
 ..._style_v2.qml => LIS_SEB_style_raster.qml} |    0
 styles/LIS_SEB_style_raster_qgis_3.4.qml      |   38 +
 styles/LIS_SEB_style_vector.qml               |  331 +++
 styles/LIS_SEB_style_vector_qgis_3.4.qml      |  182 ++
 styles/readme.md                              |   15 +
 test/CMakeLists.txt                           |   85 +-
 test/cloud_removal_step3_test.py              |   19 +
 test/cloud_removal_step4_test.py              |   19 +
 test/histo_utils_snow_fraction_test.cxx       |   34 +-
 test/histo_utils_snowline_internal_test.cxx   |   34 +-
 test/histo_utils_snowline_test.cxx            |   34 +-
 test/itkUnaryCloudMaskImageFilterTest.cxx     |   34 +-
 test/otbapp_ComputeCloudMask_test.py          |   19 +
 test/otbapp_ComputeSnowMask_test.py           |   19 +
 test/snow_annual_map_test.py                  |   24 +-
 utils/get_altitude_tiles.sh                   |   20 +
 utils/profiling_pass1.5.py                    |   24 +-
 utils/snow_line.py                            |   40 +-
 109 files changed, 8220 insertions(+), 2795 deletions(-)
 create mode 100644 CONTRIBUTING.md
 create mode 100644 analysis/readme.md
 create mode 100644 analysis/run_snowcover.py
 create mode 100644 analysis/snow_annual_map.py
 rename doc/snow_annual_map_schema.json => analysis/snow_annual_map_analysis_schema.json (72%)
 rename {python/s2snow => analysis}/snow_annual_map_evaluation.py (79%)
 create mode 100644 analysis/snowcover.py
 create mode 100644 app/build_snow_annual_map_json.py
 create mode 100644 doc/atbd/snow_annual_map_schema.json
 rename doc/atbd/{schema.json => snow_detector_schema.json} (84%)
 create mode 100644 doc/cla/ccla-en.doc
 create mode 100644 doc/cla/icla-en.doc
 create mode 100644 hpc/batch_compute_NOBS.sh
 create mode 100644 hpc/batch_compute_PSL.sh
 create mode 100644 hpc/batch_compute_SOD_SMOD.sh
 create mode 100644 hpc/compute_PSL.py
 create mode 100644 hpc/makeTCDforLIS.sh
 delete mode 100644 hpc/old/run_cloud_removal_cluster.sh
 delete mode 100644 hpc/old/run_lis_cluster.sh
 delete mode 100644 hpc/old/run_lis_s2_cluster.sh
 create mode 100644 hpc/param20190612T31TCH.json
 create mode 100644 hpc/prepare_data_for_snow_monthly_map.py
 delete mode 100755 legacy/app/runBatch_Landsat8.sh
 delete mode 100755 legacy/app/runBatch_Take5.sh
 delete mode 100755 legacy/app/runLis.sh
 delete mode 100755 legacy/app/runLis_griz.sh
 delete mode 100644 legacy/config/param_cloudremoval_template.json
 delete mode 100644 legacy/config/param_extract_maroc.json
 delete mode 100644 legacy/config/param_extract_maroc_cluster.json
 delete mode 100644 legacy/config/param_full_Landsat8_template.json
 delete mode 100644 legacy/config/param_full_Take5_template.json
 delete mode 100644 legacy/config/param_full_maroc.json
 delete mode 100644 legacy/config/param_s2.json
 delete mode 100644 legacy/config/param_test.json
 delete mode 100644 legacy/config/param_test_preprocessing_landsat.json
 delete mode 100644 legacy/config/param_test_s2.json
 delete mode 100644 legacy/config/s2_n2a_format.txt
 delete mode 100644 legacy/test/histo_utils_test.cxx
 delete mode 100644 legacy/utils/projette_mnt_Landsat8_full.sh
 delete mode 100644 legacy/utils/projette_mnt_Take5_full.sh
 create mode 100644 python/s2snow/compute_NOBS.py
 create mode 100644 python/s2snow/compute_SOD_SMOD.py
 rename styles/{LIS_SEB_style_v2_vector.qml => LIS.1.2_SEB_style_vector.qml} (100%)
 create mode 100644 styles/LIS_FSC_style_raster.qml
 create mode 100644 styles/LIS_NDSI_style_raster.qml
 rename styles/{LIS_SEB_style_v2.qml => LIS_SEB_style_raster.qml} (100%)
 create mode 100755 styles/LIS_SEB_style_raster_qgis_3.4.qml
 create mode 100644 styles/LIS_SEB_style_vector.qml
 create mode 100644 styles/LIS_SEB_style_vector_qgis_3.4.qml
 create mode 100644 styles/readme.md

diff --git a/CHANGELOG.md b/CHANGELOG.md
index d5742791..5abbcf10 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,40 @@
 # Change Log
 All notable changes to Let It Snow (LIS) will be documented in this file.
 
+
+## [1.6] - 2020-04
+
+### Added
+- New snow FSC product (parameters json schema modification fsc / cosims_mode)
+- New feature allowing to mask data (parameters json schema modification water_mask)
+- Adapt snow synthesis treatment in order to take THEIA L2B Snow as inputs (from datalake)
+- Add app/build_snow_annual_map_json.py, which create the params.json for snow_annual_map
+- Rename Snow_Occurence into SCD for (Snow Coverage Duration) (#41)
+- Add metadata generation for snow_annual_map (#39)
+- Change nodata from 254 to 255 (#29)
+- Add SMOD, SOD, NOBS syntheses
+- Migration to python3
+- Migration to OTB7
+- Rename "DN" shapefile column intto "SEB"for Snow Extend Binary(#10)
+- Change license from AGPL to Apache v2
+- Add synthesis description into README.md
+- Add CLA documents in doc/cla
+- Add new LIS style files
+
+### Fixed
+- Inaccurate temporal interpolation (#48)
+- Fix error on write call (python 2to3 problem) (#41)
+- Fix links in CONTRIBUTING.md
+- Update files header (adding license header)
+- Fix the coherence between build_json.py and schema.json
+- Fix latex errors in ATBD
+- Fix cases when TCD>100
+
+### Removed
+- COMP file is no more generated (not used, representing more that 50% product size) (#37)
+- Remove old/ directory
+- Remove legacy/ sub directories except /doc
+
 ## [1.5] - 2019-01-11
 
 ### Added
@@ -9,7 +43,7 @@ All notable changes to Let It Snow (LIS) will be documented in this file.
     - python/s2snow/snow_annual_map.py, the core of the annual map processing
     - python/s2snow/snow_annual_map_evaluation.py, provide the possibility to compare with other snow products and modis snow annual map
     - python/s2snow/snow_product_parser.py, class to handle the supported type of snow products
-    - doc/snow_annual_map_schema.json, parameters descriptions
+    - doc/atbd/snow_annual_map_schema.json, parameters descriptions
     - hpc/prepare_data_for_snow_annual_map.py, preprocessing script on CNES HPC, 
     - doc/tutorials/prepare_snow_annual_map_data.md, tutorial
 - Provided new data pack for tests "Data-LIS-1.5"
diff --git a/CMakeLists.txt b/CMakeLists.txt
index dfe6b105..9450ce33 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,3 +1,23 @@
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 PROJECT(lis)
 
 CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 00000000..dec62ed6
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,104 @@
+# How to contribute to let-it-snow (LIS) ?
+
+Thank you for taking the time to contribute to LIS! This document will guide you
+through the workflow and best practices you need to know to send your
+contribution.
+
+There are many ways to contribute to LIS:
+
+* [Reporting a bug](#reporting-bugs)
+* [Making a feature request](#feature-requests-and-discussions)
+* [Contributing code (C++, Python, CMake, etc.)](#code-contribution)
+
+Our main workflow uses GitLab for source control, issues and task tracking. We
+use a self-hosted GitLab instance:
+
+[`https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow`](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow)
+
+## Reporting bugs
+
+If you have found a bug, you can first [search the existing issues](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/issues?label_name%5B%5D=bug)
+to see if it has already been reported.
+
+If it's a new bug, please [open a new issue on GitLab](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/issues/new).
+The 'Bug' issue template will help you provide all important information and
+help fixing the bug quicker. Remember to add as much information as possible!
+
+## Feature requests and discussions
+
+Feature requests are welcome! Generally you are welcome to simply [open an issue](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/issues)
+and discuss your idea there.
+
+## Documentation improvements
+
+
+## Code contribution
+
+The LIS workflow is based on GitLab [Merge Requests](https://docs.gitlab.com/ee/gitlab-basics/add-merge-request.html).
+Clone the repository, create a feature branch, commit your changes, push the
+feature branch to a fork (or the main repository if you are a core developer),
+then send a merge request. Direct push to develop without review must be avoided.
+
+
+### Commit message
+
+On your feature branch, write a good [commit message](https://xkcd.com/1296/):
+short and descriptive. If fixing an issue or bug, put the issue number in the
+commit message so that GitLab can [cross-link it](https://docs.gitlab.com/ce/user/project/issues/crosslinking_issues.html).
+You can prefix your commit message with an indicating flag (DOC, BUG, PKG,
+TEST, SuperBuild, etc.).
+
+Standard prefixes for LIS commit messages:
+
+    BUG: Fix for runtime crash or incorrect result
+    COMP: Compiler error or warning fix
+    DOC: Documentation change
+    ENH: New functionality
+    PERF: Performance improvement
+    STYLE: No logic impact (indentation, comments)
+    WIP: Work In Progress not ready for merge
+
+For example, here are some good commit messages:
+
+    BUG: #1701 Warn users if parameter string is unset
+    DOC: Fix typo in Monteverdi French translation
+    COMP: Allow GeoTIFF and TIFF to be disabled when no 3rd party drags them
+
+### Merge request
+
+Your contribution is ready to be added to the main LIS repository? Send a Merge
+Request against the `develop` branch on GitLab using the merge request
+template. The merge request will then be discussed by the community and the core
+LIS team.
+
+* Merge requests can not be merged until all discussions have been resolved (this is enforced by GitLab)
+* The merger is responsible for checking that the branch is up-to-date with develop
+
+### Contribution license agreement
+
+LIS requires that contributors sign out a [Contributor License
+Agreement](https://en.wikipedia.org/wiki/Contributor_License_Agreement). The
+purpose of this CLA is to ensure that the project has the necessary ownership or
+grants of rights over all contributions to allow them to distribute under the
+chosen license.
+
+To accept your contribution, we need you to complete, sign and email to *cla [at]
+orfeo-toolbox [dot] org* an [Individual Contributor Licensing
+Agreement](doc/cla/icla-en.doc) (ICLA) form and a
+[Corporate Contributor Licensing
+Agreement](doc/cla/ccla-en.doc) (CCLA) form if you are
+contributing on behalf of your company or another entity which retains copyright
+for your contribution.
+
+The copyright owner (or owner's agent) must be mentioned in headers of all
+modified source files.
+
+## GitLab guidelines
+
+In order to organize the issues in our GitLab instance, we use both labels and
+milestones.
+
+The [milestones](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/milestones) should be used to track in which release a feature is merged.
+GitLab can then provide a summary of all features and bugs added to a given release
+version.
+
diff --git a/LICENSE b/LICENSE
index 71390ef3..7a4a3ea2 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,2 +1,202 @@
-This is free software under the GNU Affero General Public License v3.0. See
-http://www.gnu.org/licenses/agpl.html for details.
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
\ No newline at end of file
diff --git a/README.md b/README.md
index 2e01426d..c01638c0 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
 # Let-it-snow
 ## Synopsis
 
-This code implements the snow cover extent detection algorithm LIS (Let It Snow) for Sentinel-2, Landsat-8 and SPOT4-Take5 data.
+This code implements the snow cover extent detection algorithm LIS (Let It Snow) for Sentinel-2, Landsat-8 and SPOT4-Take5 data. It also implements different temporal syntheses based on time series of snow products. 
 
 The algorithm documentation with examples is available here:
 
@@ -11,13 +11,17 @@ Access to Theia Snow data collection:
 
 * [![DOI:10.24400/329360/f7q52mnk](https://zenodo.org/badge/DOI/10.24400/329360/f7q52mnk.svg)](http://doi.org/10.24400/329360/f7q52mnk)
 
-To read more about the "Centre d'Expertise Scientifique surface enneigée" (in French):
+How to cite:
 
-* [Bulletin THEIA](https://www.theia-land.fr/sites/default/files/imce/BulletinTHEIA3.pdf#page=10)
+*  Gascoin, S., Grizonnet, M., Bouchet, M., Salgues, G., and Hagolle, O.: Theia Snow collection: high-resolution operational snow cover maps from Sentinel-2 and Landsat-8 data, Earth Syst. Sci. Data, 11, 493–514, [https://doi.org/10.5194/essd-11-493-2019](https://doi.org/10.5194/essd-11-493-2019), 2019.
 
-The input files are Sentinel-2 or Landsat-8 level-2A products from the [Theia Land Data Centre](https://theia.cnes.fr/) or [SPOT-4/5 Take 5 level-2A products](https://spot-take5.org) and a Digital Terrain Model (DTM) like SRTM for instance reprojected at the same resolution as the input image.
+
+The input files are Sentinel-2 or Landsat-8 level-2A products from the [Theia Land Data Centre](https://theia.cnes.fr/) or [SPOT-4/5 Take 5 level-2A products](https://spot-take5.org) and a Digital Terrain Model (DTM). The output is a Level-2B snow product.
+
+The syntheses are temporally aggregated (level-3A) products derived from individual snow products after gapfilling. The three products are: the snow cover duration, the snow disappearance date and the snow appearance date. These products are typically computed over a hydrological year (more details : [Snow cover duration map](doc/snow_annual_map.md).
 
 ## Usage
+### Snow detector
 
 Run the python script run_snow_detector.py with a json configuration file as unique argument:
 
@@ -26,16 +30,24 @@ python run_snow_detector.py param.json
 ```
 The snow detection is performed in the Python script [run_snow_detector.py](app/run_snow_detector.py).
 
-All the parameters of the algorithm, paths to input and output data are stored in the json file. See the provided example [param_test_s2_template.json](tes/param_test_s2_template.json) file for an example.
-
-Moreover The JSON schema is available in the [Algorithm Theoretical Basis Documentation](doc/atbd/ATBD_CES-Neige.tex) and gives more information about the roles of these parameters.
+All the parameters of the algorithm, paths to input and output data are stored in the json file. The JSON schema (snow_detector_schema.json) is available in the [Algorithm Theoretical Basis Documentation](doc/atbd/ATBD_CES-Neige.tex) and gives more information about the roles of these parameters.
 
 NB: To build DEM data download the SRTM files corresponding to the study area and build the .vrt using gdalbuildvrt. Edit config.json file to activate preprocessing : Set "preprocessing" to true and set the vrt path.
 
+### Snow syntheses
+
+Run the python script run_snow_annual_map.py with a json configuration file as unique argument:
+```bash
+python run_snow_annual_map.py param.json
+```
+The snow syntheses are performed in the Python script [run_snow_annual_map.py](app/run_snow_annual_map.py).
+
+All the parameters of the algorithm, paths to input and output data are stored in the json file. The JSON schema (snow_annual_map_schema.json) and its description are available in the [readme](doc/snow_annual_map.md).
 
 ## Products format
 
-* COMPO: Raster image showing the outlines of the cloud (including cloud shadow) and snow masks drawn on the RGB composition of the L2A image (bands SWIR/Red/Green).
+### Snow product
+
 * SNOW_ALL: Binary mask of snow and clouds.
   * 1st bit: Snow mask after pass1
   * 2nd bit: Snow mask after pass2
@@ -52,13 +64,46 @@ pixel_value & 00000101
   * 0: No-snow
   * 100: Snow
   * 205: Cloud including cloud shadow
-  * 254: No data
-* SEB_VEC: Vector image of the snow mask and cloud mask. Two fields of information are embedded in this product. DN (for Data Neige) and type.
-  * DN field :
+  * 255: No data
+* SEB_VEC: Vector image of the snow mask and cloud mask. Two fields of information are embedded in this product. SEB (for Snow Extent Binary) and type.
+  * SEB field :
      * 0: No-snow
      * 100: Snow
      * 205: Cloud including cloud shadow
-     * 254: No data
+     * 255: No data
+     
+### Snow syntheses 
+
+Each product is computed for a given tile [TILE\_ID] and a given period from [DATE\_START] to [DATE_STOP]. Products are identified by a tag according the following naming convention: [TILE\_ID]\_[DATE\_START]\_[DATE_STOP]
+
+For example: **T31TCH\_20170901\_20180831**
+
+LIS generates the following files:
+- Raster: **DAILY\_SNOW\_MASKS\_<*tag*>.tif**, the snow time series file interpolated on a daily basis (1 image with one band per day). Each band are coded as follows (the interpolation removing any clouds or nodata):
+	- 0: No-snow
+	- 1: Snow
+
+- Raster: **SCD\_<*tag*>.tif**, the snow cover duration map (SCD), pixel values within [0-number of days] corresponding the number of snow days.
+
+- Raster: **CLOUD\_OCCURENCE\_<*tag*>.tif**, the cloud/nodata annual map image, pixel values within [0-1] corresponding the cloud or nodata occurrences in the non-interpolated time series
+
+- Raster: **SMOD\_<*tag*>.tif**, the date of snow disappearance (Snow Melt-Out Date), defined as the last date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
+
+- Raster: **SOD\_<*tag*>.tif**, the date of snow appearance (Snow Onset Date), defined as the first date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
+
+- Raster: **NOBS\_<*tag*>.tif**, the number of clear observations to compute the SCD, SMOD and SOD syntheses
+
+Output directory will also contain the following files :
+
+- Text file: **input_dates.txt**, the list of observation dates in the non-interpolated time series
+- Text file: **output_dates.txt**, the list of interpolated dates
+
+- JSON file: **param.json**, the configuration file used for the products generation (optional)
+
+- LOG file: **stdout.log**, the log file for the standard output generated during processing (optional)
+
+- LOG file: **stderr.log**, the log file for the error output generated during processing (optional)
+
 
 ## Data set example
 
@@ -77,13 +122,14 @@ LIS processing chain uses CMake (http://www.cmake.org) for building from source.
 Following a summary of the required dependencies: 
 
 * GDAL >=2.0
-* OTB >= 6.2
-* Python interpreter >= 2.7
-* Python libs >= 2.7
+* OTB >= 7.0
+* Python interpreter >= 3.6
+* Python libs >= 3.6
 * Python packages:
 * numpy
 * lxml
 * matplotlib
+* rasterio
 
 GDAL itself depends on a number of other libraries provided by most major operating systems and also depends on the non standard GEOS and Proj libraries. GDAL- Python bindings are also required
 
@@ -142,7 +188,7 @@ Update environment variables for LIS. Make sure that OTB and other dependencies
 export PATH=/your/install/directory/bin:/your/install/directory/app:$PATH
 export LD_LIBRARY_PATH=/your/install/directory/lib:$LD_LIBRARY_PATH
 export OTB_APPLICATION_PATH=/your/install/directory/lib:$OTB_APPLICATION_PATH
-export PYTHONPATH=/your/install/directory/lib:/your/install/directory/lib/python2.7/site-packages:$PYTHONPATH
+export PYTHONPATH=/your/install/directory/lib:/your/install/directory/lib/python3.7/site-packages:$PYTHONPATH
 ```
 let-it-snow is now installed.
 
@@ -162,9 +208,8 @@ Do not modify these folders.
 
 ## Contributors
 
-Manuel Grizonnet (CNES), Simon Gascoin (CNRS/CESBIO), Tristan Klempka (CNES), Germain Salgues (Magellium)
+Manuel Grizonnet (CNES), Simon Gascoin (CNRS/CESBIO), Germain Salgues (Magellium), Aurore Dupuis (CNES), Rémi Jugier (Magellium)
 
 ## License
 
-This is free software under the GNU Affero General Public License v3.0. See
-http://www.gnu.org/licenses/agpl.html for details.
+This is software under the Apache License v2.0. See https://www.apache.org/licenses/LICENSE-2.0.txt
diff --git a/analysis/readme.md b/analysis/readme.md
new file mode 100644
index 00000000..f601134b
--- /dev/null
+++ b/analysis/readme.md
@@ -0,0 +1 @@
+This folder contains the code to do the calibration and evaluation of FSC products
diff --git a/analysis/run_snowcover.py b/analysis/run_snowcover.py
new file mode 100644
index 00000000..bea21581
--- /dev/null
+++ b/analysis/run_snowcover.py
@@ -0,0 +1,181 @@
+import sys
+import os
+import errno
+import re
+
+import snowcover
+
+
+#initialize
+SNW = snowcover.snowcover()  
+
+
+
+#CALIBRATION PLEIADES
+
+nameCAL = "PLEIADES_CAL"
+sourceCAL = "PLEIADES"
+dateDebut = "2015-01-01"
+dateFin = "2020-01-01"
+SNWCAL = [1]
+NSNWCAL = [2]
+OK = SNW.makeDataSet(dateDebut=dateDebut,dateFin=dateFin,source=sourceCAL,dirName=nameCAL,SNWval=SNWCAL,NSNWval=NSNWCAL)
+OK = SNW.createQuickLooks(nameCAL)
+OK = SNW.PlotPeriode(nameCAL,sourceCAL)
+OK = SNW.PlotEachDates(nameCAL,sourceCAL)
+a,b,rmse = SNW.calibrateModel(nameCAL,sourceCAL,0.4)
+print(nameCAL,"a = ",a,"b = ",b,"rmse = ",rmse)
+
+
+
+
+#EVALUATION IZAS
+
+nameVAL = "IZAS_VAL"
+sourceVAL = "IZAS"
+SNWVAL = [1]
+NSNWVAL = [0]
+epsgVAL = "25830"
+tileVAL = ["T30TYN"]
+OK = SNW.makeDataSet(dateDebut=dateDebut,dateFin=dateFin,source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,epsgFSC = epsgVAL,tiles = tileVAL)
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+rmse = SNW.evaluateModel(nameCAL,[nameVAL],sourceCAL,[sourceVAL],a,b)
+print(nameVAL,"rmse = ",rmse)
+
+
+
+#EVALUATION SPOT
+
+nameVAL = "SPOT67_20160808_VAL"
+sourceVAL = "SPOT67"
+SNWVAL = [2]
+NSNWVAL = [1]
+NDVAL = [0]
+epsgVAL = ""
+tileVAL = ["T32TLS","T32TLR","T32TLQ","T31TGK","T31TGL","T31TGM"]
+OK = SNW.makeDataSet(dateDebut="2016-08-08",source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,NDval=NDVAL,epsgFSC = epsgVAL,tiles = tileVAL,selection = "closest")
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+rmse = SNW.evaluateModel(nameCAL,[nameVAL],sourceCAL,[sourceVAL],a,b)
+print(nameVAL,"rmse = ",rmse)
+
+nameVAL = "SPOT67_20170311_VAL"
+sourceVAL = "SPOT67"
+SNWVAL = [2]
+NSNWVAL = [1]
+NDVAL = [0]
+epsgVAL = ""
+tileVAL = ["T32TLS","T32TLR","T32TLQ","T31TGK","T31TGL","T31TGM"]
+OK = SNW.makeDataSet(dateDebut="2017-03-11",source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,NDval=NDVAL,epsgFSC = epsgVAL,tiles = tileVAL,selection = "closest")
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+rmse = SNW.evaluateModel(nameCAL,[nameVAL],sourceCAL,[sourceVAL],a,b)
+print(nameVAL,"rmse = ",rmse)
+
+nameVAL = "SPOT67_20161012_VAL"
+sourceVAL = "SPOT67"
+SNWVAL = [2]
+NSNWVAL = [1]
+NDVAL = [0]
+epsgVAL = ""
+tileVAL = ["T32TLS","T32TLR","T32TLQ","T31TGK","T31TGL","T31TGM"]
+OK = SNW.makeDataSet(dateDebut="2016-10-12",source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,NDval=NDVAL,epsgFSC = epsgVAL,tiles = tileVAL,selection = "closest")
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+rmse = SNW.evaluateModel(nameCAL,[nameVAL],sourceCAL,[sourceVAL],a,b)
+print(nameVAL,"rmse = ",rmse)
+
+
+nameVAL = "SPOT67_20161203_VAL"
+sourceVAL = "SPOT67"
+SNWVAL = [2]
+NSNWVAL = [1]
+NDVAL = [0]
+epsgVAL = ""
+tileVAL = ["T32TLS","T32TLR","T32TLQ","T31TGK","T31TGL","T31TGM"]
+OK = SNW.makeDataSet(dateDebut="2016-12-03",source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,NDval=NDVAL,epsgFSC = epsgVAL,tiles = tileVAL,selection = "cleanest")
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+rmse = SNW.evaluateModel(nameCAL,[nameVAL],sourceCAL,[sourceVAL],a,b)
+print(nameVAL,"rmse = ",rmse)
+
+nameVAL = "SPOT67_20161217_VAL"
+sourceVAL = "SPOT67"
+SNWVAL = [2]
+NSNWVAL = [1]
+NDVAL = [0]
+epsgVAL = ""
+tileVAL = ["T32TLS","T32TLR","T32TLQ","T31TGK","T31TGL","T31TGM"]
+OK = SNW.makeDataSet(dateDebut="2016-12-17",source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,NDval=NDVAL,epsgFSC = epsgVAL,tiles = tileVAL,selection = "closest")
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+rmse = SNW.evaluateModel(nameCAL,[nameVAL],sourceCAL,[sourceVAL],a,b)
+print(nameVAL,"rmse = ",rmse)
+
+
+nameVAL = "DISCHMEX_VAL"
+sourceVAL = "DISCHMEX"
+SNWVAL = [1]
+NSNWVAL = [0]
+NDVAL = []
+epsgVAL = "21781"
+tileVAL = []
+OK = SNW.makeDataSet(dateDebut=dateDebut,dateFin=dateFin,source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,NDval=NDVAL,epsgFSC = epsgVAL,tiles = tileVAL)
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+rmse = SNW.evaluateModel(nameCAL,[nameVAL],sourceCAL,[sourceVAL],a,b)
+print(nameVAL,"rmse = ",rmse)
+
+nameVAL = "AUSTRIA_VAL"
+sourceVAL = "AUSTRIA"
+SNWVAL = [1]
+NSNWVAL = [0]
+NDVAL = []
+epsgVAL = "31254"
+tileVAL = []
+OK = SNW.makeDataSet(dateDebut=dateDebut,dateFin=dateFin,source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,NDval=NDVAL,epsgFSC = epsgVAL,tiles = tileVAL)
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+rmse = SNW.evaluateModel(nameCAL,[nameVAL],sourceCAL,[sourceVAL],a,b)
+print(nameVAL,"rmse = ",rmse)
+
+
+
+#ODK
+nameVAL = "ODK_VAL"
+sourceVAL = "odk_all.txt"
+OK = SNW.processODK(nameVAL,sourceVAL,True,True)
+rmse = SNW.evaluateWithODK(nameCAL,sourceCAL,nameVAL,a,b)
+
+
+evalDirNames = []
+evalSources = []
+
+#EVALUATION CAMSNOW
+nameVAL = "CAMSNOW_VAL"
+sourceVAL = "CAMSNOW"
+SNWVAL = []
+NSNWVAL = []
+FSCVAL = True
+epsgVAL = "2154"
+sampling = "near"
+
+OK = SNW.makeDataSet(dateDebut=dateDebut,dateFin=dateFin,source=sourceVAL,dirName=nameVAL,SNWval=SNWVAL,NSNWval=NSNWVAL,epsgFSC = epsgVAL,isFSC = FSCVAL,resampling = sampling)
+OK = SNW.createQuickLooks(nameVAL)
+OK = SNW.PlotPeriode(nameVAL,sourceVAL)
+
+evalDirNames = ["CAMSNOW_VAL","IZAS_VAL"]
+evalSources = ["CAMSNOW","IZAS"]
+
+
+OK = SNW.timeLapseEvalModel(nameCAL,evalDirNames,sourceCAL,evalSources,a,b)
+print(OK)
+
+
+
+
+
+
+
+
diff --git a/analysis/snow_annual_map.py b/analysis/snow_annual_map.py
new file mode 100644
index 00000000..e6cd10f0
--- /dev/null
+++ b/analysis/snow_annual_map.py
@@ -0,0 +1,396 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+import os.path as op
+from os.path import basename
+import shutil
+import logging
+import multiprocessing
+from xml.dom import minidom
+from datetime import timedelta
+
+from lxml import etree
+
+import gdal
+from gdalconst import GA_ReadOnly
+
+# OTB Applications
+import otbApplication as otb
+
+# Import python decorators for the different needed OTB applications
+from s2snow.app_wrappers import band_math, get_app_output, super_impose, band_mathX, gap_filling
+
+from s2snow.utils import str_to_datetime, datetime_to_str
+from s2snow.utils import write_list_to_file, read_list_from_file
+from s2snow.snow_product_parser import load_snow_product
+
+# Build gdal option to generate maks of 1 byte using otb extended filename
+# syntaxx
+GDAL_OPT = "?&gdal:co:NBITS=1&gdal:co:COMPRESS=DEFLATE"
+
+
+def parse_xml(filepath):
+    """ Parse an xml file to return the zs value of a snow product
+    """
+    logging.debug("Parsing " + filepath)
+    xmldoc = minidom.parse(filepath)
+    group = xmldoc.getElementsByTagName('Global_Index_List')[0]
+    zs = group.getElementsByTagName("QUALITY_INDEX")[0].firstChild.data
+
+
+def merge_masks_at_same_date(snow_product_list, merged_snow_product, threshold=100, ram=None):
+    """ This function implement the fusion of multiple snow mask
+
+    Keyword arguments:
+    snow_product_list -- the input mask list
+    merged_snow_product -- the output filepath
+    threshold -- the threshold between valid <= invalid data
+    ram -- the ram limitation (not mandatory)
+    """
+    logging.info("Merging products into " + merged_snow_product)
+
+    # the merging is performed according the following selection:
+    #   if img1 < threshold use img1 data
+    #   else if img2 < threshold use img2 data
+    #   else if imgN < threshold use imgN data
+    # the order of the images in the input list is important:
+    #   we expect to have first the main input products
+    #   and then the densification products
+    img_index = list(range(1, len(snow_product_list) + 1))
+    expression_merging = "".join(["(im" + str(i) + "b1<=" + str(threshold) + "?im" + str(i) + "b1:" for i in img_index])
+    expression_merging += "im" + str(img_index[-1]) + "b1"
+    expression_merging += "".join([")" for i in img_index])
+
+    img_list = [i.get_snow_mask() for i in snow_product_list]
+    bandMathApp = band_math(img_list,
+                            merged_snow_product,
+                            expression_merging,
+                            ram,
+                            otb.ImagePixelType_uint8)
+    bandMathApp.ExecuteAndWriteOutput()
+    bandMathApp = None
+
+
+""" This module provide the implementation of the snow annual map """
+
+
+class snow_annual_map():
+    def __init__(self, params):
+        logging.info("Init snow_multitemp")
+
+        self.tile_id = params.get("tile_id")
+        self.date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
+        self.date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
+        self.date_margin = timedelta(days=params.get("date_margin", 0))
+        self.output_dates_filename = params.get("output_dates_filename", None)
+        self.mode = params.get("mode", "RUNTIME")
+
+        # Compute an id like T31TCH_20170831_20180901 to label the map
+        self.processing_id = str(self.tile_id + "_" + \
+                                 datetime_to_str(self.date_start) + "_" + \
+                                 datetime_to_str(self.date_stop))
+
+        # Retrive the input_products_list
+        self.input_path_list = params.get("input_products_list", [])
+
+        # @TODO an available path_tmp must be provide or the TMPDIR variable must be avaible
+        self.path_tmp = str(params.get("path_tmp", os.environ.get('TMPDIR')))
+        if not os.path.exists(self.path_tmp):
+            logging.error(self.path_tmp + ", the target does not exist and can't be used for processing")
+
+        self.path_out = op.join(str(params.get("path_out")), self.processing_id)
+
+        if not os.path.exists(self.path_out):
+            os.mkdir(self.path_out)
+
+        self.ram = params.get("ram", 512)
+        self.nbThreads = params.get("nbThreads", None)
+
+        self.use_densification = params.get("use_densification", False)
+        if self.use_densification:
+            self.densification_path_list = params.get("densification_products_list", [])
+
+        # Define label for output snow product (cf snow product format)
+        self.label_no_snow = "0"
+        self.label_snow = "100"
+        self.label_cloud = "205"
+        self.label_no_data = "255"
+        self.label_no_data_old = "254"
+
+        # Build useful paths
+        self.input_dates_filename = op.join(self.path_tmp, "input_dates.txt")
+        if not self.output_dates_filename:
+            self.output_dates_filename = op.join(self.path_tmp, "output_dates.txt")
+        self.multitemp_snow_vrt = op.join(self.path_tmp, "multitemp_snow_mask.vrt")
+        self.multitemp_cloud_vrt = op.join(self.path_tmp, "multitemp_cloud_mask.vrt")
+        self.gapfilled_timeserie = op.join(self.path_tmp, "DAILY_SNOW_MASKS_" + self.processing_id + ".tif")
+        self.annual_snow_map = op.join(self.path_tmp, "SCD_" + self.processing_id + ".tif")
+        self.cloud_occurence_img = op.join(self.path_tmp, "CLOUD_OCCURENCE_" + self.processing_id + ".tif")
+        self.metadata_path = op.join(self.path_out, "LIS_METADATA.XML")
+
+    def run(self):
+        logging.info("Run snow_annual_map")
+
+        # Set maximum ITK threads
+        if self.nbThreads:
+            os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(self.nbThreads)
+
+        # search matching snow product
+        self.product_dict = self.load_products(self.input_path_list, self.tile_id, None)
+        logging.debug("Product dictionnary:")
+        logging.debug(self.product_dict)
+
+        # Exiting with error if none of the input products were loaded
+        if not self.product_dict:
+            logging.error("Empty product list!")
+            return
+
+        # Do the loading of the products to densify the timeserie
+        if self.use_densification:
+            # load densification snow products
+            densification_product_dict = self.load_products(self.densification_path_list, None, None)
+            logging.info("Densification product dict:")
+            logging.info(densification_product_dict)
+
+            # Get the footprint of the first snow product
+            s2_footprint_ref = self.product_dict[list(self.product_dict.keys())[0]][0].get_snow_mask()
+
+            if densification_product_dict:
+                # Reproject the densification products on S2 tile before going further
+                for densifier_product_key in list(densification_product_dict.keys()):
+                    for densifier_product in densification_product_dict[densifier_product_key]:
+                        original_mask = densifier_product.get_snow_mask()
+                        reprojected_mask = op.join(self.path_tmp,
+                                                   densifier_product.product_name + "_reprojected.tif")
+                        if not os.path.exists(reprojected_mask):
+                            super_impose_app = super_impose(s2_footprint_ref,
+                                                            original_mask,
+                                                            reprojected_mask,
+                                                            "nn",
+                                                            int(self.label_no_data),
+                                                            self.ram,
+                                                            otb.ImagePixelType_uint8)
+                            super_impose_app.ExecuteAndWriteOutput()
+                            super_impose_app = None
+                        densifier_product.snow_mask = reprojected_mask
+                        logging.debug(densifier_product.snow_mask)
+
+                    # Add the products to extend the self.product_dict
+                    if densifier_product_key in list(self.product_dict.keys()):
+                        self.product_dict[densifier_product_key].extend(
+                            densification_product_dict[densifier_product_key])
+                    else:
+                        self.product_dict[densifier_product_key] = densification_product_dict[densifier_product_key]
+            else:
+                logging.warning("No Densifying candidate product found!")
+
+        # re-order products according acquisition date
+        input_dates = sorted(self.product_dict.keys())
+        write_list_to_file(self.input_dates_filename, input_dates)
+
+        # compute or retrieve the output dates
+        output_dates = []
+        if op.exists(self.output_dates_filename):
+            output_dates = read_list_from_file(self.output_dates_filename)
+        else:
+            tmp_date = self.date_start
+            while tmp_date <= self.date_stop:
+                output_dates.append(datetime_to_str(tmp_date))
+                tmp_date += timedelta(days=1)
+            write_list_to_file(self.output_dates_filename, output_dates)
+
+        shutil.copy2(self.input_dates_filename, self.path_out)
+        shutil.copy2(self.output_dates_filename, self.path_out)
+
+        # merge products at the same date
+        self.resulting_snow_mask_dict = {}
+        for key in list(self.product_dict.keys()):
+            if len(self.product_dict[key]) > 1:
+                merged_mask = op.join(self.path_tmp, key + "_merged_snow_product.tif")
+                merge_masks_at_same_date(self.product_dict[key],
+                                         merged_mask,
+                                         self.label_snow,
+                                         self.ram)
+                self.resulting_snow_mask_dict[key] = merged_mask
+            else:
+                self.resulting_snow_mask_dict[key] = self.product_dict[key][0].get_snow_mask()
+
+        # convert the snow masks into binary snow masks
+        expression = "(im1b1==" + self.label_snow + ")?1:0"
+        self.binary_snowmask_list = self.convert_mask_list(expression, "snow", GDAL_OPT)
+        logging.debug("Binary snow mask list:")
+        logging.debug(self.binary_snowmask_list)
+
+        # convert the snow masks into binary cloud masks
+        expression = "im1b1==" + self.label_cloud + "?1:(im1b1==" + self.label_no_data + "?1:(im1b1==" + self.label_no_data_old + "?1:0))"
+        self.binary_cloudmask_list = self.convert_mask_list(expression, "cloud", GDAL_OPT)
+        logging.debug("Binary cloud mask list:")
+        logging.debug(self.binary_cloudmask_list)
+
+        # build cloud mask vrt
+        logging.info("Building multitemp cloud mask vrt")
+        logging.info("cloud vrt: " + self.multitemp_cloud_vrt)
+        gdal.BuildVRT(self.multitemp_cloud_vrt,
+                      self.binary_cloudmask_list,
+                      separate=True,
+                      srcNodata='None')
+        shutil.copy2(self.multitemp_cloud_vrt, self.path_out)
+
+        # generate the summary map
+        band_index = list(range(1, len(self.binary_cloudmask_list) + 1))
+        expression = "+".join(["im1b" + str(i) for i in band_index])
+
+        bandMathApp = band_math([self.multitemp_cloud_vrt],
+                                self.cloud_occurence_img,
+                                expression,
+                                self.ram,
+                                otb.ImagePixelType_uint16)
+        bandMathApp.ExecuteAndWriteOutput()
+        bandMathApp = None
+
+        logging.info("Copying outputs from tmp to output folder")
+        shutil.copy2(self.cloud_occurence_img, self.path_out)
+
+        # build snow mask vrt
+        logging.info("Building multitemp snow mask vrt")
+        logging.info("snow vrt: " + self.multitemp_snow_vrt)
+        gdal.BuildVRT(self.multitemp_snow_vrt,
+                      self.binary_snowmask_list,
+                      separate=True,
+                      srcNodata='None')
+
+        # multiply by 100 for the temporal interpolation
+        logging.info("Scale by 100 multitemp snow mask vrt")
+        multitemp_snow100 = op.join(self.path_tmp, "multitemp_snow100.tif")
+        bandMathXApp = band_mathX([self.multitemp_snow_vrt],
+                                  multitemp_snow100,
+                                  "im1 mlt 100",
+                                  self.ram,
+                                  otb.ImagePixelType_uint8)
+        bandMathXApp.ExecuteAndWriteOutput()
+        bandMathXApp = None
+
+        # gap filling the snow timeserie
+        multitemp_snow100_gapfilled = op.join(self.path_tmp, "multitemp_snow100_gapfilled.tif")
+        app_gap_filling = gap_filling(multitemp_snow100,
+                                      self.multitemp_cloud_vrt,
+                                      multitemp_snow100_gapfilled + "?&gdal:co:COMPRESS=DEFLATE",
+                                      self.input_dates_filename,
+                                      self.output_dates_filename,
+                                      self.ram,
+                                      otb.ImagePixelType_uint8)
+
+        # @TODO the mode is for now forced to DEBUG in order to generate img on disk
+        # img_in = get_app_output(app_gap_filling, "out", self.mode)
+        # if self.mode == "DEBUG":
+        # shutil.copy2(self.gapfilled_timeserie, self.path_out)
+        # app_gap_filling = None
+
+        img_in = get_app_output(app_gap_filling, "out", "DEBUG")
+        app_gap_filling = None
+
+        # threshold to 0 or 1
+        logging.info("Round to binary series of snow occurrence")
+        bandMathXApp = band_mathX([img_in],
+                                  self.gapfilled_timeserie + GDAL_OPT,
+                                  "(im1 mlt 2) dv 100",
+                                  self.ram,
+                                  otb.ImagePixelType_uint8)
+        bandMathXApp.ExecuteAndWriteOutput()
+        bandMathXApp = None
+
+        # generate the annual map
+        band_index = list(range(1, len(output_dates) + 1))
+        expression = "+".join(["im1b" + str(i) for i in band_index])
+
+        bandMathApp = band_math([self.gapfilled_timeserie],
+                                self.annual_snow_map,
+                                expression,
+                                self.ram,
+                                otb.ImagePixelType_uint16)
+        bandMathApp.ExecuteAndWriteOutput()
+        bandMathApp = None
+
+        logging.info("Moving outputs from tmp to output folder")
+        shutil.copy2(self.annual_snow_map, self.path_out)
+        shutil.copy2(self.gapfilled_timeserie, self.path_out)
+        os.remove(self.annual_snow_map)
+        os.remove(self.gapfilled_timeserie)
+
+        logging.info("End of snow_annual_map")
+
+        if self.mode == "DEBUG":
+            dest_debug_dir = op.join(self.path_out, "tmpdir")
+            if op.exists(dest_debug_dir):
+                shutil.rmtree(dest_debug_dir)
+            shutil.copytree(self.path_tmp, dest_debug_dir)
+
+    def load_products(self, snow_products_list, tile_id=None, product_type=None):
+        logging.info("Parsing provided snow products list")
+        product_dict = {}
+        search_start_date = self.date_start - self.date_margin
+        search_stop_date = self.date_stop + self.date_margin
+        for product_path in snow_products_list:
+            try:
+                product = load_snow_product(str(product_path))
+                logging.info(str(product))
+                current_day = datetime_to_str(product.acquisition_date)
+                test_result = True
+                if search_start_date > product.acquisition_date or \
+                        search_stop_date < product.acquisition_date:
+                    test_result = False
+                if (tile_id is not None) and (tile_id not in product.tile_id):
+                    test_result = False
+                if (product_type is not None) and (product_type not in product.platform):
+                    test_result = False
+                if test_result:
+                    if current_day not in list(product_dict.keys()):
+                        product_dict[current_day] = [product]
+                    else:
+                        product_dict[current_day].append(product)
+                    logging.info("Keeping: " + str(product))
+                else:
+                    logging.warning("Discarding: " + str(product))
+            except Exception:
+                logging.error("Unable to load product :" + product_path)
+        return product_dict
+
+    def convert_mask_list(self, expression, type_name, mask_format=""):
+        binary_mask_list = []
+        for mask_date in sorted(self.resulting_snow_mask_dict):
+            binary_mask = op.join(self.path_tmp,
+                                  mask_date + "_" + type_name + "_binary.tif")
+            binary_mask = self.extract_binary_mask(self.resulting_snow_mask_dict[mask_date],
+                                                   binary_mask,
+                                                   expression,
+                                                   mask_format)
+            binary_mask_list.append(binary_mask)
+        return binary_mask_list
+
+    def extract_binary_mask(self, mask_in, mask_out, expression, mask_format=""):
+        bandMathApp = band_math([mask_in],
+                                mask_out + mask_format,
+                                expression,
+                                self.ram,
+                                otb.ImagePixelType_uint8)
+        bandMathApp.ExecuteAndWriteOutput()
+        return mask_out
diff --git a/doc/snow_annual_map_schema.json b/analysis/snow_annual_map_analysis_schema.json
similarity index 72%
rename from doc/snow_annual_map_schema.json
rename to analysis/snow_annual_map_analysis_schema.json
index b0c483b5..289310d6 100644
--- a/doc/snow_annual_map_schema.json
+++ b/analysis/snow_annual_map_analysis_schema.json
@@ -4,127 +4,145 @@
     "properties": {
         "log": {
             "default": true,
-            "description": "Log output and error to files (std***.log).",
+            "description": "Log output and error to files (std***.log). (mandatory)",
             "id": "log",
             "title": "The Log schema.",
             "type": "boolean"
         },
+        "log_stdout": {
+            "description": "Log output (std***.log). (optional, if not set, default is path_out/log_stdout.log)",
+            "id": "log_stdout",
+            "title": "The log_stdout schema.",
+            "type": "string"
+        },
+        "log_stderr": {
+            "description": "Log error (std***.log). (optional, if not set, default is path_out/log_sterr.log)",
+            "id": "log_stderr",
+            "title": "The log_stderr schema.",
+            "type": "string"
+        },
         "mode": {
-            "deafult":"RUNTIME",
-            "description": "The processing mode to use, RUNTIME to obtain only output products faster, DEBUG to obtain all intermediate files",
+            "default":"RUNTIME",
+            "description": "The processing mode to use, RUNTIME to obtain only output products faster, DEBUG to obtain all intermediate files (optional)",
             "id": "mode",
             "title": "The Mode schema.",
             "type": "string"
         },
         "tile_id": {
-            "description": "The identifier of the tile corresponding to the input input_products_list products",
+            "description": "The identifier of the tile corresponding to the input input_products_list products (mandatory)",
             "id": "tile_id",
             "title": "The Tile_id schema.",
             "type": "string"
         },
         "input_products_list": {
             "default": [],
-            "description": "The input products list, containing the paths of homogeneous snow products only on tile_id at same resolution and size",
+            "description": "The input products list, containing the paths of homogeneous snow products only on tile_id at same resolution and size (optional)",
             "id": "input_products_list",
             "title": "The input_products_list schema.",
             "type": "list"
         },
         "path_tmp": {
             "default":"",
-            "description": "The path where to store temporary files, else the application try to retrive $TMPDIR in env",
+            "description": "The path where to store temporary files, else the application try to retrive $TMPDIR in env (optional)",
             "id": "path_tmp",
             "title": "The Path_tmp schema.",
             "type": "string"
         },
         "use_densification": {
             "default":"false",
-            "description": "Activate the densification using snow products from heterogeneous sensors",
+            "description": "Activate the densification using snow products from heterogeneous sensors (optional)",
             "id": "use_densification",
             "title": "The Use_densification schema.",
             "type": "boolean"
         },
         "densification_products_list": {
             "default": [],
-            "description": "The densification list, containing the paths of heterogenous snow products from heterogeneous sensors",
+            "description": "The densification list, containing the paths of heterogenous snow products from heterogeneous sensors (optional)",
             "id": "densification_products_list",
             "title": "The densification_products_list schema.",
             "type": "list"
         },
         "path_out": {
-            "description": "Path to output directory.",
+            "description": "Path to output directory. (mandatory)",
             "id": "path_out",
             "title": "The Path_out schema.",
             "type": "string"
         },
         "date_start": {
-            "description": "Start of the date range for which we want to generate the snow_annual_map (DD/MM/YYYY)",
+            "description": "Start of the date range for which we want to generate the snow_annual_map (DD/MM/YYYY) (mandatory)",
             "id": "date_start",
             "title": "The Date_start schema.",
             "type": "string"
         },
         "date_stop": {
-            "description": "Stop of the date range for which we want to generate the snow_annual_map (DD/MM/YYYY)",
+            "description": "Stop of the date range for which we want to generate the snow_annual_map (DD/MM/YYYY) (mandatory)",
             "id": "date_stop",
             "title": "The Date_stop schema.",
             "type": "string"
         },
         "date_margin": {
             "default": 15,
-            "description": "The margin ouside the date range to use for better interpolation results (in days)",
+            "description": "The margin ouside the date range to use for better interpolation results (in days) (optional)",
             "id": "date_margin",
             "title": "The Date_margin schema.",
             "type": "string"
         },
         "ram": {
             "default": 4096,
-            "description": "Maximum number of RAM memory used by the program.",
+            "description": "Maximum number of RAM memory used by the program. (optional)",
             "id": "ram",
             "title": "The Ram schema.",
             "type": "integer"
         },
         "nb_threads": {
             "default": 1,
-            "description": "Maximum number of threads use by the program.",
+            "description": "Maximum number of threads use by the program. (optional)",
             "id": "nb_threads",
             "title": "The Nb_threads schema.",
             "type": "integer"
         },
-        "comments": "the following parameters concerns only the snow_annual_map_evaluation",
+        "output_dates_filename": {
+            "description": "Path to output_dates, containing all dates you want in the output. (optional, by default step between two dates is one day)",
+            "id": "output_dates_filename",
+            "title": "The output_dates_filename schema.",
+            "type": "string"
+        },
+        "comments": "the following parameters concerns only the snow_annual_map_evaluation (do not use them in operation)",
         "run_comparison_evaluation": {
             "default":"false",
-            "description": "Activate the one to one comparison using snow products from heterogeneous sensors",
+            "description": "Activate the one to one comparison using snow products from heterogeneous sensors (optional)",
             "id": "run_comparison_evaluation",
             "title": "The run_comparison_evaluation schema.",
             "type": "boolean"
         },
         "comparison_products_list": {
             "default": [],
-            "description": "The comparison list, containing the paths of heterogenous snow products to compare with daily interpolation",
+            "description": "The comparison list, containing the paths of heterogenous snow products to compare with daily interpolation (optional)",
             "id": "comparison_products_list",
             "title": "The comparison_products_list schema.",
             "type": "list"
         },
         "run_modis_comparison": {
             "default":"false",
-            "description": "Activate the comparison between annual map and modis snow annual map",
+            "description": "Activate the comparison between annual map and modis snow annual map (optional)",
             "id": "run_modis_comparison",
             "title": "The run_modis_comparison schema.",
             "type": "boolean"
         },
         "modis_snow_map": {
-            "description": "The path to the modis daily snow masks (one file with one band per day)",
+            "description": "The path to the modis daily snow masks (one file with one band per day) (mandatory if run_modis_comparison=true)",
             "id": "modis_snow_map",
             "title": "The modis_snow_map schema.",
             "type": "string"
         },
         "modis_snow_map_dates": {
-            "description": "The dates corresponding to the bands of the modis_snow_map",
+            "description": "The dates corresponding to the bands of the modis_snow_map (mandatory if run_modis_comparison=true)",
             "id": "modis_snow_map_dates",
             "title": "The modis_snow_map_dates schema.",
             "type": "string"
         },
         "dem": {
-            "description": "The dem to use during modis comparison, to generate snow per altitude slices",
+            "description": "The dem to use during modis comparison, to generate snow per altitude slices (mandatory)",
             "id": "dem",
             "title": "The dem schema.",
             "type": "string"
diff --git a/python/s2snow/snow_annual_map_evaluation.py b/analysis/snow_annual_map_evaluation.py
similarity index 79%
rename from python/s2snow/snow_annual_map_evaluation.py
rename to analysis/snow_annual_map_evaluation.py
index 8015abe7..0018e90c 100644
--- a/python/s2snow/snow_annual_map_evaluation.py
+++ b/analysis/snow_annual_map_evaluation.py
@@ -1,20 +1,25 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
-#=========================================================================
 #
-#  Program:   lis
-#  Language:  Python
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
 #
-#  Copyright (c) Germain Salgues
-#  Copyright (c) Manuel Grizonnet
+# This file is part of Let-it-snow (LIS)
 #
-#  See lis-copyright.txt for details.
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
 #
-#  This software is distributed WITHOUT ANY WARRANTY; without even
-#  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-#  PURPOSE.  See the above copyright notices for more information.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
 #
-#=========================================================================
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 import os
 import os.path as op
 import shutil
@@ -28,12 +33,12 @@ import gdalconst
 import otbApplication as otb
 
 # Import python decorators for the different needed OTB applications
+from analysis.snow_annual_map import merge_masks_at_same_date
 from s2snow.app_wrappers import band_math, super_impose, confusion_matrix
 from s2snow.utils import get_raster_as_array, apply_color_table
 from s2snow.utils import str_to_datetime, datetime_to_str
 from s2snow.utils import write_list_to_file, read_list_from_file
-from s2snow.snow_annual_map import snow_annual_map
-
+from analysis.snow_annual_map import snow_annual_map
 
 # Build gdal option to generate maks of 1 byte using otb extended filename
 # syntaxx
@@ -43,6 +48,7 @@ GDAL_OPT = "?&gdal:co:NBITS=1&gdal:co:COMPRESS=DEFLATE"
 # syntax
 GDAL_OPT_2B = "?&gdal:co:NBITS=2&gdal:co:COMPRESS=DEFLATE"
 
+
 def get_raster_extent_as_poly(raster1):
     """ Return the extent of the input raster as polygon
     """
@@ -70,6 +76,7 @@ def get_raster_extent_as_poly(raster1):
 
     return poly, srs1
 
+
 def write_poly_to_shapefile(poly, shapefile_name, srs=None):
     """ Write a polygon geometry to shapefile
     """
@@ -93,27 +100,29 @@ def write_poly_to_shapefile(poly, shapefile_name, srs=None):
     layer.CreateFeature(feat)
     ds = layer = feat = geom = None
 
+
 def get_raster_intersection(raster1, raster2):
     """ Compute the intersection of 2 raters
     Return the instersection polygon and the associated projection
     """
     poly1, srs1 = get_raster_extent_as_poly(raster1)
-    print "poly1", poly1
+    print("poly1", poly1)
 
     poly2, srs2 = get_raster_extent_as_poly(raster2)
-    print "poly2", poly2
+    print("poly2", poly2)
 
     # convert poly2 into poly1 ProjectionRef
     transform = osr.CoordinateTransformation(srs2, srs1)
     poly2.Transform(transform)
-    print "poly2 transformed", poly2
+    print("poly2 transformed", poly2)
 
     intersection = poly2.Intersection(poly1)
-    print "intersection", intersection
+    print("intersection", intersection)
 
-    #return also the srs in which is expressed the intersection
+    # return also the srs in which is expressed the intersection
     return intersection, srs1
 
+
 class snow_annual_map_evaluation(snow_annual_map):
     def __init__(self, params):
         self.params = params
@@ -158,8 +167,8 @@ class snow_annual_map_evaluation(snow_annual_map):
         write_list_to_file(self.comparison_dates_filename, comparison_input_dates)
 
         # load required product
-        self.resulting_snow_mask_dict={}
-        for key in self.product_dict.keys():
+        self.resulting_snow_mask_dict = {}
+        for key in list(self.product_dict.keys()):
             comparison_tag = key + "_comparison"
             if len(self.product_dict[key]) > 1:
                 merged_mask = op.join(self.path_tmp, comparison_tag + "_merged_snow_product.tif")
@@ -169,8 +178,8 @@ class snow_annual_map_evaluation(snow_annual_map):
                 self.resulting_snow_mask_dict[comparison_tag] = self.product_dict[key][0].get_snow_mask()
 
         # convert the snow masks into binary snow masks
-        expression = "im1b1=="+self.label_cloud+"?2:(im1b1=="+self.label_no_data+"?2:" \
-                        + "(im1b1==" + self.label_snow + ")?1:0)"
+        expression = "im1b1==" + self.label_cloud + "?2:(im1b1==" + self.label_no_data + "?2:" \
+                     + "(im1b1==" + self.label_snow + ")?1:0)"
         self.binary_snowmask_list = self.convert_mask_list(expression, "snow_eval")
         logging.debug("Binary snow mask list:")
         logging.debug(self.binary_snowmask_list)
@@ -182,7 +191,7 @@ class snow_annual_map_evaluation(snow_annual_map):
             for comparison_index, comparison_date in enumerate(comparison_input_dates):
                 if ts_date in comparison_date:
                     pair_dict[comparison_date] = (ts_index, comparison_index)
-        print pair_dict
+        print(pair_dict)
 
         # project the snow masks onto the same foot print
         self.binary_snowmask_list_reprojected = []
@@ -191,7 +200,7 @@ class snow_annual_map_evaluation(snow_annual_map):
             if not os.path.exists(mask_out):
                 super_impose_app = super_impose(self.annual_snow_map,
                                                 mask_in,
-                                                mask_out+GDAL_OPT_2B,
+                                                mask_out + GDAL_OPT_2B,
                                                 "linear",
                                                 2,
                                                 self.ram,
@@ -202,7 +211,7 @@ class snow_annual_map_evaluation(snow_annual_map):
 
         # compare the two snow masks
         comparision_list = []
-        for comparison_date in pair_dict.keys():
+        for comparison_date in list(pair_dict.keys()):
             s2_index, comparison_index = pair_dict[comparison_date]
 
             path_extracted = op.join(self.path_tmp, "gapfilled_s2_" + comparison_date + ".tif")
@@ -212,7 +221,7 @@ class snow_annual_map_evaluation(snow_annual_map):
                 format='GTiff',
                 outputType=gdal.GDT_Byte,
                 noData=None,
-                bandList=[s2_index+1])
+                bandList=[s2_index + 1])
 
             expression = "im2b1==2?254:(2*im2b1+im1b1)"
             img_out = op.join(self.path_tmp, "comparision_" + comparison_date + ".tif")
@@ -230,7 +239,7 @@ class snow_annual_map_evaluation(snow_annual_map):
             apply_color_table(img_out, self.colorTable)
             shutil.copy2(img_out, self.path_out)
 
-            out = op.join(self.path_tmp, "confusion_matrix_"+ comparison_date + ".csv")
+            out = op.join(self.path_tmp, "confusion_matrix_" + comparison_date + ".csv")
             confusionMatrixApp = confusion_matrix(path_extracted,
                                                   self.binary_snowmask_list_reprojected[comparison_index],
                                                   out,
@@ -254,8 +263,8 @@ class snow_annual_map_evaluation(snow_annual_map):
 
         shutil.copy2(montage, self.path_out)
 
-        #if self.mode == "DEBUG":
-            #shutil.copytree(self.path_tmp, op.join(self.path_out, "tmpdir"))
+        # if self.mode == "DEBUG":
+        # shutil.copytree(self.path_tmp, op.join(self.path_out, "tmpdir"))
 
         logging.info("End snow_annual_map_evaluation")
 
@@ -279,7 +288,7 @@ class snow_annual_map_evaluation(snow_annual_map):
                 modis_stop_index = i
 
         # generate the summary map
-        band_index = range(modis_start_index+1, modis_stop_index+2)
+        band_index = list(range(modis_start_index + 1, modis_stop_index + 2))
         expression = "+".join(["(im1b" + str(i) + "==200?1:0)" for i in band_index])
 
         if not op.exists(self.modis_annual_snow_map):
@@ -354,29 +363,29 @@ class snow_annual_map_evaluation(snow_annual_map):
                              "intersection")
 
         # The following approach use super impose to project MODIS onto S2 data
-        #for interp_method in ["linear"]:
-            #modis_reprojected_snow_map = self.annual_snow_map.replace(".tif", \
-                                            #"_reprojected_"+interp_method+".tif")
-            #super_impose_app = super_impose(self.annual_snow_map,
-                                            #self.modis_annual_snow_map,
-                                            #modis_reprojected_snow_map,
-                                            #interp_method,
-                                            #-1,
-                                            #self.ram,
-                                            #otb.ImagePixelType_int16)
-            #super_impose_app.ExecuteAndWriteOutput()
-            #super_impose_app = None
-            #shutil.copy2(modis_reprojected_snow_map, self.path_out)
-
-            #compute_annual_stats(self.annual_snow_map,
-                                 #self.dem,
-                                 #modis_reprojected_snow_map,
-                                 #self.dem,
-                                 #self.path_out,
-                                 #"superimpose")
-
-        #if self.mode == "DEBUG":
-            #shutil.copytree(self.path_tmp, op.join(self.path_out, "tmpdir"))
+        # for interp_method in ["linear"]:
+        # modis_reprojected_snow_map = self.annual_snow_map.replace(".tif", \
+        # "_reprojected_"+interp_method+".tif")
+        # super_impose_app = super_impose(self.annual_snow_map,
+        # self.modis_annual_snow_map,
+        # modis_reprojected_snow_map,
+        # interp_method,
+        # -1,
+        # self.ram,
+        # otb.ImagePixelType_int16)
+        # super_impose_app.ExecuteAndWriteOutput()
+        # super_impose_app = None
+        # shutil.copy2(modis_reprojected_snow_map, self.path_out)
+
+        # compute_annual_stats(self.annual_snow_map,
+        # self.dem,
+        # modis_reprojected_snow_map,
+        # self.dem,
+        # self.path_out,
+        # "superimpose")
+
+        # if self.mode == "DEBUG":
+        # shutil.copytree(self.path_tmp, op.join(self.path_out, "tmpdir"))
 
 
 def compute_annual_stats(s2, dem_s2, modis, dem_modis, outputDir, suffix):
@@ -406,7 +415,7 @@ def compute_annual_stats(s2, dem_s2, modis, dem_modis, outputDir, suffix):
     for alt_range in altitudes:
         logging.debug("Altitude stats for " + str(alt_range[0]) + "m - " + str(alt_range[1]) + "m")
 
-        labels.append("["+str(alt_range[0])+"-"+str(alt_range[1])+"m[")
+        labels.append("[" + str(alt_range[0]) + "-" + str(alt_range[1]) + "m[")
 
         indexes_s2 = np.where(s2_mask & (alt_range[0] <= dem_s2_array) \
                               & (dem_s2_array < alt_range[1]))
@@ -443,37 +452,39 @@ def compute_annual_stats(s2, dem_s2, modis, dem_modis, outputDir, suffix):
     # display
     # plt.show()
 
+
 ###############################################################
 #   Main Test
 ###############################################################
 def main():
-    params = {"tile_id":"T31TCH",
-              "date_start":"01/09/2015",
-              "date_stop":"31/08/2016",
-              "date_margin":15,
-              "mode":"DEBUG",
-              "input_products_list":[],
-              "path_tmp":os.environ.get('TMPDIR'),
-              "path_out":"/home/qt/salguesg/scratch/workdir",
-              "ram":4096,
-              "nbThreads":8,
-              "use_densification":False,
-              "densification_products_list":[],
-              "data_availability_check":False,
-              "run_comparison_evaluation":True,
-              "comparison_products_list":[],
-              "run_modis_comparison":True,
-              "modis_snow_map":"/home/qt/salguesg/scratch/workdir/MODIS/Pirineos_gapfilled.tif",
-              "modis_snow_map_dates":"/home/qt/salguesg/scratch/workdir/MODIS/Pirineos_gapfilled_dates.csv",
-              "dem":"/work/OT/siaa/Theia/Neige/DEM/S2__TEST_AUX_REFDE2_T31TCH_0001.DBL.DIR/S2__TEST_AUX_REFDE2_T31TCH_0001_ALT_R2.TIF"}
+    params = {"tile_id": "T31TCH",
+              "date_start": "01/09/2015",
+              "date_stop": "31/08/2016",
+              "date_margin": 15,
+              "mode": "DEBUG",
+              "input_products_list": [],
+              "path_tmp": os.environ.get('TMPDIR'),
+              "path_out": "/home/qt/salguesg/scratch/workdir",
+              "ram": 4096,
+              "nbThreads": 8,
+              "use_densification": False,
+              "densification_products_list": [],
+              "data_availability_check": False,
+              "run_comparison_evaluation": True,
+              "comparison_products_list": [],
+              "run_modis_comparison": True,
+              "modis_snow_map": "/home/qt/salguesg/scratch/workdir/MODIS/Pirineos_gapfilled.tif",
+              "modis_snow_map_dates": "/home/qt/salguesg/scratch/workdir/MODIS/Pirineos_gapfilled_dates.csv",
+              "dem": "/work/OT/siaa/Theia/Neige/DEM/S2__TEST_AUX_REFDE2_T31TCH_0001.DBL.DIR/S2__TEST_AUX_REFDE2_T31TCH_0001_ALT_R2.TIF"}
 
     snow_annual_map_evaluation_app = snow_annual_map_evaluation(params)
     snow_annual_map_evaluation_app.run()
-    #snow_annual_map_evaluation_app.run_evaluation()
-    #snow_annual_map_evaluation_app.compare_modis()
+    # snow_annual_map_evaluation_app.run_evaluation()
+    # snow_annual_map_evaluation_app.compare_modis()
+
 
 if __name__ == '__main__':
     # Set logging level and format.
-    logging.basicConfig(level=logging.DEBUG, format=\
-                        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+    logging.basicConfig(level=logging.DEBUG, format= \
+        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
     main()
diff --git a/analysis/snowcover.py b/analysis/snowcover.py
new file mode 100644
index 00000000..c4341a52
--- /dev/null
+++ b/analysis/snowcover.py
@@ -0,0 +1,2541 @@
+import sys
+import os
+import errno
+import re
+from datetime import datetime, timedelta, date
+from osgeo import osr, gdal
+from osgeo.gdalnumeric import *
+from osgeo.gdalconst import *
+import numpy as np
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+import xml.etree.ElementTree as ET
+from xml.dom import minidom
+import scipy.optimize as opti
+from scipy.stats import mstats
+import shutil
+import matplotlib.gridspec as gridspec
+from matplotlib.colors import LogNorm
+from pyproj import Proj, transform
+import glob
+import random
+import pandas as pd
+from sklearn import datasets, linear_model
+from sklearn.model_selection import train_test_split
+from sklearn.metrics import mean_squared_error
+from math import sqrt
+import math
+from matplotlib.ticker import PercentFormatter
+
+
+
+
+
+
+class snowcover:
+    def __init__(self):
+
+         
+
+        self.nb_shift_days = 4
+        self.path_palettes = "/work/OT/siaa/Theia/Neige/CoSIMS/zacharie/snowcover/palettes"
+        self.path_outputs = "/work/OT/siaa/Theia/Neige/CoSIMS/zacharie/snowcover/OUTPUTS"
+        self.path_THEIA = "/work/OT/siaa/Theia/Neige/CoSIMS/zacharie/snowcover/INPUTS"
+        self.path_LIS = "/work/OT/siaa/Theia/Neige/PRODUITS_NEIGE_LIS_develop_1.5"
+        self.date_format = "%Y-%m-%d"
+        self.max_accuracy = 5
+        self.f_tree = "/work/OT/siaa/Theia/Neige/CoSIMS/data/tree_cover_density/original_tiling/TCD_2015_020m_eu_03035_d05_full.tif"
+        
+        
+        
+        
+        
+        
+        
+
+
+    #Check the directory d and recuperate a list of files with names containing a date between dateDebut - self.nb_shift_days and dateFin + self.nb_shift_days
+    def getListDateDecal(self,dateDebut,dateFin,d,decal):
+        lo=[]
+        li = []
+        
+        try:
+            li = os.listdir(d)
+        except OSError as exc:  # Python >2.5
+            if exc.errno == errno.EACCES:
+                return lo
+            else:
+                raise       
+        
+        for i in sorted(li):
+            date = self.getDateFromStr(i) 
+            if date == '' : continue
+            if (date >= self.getDateFromStr(dateDebut) - timedelta(days = decal)) and (date <= self.getDateFromStr(dateFin) + timedelta(days = decal)) :
+                lo.append(os.path.join(d,i))
+        return lo
+
+    #Create a directory with path dos
+    def mkdir_p(self,dos):
+        try:
+            os.makedirs(dos)
+        except OSError as exc:  # Python >2.5
+            if exc.errno == errno.EEXIST and os.path.isdir(dos):
+                pass
+            else:
+                raise
+
+
+
+    #Extract a date from a string
+    def getDateFromStr(self,N):
+        sepList = ["","-","_","/"]
+        date = ''
+        for s in sepList :
+            found = re.search('\d{4}'+ s +'\d{2}'+ s +'\d{2}', N)
+            if found != None :
+               date = datetime.strptime(found.group(0), '%Y'+ s +'%m'+ s +'%d').date()
+               break
+        return date
+    
+
+        
+    #Extract a Tile number from a string
+    def getTileFromStr(self,N):
+
+        tile = ''
+        found = re.search('T' + '\d{2}' +'\w{3}', N)
+        if found != None : tile = found.group(0)
+           
+        return tile
+        
+    #Extract a EPSG number from a string
+    def getEpsgFromStr(self,N):
+        
+        epsg = ''
+        found = re.search('\d{5}', N)
+        if found != None : epsg = found.group(0)
+           
+        return str(epsg)
+
+    #Get the coordinates of the overlapping region between two rasters G1 and G2
+    #he coordinates are in the projection of G2
+    def getOverlapCoords(self,G1,G2):
+        
+        epsg1 = (gdal.Info(G1, format='json')['coordinateSystem']['wkt'].rsplit('"EPSG","', 1)[-1].split('"')[0])
+        epsg2 = (gdal.Info(G2, format='json')['coordinateSystem']['wkt'].rsplit('"EPSG","', 1)[-1].split('"')[0])
+        
+        GT1 = G1.GetGeoTransform()
+        minx1 = GT1[0]
+        maxy1 = GT1[3]
+        maxx1 = minx1 + GT1[1] * G1.RasterXSize
+        miny1 = maxy1 + GT1[5] * G1.RasterYSize
+        
+        GT2 = G2.GetGeoTransform()
+        minx2 = GT2[0]
+        maxy2 = GT2[3]
+        maxx2 = minx2 + GT2[1] * G2.RasterXSize
+        miny2 = maxy2 + GT2[5] * G2.RasterYSize
+        
+        if epsg1 not in epsg2 :
+            minx1 , miny1 = self.reproject(epsg1,epsg2,minx1,miny1)
+            maxx1 , maxy1 = self.reproject(epsg1,epsg2,maxx1,maxy1)
+        
+        
+        minx3 = max(minx1,minx2)
+        maxy3 = min(maxy1,maxy2)
+        maxx3 = min(maxx1,maxx2)
+        miny3 = max(miny1,miny2)   
+        
+        # if there is no intersection 
+        if (minx3 > maxx3 or miny3 > maxy3) : 
+            return None,None,None,None
+        
+        return minx3, maxy3, maxx3, miny3
+        
+        
+    #Check if two rasters G1 and G2 are overlapping
+    def isOverlapping(self,G1,G2):
+        
+        epsg1 = (gdal.Info(G1, format='json')['coordinateSystem']['wkt'].rsplit('"EPSG","', 1)[-1].split('"')[0])
+        epsg2 = (gdal.Info(G2, format='json')['coordinateSystem']['wkt'].rsplit('"EPSG","', 1)[-1].split('"')[0])
+        
+        GT1 = G1.GetGeoTransform()
+        minx1 = GT1[0]
+        maxy1 = GT1[3]
+        maxx1 = minx1 + GT1[1] * G1.RasterXSize
+        miny1 = maxy1 + GT1[5] * G1.RasterYSize
+        
+        GT2 = G2.GetGeoTransform()
+        minx2 = GT2[0]
+        maxy2 = GT2[3]
+        maxx2 = minx2 + GT2[1] * G2.RasterXSize
+        miny2 = maxy2 + GT2[5] * G2.RasterYSize
+        
+        if epsg1 not in epsg2 :
+            minx1 , miny1 = self.reproject(epsg1,epsg2,minx1,miny1)
+            maxx1 , maxy1 = self.reproject(epsg1,epsg2,maxx1,maxy1)
+        
+        minx3 = max(minx1,minx2)
+        maxy3 = min(maxy1,maxy2)
+        maxx3 = min(maxx1,maxx2)
+        miny3 = max(miny1,miny2)   
+        
+        # no intersection 
+        if (minx3 > maxx3 or miny3 > maxy3) : 
+            return False
+        else:
+            return True
+        
+
+
+    #Check if the Gsmall raster is entirely overlapping with the raster Gbig
+    def isInside(self,Gbig,Gsmall):
+        
+        epsgbig = (gdal.Info(Gbig, format='json')['coordinateSystem']['wkt'].rsplit('"EPSG","', 1)[-1].split('"')[0])
+        epsgsmall = (gdal.Info(Gsmall, format='json')['coordinateSystem']['wkt'].rsplit('"EPSG","', 1)[-1].split('"')[0])
+        
+
+        
+        GTbig = Gbig.GetGeoTransform()
+        minxbig = GTbig[0]
+        maxybig = GTbig[3]
+        maxxbig = minxbig + GTbig[1] * Gbig.RasterXSize
+        minybig = maxybig + GTbig[5] * Gbig.RasterYSize
+        
+        GTsmall = Gsmall.GetGeoTransform()
+        minxsmall = GTsmall[0]
+        maxysmall = GTsmall[3]
+        maxxsmall = minxsmall + GTsmall[1] * Gsmall.RasterXSize
+        minysmall = maxysmall + GTsmall[5] * Gsmall.RasterYSize
+        
+        if epsgbig not in epsgsmall :
+            minxsmall , minysmall = self.reproject(epsgsmall,epsgbig,minxsmall,minysmall)
+            maxxsmall , maxysmall = self.reproject(epsgsmall,epsgbig,maxxsmall,maxysmall)
+        
+        if minxbig <= minxsmall and maxxbig >= maxxsmall and minybig <= minysmall and maxybig >=maxysmall :
+            return True
+        else :
+            return False
+
+    
+    #Reproject coordinates x1 and y1 from inEPSG to outEPSG
+    def reproject(self,inEPSG,outEPSG,x1,y1):
+        
+        inProj = Proj(init='EPSG:' + inEPSG)
+        outProj = Proj(init='EPSG:'+ outEPSG)
+        x2,y2 = transform(inProj,outProj,x1,y1)
+        
+        return x2, y2
+    
+    
+
+        
+
+    #Prepare the FSC and NDSI datasets necessary for calibration and validation
+    #dirName (string) : name of the directory which will contain the resulting dataset (FSC and NDSI tif files) (self.path_outputs/dirName) (optional)
+    #dateDebut, dateFin ("YYYY-MM-DD") : The tif files are made from rasters dating between dateDebut - self.nb_shift_days and dateFin + self.nb_shift_days.
+    #If dateFin is empty, The periode of time consist of only the one date dateDebut
+    #source (string) : name of the raster source (PLEIADES, SPOT, IZAS,etc...) 
+    #epsgFSC (string) : Imposed projection number of the FSC raster. (optional)
+    #resampling (string) : raster resampling method ("average","near",etc...) (optional)
+    #isFSC (bool) : indication if the snow rasters already have FSC values (optional)
+    #SNWval (list of int) : value(s) of snow pixels in the snow raster (optional)
+    #NSNWval (list of int) : value(s) of no-snow pixels in the snow raster (optional)
+    #NDval (list of int) : value(s) of no-data pixels in the snow raster(optional)
+    #tiles (list of string) : tile numbers of the L2A products overlapping the snow rasters. If empty, makeDataSets will search them itself. (optional)
+    #selection (string) : method of L2A products selection (closest or cleanest) (optional)
+    #return True or False to indicate success
+    def makeDataSet(self,dirName = "",dateDebut = "",dateFin = "",source = "",epsgFSC = "",resampling = "average",isFSC = False,SNWval = [1],NSNWval = [0],NDval = [], tiles = [],selection = "closest"):
+        
+
+        #parameters check
+        if dateFin == "": 
+            dateFin = dateDebut
+
+        if self.getDateFromStr(dateDebut) == '' or self.getDateFromStr(dateFin) == '':
+            print("ERROR makeDataSet : error in input date")
+            return False
+        if source == "" :
+            print("ERROR makeDataSet : source must be specified")
+            return False
+
+        if dirName == "":
+            dirName = "DATASETS_" + source + "_" + dateDebut
+            if dateDebut != dateFin : dirName = dirName + "_" + dateFin
+
+
+
+        #search overlapping tiles
+        if tiles == [] :
+            print("Searching for overlapping tiles")
+            tiles = self.searchTiles(source = source,epsgFSC = epsgFSC)
+            if tiles == []:
+                print("ERROR makeDataSet : no tiles found")
+                return False
+        print("tiles: ",tiles)
+
+
+        #select snow and L2A products
+        print("Selecting FSC and L2A products")
+        list_products = self.selectProducts(dateDebut = dateDebut,dateFin = dateFin,source = source, epsgFSC = epsgFSC,tiles = tiles,selection = selection)
+        if list_products == {}:
+            print("ERROR selectProducts : no products found")
+            return False
+
+        #display selected products on terminal 
+        print("nb of FSC products = " + str(len(list_products)))
+        for FSC_date in list_products :
+            f_FSC = list_products[FSC_date][0]
+            l_L2A = list_products[FSC_date][1]
+            print("DATE = " + str(FSC_date) + "\n     FSC = " + str(f_FSC) + "\n     nb of L2A tiles = " + str(len(l_L2A)) )
+            for tile , epsgL2A, L2A  in l_L2A : 
+                print("     " + tile + " : " + L2A)
+
+        #produce FSC and NDSI datasets
+        success = self.products2DataSets(dirName = dirName,dateDebut = dateDebut,dateFin = dateFin,source = source,epsgFSC = epsgFSC,list_products = list_products,resampling = "average",isFSC = False,SNWval = SNWval,NSNWval = NSNWval,NDval = NDval)
+
+        return success
+
+
+
+
+    #produce a list of the Sentinel-2 tiles overlapping with the snow rasters of a source. (ex: T31TCH for PLEIADES)
+    #source (string) : source of the snow rasters (PLEIADES,IZAS,etc...)
+    #epsgFSC (string) : Imposed epsg projection number (optional)
+    #return a list of tiles names
+    def searchTiles(self,source = "",epsgFSC = ""):
+
+        tiles_overlap = []
+
+
+        if source == "" :
+            print("ERROR searchTiles: source must be specified")
+            return tiles_overlap
+
+        
+
+        #For each FSC raster we look for the overlapping S2 tiles
+        path_FSC_dir = os.path.join(self.path_THEIA,"FSC",source)
+        pxs = []
+        for FSC_product in os.listdir(path_FSC_dir):
+            f_FSC = os.path.join(path_FSC_dir,FSC_product)
+            g_FSC = gdal.Open(f_FSC)
+            px = g_FSC.GetGeoTransform()[0]
+            if px not in pxs :
+                print("\n")
+                print("Check tiles for FSC file",FSC_product)
+                pxs.append(px)
+                
+                # we set the EPSG if necessary
+                if epsgFSC != "" :
+                    g_FSC = gdal.Warp('',g_FSC,format= 'MEM',srcSRS="EPSG:" + epsgFSC)
+
+                # we check each S2 tiles for overlaps
+                for tile in os.listdir(self.path_LIS) :
+                    if not os.path.isdir(os.path.join(self.path_LIS,tile)) : continue
+                    print ("Check tile : " + tile)
+                    try:
+                        L2A_product = os.listdir(os.path.join(self.path_LIS,tile))[-1]
+                    except OSError as exc:  # Python >2.5
+                        if exc.errno == errno.EACCES:
+                            continue
+                        else:
+                            raise   
+                    print("Check overlapping with L2A file " + L2A_product)
+                    f_L2A = os.path.join(self.path_LIS,tile,L2A_product,"swir_band_extracted.tif")
+                    g_L2A = gdal.Open(f_L2A)
+                    if self.isOverlapping(g_L2A,g_FSC) :
+                        print("Overlap present")
+                        if tile not in tiles_overlap :
+                            tiles_overlap.append(tile)
+                print("\n")
+        return tiles_overlap
+                
+
+
+
+
+
+    #select a list of snow rasters and their coreesponding L2A products 
+    #dateDebut, dateFin ("YYYY-MM-DD") : The datasets are made from rasters dating between dateDebut - self.nb_shift_days and dateFin + self.nb_shift_days.
+    # If dateFin is empty, The periode of time consist of only the one date dateDebut
+    #source (string) : name of the raster source (PLEIADES, SPOT, IZAS,etc...) 
+    #epsgFSC (string) : Imposed projection number of the FSC raster. (optional)
+    #tiles (list of string) : tile numbers of the L2A products overlapping the snow rasters. If empty, makeDataSets will search them itself. (optional)
+    #selection (string) : method of L2A products selection (closest or cleanest) (optional)
+    #return a dictionary in the format list_products[date] = [path of a snow raster,[tile of a L2A product,epsg of a L2A product, path of a L2A product]]
+    def selectProducts(self,dateDebut = "",dateFin = "",source = "", epsgFSC = "",tiles = [],selection = ""):
+
+        list_products = {}
+
+        #parameters check
+        if dateFin == "": dateFin = dateDebut
+        
+        if selection == "":
+            selection = self.selection
+
+        if self.getDateFromStr(dateDebut) == '' or self.getDateFromStr(dateFin) == '':
+            print("ERROR selectProducts : error in input date")
+            return list_products
+        if source == "" :
+            print("ERROR selectProducts : source must be specified")
+            return list_products
+
+        # We create a list of the FSC products (with paths)
+        
+        path_FSC_dir = os.path.join(self.path_THEIA,"FSC",source)
+        if os.path.isdir(path_FSC_dir):
+            list_FSC_products = self.getListDateDecal(dateDebut,dateFin,path_FSC_dir,0)
+        if list_FSC_products == [] :
+            print ("ERROR selectProducts : No FSC product found for source " + source + " in directory " + path_FSC_dir)
+            return list_products
+            
+            
+        
+        for tile in tiles :
+                
+            print("Check tile : " + tile)
+            path_L2A_dir = os.path.join(self.path_LIS,tile)
+            if os.path.isdir(path_L2A_dir):
+                list_L2A_products = self.getListDateDecal(dateDebut,dateFin,path_L2A_dir,self.nb_shift_days)
+            if list_L2A_products == [] :
+                print ("No L2A product found for tile " + tile + " in directory " + path_L2A_dir)
+                        
+
+            L2A_product = glob.glob(os.path.join(self.path_LIS,tile,'*SENTINEL*'))[0]
+            f_tile = os.path.join(L2A_product,"LIS_PRODUCTS","LIS_SEB.TIF")
+            g_tile = gdal.Open(f_tile)
+            
+            
+            for f_FSC in list_FSC_products :
+                
+                
+                g_FSC = gdal.Open(f_FSC)
+
+                if epsgFSC != "" :
+                    g_FSC = gdal.Warp('',g_FSC,format= 'MEM',srcSRS="EPSG:" + epsgFSC )                
+    
+                dateFSC = self.getDateFromStr(f_FSC)
+                minx, maxy, maxx, miny = self.getOverlapCoords(g_FSC,g_tile)  
+                if minx == None and maxy == None: continue
+                
+                
+                epsgL2A = ""
+                L2A = ""
+                ind = self.nb_shift_days + 1
+                NDR1 = 100
+                NDR2 = 100
+                for L2A_product in list_L2A_products:
+                    if "SENTINEL" not in L2A_product : continue
+                    dateL2A = self.getDateFromStr(L2A_product)
+                    lag = dateL2A - dateFSC
+                    if abs(lag.days) >  self.nb_shift_days : continue
+                    
+                    f_L2A = os.path.join(L2A_product,"LIS_PRODUCTS","LIS_SEB.TIF")
+                    g_L2A = gdal.Translate('',f_L2A,format= 'MEM',projWin = [minx, maxy, maxx, miny]) 
+                    bandL2A = BandReadAsArray(g_L2A.GetRasterBand(1))
+                    
+
+                    
+                    NDR2 = (float(len(bandL2A[bandL2A == 205]) + len(bandL2A[bandL2A == 254])) / float(np.size(bandL2A))) * 100
+                    
+                                
+                    check = None
+                    
+                    if "cleanest" in selection :   
+                        check =  NDR2 < 100 and ((abs(NDR2 - NDR1) < 0.0001 and abs(lag.days) < ind) or (NDR1 - NDR2 >= 0.0001))
+                    else :
+                        check = abs(lag.days) < ind
+                    
+                    if check :
+                        #print("\n")
+                        print("date FSC",dateFSC,"date L2A",dateL2A)
+                        print("NoDataRatio1 = ",NDR1,"NoDataRatio2 = ",NDR2,"lag = ",lag.days)
+                        #print("\n")
+                        ind = abs(lag.days)
+                        L2A = L2A_product
+                        NDR1 = NDR2
+                        epsgL2A = (gdal.Info(g_L2A, format='json')['coordinateSystem']['wkt'].rsplit('"EPSG","', 1)[-1].split('"')[0])
+                    else : print("date rejetee")
+                        
+                        
+                if L2A == "" : continue
+                
+                print("Chosen L2A : " + L2A)
+                
+                if dateFSC not in list_products.keys() :
+                    list_products[dateFSC] = [f_FSC,[]]
+                    list_products[dateFSC][1].append([tile,epsgL2A,L2A])
+                else :
+                    
+                    list_products[dateFSC][1].append([tile,epsgL2A,L2A])
+                
+                
+        return list_products
+
+
+
+
+
+    # Produce FSC and NDSI datasets.
+    #dirName (string) : name of the directory containing the datasets (self.path_outputs/dirName) (optional)
+    #dateDebut, dateFin ("YYYY-MM-DD") : The datasets are made from rasters dating between dateDebut - self.nb_shift_days and dateFin + self.nb_shift_days.
+    # If dateFin is empty, The periode of time consist of only the one date dateDebut
+    #source (string) : name of the raster source (PLEIADES, SPOT, IZAS,etc...) 
+    #epsgFSC (string) : Imposed projection number of the FSC raster. (optional)
+    #resampling (string) : raster resampling method ("average","near",etc...) (optional)
+    #isFSC (bool) : indication if the snow rasters already have FSC values (optional)
+    #SNWval (list of int) : value(s) of snow pixels in the snow raster (optional)
+    #NSNWval (list of int) : value(s) of no-snow pixels in the snow raster (optional)
+    #NDval (list of int) : value(s) of no-data pixels in the snow raster(optional)
+    #list_products : dictionary in the format list_products[date] = [path of a snow raster,[tile of a L2A product,epsg of a L2A product, path of a L2A product]]
+    def products2DataSets(self,dirName = "",dateDebut = "",dateFin = "",source = "",epsgFSC = "",list_products = {},resampling = "average",isFSC = False,SNWval = [1],NSNWval = [0],NDval = []):
+        
+        
+
+        #parameters check
+        if dateFin == "": 
+            dateFin = dateDebut
+
+        if self.getDateFromStr(dateDebut) == '' or self.getDateFromStr(dateFin) == '':
+            print("ERROR products2DataSets : error in input date")
+            return False
+        if source == "" :
+            print("ERROR products2DataSets : source must be specified")
+            return False
+
+        if list_products == {} : 
+            print("ERROR products2DataSets : no products specified")
+            return False
+
+        if dirName == "":
+            dirName = "DATASETS_" + source + "_" + dateDebut
+            if dateDebut != dateFin : dirName = dirName + "_" + dateFin
+        
+        
+
+        
+        dataSetDir = os.path.join(self.path_outputs,dirName)
+        shutil.rmtree(dataSetDir,ignore_errors=True)
+        self.mkdir_p(dataSetDir)
+        nb_results = 0
+    
+        #DEFINITION DE LA PERIODE D'ANALYSE##########################################
+        #On prend une date de debut et une date de fin
+        print("\nPeriode d'analyse : " + dateDebut + "-" + dateFin)
+
+    
+        #POUR CHAQUE DATE:##########################################
+        for dateFSC in list_products :
+
+            nd = 100000
+            dir_tifs_date = os.path.join(dataSetDir,"TIFS",dateFSC.strftime(self.date_format))
+            self.mkdir_p(dir_tifs_date)
+       
+        
+            f_FSC = list_products[dateFSC][0]
+            l_L2A = list_products[dateFSC][1]
+            
+            print("\nCalcul pour : " + dateFSC.strftime("%Y/%m/%d"))
+            
+            
+            
+            # we get the FSC projection system
+           
+            if epsgFSC == "" :
+                epsgFSC = (gdal.Info(f_FSC, format='json')['coordinateSystem']['wkt'].rsplit('"EPSG","', 1)[-1].split('"')[0])
+           
+            
+            
+
+            
+            # On ouvre, converti et re-echantillonne le FSC
+            
+            print("\nConversion des valeurs FSC")
+            #on change les valeurs FSC
+            g_FSC_o = None
+            g_FSC_o = gdal.Warp('',f_FSC,format= 'MEM',outputType = gdal.GDT_Float32)
+            
+            g_FSC_o = gdal.Warp('',g_FSC_o,format= 'MEM', dstNodata = 9999)
+            g_FSC_o = gdal.Translate('',g_FSC_o,format= 'MEM',noData = nd)
+
+
+
+            
+            
+            
+            #g_FSC_o = gdal.Translate('',g_FSC_o,format= 'MEM',noData = None)
+            a_FSC = BandReadAsArray(g_FSC_o.GetRasterBand(1))
+            print("nodata999",str(len(a_FSC[a_FSC == 9999])))
+            print("nodataNAN",str(len(a_FSC[np.isnan(a_FSC)])))
+            if len(NDval) > 0 :
+                for nData in NDval :
+                    cond = np.where((a_FSC == nData) | (np.isnan(a_FSC)))
+                    a_FSC[cond] = 9999
+            if len(NSNWval) > 0 :
+                for noSnow in NSNWval :
+                    a_FSC[a_FSC == noSnow] = 0
+            if len(SNWval) > 0 :
+                if isFSC == False :
+                    for snow in SNWval :
+                        a_FSC[a_FSC == snow] = 1
+            g_FSC_o.GetRasterBand(1).WriteArray(a_FSC)
+            a_FSC = None
+            
+            gdal.Translate(os.path.join(dir_tifs_date,"INPUT_FSC.tif"),g_FSC_o,format= 'GTiff',noData = 9999)
+            
+
+            
+            print("\nTraitement des tuiles")
+            
+           
+  
+            l_g_FSC = {}
+            
+
+            
+            # On prepare un FSC reprojete pour chaque projection
+            for tile , epsgS2 , L2A_product in  l_L2A : 
+                
+                if epsgS2 not in l_g_FSC.keys():
+                    g_FSC = gdal.Warp('',g_FSC_o,format= 'MEM',srcSRS="EPSG:" + epsgFSC,dstSRS="EPSG:" + epsgS2,resampleAlg=resampling,xRes= 20,yRes= 20)
+                    a_FSC = BandReadAsArray(g_FSC.GetRasterBand(1))
+                    a_FSC[np.isnan(a_FSC)] = 9999
+                    a_FSC[a_FSC > 1] = 9999
+                    g_FSC.GetRasterBand(1).WriteArray(a_FSC)
+                    a_FSC = None
+                    #g_FSC = gdal.Warp('',g_FSC,format= 'MEM',dstNodata = 9999)
+                    l_g_FSC[epsgS2] = g_FSC
+                    a_FSC = BandReadAsArray(g_FSC.GetRasterBand(1))
+                    print("resnodata999",str(len(a_FSC[a_FSC == 9999])))
+                    print("resnodataNAN",str(len(a_FSC[np.isnan(a_FSC)])))
+                    gdal.Translate(os.path.join(dir_tifs_date,"RESAMPLED_FSC_EPSG-"+epsgS2+".tif"),g_FSC,format= 'GTiff',noData = 9999)
+                    
+                    g_FSC = None
+                    
+                    
+
+
+            
+            for tile , epsgS2 , L2A_product in  sorted(l_L2A,key=lambda l:l[1]) : 
+                
+                
+                # We look for the red, green & swir bands tiff files + mask
+                f_green = ""
+                f_swir = ""
+                f_red = ""
+                f_MSK = ""
+                f_compo = ""
+    
+        
+                for f in os.listdir(L2A_product) :
+                    if ("green_band_resampled.tif" in f) :
+                        f_green = os.path.join(L2A_product,f)
+                    elif ("red_band_resampled.tif" in f) :
+                        f_red = os.path.join(L2A_product,f)
+                    elif ("swir_band_extracted.tif" in f) :
+                        f_swir = os.path.join(L2A_product,f)
+                    elif ("LIS_PRODUCTS" in f) :
+                        if os.path.isfile(os.path.join(L2A_product,f,"LIS_SEB.TIF")):
+                            f_msk = os.path.join(L2A_product,f,"LIS_SEB.TIF")
+                        if os.path.isfile(os.path.join(L2A_product,f,"LIS_COMPO.TIF")):
+                            f_compo = os.path.join(L2A_product,f,"LIS_COMPO.TIF")
+
+    
+        
+                #If there is a file missing, we skip to the next tile
+                if f_green == "" or f_red == "" or f_swir == "" or f_msk == "": continue
+                
+                
+                # On calcul les coord de overlap dans la projection L2A
+                print("\nCalcul coordonnees de chevauchement")
+                g_msk = gdal.Open(f_msk)
+                minx, maxy, maxx, miny = self.getOverlapCoords(l_g_FSC[epsgS2],g_msk)
+                
+                
+                #on decoupe les fichiers L2A
+                #on decoupe le masque 
+                print("\nDecoupage du masque")
+                g_msk= gdal.Translate(os.path.join(dir_tifs_date,"INPUT_SEB_" + tile + "_EPSG-" + epsgS2 + ".tif"),g_msk,format= 'GTiff',projWin = [minx, maxy, maxx, miny])
+                print("\nDecoupage du compo")
+                gdal.Translate(os.path.join(dir_tifs_date,"INPUT_COMPO_" + tile + "_EPSG-" + epsgS2 + ".tif"),f_compo,format= 'GTiff',projWin = [minx, maxy, maxx, miny])
+                
+                #on load et decoupe l'image bande verte, 
+                print("\nDecoupage bande verte")
+                g_green = gdal.Translate('',f_green,format= 'MEM',projWin = [minx, maxy, maxx, miny])
+                #on load et decoupe l'image bande rouge
+                print("\nDecoupage bande rouge")
+                g_red = gdal.Translate('',f_red,format= 'MEM',projWin = [minx, maxy, maxx, miny])        
+                #on load et decoupe l'image bande swir
+                print("\nDecoupage bande IR")
+                g_swir= gdal.Translate('',f_swir,format= 'MEM',projWin = [minx, maxy, maxx, miny])                   
+                
+                
+                #on decoupe une copie de FSC
+                g_FSC_c = gdal.Translate('',l_g_FSC[epsgS2],format= 'MEM',projWin = [minx, maxy, maxx, miny]) 
+                
+                #on produit un raster avec les memes conditions
+                raster = g_FSC_c
+                
+                
+                #on calcul les NDSI pour le chevauchement
+                print("\nCalcul des NDSI")
+                bandV = BandReadAsArray(g_green.GetRasterBand(1))
+                g_green = None
+                bandIR = BandReadAsArray(g_swir.GetRasterBand(1))
+                g_swir = None
+                bandR = BandReadAsArray(g_red.GetRasterBand(1))
+                g_red = None           
+                #on extrait la bande neige
+                MSK = BandReadAsArray(g_msk.GetRasterBand(1))
+                g_msk = None  
+                #on extrait la bande FSC         
+                FSC = BandReadAsArray(g_FSC_c.GetRasterBand(1))
+                
+
+                
+                #On calcul les NDSI
+                a = (bandV - bandIR).astype(float)
+                b = (bandV + bandIR).astype(float)
+                NDSI = a/b
+                
+
+                
+                #On remplace les pixels non utilisables par des nodatas
+                
+                cond1 = np.where((MSK != 100) )
+                NDSI[cond1] = 9999
+                FSC[cond1] = 9999  
+                MSK = None
+                cond2 = np.where(FSC > 1 | np.isnan(FSC) | np.isinf(FSC))
+                NDSI[cond2] = 9999
+                FSC[cond2] = 9999  
+                cond3 = np.where(np.isnan(NDSI) | np.isinf(NDSI))
+                FSC[cond3] = 9999  
+                NDSI[cond3] = 9999
+                
+
+                       
+                
+                cond5 = np.where((NDSI < 0) | (NDSI > 1))
+                FSC[cond5] = 9999 
+                NDSI[cond5] = 9999            
+                
+            
+                
+                
+                raster.GetRasterBand(1).WriteArray(NDSI)
+
+                gdal.Translate(os.path.join(dir_tifs_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsgS2 + ".tif"),raster,format= 'GTiff',noData = 9999)
+                raster.GetRasterBand(1).WriteArray(FSC)
+
+                gdal.Translate(os.path.join(dir_tifs_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsgS2 + ".tif"),raster,format= 'GTiff',noData = 9999)
+                
+                
+                
+                for proj in l_g_FSC :
+                    
+                    
+                    g_FSC_m = gdal.Warp('',g_FSC_c,format= 'MEM',dstSRS="EPSG:" + proj,xRes= 20,yRes= 20)
+                    NODATA = BandReadAsArray(g_FSC_m.GetRasterBand(1))
+                    #NODATA[np.isnan(NODATA)] = 9999
+                    #NODATA[NODATA != 9999] = 9999
+                    condnd = np.where((NODATA != nd) & (~np.isnan(NODATA)))
+                    NODATA[condnd] = 9999
+                    g_FSC_m.GetRasterBand(1).WriteArray(NODATA)
+                    g_FSC = gdal.Warp('',[l_g_FSC[proj],g_FSC_m],format= 'MEM')
+                    l_g_FSC[proj] = g_FSC
+                    a_FSC = None
+                    g_FSC = None
+                
+
+            nb_results += 1
+    
+    
+        
+        if nb_results > 0 :
+           print("\nNumber of processed dates:" + str(nb_results))
+           return True
+        else :
+           print("\nERROR products2DataSets : No date processed")
+           return False    
+                
+                
+                
+
+    #From the FSC and NDSI datasets found in dirName, calibrate the a and b parameters of the FSC = 0.5atanh(aNDSI+b)+0.5 model 
+    #The datasets are separated into a training and a testing set
+    #The calibration is made from the training set dans validated with the testing set
+    #dirName (string) : name of the directory containing the datasets (self.path_outputs/dirName) 
+    #source (string) : name of the FSC dataset source (PLEIADES, SPOT, IZAS,etc...) 
+    #percTest (float) : percentage of the datasets used for the validation of the calibrated model.
+    #return the a and b parameters and the rmse of the validation
+    def calibrateModel(self,dirName,source,percTest):
+
+
+
+
+        dataSetDir = os.path.join(self.path_outputs,dirName)
+        path_tifs = os.path.join(dataSetDir,"TIFS")
+        path_cal = os.path.join(dataSetDir,"CALIBRATION")
+        
+
+
+        sorted_dates = sorted(os.listdir(path_tifs))
+        dateDebut = sorted_dates[0]
+        dateFin = sorted_dates[-1]
+        
+        NDSIALL = []
+        FSCALL = []
+
+        
+        path_cal_date = os.path.join(path_cal,dateDebut + "_" + dateFin)
+        shutil.rmtree(path_cal_date, ignore_errors=True)
+        self.mkdir_p(path_cal_date)
+
+        f= open(os.path.join(path_cal_date,source + "_CALIBRATION_RESULTS.txt"),"w")
+        f.write("\nDates :")
+        nb_dates = 0
+        
+            
+            
+        for d in sorted(os.listdir(path_tifs)):
+            date = self.getDateFromStr(d)
+            if date == '' : continue
+            print(date)
+            path_tifs_date = os.path.join(path_tifs,d)
+            
+            
+            epsgs = {}
+            for tif in os.listdir(path_tifs_date) :
+                epsg = self.getEpsgFromStr(tif)
+                if epsg == '': continue
+                if epsg not in epsgs :
+                    epsgs[epsg] = []
+                    
+            tiles = []
+            for tif in os.listdir(path_tifs_date) :
+                epsg = self.getEpsgFromStr(tif)
+                tile = self.getTileFromStr(tif)
+                if epsg == '' or tile == '': continue
+                if tile not in epsgs[epsg]:
+                    epsgs[epsg].append(tile)      
+                    
+            
+            
+            for epsg in epsgs :
+                for tile in epsgs[epsg]:
+                    g_FSC = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    FSCALL.append(BandReadAsArray(g_FSC.GetRasterBand(1)).flatten())
+                    g_NDSI = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    NDSIALL.append(BandReadAsArray(g_NDSI.GetRasterBand(1)).flatten())
+                    
+            f.write("\n      " + d)
+            nb_dates += 1
+        
+        print("Eliminate Nodata pixels")
+        NDSIALL = np.hstack(NDSIALL)
+        FSCALL = np.hstack(FSCALL)  
+        cond1 = np.where((FSCALL != 9999) & (~np.isnan(FSCALL)) & (~np.isinf(FSCALL)))
+        NDSIALL = NDSIALL[cond1]
+        FSCALL = FSCALL[cond1]
+        
+        cond2 = np.where( (NDSIALL != 9999) & (~np.isnan(NDSIALL)) & (~np.isinf(NDSIALL)))
+        FSCALL = FSCALL[cond2]
+        NDSIALL = NDSIALL[cond2]
+        
+
+            
+        if len(FSCALL) < 2 : 
+            f.close()
+            shutil.rmtree(path_cal_date, ignore_errors=True)
+            print("ERROR calibrateModel : dataSet too small")
+            return 0,0,0
+
+
+        NDSI_train, NDSI_test, FSC_train, FSC_test = train_test_split(NDSIALL, FSCALL, test_size=percTest)
+
+        
+        #CALIBRATION
+        print("CALIBRATION")
+        fun = lambda x: sqrt(mean_squared_error(0.5*np.tanh(x[0]*NDSI_train+x[1])+0.5,FSC_train))
+        
+        model = opti.minimize(fun,(3.0,-1.0),method = 'Nelder-Mead')#method = 'Nelder-Mead')
+
+        a = model.x[0]
+        b = model.x[1]
+        success = model.success
+        rmse_cal = model.fun
+        print("CALIBRATION SUCCESS : ",success)
+        print("CALIBRATION RMSE : ",rmse_cal)
+        
+        
+
+        # Plot figure with subplots 
+        fig = plt.figure()
+        st = fig.suptitle(source + " : CALIBRATION FOR THE PERIOD " + dateDebut + " - " + dateFin)
+        # set up subplot grid
+        gridspec.GridSpec(2,2)
+        
+        # 2D histo de calibration
+        ax = plt.subplot2grid((2,2), (0,0))
+        
+        plt.title("CALIBRATION WITH THE TRAINING SET")
+        plt.ylabel('Training FSC',size = 10)
+        plt.xlabel('Training NDSI',size = 10)
+        plt.hist2d(NDSI_train,FSC_train,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'), norm=LogNorm())
+        
+        n = np.arange(min(NDSI_train),1.01,0.01)
+        
+        line = 0.5*np.tanh(a*n+b) +  0.5
+
+        plt.plot(n, line, 'r', label='FSC=0.5*tanh(a*NDSI+b)+0.5\na={:.2f} b={:.2f}\nRMSE={:.2f}'.format(a,b,rmse_cal))
+        plt.legend(fontsize=10,loc='upper left')
+
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+
+
+
+        # VALIDATION
+        
+        # prediction of FSC from testing NDSI
+        FSC_pred = 0.5*np.tanh(a*NDSI_test+b) +  0.5
+
+        # error
+        er_FSC = FSC_pred - FSC_test
+
+        # absolute error
+        abs_er_FSC = abs(er_FSC)
+
+        # mean error
+        m_er_FSC = np.mean(er_FSC)
+
+        # absolute mean error
+        abs_m_er_FSC = np.mean(abs_er_FSC)
+
+        #root mean square error
+        rmse_FSC = sqrt(mean_squared_error(FSC_pred,FSC_test))
+
+        #correlation
+        corr_FSC = mstats.pearsonr(FSC_pred,FSC_test)[0]
+
+        #standard deviation
+        stde_FSC = np.std(er_FSC)
+
+
+        #correlation, erreur moyenne, ecart-type, rmse
+
+        # 2D histo de validation
+        ax = plt.subplot2grid((2,2), (0,1))
+        
+        plt.title("VALIDATION WITH THE TESTING SET")
+        plt.ylabel('predicted FSC',size = 10)
+        plt.xlabel('testing FSC',size = 10)
+        plt.hist2d(FSC_test,FSC_pred,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'),norm=LogNorm())
+        slope, intercept, r_value, p_value, std_err = mstats.linregress(FSC_test,FSC_pred) 
+        n = np.array([min(FSC_test),1.0])
+        line = slope * n + intercept
+
+        plt.plot(n, line, 'b', label='y = {:.2f}x + {:.2f}\ncorr={:.2f} rmse={:.2f}'.format(slope,intercept,corr_FSC,rmse_FSC))
+        plt.plot(n, n, 'g', label='y = 1.0x + 0.0')
+
+        plt.legend(fontsize=10,loc='upper left')
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+
+        # 1D histo de residus
+        ax = plt.subplot2grid((2,2), (1,0),rowspan=1, colspan=2)
+        plt.title("FSC RESIDUALS")
+        plt.ylabel('amount of data points',size = 10)
+        plt.xlabel('FSC pred - test',size = 10)
+        xticks = np.arange(-1.0, 1.1, 0.1)
+        plt.xticks(xticks)
+        plt.hist(er_FSC,bins=40,weights=np.ones(len(er_FSC)) / len(er_FSC))
+        plt.gca().yaxis.set_major_formatter(PercentFormatter(1))
+        plt.grid(True) 
+
+
+        # fit subplots & save fig
+        fig.tight_layout()
+        fig.set_size_inches(w=16,h=10)
+        st.set_y(0.95)
+        fig.subplots_adjust(top=0.85)
+        fig.savefig(os.path.join(path_cal_date,'PLOT_CAL_' + source + '_' + dateDebut + "_" + dateFin + '.png'))
+        plt.close(fig)
+
+
+
+        f.write("\n")
+        f.write("\nCALIBRATION" )
+        f.write("\n  Number of 20x20m data points : " + str(len(NDSI_train)))
+        f.write("\n lin. reg. NDSI on FSC : 0.5*tanh(a*NDSI+b)+0.5 : a = " + str(a) + " ; b = " + str(b))
+        f.write("\n root mean square err. : " + str(rmse_cal))
+
+            
+        f.write("\n")
+        
+        f.write("\nVALIDATION" )
+        f.write("\n  Number of 20x20m data points : " + str(len(NDSI_test)))
+        f.write("\n  corr. coef. : " + str(corr_FSC))
+        f.write("\n  std. err. : " + str(stde_FSC))
+        f.write("\n  mean err. : " + str(m_er_FSC))
+        f.write("\n  abs. mean err. : " + str(abs_m_er_FSC))
+        f.write("\n  root mean square err. : " + str(rmse_FSC))
+
+        f.close()
+
+
+        return a,b,rmse_FSC
+
+
+
+
+    #With the FSC and NDSI datasets found in evaldirName, evaluate the a and b parameters of the FSC = 0.5atanh(aNDSI+b)+0.5 model 
+    #calDirName (string) : name of the directory containing the datasets used for the calibration (self.path_outputs/dirName) 
+    #calSource (string) : name of the FSC dataset source (PLEIADES, SPOT, IZAS,etc...) used for the calibration
+    #evalDirName (string) : name of the directory containing the datasets used for the evaluation (self.path_outputs/dirName) 
+    #evalSource (string) : name of the FSC dataset source (PLEIADES, SPOT, IZAS,etc...) used for the evaluation
+    #a,b (float) : paramaters of the FSC = 0.5atanh(aNDSI+b)+0.5 model 
+    #return rmse of the evaluation
+    def evaluateModel(self,calDirName,evalDirNames,calSource,evalSources,a,b):
+        
+
+        calDataSetDir = os.path.join(self.path_outputs,calDirName)
+        path_eval = os.path.join(calDataSetDir,"EVALUATION")
+        title = "EVAL_" + calSource + "_WITH"
+        
+
+
+        NDSI_test = []
+        FSC_test = []
+        
+        title2 = "EVAL"
+        for evalDirName in evalDirNames :
+            title2 = title2 + "_" + evalDirName 
+        
+        for evalSource in evalSources :
+            title = title + "_" + evalSource 
+
+        path_eval_dir = os.path.join(path_eval,title2)
+        shutil.rmtree(path_eval_dir, ignore_errors=True)
+
+        self.mkdir_p(path_eval_dir)
+
+        f= open(os.path.join(path_eval_dir,title + ".txt"),"w")
+        f.write("\nCalibration dataset :" + calDirName)
+        f.write("\nModel : FSC = 0.5*tanh(a*NDSI+b) +  0.5 with :")
+        f.write("\n        a = " + str(a) + " b = " + str(b))
+        f.write("\nEvaluation dataSets :")
+
+
+
+
+
+        for evalDirName in evalDirNames :
+
+            f.write("\n     "+ evalDirName)
+
+            evalDataSetDir = os.path.join(self.path_outputs,evalDirName)
+            path_tifs = os.path.join(evalDataSetDir,"TIFS")
+        
+
+
+            
+            for d in sorted(os.listdir(path_tifs)):
+                date = self.getDateFromStr(d)
+                if date == '' : continue
+                print(date)
+                path_tifs_date = os.path.join(path_tifs,d)
+                
+                
+                epsgs = {}
+                for tif in os.listdir(path_tifs_date) :
+                    epsg = self.getEpsgFromStr(tif)
+                    if epsg == '': continue
+                    if epsg not in epsgs :
+                        epsgs[epsg] = []
+                    
+                tiles = []
+                for tif in os.listdir(path_tifs_date) :
+                    epsg = self.getEpsgFromStr(tif)
+                    tile = self.getTileFromStr(tif)
+                    if epsg == '' or tile == '': continue
+                    if tile not in epsgs[epsg]:
+                        epsgs[epsg].append(tile)      
+                    
+            
+            
+                for epsg in epsgs :
+                    for tile in epsgs[epsg]:
+                        g_FSC = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                        FSC_test.append(BandReadAsArray(g_FSC.GetRasterBand(1)).flatten())
+                        g_NDSI = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                        NDSI_test.append(BandReadAsArray(g_NDSI.GetRasterBand(1)).flatten())
+                    
+                
+        
+        print("Eliminate Nodata pixels")
+        NDSI_test = np.hstack(NDSI_test)
+        FSC_test = np.hstack(FSC_test)  
+        cond1 = np.where((FSC_test != 9999) & (~np.isnan(FSC_test)) & (~np.isinf(FSC_test)))
+        NDSI_test = NDSI_test[cond1]
+        FSC_test = FSC_test[cond1]
+        
+        cond2 = np.where( (NDSI_test != 9999) & (~np.isnan(NDSI_test)) & (~np.isinf(NDSI_test)))
+        FSC_test = FSC_test[cond2]
+        NDSI_test = NDSI_test[cond2]
+        
+
+            
+        if len(FSC_test) < 2 : 
+            f.close()
+            shutil.rmtree(path_eval_dir, ignore_errors=True)
+            print("ERROR evaluateModel : dataSet too small")
+            return 0
+
+
+
+        # VALIDATION
+        
+        # prediction of FSC from testing NDSI
+        FSC_pred =  0.5*np.tanh(a*NDSI_test+b) +  0.5
+
+        # error
+        er_FSC = FSC_pred - FSC_test
+
+        # absolute error
+        abs_er_FSC = abs(er_FSC)
+
+        # mean error
+        m_er_FSC = np.mean(er_FSC)
+
+        # absolute mean error
+        abs_m_er_FSC = np.mean(abs_er_FSC)
+
+        #root mean square error
+        rmse_FSC = sqrt(mean_squared_error(FSC_pred,FSC_test))
+
+        #correlation
+        corr_FSC = mstats.pearsonr(FSC_pred,FSC_test)[0]
+
+        #standard deviation
+        stde_FSC = np.std(er_FSC)
+
+
+        # Plot figure with subplots 
+        fig = plt.figure()
+        st = fig.suptitle(title)
+        # set up subplot grid
+        gridspec.GridSpec(2,2)
+
+
+
+        # 2D histos de FSC vs NDSI
+        ax = plt.subplot2grid((2,2), (0,0))
+        plt.ylabel('testing FSC',size = 10)
+        plt.xlabel('testing NDSI',size = 10)
+        plt.hist2d(NDSI_test,FSC_test,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'),norm=LogNorm())
+        n = np.arange(min(NDSI_test),1.01,0.01)
+        line = 0.5*np.tanh(a*n+b) +  0.5
+        plt.plot(n, line, 'r', label='Predicted FSC')
+        plt.legend(fontsize=10,loc='upper left')
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio) 
+
+        # 2D histos de validation
+        ax = plt.subplot2grid((2,2), (0,1))
+        plt.ylabel('predicted FSC',size = 10)
+        plt.xlabel('testing FSC',size = 10)
+        plt.hist2d(FSC_test,FSC_pred,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'),norm=LogNorm())
+        slope, intercept, r_value, p_value, std_err = mstats.linregress(FSC_test,FSC_pred) 
+        n = np.array([min(FSC_test),1.0])
+        line = slope * n + intercept
+        plt.plot(n, line, 'b', label='y = {:.2f}x + {:.2f}\ncorr={:.2f} rmse={:.2f}'.format(slope,intercept,corr_FSC,rmse_FSC))
+        plt.plot(n, n, 'g', label='y = 1.0x + 0.0')
+        plt.legend(fontsize=10,loc='upper left')
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+
+
+        # 1D histo de residus
+        ax = plt.subplot2grid((2,2), (1,0),rowspan=1, colspan=2)
+        plt.title("FSC RESIDUALS")
+        plt.ylabel('amount of data points',size = 10)
+        plt.xlabel('FSC pred - test',size = 10)
+        xticks = np.arange(-1.0, 1.1, 0.1)
+        plt.xticks(xticks)
+        plt.hist(er_FSC,bins=40,weights=np.ones(len(er_FSC)) / len(er_FSC))
+        plt.gca().yaxis.set_major_formatter(PercentFormatter(1))
+        plt.grid(True) 
+
+
+        # fit subplots & save fig
+        fig.tight_layout()
+        fig.set_size_inches(w=16,h=10)
+        st.set_y(0.95)
+        fig.subplots_adjust(top=0.85)
+        fig.savefig(os.path.join(path_eval_dir,'PLOT_' + title + '.png'))
+        plt.close(fig)
+
+
+            
+        f.write("\n")
+        
+        f.write("\nEVALUATION" )
+        f.write("\n  Number of 20x20m data points : " + str(len(NDSI_test)))
+        f.write("\n  corr. coef. : " + str(corr_FSC))
+        f.write("\n  std. err. : " + str(stde_FSC))
+        f.write("\n  mean err. : " + str(m_er_FSC))
+        f.write("\n  abs. mean err. : " + str(abs_m_er_FSC))
+        f.write("\n  root mean square err. : " + str(rmse_FSC))
+
+        f.close()
+
+
+        return rmse_FSC
+
+
+    
+    
+    
+    #Plots all the FSC datasets vs NDSI datasets found in dirName (one png date)
+    #dirName (string) : name of the directory containing the datasets (self.path_outputs/dirName) 
+    #source (string) : name of the FSC dataset source (PLEIADES, SPOT, IZAS,etc...) 
+    #return a success bool
+    def PlotPeriode(self,dirName,source):
+        
+        dataSetDir = os.path.join(self.path_outputs,dirName)
+        path_tifs = os.path.join(dataSetDir,"TIFS")
+        path_plots = os.path.join(dataSetDir,"PLOTS")
+        
+        print("Start plotting the periode")
+        NDSIALL = []
+        FSCALL = []
+        NDSIALL2 = []
+        FSCALL2 = []
+
+        sorted_dates = sorted(os.listdir(path_tifs))
+        dateDebut = sorted_dates[0]
+        dateFin = sorted_dates[-1]
+        
+        path_plots_date = os.path.join(path_plots,dateDebut + "_" + dateFin)
+        self.mkdir_p(path_plots_date)
+        
+        f= open(os.path.join(path_plots_date,"INFO.txt"),"w")
+        f.write("\nDates :")
+        nb_dates = 0
+        
+        
+
+            
+            
+        for d in sorted_dates:
+            date = self.getDateFromStr(d)
+            if date == '' : continue
+            print(date)
+            path_tifs_date = os.path.join(path_tifs,d)
+            
+            
+            epsgs = {}
+            for tif in os.listdir(path_tifs_date) :
+                epsg = self.getEpsgFromStr(tif)
+                if epsg == '': continue
+                if epsg not in epsgs :
+                    epsgs[epsg] = []
+                    
+            tiles = []
+            for tif in os.listdir(path_tifs_date) :
+                epsg = self.getEpsgFromStr(tif)
+                tile = self.getTileFromStr(tif)
+                if epsg == '' or tile == '': continue
+                if tile not in epsgs[epsg]:
+                    epsgs[epsg].append(tile)      
+                    
+            
+            
+            for epsg in epsgs :
+                for tile in epsgs[epsg]:
+                    g_FSC = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    FSCALL.append(BandReadAsArray(g_FSC.GetRasterBand(1)).flatten())
+                    g_NDSI = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    NDSIALL.append(BandReadAsArray(g_NDSI.GetRasterBand(1)).flatten())
+                    
+            f.write("\n      " + d)
+            nb_dates += 1
+        
+        print("Eliminate Nodata pixels")
+        NDSIALL = np.hstack(NDSIALL)
+        FSCALL = np.hstack(FSCALL)  
+        cond1 = np.where((FSCALL != 9999) & (~np.isnan(FSCALL)) & (~np.isinf(FSCALL)))
+        NDSIALL = NDSIALL[cond1]
+        FSCALL = FSCALL[cond1]
+        
+        cond2 = np.where( (NDSIALL != 9999) & (~np.isnan(NDSIALL)) & (~np.isinf(NDSIALL)))
+        FSCALL = FSCALL[cond2]
+        NDSIALL = NDSIALL[cond2]
+        
+        cond3 = np.where((FSCALL != 0) & (FSCALL != 1))
+        NDSIALL2 = NDSIALL[cond3]
+        FSCALL2 = FSCALL[cond3]
+            
+        if len(FSCALL2) < 2 : 
+            f.close()
+            shutil.rmtree(path_plots_date, ignore_errors=True)
+            return False
+        f.write("\nNumber of dates : " + str(nb_dates))
+            
+
+        print("Create plots")
+        minNDSI = min(NDSIALL)
+        list_FSC_box = [FSCALL[np.where((NDSIALL >= 0.8) & (NDSIALL <= 1))]]
+        list_labels_box = ["[ 0.8\n1 ]"]
+        b = 0.8
+        while minNDSI < b : 
+            a = b - 0.2
+            list_FSC_box.insert(0,FSCALL[np.where((NDSIALL >= a) & (NDSIALL < b))])
+            list_labels_box.insert(0,"[ "+ "{0:.1f}".format(a) +"\n"+ "{0:.1f}".format(b) +" [")
+            b = b - 0.2
+            
+
+        minNDSI2 = min(NDSIALL2)
+        list_FSC_box2 = [FSCALL2[np.where((NDSIALL2 >= 0.8) & (NDSIALL2 <= 1))]]
+        list_labels_box2 = ["[ 0.8\n1 ]"]
+        b = 0.8
+        while minNDSI2 < b : 
+            a = b - 0.2
+            list_FSC_box2.insert(0,FSCALL2[np.where((NDSIALL2 >= a) & (NDSIALL2 < b))])
+            list_labels_box2.insert(0,"[ "+ "{0:.1f}".format(a) +"\n"+ "{0:.1f}".format(b) +" [")
+            b = b - 0.2      
+
+        
+        
+
+        
+        # Plot figure with subplots 
+        fig = plt.figure()
+        st = fig.suptitle(source + " : FSC / NDSI FOR THE PERIOD " + dateDebut + " - " + dateFin)
+        # set up subplot grid
+        gridspec.GridSpec(2,3)
+        
+        # 2D histo avec FSC = 0 et FSC = 1
+        ax = plt.subplot2grid((2,3), (0,2))
+        
+        
+        plt.ylabel('0 <= FSC <= 1')
+        plt.xlabel('NDSI')
+        plt.hist2d(NDSIALL,FSCALL,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'), norm=LogNorm())
+        
+        if NDSIALL != [] and FSCALL != [] :
+            slopeA, interceptA, r_valueA, p_valueA, std_errA = mstats.linregress(NDSIALL,FSCALL) 
+            slopeB, interceptB, r_valueB, p_valueB, std_errB = mstats.linregress(FSCALL,NDSIALL) 
+            n = np.array([minNDSI,1.0])
+            lineA = slopeA*n+interceptA
+            lineB = (n-interceptB)/slopeB
+            plt.plot(n, lineA, 'g', label='MA: a={:.2f} b={:.2f}\ncorr={:.2f} std_err={:.3f}'.format(slopeA,interceptA,r_valueA,std_errA))
+            plt.plot(n, lineB, 'r', label='MB: a={:.2f} b={:.2f}\ncorr={:.2f} std_err={:.3f}'.format(1/slopeB,-interceptB/slopeB,r_valueB,std_errB))
+            plt.legend(fontsize=6,loc='upper left')
+
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+        
+        # 2D histo sans FSC = 0 et FSC = 1
+        ax = plt.subplot2grid((2,3), (1,2))
+        
+        
+        plt.ylabel('0 < FSC < 1')
+        plt.xlabel('NDSI')
+        plt.hist2d(NDSIALL2,FSCALL2,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'),norm=LogNorm())
+        if NDSIALL2 != []  and FSCALL2 != [] : 
+            slopeA2, interceptA2, r_valueA2, p_valueA2, std_errA2 = mstats.linregress(NDSIALL2,FSCALL2) 
+            slopeB2, interceptB2, r_valueB2, p_valueB2, std_errB2 = mstats.linregress(FSCALL2,NDSIALL2) 
+            n = np.array([minNDSI2,1.0])
+            lineA = slopeA2*n+interceptA2
+            lineB = (n-interceptB2)/slopeB2
+            plt.plot(n, lineA, 'g', label='MA: a={:.2f} b={:.2f}\ncorr={:.2f} std_err={:.3f}'.format(slopeA2,interceptA2,r_valueA2,std_errA2))
+            plt.plot(n, lineB, 'r', label='MB: a={:.2f} b={:.2f}\ncorr={:.2f} std_err={:.3f}'.format(1/slopeB2,-interceptB2/slopeB2,r_valueB2,std_errB2))
+            plt.legend(fontsize=6,loc='upper left')
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+
+
+
+        # boxplot avec FSC = 0 et FSC = 1
+        ax = plt.subplot2grid((2,3), (0,0),rowspan=1, colspan=2)
+        plt.title('ANALYSIS WITH 0 <= FSC <= 1')
+        plt.ylabel('0 <= FSC <= 1')
+        plt.xlabel('NDSI')
+        plt.boxplot(list_FSC_box,labels = list_labels_box)
+        
+
+        
+        # boxplot sans FSC = 0 et FSC = 1
+        ax = plt.subplot2grid((2,3), (1,0),rowspan=1, colspan=2)
+        plt.title('ANALYSIS WITH 0 < FSC < 1')
+        plt.ylabel('0 < FSC < 1')
+        plt.xlabel('NDSI')
+        plt.boxplot(list_FSC_box2,labels = list_labels_box2)
+    
+        # fit subplots & save fig
+        fig.tight_layout()
+        fig.set_size_inches(w=16,h=10)
+        st.set_y(0.95)
+        fig.subplots_adjust(top=0.85)
+        fig.savefig(os.path.join(path_plots_date,'PLOT_FSC_NDSI_' + source + '_' + dateDebut + "_" + dateFin + '.png'))
+        plt.close(fig)
+        
+        
+        f.write("\nFor  0 <= FSC <= 1 : " )
+        f.write("\n  Number of data points : " + str(len(NDSIALL)))
+        if NDSIALL != [] and FSCALL != [] :
+            f.write("\n lin. reg. FSC on NDSI (MA): FSC = aNDSI + b : a = " + str(slopeA) + " ; b = " + str(interceptA))
+            f.write("\n  std. err. (MA): " + str(std_errA))
+            f.write("\n lin. reg. NDSI on FSC (MB): FSC = aNDSI + b : a = " + str(1/slopeB) + " ; b = " + str(-interceptB/slopeB))
+            f.write("\n  std. err. (MB): " + str(std_errB))
+            f.write("\n  corr. coef. : " + str(r_valueA))
+            
+        
+        f.write("\nFor  0 < FSC < 1 : " )
+        f.write("\n  Number of data points : " + str(len(NDSIALL2)))
+        if NDSIALL2 != [] and FSCALL2 != [] :
+            f.write("\n lin. reg. FSC on NDSI (MA): FSC = aNDSI + b : a = " + str(slopeA2) + " ; b = " + str(interceptA2))
+            f.write("\n  std. err. (MA): " + str(std_errA2))
+            f.write("\n lin. reg. NDSI on FSC (MB): FSC = aNDSI + b : a = " + str(1/slopeB2) + " ; b = " + str(-interceptB2/slopeB2))
+            f.write("\n  std. err. (MB): " + str(std_errB2))
+            f.write("\n  corr. coef. : " + str(r_valueA2))
+        f.close()
+    
+        print ("\n plotting finished")
+        NDSI = None
+        FSC = None
+        NDSIALL = None
+        FSCALL = None
+        NDSIALL2 = None
+        FSCALL2 = None
+        
+        return True
+        
+
+
+        
+
+    #For each available date, plots the FSC vs NDSI datasets (one png file per date)
+    #dirName (string) : name of the directory containing the datasets (self.path_outputs/dirName) 
+    #source (string) : name of the FSC dataset source (PLEIADES, SPOT, IZAS,etc...) 
+    #return a success bool
+    def PlotEachDates(self,dirName,source):
+
+        print("Start plotting each date")
+        dataSetDir = os.path.join(self.path_outputs,dirName)
+        path_tifs = os.path.join(dataSetDir,"TIFS")
+        path_plots = os.path.join(dataSetDir,"PLOTS")
+            
+            
+        for d in sorted(os.listdir(path_tifs)):
+            date = self.getDateFromStr(d)
+            if date == '' : continue
+            print(date)
+            path_tifs_date = os.path.join(path_tifs,d)
+            path_plots_date = os.path.join(path_plots,d)
+            self.mkdir_p(path_plots_date)
+            FSC = []
+            NDSI = []  
+            
+            epsgs = {}
+            for tif in os.listdir(path_tifs_date) :
+                epsg = self.getEpsgFromStr(tif)
+                if epsg == '': continue
+                if epsg not in epsgs :
+                    epsgs[epsg] = []
+                    
+            tiles = []
+            for tif in os.listdir(path_tifs_date) :
+                epsg = self.getEpsgFromStr(tif)
+                tile = self.getTileFromStr(tif)
+                if epsg == '' or tile == '': continue
+                if tile not in epsgs[epsg]:
+                    epsgs[epsg].append(tile)  
+                    
+                    
+            
+            for epsg in epsgs :
+                for tile in epsgs[epsg]:
+                    g_FSC = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    FSC.append(BandReadAsArray(g_FSC.GetRasterBand(1)).flatten())
+                    g_NDSI = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    NDSI.append(BandReadAsArray(g_NDSI.GetRasterBand(1)).flatten())
+                    
+                    
+
+
+        
+            print("Eliminate Nodata pixels")
+            NDSI = np.hstack(NDSI)
+            FSC = np.hstack(FSC)  
+            cond1 = np.where((FSC != 9999) & (~np.isnan(FSC)) & (~np.isinf(FSC)))
+            NDSI = NDSI[cond1]
+            FSC = FSC[cond1]
+            
+            cond2 = np.where( (NDSI != 9999) & (~np.isnan(NDSI)) & (~np.isinf(NDSI)))
+            FSC = FSC[cond2]
+            NDSI = NDSI[cond2]
+            
+            cond3 = np.where((FSC != 0) & (FSC != 1))
+            NDSI2 = NDSI[cond3]
+            FSC2 = FSC[cond3]            
+            if len(FSC2) < 2 : 
+                print("Not enough available pixels")
+                continue
+            
+
+            f = open(os.path.join(path_plots_date,"INFO.txt"),"w")
+            f.write("\nDate : " + d)
+            f.write("\nProjections of FSC inputs : ")
+            for epsg in epsgs :  f.write("\n                   " + epsg)
+            
+            
+            
+            
+            minNDSI = min(NDSI)
+            list_FSC_box = [FSC[np.where((NDSI >= 0.8) & (NDSI <= 1))]]
+            list_labels_box = ["[ 0.8\n1 ]"]
+            b = 0.8
+            while minNDSI < b : 
+                a = b - 0.2
+                list_FSC_box.insert(0,FSC[np.where((NDSI >= a) & (NDSI < b))])
+                list_labels_box.insert(0,"[ "+ "{0:.1f}".format(a) +"\n"+ "{0:.1f}".format(b) +" [")
+                b = b - 0.2
+                
+
+            minNDSI2 = min(NDSI2)
+            list_FSC_box2 = [FSC2[np.where((NDSI2 >= 0.8) & (NDSI2 <= 1))]]
+            list_labels_box2 = ["[ 0.8\n1 ]"]
+            b = 0.8
+            while minNDSI2 < b : 
+                a = b - 0.2
+                list_FSC_box2.insert(0,FSC2[np.where((NDSI2 >= a) & (NDSI2 < b))])
+                list_labels_box2.insert(0,"[ "+ "{0:.1f}".format(a) +"\n"+ "{0:.1f}".format(b) +" [")
+                b = b - 0.2         
+            
+            
+
+            
+            # Plot figure with subplots 
+            fig = plt.figure()
+            st = fig.suptitle(source + " : FSC / NDSI FOR " + date.strftime("%Y/%m/%d"))
+            gridspec.GridSpec(2,3)
+        
+            # 2D histo avec FSC = 0 et FSC = 1
+            ax = plt.subplot2grid((2,3), (0,2))
+            slopeA, interceptA, r_valueA, p_valueA, std_errA = mstats.linregress(NDSI,FSC) 
+            slopeB, interceptB, r_valueB, p_valueB, std_errB = mstats.linregress(FSC,NDSI)
+            
+            plt.ylabel('0 <= FSC <= 1')
+            plt.xlabel('NDSI')
+            plt.hist2d(NDSI,FSC,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'), norm=LogNorm())
+            n = np.array([minNDSI,1.0])
+            lineA = slopeA*n+interceptA
+            lineB = (n-interceptB)/slopeB
+            plt.plot(n, lineA, 'g', label='MA: a={:.2f} b={:.2f}\ncorr={:.2f} std_err={:.3f}'.format(slopeA,interceptA,r_valueA,std_errA))
+            plt.plot(n, lineB, 'r', label='MB: a={:.2f} b={:.2f}\ncorr={:.2f} std_err={:.3f}'.format(1/slopeB,-interceptB/slopeB,r_valueB,std_errB))
+            plt.legend(fontsize=6,loc='upper left')
+            plt.colorbar()
+            ratio = 1
+            xleft, xright = ax.get_xlim()
+            ybottom, ytop = ax.get_ylim()
+            ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+            
+            # 2D histo sans FSC = 0 et FSC = 1
+            ax = plt.subplot2grid((2,3), (1,2))
+            slopeA2, interceptA2, r_valueA2, p_valueA2, std_errA2 = mstats.linregress(NDSI2,FSC2) 
+            slopeB2, interceptB2, r_valueB2, p_valueB2, std_errB2 = mstats.linregress(FSC2,NDSI2) 
+            
+            plt.ylabel('0 < FSC < 1')
+            plt.xlabel('NDSI')
+            plt.hist2d(NDSI2,FSC2,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'),norm=LogNorm())
+            n = np.array([minNDSI2,1.0])
+            lineA = slopeA2*n+interceptA2
+            lineB = (n-interceptB2)/slopeB2
+            plt.plot(n, lineA, 'g', label='MA: a={:.2f} b={:.2f}\ncorr={:.2f} std_err={:.3f}'.format(slopeA2,interceptA2,r_valueA2,std_errA2))
+            plt.plot(n, lineB, 'r', label='MB: a={:.2f} b={:.2f}\ncorr={:.2f} std_err={:.3f}'.format(1/slopeB2,-interceptB2/slopeB2,r_valueB2,std_errB2))
+            
+            
+            plt.legend(fontsize=6,loc='upper left')
+            plt.colorbar()
+            ratio = 1
+            xleft, xright = ax.get_xlim()
+            ybottom, ytop = ax.get_ylim()
+            ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+        
+        
+        
+            # boxplot avec FSC = 0 et FSC = 1
+            ax = plt.subplot2grid((2,3), (0,0),rowspan=1, colspan=2)
+            plt.title('ANALYSIS WITH 0 <= FSC <= 1')
+            plt.ylabel('0 <= FSC <= 1')
+            plt.xlabel('NDSI')
+            plt.boxplot(list_FSC_box,labels = list_labels_box)
+            
+        
+            
+            # boxplot sans FSC = 0 et FSC = 1
+            ax = plt.subplot2grid((2,3), (1,0),rowspan=1, colspan=2)
+            plt.title('ANALYSIS WITH 0 < FSC < 1')
+            plt.ylabel('0 < FSC < 1')
+            plt.xlabel('NDSI')
+            plt.boxplot(list_FSC_box2,labels = list_labels_box2)
+        
+            # fit subplots & save fig
+            fig.tight_layout()
+            
+            fig.set_size_inches(w=16,h=10)
+            st.set_y(0.95)
+            fig.subplots_adjust(top=0.85)
+            fig.savefig(os.path.join(path_plots_date,'PLOT_FSC_NDSI_'  + source + '_' + date.strftime(self.date_format) + '.png'))
+            plt.close(fig)
+            
+            
+
+            f.write("\nFor  0 <= FSC <= 1 : " )
+            f.write("\n  Number of data points : " + str(len(NDSI)))
+            if NDSI != [] and FSC != [] :
+                f.write("\n lin. reg. FSC on NDSI (MA): FSC = aNDSI + b : a = " + str(slopeA) + " ; b = " + str(interceptA))
+                f.write("\n  std. err. (MA): " + str(std_errA))
+                f.write("\n lin. reg. NDSI on FSC (MB): FSC = aNDSI + b : a = " + str(1/slopeB) + " ; b = " + str(-interceptB/slopeB))
+                f.write("\n  std. err. (MB): " + str(std_errB))
+                f.write("\n  corr. coef. : " + str(r_valueA))
+                
+            
+            f.write("\nFor  0 < FSC < 1 : " )
+            f.write("\n  Number of data points : " + str(len(NDSI2)))
+            if NDSI2 != [] and FSC2 != [] :
+                f.write("\n lin. reg. FSC on NDSI (MA): FSC = aNDSI + b : a = " + str(slopeA2) + " ; b = " + str(interceptA2))
+                f.write("\n  std. err. (MA): " + str(std_errA2))
+                f.write("\n lin. reg. NDSI on FSC (MB): FSC = aNDSI + b : a = " + str(1/slopeB2) + " ; b = " + str(-interceptB2/slopeB2))
+                f.write("\n  std. err. (MB): " + str(std_errB2))
+                f.write("\n  corr. coef. : " + str(r_valueA2))
+            f.close()
+            
+            
+        print ("\n plotting finished")
+        NDSI = None
+        FSC = None
+        NDSI2 = None
+        FSC2 = None
+        
+        return True
+    
+
+
+
+
+
+
+  
+
+
+    #For each available date, create a quicklook of each tif file in the dataset (one png file per date)
+    #dirName (string) : name of the directory containing the tifs files (self.path_outputs/dirName) 
+    #return a success bool
+    def createQuickLooks(self,dirName):
+        
+        
+        p_cmp = os.path.join(self.path_palettes,"palette_cmp.txt")
+        p_fsc = os.path.join(self.path_palettes,"palette_FSC.txt")
+        
+        dataSetDir = os.path.join(self.path_outputs,dirName)
+        path_tifs = os.path.join(dataSetDir,"TIFS")
+        path_qckls = os.path.join(dataSetDir,"QUICKLOOKS")
+
+        nb_dates = 0
+        for date in sorted(os.listdir(path_tifs)):
+            print(date)
+            path_tifs_date = os.path.join(path_tifs,date)
+            path_qckls_date = os.path.join(path_qckls,date)
+            self.mkdir_p(path_qckls_date)
+            
+            #we get a list of tiles for each epsg
+            epsgs = {}
+            for tif in os.listdir(path_tifs_date) :
+                epsg = self.getEpsgFromStr(tif)
+                if epsg == '': continue
+                if epsg not in epsgs :
+                    epsgs[epsg] = []
+                    
+            tiles = []
+            for tif in os.listdir(path_tifs_date) :
+                epsg = self.getEpsgFromStr(tif)
+                tile = self.getTileFromStr(tif)
+                if epsg == '' or tile == '': continue
+                if tile not in epsgs[epsg]:
+                    epsgs[epsg].append(tile)  
+                    
+           
+            #create input FSC quicklook
+            f_FSC_i = os.path.join(path_tifs_date,"INPUT_FSC.tif")
+            os.system("gdaldem color-relief " + f_FSC_i + " " + p_fsc + " " + os.path.join(path_qckls_date,"INPUT_FSC.tif"))
+            gdal.Translate(os.path.join(path_qckls_date,"INPUT_FSC.png"),os.path.join(path_qckls_date,"INPUT_FSC.tif"),format= 'PNG', width=800,outputType = gdal.GDT_Byte)
+            os.remove(os.path.join(path_qckls_date,"INPUT_FSC.tif"))
+            #for each epsg 
+            for epsg in epsgs:
+                
+                #create resampled FSC quicklook for each projection
+                f_FSC_r = os.path.join(path_tifs_date,"RESAMPLED_FSC_EPSG-" + epsg + ".tif")
+                os.system("gdaldem color-relief " + f_FSC_r + " " + p_fsc + " " + os.path.join(path_qckls_date,"RESAMPLED_FSC_EPSG-" + epsg + ".tif"))
+                gdal.Translate(os.path.join(path_qckls_date,"RESAMPLED_FSC_EPSG-" + epsg + ".png"),os.path.join(path_qckls_date,"RESAMPLED_FSC_EPSG-" + epsg + ".tif"),format= 'PNG', width=800,outputType = gdal.GDT_Byte)
+                os.remove(os.path.join(path_qckls_date,"RESAMPLED_FSC_EPSG-" + epsg + ".tif"))
+
+                
+                for tile in epsgs[epsg]:
+                    
+                    #create compo quicklook
+                    f_COMPO = os.path.join(path_tifs_date,"INPUT_COMPO_"+tile+"_EPSG-"+epsg+".tif")
+                    gdal.Translate(os.path.join(path_qckls_date,"INPUT_COMPO_"+tile+"_EPSG-"+epsg+".png"),f_COMPO,format= 'PNG', width=800,outputType = gdal.GDT_Byte)
+                    
+
+                    
+                    #create FSC output quiclook
+                    f_FSC = os.path.join(path_tifs_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif")
+                    os.system("gdaldem color-relief " + f_FSC + " " + p_fsc + " " + os.path.join(path_qckls_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    gdal.Translate(os.path.join(path_qckls_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".png"),os.path.join(path_qckls_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif"),format= 'PNG', width=800,outputType = gdal.GDT_Byte)
+                    os.remove(os.path.join(path_qckls_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    
+                    #create NDSI output quiclook
+                    f_NDSI = os.path.join(path_tifs_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif")
+                    os.system("gdaldem color-relief " + f_NDSI + " " + p_fsc + " " + os.path.join(path_qckls_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    gdal.Translate(os.path.join(path_qckls_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".png"),os.path.join(path_qckls_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif"),format= 'PNG', width=800,outputType = gdal.GDT_Byte)
+                    os.remove(os.path.join(path_qckls_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    
+                    
+                    
+                    #create snow difference quicklook
+                    
+                    
+                    g_FSC = gdal.Open(f_FSC_r)
+                    g_SEB = gdal.Open(os.path.join(path_tifs_date,"INPUT_SEB_"+tile+"_EPSG-"+epsg+".tif"))
+                    
+                    minx, maxy, maxx, miny = self.getOverlapCoords(g_FSC,g_SEB)
+                    g_FSC = gdal.Translate('',g_FSC,format= 'MEM',projWin = [minx, maxy, maxx, miny]) 
+                    g_SEB = gdal.Translate('',g_SEB,format= 'MEM',projWin = [minx, maxy, maxx, miny],outputType = gdal.GDT_Float32) 
+                    g_CMP = g_FSC
+                    #valeurs dans FSC : [0-1] pour la neige (et non-neige) , 9999 pour noData
+                    #valeurs dans SEB : 100 pour la neige, 0 pour non-neige, 205 pour nuage, 254 pour nodata         
+
+                    SEB = BandReadAsArray(g_SEB.GetRasterBand(1))  
+                    cond = np.where((SEB != 100) & (SEB != 0))
+                    SEB[cond] = np.nan
+                    cond = np.where(SEB == 100)
+                    SEB[cond] = 1
+
+
+                    
+                    #valeurs dans FSC : [0-1] pour la neige (et non-neige) , 9999 pour noData
+                    #valeurs dans SEB : 1 pour la neige, 0 pour non neige, nan pour noData   
+                    
+
+                    FSC = BandReadAsArray(g_CMP.GetRasterBand(1))
+
+                    cond = np.where((FSC > 0) & (FSC <= 1))
+                    FSC[cond] = 2
+                    FSC[FSC == 9999] = np.nan
+
+                    #valeurs dans FSC : 2 pour la neige, 0 pour non neige, nan pour nodata
+                    #valeurs dans SEB : 1 pour la neige, 0 pour non neige, nan pour noData
+                    
+                    CMP = (SEB + FSC)
+                    
+
+                    g_CMP.GetRasterBand(1).WriteArray(CMP)
+                    gdal.Translate(os.path.join(path_tifs_date,"SNOW_DIFF_tile-" + tile + "_EPSG-" + epsg + ".tif"),g_CMP,format= 'GTiff',noData = 9999)
+                    os.system("gdaldem color-relief " + os.path.join(path_tifs_date,"SNOW_DIFF_tile-" + tile + "_EPSG-" + epsg + ".tif") + " " + p_cmp + " " + os.path.join(path_qckls_date,"SNOW_DIFF_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    gdal.Translate(os.path.join(path_qckls_date,"SNOW_DIFF_tile-" + tile + "_EPSG-" + epsg + ".png"),os.path.join(path_qckls_date,"SNOW_DIFF_tile-" + tile + "_EPSG-" + epsg + ".tif"),format= 'PNG', width=800,outputType = gdal.GDT_Byte)
+                    os.remove(os.path.join(path_qckls_date,"SNOW_DIFF_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                    
+                    
+                   
+                    
+                    
+                    
+
+        
+        return True
+
+
+
+
+
+        
+    
+    #calDirName (string) : name of the directory containing the datasets used for the calibration (self.path_outputs/dirName) 
+    #calSource (string) : name of the FSC dataset source (PLEIADES, SPOT, IZAS,etc...) used for the calibration
+    #evalDirName (string) : name of the directory containing the datasets used for the evaluation (self.path_outputs/dirName) 
+    #evalSource (string) : name of the FSC dataset source (PLEIADES, SPOT, IZAS,etc...) used for the evaluation
+    #a,b (float) : paramaters of the FSC = 0.5atanh(aNDSI+b)+0.5 model 
+    #return rmse of the evaluation
+    def timeLapseEvalModel(self,calDirName,evalDirNames,calSource,evalSources,a,b):
+        
+
+        calDataSetDir = os.path.join(self.path_outputs,calDirName)
+        path_eval = os.path.join(calDataSetDir,"EVALUATION")
+        title = "TIMELAPSE_" + calSource + "_WITH"
+        
+
+
+
+        
+        title2 = "TIMELAPSE"
+        for evalDirName in evalDirNames :
+            title2 = title2 + "_" + evalDirName 
+        
+        for evalSource in evalSources :
+            title = title + "_" + evalSource 
+
+        path_eval_dir = os.path.join(path_eval,title2)
+        shutil.rmtree(path_eval_dir, ignore_errors=True)
+
+        self.mkdir_p(path_eval_dir)
+
+        f= open(os.path.join(path_eval_dir,title + ".txt"),"w")
+        f.write("\nCalibration dataset :" + calDirName)
+        f.write("\nModel : FSC = 0.5*tanh(a*NDSI+b) +  0.5 with :")
+        f.write("\n        a = " + str(a) + " b = " + str(b))
+        
+
+        f.write("\n")
+        
+        f.write("\nEVALUATION" )
+
+        # Plot figure with subplots 
+        fig = plt.figure()
+        st = fig.suptitle(title)
+        # set up subplot grid
+        gridspec.GridSpec(1,2)
+        # prepare for evaluation scatterplot
+        ax = plt.subplot2grid((1,2), (0,0),rowspan=1, colspan=2)
+        
+        plt.ylabel('FSC predicted - evaluation',size = 10)
+        plt.xlabel('date',size = 10)
+        
+
+  
+        k = 0
+        for evalDirName in evalDirNames :
+            evalSource = evalSources[k]
+            k = k + 1
+
+            nb_pixel_total = 0
+
+            evalDataSetDir = os.path.join(self.path_outputs,evalDirName)
+            path_tifs = os.path.join(evalDataSetDir,"TIFS")
+
+            NDSI_avg_test = []
+            FSC_avg_test = []
+            days = []
+            
+            for d in sorted(os.listdir(path_tifs)):
+                date = self.getDateFromStr(d)
+                if date == '' : continue
+                print(date)
+                path_tifs_date = os.path.join(path_tifs,d)
+                
+                
+                epsgs = {}
+                for tif in os.listdir(path_tifs_date) :
+                    epsg = self.getEpsgFromStr(tif)
+                    if epsg == '': continue
+                    if epsg not in epsgs :
+                        epsgs[epsg] = []
+                    
+                tiles = []
+                for tif in os.listdir(path_tifs_date) :
+                    epsg = self.getEpsgFromStr(tif)
+                    tile = self.getTileFromStr(tif)
+                    if epsg == '' or tile == '': continue
+                    if tile not in epsgs[epsg]:
+                        epsgs[epsg].append(tile)      
+                    
+                FSC_d = []
+                NDSI_d = []
+            
+                for epsg in epsgs :
+                    for tile in epsgs[epsg]:
+                        g_FSC = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_FSC_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                        FSC_d.append(BandReadAsArray(g_FSC.GetRasterBand(1)).flatten())
+                        g_NDSI = gdal.Open(os.path.join(path_tifs_date,"OUTPUT_NDSI_tile-" + tile + "_EPSG-" + epsg + ".tif"))
+                        NDSI_d.append(BandReadAsArray(g_NDSI.GetRasterBand(1)).flatten())
+                    
+                
+        
+        
+                NDSI_d = np.hstack(NDSI_d)
+                FSC_d = np.hstack(FSC_d)  
+                cond1 = np.where((FSC_d != 9999) & (~np.isnan(FSC_d)) & (~np.isinf(FSC_d)))
+                NDSI_d = NDSI_d[cond1]
+                FSC_d = FSC_d[cond1]
+                cond2 = np.where( (NDSI_d != 9999) & (~np.isnan(NDSI_d)) & (~np.isinf(NDSI_d)))
+                FSC_d = FSC_d[cond2]
+                NDSI_d = NDSI_d[cond2]
+                if len(FSC_d) == 0 : continue
+                
+                nb_pixel_total = nb_pixel_total + len(FSC_d)
+
+                FSC_avg = np.average(FSC_d)
+                NDSI_avg = np.average(NDSI_d)
+        
+
+                NDSI_avg_test.append(NDSI_avg)
+                FSC_avg_test.append(FSC_avg)
+                days.append(date)
+
+            NDSI_avg_test = np.hstack(NDSI_avg_test)
+            FSC_avg_test = np.hstack(FSC_avg_test)
+            days = np.hstack(days)
+                
+        
+
+
+
+            # VALIDATION
+        
+            # prediction of FSC from testing NDSI
+            FSC_avg_pred = 0.5*np.tanh(a*NDSI_avg_test+b) +  0.5
+
+            # error
+            er_FSC = FSC_avg_pred - FSC_avg_test
+
+            # absolute error
+            abs_er_FSC = abs(er_FSC)
+
+            # mean error
+            m_er_FSC = np.mean(er_FSC)
+
+            # absolute mean error
+            abs_m_er_FSC = np.mean(abs_er_FSC)
+
+            #root mean square error
+            rmse_FSC = sqrt(mean_squared_error(FSC_avg_pred,FSC_avg_test))
+
+            #correlation
+            corr_FSC = mstats.pearsonr(FSC_avg_pred,FSC_avg_test)[0]
+
+            #standard deviation
+            stde_FSC = np.std(er_FSC)
+
+
+            plt.scatter(days,er_FSC, label='{:s}; rmse : {:.2f}'.format(evalSource,rmse_FSC))
+            plt.legend(fontsize=10,loc='upper left')
+
+
+
+
+            f.write("\n")  
+            f.write("\nEvaluation dataSet :" + evalDirName)
+            f.write("\n  Number of datess : " + str(len(NDSI_avg_test)))
+            f.write("\n  Total number of 20x20m pixels : " + str(nb_pixel_total))
+            f.write("\n  Number of 20x20m pixels per date : " + str(nb_pixel_total/len(NDSI_avg_test)))
+            f.write("\n  Covered surface per date (m2) : " + str(20*20*nb_pixel_total/len(NDSI_avg_test)))
+            f.write("\n  corr. coef. : " + str(corr_FSC))
+            f.write("\n  std. err. (MB): " + str(stde_FSC))
+            f.write("\n  mean err. : " + str(m_er_FSC))
+            f.write("\n  abs. mean err. : " + str(abs_m_er_FSC))
+            f.write("\n  root mean square err. : " + str(rmse_FSC))
+
+        
+ 
+
+        
+        # fit subplots & save fig
+        fig.tight_layout()
+        fig.set_size_inches(w=16,h=10)
+        st.set_y(0.95)
+        fig.subplots_adjust(top=0.85)
+        fig.savefig(os.path.join(path_eval_dir,'PLOT_' + title + '.png'))
+        plt.close(fig)
+
+        #close txt file
+        f.close()
+
+
+        return True
+
+
+
+
+
+
+
+    #Filter the input ODK points and match them with a LIS product
+    #dirName : directory which will contain the new list of ODK points
+    #source : text file containing the input ODK points
+    #accFilter (bool) : indicate if we eliminate ODK points above a certain accuracy
+    #snwFilter (bool) : indicate if we eliminate ODK points outside the LIS snow mask
+    def processODK(self,dirName,source,accFilter,snwFilter):
+
+
+
+        dataSetDir = os.path.join(self.path_outputs,dirName)
+        odkList = os.path.join(dataSetDir,"ODKLIST.txt")
+        odkInfos = os.path.join(dataSetDir,"ODKINFOS.txt")
+
+
+        dirODK = "/work/OT/siaa/Theia/Neige/CoSIMS/zacharie/snowcover/INPUTS/FSC/ODK"
+        inputODK = os.path.join(dirODK,source)
+
+        if "ODK" not in  dataSetDir:
+            print("ERROR processODK : 'ODK' needs to be in the name")
+            return False
+        shutil.rmtree(dataSetDir, ignore_errors=True)
+
+        self.mkdir_p(dataSetDir)
+
+
+        dict_tile = {}
+        dict_products = {}
+        dict_FSC = {}
+
+
+        print("####################################################")
+        print("Recuperation of ODK data")
+        #on recupere les donnees odk
+        with open(inputODK, "r") as ODK :
+            line = ODK.readline()
+            line = ODK.readline()
+            while line :
+                point = line.split()
+                date = point[0]
+                latitude = point[1]
+                longitude = point[2]
+                accuracy = point[3]
+                fsc = point[4]
+                if accFilter == True and float(accuracy) > self.max_accuracy  : 
+                    line = ODK.readline()
+                    continue
+                    
+                if date not in dict_FSC.keys() :
+                    dict_FSC[date] = []
+                    dict_FSC[date].append([latitude,longitude,fsc,accuracy])
+                else :
+                    dict_FSC[date].append([latitude,longitude,fsc,accuracy])
+                    
+                line = ODK.readline()
+                    
+
+
+        print("####################################################")
+        print("Search of tiles and L2A rasters")
+        #on trouve les tuiles et rasters correspondants
+        for date in dict_FSC :
+            print("check date: ",date)
+            list_points = dict_FSC[date]
+            dateFSC = self.getDateFromStr(date)
+
+
+            for point in list_points :
+                
+                lat = point[0]
+                lon = point[1]
+                fsc = point[2]
+                acc = point[3]
+                print(" check point : ",lat,lon)
+
+
+            
+                decalP = self.nb_shift_days + 1    
+                tileP = ""
+                p_L2AP = ""   
+                for tile in os.listdir(self.path_LIS) :
+
+
+                    path_tile = os.path.join(self.path_LIS,tile)
+                    if not os.path.isdir(path_tile): continue
+                    try:
+                        L2A_product = os.listdir(path_tile)[-1]
+                    except OSError as exc:  # Python >2.5
+                        if exc.errno == errno.EACCES:
+                            continue
+                        else:
+                            raise   
+                    L2A_product = os.path.join(path_tile,L2A_product)
+                    f_L2A = os.path.join(L2A_product,"LIS_PRODUCTS","LIS_SEB.TIF")
+                
+
+                    pixel = os.popen('gdallocationinfo -valonly -wgs84 %s %s %s' % (f_L2A, lon, lat)).read()
+
+                    try:
+                        int(pixel)
+                        
+                    except ValueError:
+                        continue
+
+                    L2A_products = glob.glob(os.path.join(path_tile,'*SENTINEL*'))
+
+
+
+                    for L2A_product in L2A_products :
+                        dateL2A = self.getDateFromStr(L2A_product)
+                        decal = dateL2A - dateFSC
+                        if abs(decal.days) >= decalP : continue
+
+                        f_L2A = os.path.join(L2A_product,"LIS_PRODUCTS","LIS_SEB.TIF")
+                        pixel = int(os.popen('gdallocationinfo -valonly -wgs84 %s %s %s' % (f_L2A, lon, lat)).read())
+
+                        if  snwFilter == True and pixel != 100: continue
+
+                        decalP = abs(decal.days)
+                        p_L2AP = L2A_product
+                        tileP = tile
+
+                if p_L2AP == "":
+                    print("  point rejete")
+                    continue
+                else :
+                    print("  point accepte")
+
+                f_L2AP = os.path.basename(p_L2AP)
+
+                #we check if pixel is in tree region
+                forest = int(os.popen('gdallocationinfo -valonly -wgs84 %s %s %s' % (self.f_tree, lon, lat)).read())
+
+
+                if dateFSC not in dict_products.keys() :
+                    dict_products[dateFSC] = []
+                    dict_products[dateFSC].append([lat,lon,fsc,acc,decalP,p_L2AP,f_L2AP,tileP,forest])
+                else :
+                    dict_products[dateFSC].append([lat,lon,fsc,acc,decalP,p_L2AP,f_L2AP,tileP,forest])
+                    
+              
+        f_odkList= open(odkList,"w")
+        f_odkInfos= open(odkInfos,"w")
+        #on affiche le dict
+        print("####################################################")
+        print("\n")
+        nb_points = 0
+        f_odkList.write("date lat lon fsc acc decal L2A tile forest")
+        for date in dict_products :
+            print(date)
+            
+            for point in dict_products[date] :
+                print ("TILE : ",point[7])
+                if point[7] not in dict_tile.keys():
+                    dict_tile[point[7]] = [1,0]
+                else :
+                    dict_tile[point[7]][0] = dict_tile[point[7]][0] + 1
+                if point[8] > 0 :
+                    dict_tile[point[7]][1] = dict_tile[point[7]][1] + 1
+                print ("L2A product : ",point[6])
+                print("lat = ",point[0],"lon = ",point[1],"fsc = ",point[2],"acc = ",point[3],"decal = ",point[4],"forest value = ",point[8])
+                f_odkList.write("\n"+date.strftime(self.date_format)+" "+str(point[0])+" "+str(point[1])+" "+str(point[2])+" "+str(point[3])+" "+str(point[4])+" "+point[5]+" "+point[7]+" "+str(point[8]))
+                nb_points = nb_points + 1
+            print("\n")
+        print("nb of points = ",nb_points)
+
+        #on affiche le nombre de points par tuile
+        for tile in dict_tile :
+            line = "TILE : " + tile + " ; NB of points : " + str(dict_tile[tile][0]) + " ; NB of points in forest : " + str(dict_tile[tile][1])
+            print(line)
+            f_odkInfos.write("\n" + line)
+
+        f_odkInfos.write("\nTOTAL NB OF POINTS : " + str(nb_points))
+
+        f_odkList.close()
+        f_odkInfos.close()
+
+        return True
+
+
+
+
+
+
+    #With the list of ODK points and their matching LIS products found in evaldirName, evaluate the a and b parameters of the FSC = 0.5atanh(aNDSI+b)+0.5 model 
+    #calDirName (string) : name of the directory containing the datasets used for the calibration (self.path_outputs/dirName) 
+    #calSource (string) : name of the FSC dataset source (PLEIADES, SPOT, IZAS,etc...) used for the calibration
+    #evalDirName (string) : name of the directory containing the ODK list used for the evaluation (self.path_outputs/dirName) 
+    #a,b (float) : paramaters of the FSC = 0.5atanh(aNDSI+b)+0.5 model 
+    #return rmse of the evaluation
+    def evaluateWithODK(self,calDirName,calSource,evalDirName,a,b):
+
+
+        calDataSetDir = os.path.join(self.path_outputs,calDirName)
+        path_eval = os.path.join(calDataSetDir,"EVALUATION")
+        title = "EVAL_" + calSource + "_WITH_ODK"
+        
+        odkPoints = os.path.join(self.path_outputs,evalDirName,"ODKLIST.txt")
+
+
+        path_eval_dir = os.path.join(path_eval,title)
+        shutil.rmtree(path_eval_dir, ignore_errors=True)
+
+        self.mkdir_p(path_eval_dir)
+
+        f= open(os.path.join(path_eval_dir,title + ".txt"),"w")
+        f.write("\nCalibration dataset :" + calDirName + " from " + calSource)
+        f.write("\nModel : FSC = 0.5*tanh(a*NDSI+b) +  0.5 :")
+        f.write("\n        a = " + str(a) + " b = " + str(b))
+        f.write("\nEvaluation dataSets : \n" + evalDirName )
+
+
+
+
+
+        dict_FSC = {}
+        dict_products = {}
+
+
+
+        print("####################################################")
+        print("Recuperation of ODK data")
+        #on recupere les donnees odk
+        with open(odkPoints, "r") as ODK :
+            line = ODK.readline()
+            line = ODK.readline()
+            while line :
+                point = line.split()
+                date = point[0]
+                latitude = point[1]
+                longitude = point[2]
+                fsc = float(point[3])
+                L2A_product = point[6]
+                tcd = float(point[8])
+
+
+                    
+                if date not in dict_products.keys() :
+                    dict_products[date] = []
+
+                dict_products[date].append([latitude,longitude,fsc,tcd,L2A_product])
+                    
+                line = ODK.readline()
+                    
+
+
+       
+
+        #on compare ODK et L2A
+        list_NDSI = []
+        list_FSC = []
+        list_TCD = []
+        for date in dict_products :
+            for point in dict_products[date] :
+
+                lat = point[0]
+                lon = point[1]
+                fsc = point[2]
+                tcd = point[3]
+                L2A_product = point[4]
+
+            
+                # We look for the red, green and swir bands tiff files + mask
+                f_green = ""
+                f_swir = ""
+                f_red = ""
+                f_mask = ""
+
+
+                for fp in os.listdir(L2A_product) :
+                    if ("green_band_resampled.tif" in fp) :
+                        f_green = os.path.join(L2A_product,fp)
+                    elif ("red_band_resampled.tif" in fp) :
+                        f_red = os.path.join(L2A_product,fp)
+                    elif ("swir_band_extracted.tif" in fp) :
+                        f_swir = os.path.join(L2A_product,fp)
+
+
+
+                #If there is a file missing, we skip to the next point
+                if f_green == "" or f_red == "" or f_swir == "" : continue
+                
+
+                #We get the corresponding pixel from each band to calculate a NDSI pixel
+                green = 0
+                red = 0
+                swir = 0
+                NDSI = 0
+                
+
+                
+                try:
+                    
+                    green = float(os.popen('gdallocationinfo -valonly -wgs84 %s %s %s' % (f_green, lon, lat)).read())
+                    red = float(os.popen('gdallocationinfo -valonly -wgs84 %s %s %s' % (f_red, lon, lat)).read())
+                    swir = float(os.popen('gdallocationinfo -valonly -wgs84 %s %s %s' % (f_swir, lon, lat)).read())
+                    
+                except ValueError:
+                    continue
+
+
+                    
+                    
+                    
+                NDSI = (green - swir)/(green + swir)
+
+
+                
+                if np.isnan(NDSI) or np.isinf(NDSI) : continue
+                
+                list_NDSI.append(NDSI)
+                list_FSC.append(fsc)
+                list_TCD.append(tcd)
+
+            
+
+        #on affiche les lists
+        print("####################################################")
+        print("\nODK POINTS:")
+
+        for i in arange(len(list_NDSI)) :
+            print("NDSI = ",list_NDSI[i],"FSC = ",list_FSC[i],"TCD = ",list_TCD[i])
+
+        print("####################################################")
+        print("Calculation of NDSI-FSC relation and model evaluation")
+
+        #on calcul et affiche la relation FSC-NDSI et l evaluation des parametres a et b
+
+        NDSI_test = np.asarray(list_NDSI)
+        FSC_test = np.asarray(list_FSC)
+        TCD = np.asarray(list_TCD)
+
+
+        TOC_FSC_pred = 0.5*np.tanh(a*NDSI_test+b) +  0.5
+        OG_FSC_pred = TOC_FSC_pred/((100.0 - TCD)/100.0)
+        OG_FSC_pred[OG_FSC_pred > 1] = 1
+        OG_FSC_pred[np.isinf(OG_FSC_pred)] = 1
+        TDC_t = TCD[TCD > 0]
+        FSC_test_t = FSC_test[TCD > 0]
+        OG_FSC_pred_t = OG_FSC_pred[TCD > 0]
+
+
+
+
+
+        #TOC
+        # error
+        TOC_er_FSC = TOC_FSC_pred - FSC_test
+        # absolute error
+        TOC_abs_er_FSC = abs(TOC_er_FSC)
+        # mean error
+        TOC_m_er_FSC = np.mean(TOC_er_FSC)
+        # absolute mean error
+        TOC_abs_m_er_FSC = np.mean(TOC_abs_er_FSC)
+        #root mean square error
+        TOC_rmse_FSC = sqrt(mean_squared_error(TOC_FSC_pred,FSC_test))
+        #correlation
+        TOC_corr_FSC = mstats.pearsonr(TOC_FSC_pred,FSC_test)[0]
+        #standard deviation
+        TOC_stde_FSC = np.std(TOC_er_FSC)
+     
+        #OG
+        # error
+        OG_er_FSC = OG_FSC_pred - FSC_test
+        # absolute error
+        OG_abs_er_FSC = abs(OG_er_FSC)
+        # mean error
+        OG_m_er_FSC = np.mean(OG_er_FSC)
+        # absolute mean error
+        OG_abs_m_er_FSC = np.mean(OG_abs_er_FSC)
+        #root mean square error
+        OG_rmse_FSC = sqrt(mean_squared_error(OG_FSC_pred,FSC_test))
+        #correlation
+        OG_corr_FSC = mstats.pearsonr(OG_FSC_pred,FSC_test)[0]
+        #standard deviation
+        OG_stde_FSC = np.std(OG_er_FSC)
+
+
+
+        #OG Tree Only
+        # error
+        OG_er_FSC_t = OG_FSC_pred_t - FSC_test_t
+        # absolute error
+        OG_abs_er_FSC_t = abs(OG_er_FSC_t)
+        # mean error
+        OG_m_er_FSC_t = np.mean(OG_er_FSC_t)
+        # absolute mean error
+        OG_abs_m_er_FSC_t = np.mean(OG_abs_er_FSC_t)
+        #root mean square error
+        OG_rmse_FSC_t = sqrt(mean_squared_error(OG_FSC_pred_t,FSC_test_t))
+        #correlation
+        OG_corr_FSC_t = mstats.pearsonr(OG_FSC_pred_t,FSC_test_t)[0]
+        #standard deviation
+        OG_stde_FSC_t = np.std(OG_er_FSC_t)
+
+
+
+
+
+        minNDSI = min(NDSI_test)
+        list_FSC_box = [FSC_test[np.where((NDSI_test >= 0.8) & (NDSI_test <= 1))]]
+        list_labels_box = ["[ 0.8\n1 ]"]
+        j = 0.8
+        while minNDSI < j : 
+            i = j - 0.2
+            list_FSC_box.insert(0,FSC_test[np.where((NDSI_test >= i) & (NDSI_test < j))])
+            list_labels_box.insert(0,"[ "+ "{0:.1f}".format(i) +"\n"+ "{0:.1f}".format(j) +" [")
+            j = j - 0.2
+            
+        # Plot figure with subplots 
+        fig = plt.figure()
+        st = fig.suptitle("ODK FSC / NDSI")
+        gridspec.GridSpec(1,3)
+
+        # 2D histo pour FSC vs NDSI
+        
+        ax = plt.subplot2grid((1,3), (0,2))
+        plt.title('ODK FSC/NDSI')
+        plt.ylabel('testing FSC')
+        plt.xlabel('testing NDSI')
+        plt.hist2d(NDSI_test,FSC_test,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'),norm=LogNorm())
+        n = np.arange(min(NDSI_test),1.01,0.01)
+        line = 0.5*np.tanh(a*n+b) +  0.5
+        plt.plot(n, line, 'r', label='Predicted TOC FSC')
+        plt.legend(fontsize=6,loc='upper left')
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio) 
+
+        # boxplot avec FSC = 0 et FSC = 1
+        ax = plt.subplot2grid((1,3), (0,0),rowspan=1, colspan=2)
+        plt.title('ODK FSC/NDSI')
+        plt.ylabel('0 <= FSC <= 1')
+        plt.xlabel('NDSI')
+        plt.boxplot(list_FSC_box,labels = list_labels_box)
+
+
+        # fit subplots and save fig
+        fig.tight_layout()
+        fig.set_size_inches(w=16,h=10)
+        st.set_y(0.95)
+        fig.subplots_adjust(top=0.85)
+        fig.savefig(os.path.join('ODK_ANALYSIS.png'))
+        plt.close(fig)
+
+
+        # Plot figure with subplots 
+        fig = plt.figure()
+        st = fig.suptitle("ODK EVALUATION")
+        gridspec.GridSpec(2,3)
+
+        # 2D histo pour TOC evaluation
+        ax = plt.subplot2grid((2,3), (0,2))
+
+        plt.title('TOC FSC EVALUATION')
+        plt.ylabel('predicted TOC FSC')
+        plt.xlabel('testing FSC')
+        plt.hist2d(FSC_test,TOC_FSC_pred,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'),norm=LogNorm())
+        slope, intercept, r_value, p_value, std_err = mstats.linregress(FSC_test,TOC_FSC_pred) 
+        n = np.array([min(FSC_test),1.0])
+        line = slope * n + intercept
+        plt.plot(n, line, 'b', label='y = {:.2f}x + {:.2f}\ncorr={:.2f} rmse={:.2f}'.format(slope,intercept,TOC_corr_FSC,TOC_rmse_FSC))
+        plt.plot(n, n, 'g', label='y = 1.0x + 0.0')
+        plt.legend(fontsize=6,loc='upper left')
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+
+
+        # 1D histo de TOC residus
+        ax = plt.subplot2grid((2,3), (0,0),rowspan=1, colspan=2)
+        plt.title("TOC FSC RESIDUALS")
+        plt.ylabel('amount of data points')
+        plt.xlabel('FSC pred - test')
+        xticks = np.arange(-1.0, 1.1, 0.1)
+        plt.xticks(xticks)
+        plt.hist(TOC_er_FSC,bins=40,weights=np.ones(len(TOC_er_FSC)) / len(TOC_er_FSC))
+        plt.gca().yaxis.set_major_formatter(PercentFormatter(1))
+        plt.grid(True) 
+
+
+        # 2D histo pour OG evaluation
+        ax = plt.subplot2grid((2,3), (1,2))
+
+        plt.title('OG FSC EVALUATION')
+        plt.ylabel('predicted OG FSC')
+        plt.xlabel('testing FSC')
+        plt.hist2d(FSC_test,OG_FSC_pred,bins=(40, 40), cmap=plt.cm.get_cmap('plasma'),norm=LogNorm())
+        slope, intercept, r_value, p_value, std_err = mstats.linregress(FSC_test,OG_FSC_pred) 
+        n = np.array([min(FSC_test),1.0])
+        line = slope * n + intercept
+        plt.plot(n, line, 'b', label='y = {:.2f}x + {:.2f}\ncorr={:.2f} rmse={:.2f}'.format(slope,intercept,OG_corr_FSC,OG_rmse_FSC))
+        plt.plot(n, n, 'g', label='y = 1.0x + 0.0')
+        plt.legend(fontsize=6,loc='upper left')
+        plt.colorbar()
+        ratio = 1
+        xleft, xright = ax.get_xlim()
+        ybottom, ytop = ax.get_ylim()
+        ax.set_aspect(abs((xright-xleft)/(ybottom-ytop))*ratio)  
+
+
+        # 1D histo de OG residus
+        ax = plt.subplot2grid((2,3), (1,0),rowspan=1, colspan=2)
+        plt.title("OG FSC RESIDUALS")
+        plt.ylabel('amount of data points')
+        plt.xlabel('FSC pred - test')
+        xticks = np.arange(-1.0, 1.1, 0.1)
+        plt.xticks(xticks)
+        plt.hist(OG_er_FSC,bins=40,weights=np.ones(len(OG_er_FSC)) / len(OG_er_FSC))
+        plt.gca().yaxis.set_major_formatter(PercentFormatter(1))
+        plt.grid(True) 
+
+
+
+        # fit subplots and save fig
+        fig.tight_layout()
+        fig.set_size_inches(w=16,h=10)
+        st.set_y(0.95)
+        fig.subplots_adjust(top=0.85)
+        fig.savefig(os.path.join(path_eval_dir,title + '.png'))
+        plt.close(fig)
+
+        
+
+        f.write("\n")
+        f.write("\nEVALUATION OF TOC FSC" )
+        f.write("\n  Number of data points : " + str(len(FSC_test)))
+        f.write("\n  corr. coef. : " + str(TOC_corr_FSC))
+        f.write("\n  std. err. : " + str(TOC_stde_FSC))
+        f.write("\n  mean err. : " + str(TOC_m_er_FSC))
+        f.write("\n  abs. mean err. : " + str(TOC_abs_m_er_FSC))
+        f.write("\n  root mean square err. : " + str(TOC_rmse_FSC))
+        f.write("\n")
+        f.write("\nEVALUATION OF OG FSC" )
+        f.write("\n  Number of data points : " + str(len(FSC_test)))
+        f.write("\n  corr. coef. : " + str(OG_corr_FSC))
+        f.write("\n  std. err. : " + str(OG_stde_FSC))
+        f.write("\n  mean err. : " + str(OG_m_er_FSC))
+        f.write("\n  abs. mean err. : " + str(OG_abs_m_er_FSC))
+        f.write("\n  root mean square err. : " + str(OG_rmse_FSC))
+        f.write("\n")
+        f.write("\nEVALUATION OF OG FSC with only pixels with TCD > 0" )
+        f.write("\n  Number of data points : " + str(len(FSC_test_t)))
+        f.write("\n  corr. coef. : " + str(OG_corr_FSC_t))
+        f.write("\n  std. err. : " + str(OG_stde_FSC_t))
+        f.write("\n  mean err. : " + str(OG_m_er_FSC_t))
+        f.write("\n  abs. mean err. : " + str(OG_abs_m_er_FSC_t))
+        f.write("\n  root mean square err. : " + str(OG_rmse_FSC_t))
+
+        f.close()
+
+
+        return TOC_rmse_FSC
diff --git a/app/CMakeLists.txt b/app/CMakeLists.txt
index e16f4d99..f2bdf06e 100644
--- a/app/CMakeLists.txt
+++ b/app/CMakeLists.txt
@@ -1,9 +1,31 @@
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 file(INSTALL ${CMAKE_CURRENT_SOURCE_DIR}/run_snow_detector.py DESTINATION ${CMAKE_BINARY_DIR}/app)
 file(INSTALL ${CMAKE_CURRENT_SOURCE_DIR}/run_cloud_removal.py DESTINATION ${CMAKE_BINARY_DIR}/app)
 file(INSTALL ${CMAKE_CURRENT_SOURCE_DIR}/run_snow_annual_map.py DESTINATION ${CMAKE_BINARY_DIR}/app)
 file(INSTALL ${CMAKE_CURRENT_SOURCE_DIR}/build_json.py DESTINATION ${CMAKE_BINARY_DIR}/app)
+file(INSTALL ${CMAKE_CURRENT_SOURCE_DIR}/build_snow_annual_map_json.py DESTINATION ${CMAKE_BINARY_DIR}/app)
 
 install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/run_snow_detector.py DESTINATION ${CMAKE_INSTALL_PREFIX}/app)
 install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/run_snow_annual_map.py DESTINATION ${CMAKE_INSTALL_PREFIX}/app)
 install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/run_cloud_removal.py DESTINATION ${CMAKE_INSTALL_PREFIX}/app)
 install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/build_json.py DESTINATION ${CMAKE_INSTALL_PREFIX}/app)
+install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/build_snow_annual_map_json.py DESTINATION ${CMAKE_INSTALL_PREFIX}/app)
diff --git a/app/build_json.py b/app/build_json.py
index 154b255b..6a762c13 100644
--- a/app/build_json.py
+++ b/app/build_json.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import os
 import re
@@ -10,174 +29,189 @@ import argparse
 import zipfile
 
 ### Configuration Template ###
-conf_template = {"general":{"pout":"",
-                            "nodata":-10000,
-                            "ram":1024,
-                            "nb_threads":1,
-                            "preprocessing":False,
-                            "log":True,
-                            "multi":1,
-                            "target_resolution":-1},
-                 "vector":{"generate_vector":True,
-                           "generate_intermediate_vectors":False,
-                           "use_gdal_trace_outline":True,
-                           "gdal_trace_outline_dp_toler":0,
-                           "gdal_trace_outline_min_area":0},
-                 "inputs":{"green_band":{"path": "",
+conf_template = {"general": {"pout": "",
+                             "nodata": -10000,
+                             "ram": 2048,
+                             "nb_threads": 1,
+                             "preprocessing": False,
+                             "log": True,
+                             "multi": 10,
+                             "target_resolution": -1},
+                 "vector": {"generate_vector": True,
+                            "generate_intermediate_vectors": False,
+                            "use_gdal_trace_outline": True,
+                            "gdal_trace_outline_dp_toler": 0,
+                            "gdal_trace_outline_min_area": 0
+                            },
+                 "inputs": {"green_band": {"path": "",
+                                           "noBand": 1},
+                            "red_band": {"path": "",
                                          "noBand": 1},
-                           "red_band":{"path": "",
-                                       "noBand": 1},
-                           "swir_band":{"path": "",
-                                        "noBand": 1},
-                           "dem":"",
-                           "cloud_mask":""},
-                 "snow":{"dz":100,
-                         "ndsi_pass1":0.4,
-                         "red_pass1":200,
-                         "ndsi_pass2":0.15,
-                         "red_pass2":40,
-                         "fsnow_lim":0.1,
-                         "fclear_lim":0.1,
-                         "fsnow_total_lim":0.001},
-                 "cloud":{"shadow_in_mask":64,
-                          "shadow_out_mask":128,
-                          "all_cloud_mask":1,
-                          "high_cloud_mask":32,
-                          "rf":12,
-                          "red_darkcloud":300,
-                          "red_backtocloud":100,
-                          "strict_cloud_mask":False,
-                          "rm_snow_inside_cloud":False,
-                          "rm_snow_inside_cloud_dilation_radius":1,
-                          "rm_snow_inside_cloud_threshold":0.85,
-                          "rm_snow_inside_cloud_min_area":5000}}
-
+                            "swir_band": {"path": "",
+                                          "noBand": 1},
+                            "dem": "",
+                            "cloud_mask": ""
+                            },
+                 "snow": {"dz": 100,
+                          "ndsi_pass1": 0.4,
+                          "red_pass1": 200,
+                          "ndsi_pass2": 0.15,
+                          "red_pass2": 40,
+                          "fsnow_lim": 0.1,
+                          "fclear_lim": 0.1,
+                          "fsnow_total_lim": 0.001},
+                 "cloud": {"shadow_in_mask": 32,
+                           "shadow_out_mask": 64,
+                           "all_cloud_mask": 1,
+                           "high_cloud_mask": 128,
+                           "rf": 12,
+                           "red_darkcloud": 300,
+                           "red_backtocloud": 100,
+                           "strict_cloud_mask": False,
+                           "rm_snow_inside_cloud": False,
+                           "rm_snow_inside_cloud_dilation_radius": 1,
+                           "rm_snow_inside_cloud_threshold": 0.85,
+                           "rm_snow_inside_cloud_min_area": 5000},
+                 "fsc": {
+                         "dofsc": False,
+                         "fscToc_Eq": "1.45*ndsi-0.01",
+                         "fscOg_Eq": "fscToc/(1-tcd)",
+                         "tcd": "",
+                         "cosims_mode": False
+                        },
+                 "water_mask": {
+                                "apply": False,
+                                "path": None,
+                                "raster_values": [1]
+                                }
+                 }
 
 ### Mission Specific Parameters ###
 
-MAJA_parameters = {"multi":10,
-                 "green_band":".*FRE_R1.DBL.TIF$",
-                 "green_bandNumber":2,
-                 "red_band":".*FRE_R1.DBL.TIF$",
-                 "red_bandNumber":3,
-                 "swir_band":".*FRE_R2.DBL.TIF$",
-                 "swir_bandNumber":5,
-                 "cloud_mask":".*CLD_R2.DBL.TIF$",
-                 "dem":".*ALT_R2\.TIF$",
-                 "shadow_in_mask":4,
-                 "shadow_out_mask":8,
-                 "all_cloud_mask":1,
-                 "high_cloud_mask":128,
-                 "rf":12}
-
-SEN2COR_parameters = {"mode":"sen2cor",
-                 "multi":10,
-                 "green_band":".*_B03_10m.jp2$",
-                 "green_bandNumber":1,
-                 "red_band":".*_B04_10m.jp2$",
-                 "red_bandNumber":1,
-                 "swir_band":".*_B11_20m.jp2$",
-                 "swir_bandNumber":1,
-                 "cloud_mask":".*_SCL_20m.jp2$",
-                 "dem":"",
-                 "shadow_in_mask":3,
-                 "shadow_out_mask":3,
-                 "all_cloud_mask":8,
-                 "high_cloud_mask":10,
-                 "rf":12}
-
-Take5_parameters = {"multi":1,
-                    "green_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
-                    "green_bandNumber":1,
-                    "red_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
-                    "red_bandNumber":2,
-                    "swir_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
-                    "swir_bandNumber":4,
-                    "cloud_mask":".*NUA.*\.TIF$",
-                    "div_mask":".*DIV.*\.TIF$",
-                    "div_slope_thres":8,
-                    "dem":".*\.tif",
-                    "shadow_in_mask":64,
-                    "shadow_out_mask":128,
-                    "all_cloud_mask":1,
-                    "high_cloud_mask":32,
-                    "rf":8}
-
-S2_parameters = {"multi":10,
-                 "green_band":".*FRE_B3.*\.tif$",
-                 "green_bandNumber":1,
-                 "red_band":".*FRE_B4.*\.tif$",
-                 "red_bandNumber":1,
-                 "swir_band":".*FRE_B11.*\.tif$",
-                 "swir_bandNumber":1,
-                 "cloud_mask":".*CLM_R2.*\.tif$",
-                 "dem":".*ALT_R2\.TIF$",
-                 "div_mask":".*MG2_R2.*\.tif$",
-                 "div_slope_thres":64,
-                 "shadow_in_mask":32,
-                 "shadow_out_mask":64,
-                 "all_cloud_mask":1,
-                 "high_cloud_mask":128,
-                 "rf":12}
-
-L8_parameters_new_format = {"multi":1,
-                 "green_band":".*FRE_B3.*\.tif$",
-                 "green_bandNumber":1,
-                 "red_band":".*FRE_B4.*\.tif$",
-                 "red_bandNumber":1,
-                 "swir_band":".*FRE_B6.*\.tif$",
-                 "swir_bandNumber":1,
-                 "cloud_mask":".*CLM_XS.*\.tif$",
-                 "dem":".*ALT_R2\.TIF$",
-                 "div_mask":".*MG2_XS.*\.tif$",
-                 "div_slope_thres":64,
-                 "shadow_in_mask":32,
-                 "shadow_out_mask":64,
-                 "all_cloud_mask":1,
-                 "high_cloud_mask":128,
-                 "rf":8}
-
-L8_parameters = {"multi":1,
-                 "green_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
-                 "green_bandNumber":3,
-                 "red_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
-                 "red_bandNumber":4,
-                 "swir_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
-                 "swir_bandNumber":6,
-                 "cloud_mask":".*NUA.*\.TIF$",
-                 "div_mask":".*DIV.*\.TIF$",
-                 "div_slope_thres":8,
-                 "dem":".*\.tif",
-                 "shadow_in_mask":64,
-                 "shadow_out_mask":128,
-                 "all_cloud_mask":1,
-                 "high_cloud_mask":32,
-                 "rf":8}
-
-LANDSAT8_LASRC_parameters = {"mode":"lasrc",
-                 "multi":10,
-                 "green_band":".*_sr_band3.tif$",
-                 "green_bandNumber":1,
-                 "red_band":".*_sr_band4.tif$",
-                 "red_bandNumber":1,
-                 "swir_band":".*_sr_band6.tif$",
-                 "swir_bandNumber":1,
-                 "cloud_mask":".*_pixel_qa.tif$",
-                 "dem":".*\.tif",
-                 "shadow_in_mask":8,
-                 "shadow_out_mask":8,
-                 "all_cloud_mask":224, # cloud with high confidence (32+(64+128)) 
-                 "high_cloud_mask":800, # cloud and high cloud with high confidence (32 + (512+256)) 
-                 "rf":8}
-
-mission_parameters = {"S2":S2_parameters,\
-                      "LANDSAT8":L8_parameters,\
-                      "LANDSAT8_new_format":L8_parameters_new_format,\
-                      "Take5":Take5_parameters,\
-                      "MAJA":MAJA_parameters,\
-                      "SEN2COR":SEN2COR_parameters,\
-                      "LANDSAT8_LASRC":LANDSAT8_LASRC_parameters
-                     }
+MAJA_parameters = {"multi": 10,
+                   "green_band": ".*FRE_R1.DBL.TIF$",
+                   "green_bandNumber": 2,
+                   "red_band": ".*FRE_R1.DBL.TIF$",
+                   "red_bandNumber": 3,
+                   "swir_band": ".*FRE_R2.DBL.TIF$",
+                   "swir_bandNumber": 5,
+                   "cloud_mask": ".*CLD_R2.DBL.TIF$",
+                   "dem": ".*ALT_R2\.TIF$",
+                   "shadow_in_mask": 4,
+                   "shadow_out_mask": 8,
+                   "all_cloud_mask": 1,
+                   "high_cloud_mask": 128,
+                   "rf": 12}
+
+SEN2COR_parameters = {"mode": "sen2cor",
+                      "multi": 10,
+                      "green_band": ".*_B03_10m.jp2$",
+                      "green_bandNumber": 1,
+                      "red_band": ".*_B04_10m.jp2$",
+                      "red_bandNumber": 1,
+                      "swir_band": ".*_B11_20m.jp2$",
+                      "swir_bandNumber": 1,
+                      "cloud_mask": ".*_SCL_20m.jp2$",
+                      "dem": "",
+                      "shadow_in_mask": 3,
+                      "shadow_out_mask": 3,
+                      "all_cloud_mask": 8,
+                      "high_cloud_mask": 10,
+                      "rf": 12}
+
+Take5_parameters = {"multi": 1,
+                    "green_band": ".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
+                    "green_bandNumber": 1,
+                    "red_band": ".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
+                    "red_bandNumber": 2,
+                    "swir_band": ".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
+                    "swir_bandNumber": 4,
+                    "cloud_mask": ".*NUA.*\.TIF$",
+                    "div_mask": ".*DIV.*\.TIF$",
+                    "div_slope_thres": 8,
+                    "dem": ".*\.tif",
+                    "shadow_in_mask": 64,
+                    "shadow_out_mask": 128,
+                    "all_cloud_mask": 1,
+                    "high_cloud_mask": 32,
+                    "rf": 8}
+
+S2_parameters = {"multi": 10,
+                 "green_band": ".*FRE_B3.*\.tif$",
+                 "green_bandNumber": 1,
+                 "red_band": ".*FRE_B4.*\.tif$",
+                 "red_bandNumber": 1,
+                 "swir_band": ".*FRE_B11.*\.tif$",
+                 "swir_bandNumber": 1,
+                 "cloud_mask": ".*CLM_R2.*\.tif$",
+                 "dem": ".*ALT_R2\.TIF$",
+                 "div_mask": ".*MG2_R2.*\.tif$",
+                 "div_slope_thres": 64,
+                 "shadow_in_mask": 32,
+                 "shadow_out_mask": 64,
+                 "all_cloud_mask": 1,
+                 "high_cloud_mask": 128,
+                 "rf": 12}
+
+L8_parameters_new_format = {"multi": 1,
+                            "green_band": ".*FRE_B3.*\.tif$",
+                            "green_bandNumber": 1,
+                            "red_band": ".*FRE_B4.*\.tif$",
+                            "red_bandNumber": 1,
+                            "swir_band": ".*FRE_B6.*\.tif$",
+                            "swir_bandNumber": 1,
+                            "cloud_mask": ".*CLM_XS.*\.tif$",
+                            "dem": ".*ALT_R2\.TIF$",
+                            "div_mask": ".*MG2_XS.*\.tif$",
+                            "div_slope_thres": 64,
+                            "shadow_in_mask": 32,
+                            "shadow_out_mask": 64,
+                            "all_cloud_mask": 1,
+                            "high_cloud_mask": 128,
+                            "rf": 8}
+
+L8_parameters = {"multi": 1,
+                 "green_band": ".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
+                 "green_bandNumber": 3,
+                 "red_band": ".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
+                 "red_bandNumber": 4,
+                 "swir_band": ".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
+                 "swir_bandNumber": 6,
+                 "cloud_mask": ".*NUA.*\.TIF$",
+                 "div_mask": ".*DIV.*\.TIF$",
+                 "div_slope_thres": 8,
+                 "dem": ".*\.tif",
+                 "shadow_in_mask": 64,
+                 "shadow_out_mask": 128,
+                 "all_cloud_mask": 1,
+                 "high_cloud_mask": 32,
+                 "rf": 8}
+
+LANDSAT8_LASRC_parameters = {"mode": "lasrc",
+                             "multi": 10,
+                             "green_band": ".*_sr_band3.tif$",
+                             "green_bandNumber": 1,
+                             "red_band": ".*_sr_band4.tif$",
+                             "red_bandNumber": 1,
+                             "swir_band": ".*_sr_band6.tif$",
+                             "swir_bandNumber": 1,
+                             "cloud_mask": ".*_pixel_qa.tif$",
+                             "dem": ".*\.tif",
+                             "shadow_in_mask": 8,
+                             "shadow_out_mask": 8,
+                             "all_cloud_mask": 224,  # cloud with high confidence (32+(64+128))
+                             "high_cloud_mask": 800,  # cloud and high cloud with high confidence (32 + (512+256))
+                             "rf": 8}
+
+mission_parameters = {"S2": S2_parameters, \
+                      "LANDSAT8": L8_parameters, \
+                      "LANDSAT8_new_format": L8_parameters_new_format, \
+                      "Take5": Take5_parameters, \
+                      "MAJA": MAJA_parameters, \
+                      "SEN2COR": SEN2COR_parameters, \
+                      "LANDSAT8_LASRC": LANDSAT8_LASRC_parameters
+                      }
+
 
 def str2bool(v):
     if v.lower() in ('yes', 'true', 't', 'y', '1'):
@@ -187,6 +221,7 @@ def str2bool(v):
     else:
         raise argparse.ArgumentTypeError('Boolean value expected.')
 
+
 def findFiles(folder, pattern):
     """ Search recursively into a folder to find a patern match
     """
@@ -195,7 +230,7 @@ def findFiles(folder, pattern):
         zfile = zipfile.ZipFile(folder)
         for filename in zfile.namelist():
             if re.match(pattern, filename):
-                matches.append("/vsizip/"+os.path.join(folder, filename))
+                matches.append("/vsizip/" + os.path.join(folder, filename))
     else:
         for root, dirs, files in os.walk(folder):
             for file in files:
@@ -203,6 +238,7 @@ def findFiles(folder, pattern):
                     matches.append(os.path.join(root, file))
     return matches
 
+
 def read_product(inputPath, mission):
     """ Read the content of the input product folder
     and load the data information required for snow detection.
@@ -240,16 +276,16 @@ def read_product(inputPath, mission):
         conf_json["cloud"]["high_cloud_mask"] = params["high_cloud_mask"]
         conf_json["cloud"]["rf"] = params["rf"]
 
-        #Check if an optional mode is provided in the mission configuration
+        # Check if an optional mode is provided in the mission configuration
         # Use in case of SEN2COR to handle differences between maja and sen2cor encoding
         if 'mode' in params:
             conf_json["general"]["mode"] = params["mode"]
- 
-        
+
         return conf_json
     else:
         logging.error(inputPath + " doesn't exist.")
 
+
 def main():
     # Parse arguments
     parser = argparse.ArgumentParser(description='This script is used to \
@@ -272,7 +308,6 @@ def main():
     group_general.add_argument("-multi", type=float)
     group_general.add_argument("-target_resolution", type=float)
 
-
     group_inputs = parser.add_argument_group('inputs', 'input files')
     group_inputs.add_argument("-dem", help="dem file path, to use for processing the input product")
     group_inputs.add_argument("-cloud_mask", help="cloud mask file path")
@@ -296,13 +331,25 @@ def main():
     group_cloud.add_argument("-red_backtocloud", type=int)
     group_cloud.add_argument("-strict_cloud_mask", type=str2bool, help="true/false")
 
+    group_fsc = parser.add_argument_group('fsc', 'fractional snow cover parameters')
+    group_fsc.add_argument("-fsc", type=str,
+                           help="path to tree cover density file, automatically activates sets fsc: dofsc to true")
+    group_fsc.add_argument("-cosims_mode", action='store_true', help="CoSIMS mode : Generate CoSIMS formatted outputs.")
+
+    group_water_mask = parser.add_argument_group('water_mask', 'water mask parameters')
+    group_water_mask.add_argument("-water_mask_path", type=str, help="Path to a raster or a shapefile")
+    group_water_mask.add_argument("-water_mask_raster_value", type=int, action='append',
+                                  help="If the input water_mask_path is a raster, you can specify all the values corresponding " + \
+                                       "to water which are to be masked by repeating this optional argument -water_mask_raster_value value1 -water_mask_raster_value value2 etc... " + \
+                                       "If no values are specified, 1 will be used by default.")
+
     args = parser.parse_args()
 
     inputPath = os.path.abspath(args.inputPath)
     outputPath = os.path.abspath(args.outputPath)
 
     sentinel2Acronyms = ['S2', 'SENTINEL2', 'S2A', 'S2B']
-    
+
     # Test if it is a MAJA output products (generated with MAJA processor version XX)
     # FIXME: This detection based on directory substring detection is very week and error prone
     # FIXME: use a factory and detect by using xml metadata
@@ -406,12 +453,33 @@ def main():
             logging.error("No DEM found!")
             return 1
 
+        if args.fsc:
+            jsonData["fsc"]["dofsc"] = True
+            jsonData["fsc"]["tcd"] = os.path.abspath(args.fsc)
+            jsonData["fsc"]["cosims_mode"] = args.cosims_mode
+        else:
+            jsonData["fsc"]["dofsc"] = False
+
+        if args.water_mask_path is not None:
+            jsonData["water_mask"]["apply"] = True
+            suffix = args.water_mask_path.split('.')[-1].lower()
+            if suffix not in ['shp', 'tif']:
+                raise IOError('Input water_mask_path must either be a GeoTIFF raster (.tif) or a shapefile (.shp)')
+            jsonData["water_mask"]["water_mask_path"] = os.path.abspath(args.water_mask_path)
+            if args.water_mask_raster_value is None:
+                jsonData["water_mask"]["water_mask_raster_values"] = [1]
+            else:
+                jsonData["water_mask"]["water_mask_raster_values"] = args.water_mask_raster_value
+        else:
+            jsonData["water_mask"]["apply"] = False
+
         jsonFile = open(os.path.join(outputPath, "param_test.json"), "w")
         jsonFile.write(json.dumps(jsonData, indent=4))
         jsonFile.close()
 
+
 if __name__ == "__main__":
     # Set logging level and format.
-    logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=\
+    logging.basicConfig(stream=sys.stdout, level=logging.INFO, format= \
         '%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
     main()
diff --git a/app/build_snow_annual_map_json.py b/app/build_snow_annual_map_json.py
new file mode 100644
index 00000000..056cbbd2
--- /dev/null
+++ b/app/build_snow_annual_map_json.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import json
+import logging
+import argparse
+import os
+import sys
+
+snow_annual_map_json_template = {
+    "log": True,
+    "mode": "RUNTIME",
+    "use_densification": False,
+    "date_margin": 15,
+    "ram": 4096,
+    "nb_threads": 6,
+}
+
+
+def main():
+    # Parse arguments
+    parser = argparse.ArgumentParser(description='This script is used to \
+                                generate the snow annual map configuration json file.')
+    parser.add_argument("tile_id", help="The identifier of the tile corresponding \
+                                             to the input_products_list (mandatory)")
+    parser.add_argument("input_products_list", help="Input products list, containing the paths to homogeneous \
+                                                         snow products only on tile_id at same resolution \
+                                                         and size (mandatory)")
+    parser.add_argument("path_out", help="Path to output directory. (mandatory)")
+    parser.add_argument("date_start", help="Start of the date range for which we want to generate \
+                                            the snow_annual_map (DD/MM/YYYY) (mandatory)")
+    parser.add_argument("date_stop", help="Stop of the date range for which we want to generate \
+                                        the snow_annual_map (DD/MM/YYYY) (mandatory)")
+    parser.add_argument("--disable_log", help="Disable logging", action="store_true")
+    parser.add_argument("--log_stdout", help="Log output (std***.log). (optional, default is path_out/log_stdout.log)")
+    parser.add_argument("--log_stderr", help="Log error (std***.log). (optional, default is path_out/log_sterr.log)")
+    parser.add_argument("--debug", help="Enable debug mode", action="store_true")
+    parser.add_argument("--path_tmp", help="Path where to store temporary files, (optional, default is path_out/tmp)")
+    parser.add_argument("--date_margin", help="The margin outside the date range to use for better interpolation results \
+                                            (in days) (optional)")
+    parser.add_argument("--use_densification", help="Activate the densification using snow products from \
+                                                   heterogeneous sensors", action="store_false")
+    parser.add_argument("--densification_products_list", help="The densification list, containing the paths to \
+                                                             heterogenous snow products from heterogeneous sensors \
+                                                             (optional)")
+    parser.add_argument("--ram", help="Maximum number of RAM memory used by the program. (optional)")
+    parser.add_argument("--nb_threads", help="Maximum number of threads use by the program. (optional)")
+    parser.add_argument("--output_dates_filename", help="Output dates list. (optional)")
+
+    args = parser.parse_args()
+
+    # Check mandatory arguments
+    if args.tile_id is None:
+        logging.error("tile_id is a mandatory argument")
+        show_help()
+    if args.input_products_list is None:
+        logging.error("input_products_list is a mandatory argument")
+        show_help()
+    if args.path_out is None:
+        logging.error("path_out is a mandatory argument")
+        show_help()
+    if args.date_start is None:
+        logging.error("date_start is a mandatory argument")
+        show_help()
+    if args.date_stop is None:
+        logging.error("date_stop is a mandatory argument")
+        show_help()
+    if args.use_densification and args.densification_products_list is None:
+        logging.error("densification_products_list is a mandatory argument when use_densification is true")
+        show_help()
+
+    # retrieve template
+    jsonData = snow_annual_map_json_template
+
+    # set mandatory arguments
+    jsonData["tile_id"] = args.tile_id
+    jsonData["input_products_list"] = args.input_products_list.replace("[","").replace("]","").split(',')
+    jsonData["date_start"] = args.date_start
+    jsonData["date_stop"] = args.date_stop
+    jsonData["path_out"] = args.path_out
+    path_out = os.path.abspath(args.path_out)
+    if not os.path.exists(path_out):
+        logging.info("Create directory " + path_out + "...")
+        os.makedirs(path_out)
+
+    # overwrite template default values
+    if args.disable_log is not None:
+        jsonData["log"] = False
+    if args.log_stdout is not None:
+        jsonData["log_stdout"] = args.log_stdout
+    if args.log_stderr is not None:
+        jsonData["log_stderr"] = args.log_stderr
+    if args.debug is not None:
+        jsonData["mode"] = "DEBUG"
+    if args.path_tmp is not None:
+        jsonData["path_tmp"] = args.path_tmp
+    if args.date_margin is not None:
+        jsonData["date_margin"] = int(args.date_margin)
+    if args.use_densification is not None:
+        jsonData["use_densification"] = True
+    if args.densification_products_list is not None:
+        jsonData["densification_products_list"] = args.densification_products_list.replace("[","").replace("]","").split(',')
+    if args.ram is not None:
+        jsonData["ram"] = args.ram
+    if args.nb_threads is not None:
+        jsonData["nb_threads"] = args.nb_threads
+    if args.output_dates_filename is not None:
+        jsonData["output_dates_filename"] = args.output_dates_filename
+
+    jsonFile = open(os.path.join(path_out, "snow_annual_map.json"), "w")
+    jsonFile.write(json.dumps(jsonData, indent=4))
+    jsonFile.close()
+
+
+def show_help():
+    print("help !!! :) ")
+
+
+if __name__ == "__main__":
+    # Set logging level and format.
+    logging.basicConfig(stream=sys.stdout, level=logging.INFO, format= \
+        '%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+    main()
diff --git a/app/run_cloud_removal.py b/app/run_cloud_removal.py
index 74f4f335..845146bc 100644
--- a/app/run_cloud_removal.py
+++ b/app/run_cloud_removal.py
@@ -1,21 +1,40 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import sys
 import os.path as op
 import json
+import logging
 from s2snow import cloud_removal
-
-VERSION="0.1"
+from s2snow.version import VERSION
 
 def show_help():
     """Show help of the run_cloud_removal script"""
-    print "Usage: python run_cloud_removal.py param.json"
-    print "python run_cloud_removal.py version to show version"
-    print "python run_cloud_removal.py help to show help"
+    print("Usage: python run_cloud_removal.py param.json")
+    print("python run_cloud_removal.py version to show version")
+    print("python run_cloud_removal.py help to show help")
 
 def show_version():
-    print VERSION
+    print(VERSION)
 
 #----------------- MAIN ---------------------------------------------------
 
@@ -35,8 +54,16 @@ def main(argv):
     if log:
         sys.stdout = open(op.join(pout, "stdout.log"), 'w')
         sys.stderr = open(op.join(pout, "stderr.log"), 'w')
-    
+
+    # Set logging level and format.
+    logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
+                        format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+    logging.info("Start run_cloud_removal.py")
+    logging.info("Input args = " + json_file)
+
+    # Run the cloud removal
     cloud_removal.run(data)
+    logging.info("End run_cloud_removal.py")
       
 if __name__ == "__main__":
     if len(sys.argv) != 2 :
diff --git a/app/run_snow_annual_map.py b/app/run_snow_annual_map.py
index 26a52c6f..975da68e 100644
--- a/app/run_snow_annual_map.py
+++ b/app/run_snow_annual_map.py
@@ -1,63 +1,241 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+import shutil
 import sys
 import os.path as op
 import json
 import logging
-from s2snow import snow_annual_map_evaluation
+from datetime import timedelta
+
+from lxml import etree
 
-VERSION = "0.1.0"
+from s2snow.compute_NOBS import compute_NOBS
+from s2snow.compute_SOD_SMOD import compute_SOD_SMOD
+from s2snow.snow_annual_map import load_products, load_densification_products, \
+    compute_output_dates, merge_product_at_same_date, convert_snow_masks_into_binary_snow_masks, \
+    convert_snow_masks_into_binary_cloud_masks, compute_CCD, compute_SCD
+from s2snow.utils import str_to_datetime, datetime_to_str, write_list_to_file
+from s2snow.version import VERSION
 
 
 def show_help():
     """Show help of the run_snow_annual_map script"""
-    print "This script is used to run the snow annual map " \
-          + "module that compute snow coverage onto a given date range"
-    print "Usage: python run_snow_annual_map.py param.json"
-    print "python run_snow_annual_map.py version to show version"
-    print "python run_snow_annual_map.py help to show help"
+    print("This script is used to run the snow annual map " \
+          + "module that compute snow coverage onto a given date range")
+    print("Usage: python run_snow_annual_map.py param.json")
+    print("python run_snow_annual_map.py version to show version")
+    print("python run_snow_annual_map.py help to show help")
 
 
 def show_version():
-    print VERSION
-
-# ----------------- MAIN ---------------------------------------------------
+    print("LIS Version : {}".format(VERSION))
 
 
+# ----------------- MAIN ---------------------------------------------------
 def main(argv):
     """ main script of snow extraction procedure"""
-
     json_file = argv[1]
 
-    # Load json_file from json files
-    with open(json_file) as json_data_file:
-        data = json.load(json_data_file)
-
-    pout = data.get("path_out")
-    log = data.get("log", True)
-
-    if log:
-        sys.stdout = open(data.get('log_stdout', op.join(pout, "stdout.log")), 'w')
-        sys.stderr = open(data.get('log_stderr', op.join(pout, "stderr.log")), 'w')
-
     # Set logging level and format.
     logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
-        format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+                        format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
     logging.info("Start run_snow_annual_map.py")
-    logging.info("Input args = " + json_file)
+    logging.info("Input args = {}".format(json_file))
+
+    logging.info("Load parameters")
+    # ----------------------------------------------------------------------------------------
+    # Set parameters
+    # ----------------------------------------------------------------------------------------
+    with open(json_file) as json_data_file:
+        params = json.load(json_data_file)
+
+    mode = params.get("mode", "RUNTIME")
+    if mode == "DEBUG":
+        logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, \
+                            format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+        logging.debug("Debug is enabled")
+
+    tile_id = params.get("tile_id")
+    date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
+    date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
+    date_margin = timedelta(days=params.get("date_margin", 0))
+    logging.debug("Date margin: {}".format(date_margin))
+    output_dir = params.get("path_out")
+    if not os.path.exists(output_dir):
+        logging.info("Create directory {} ...".format(output_dir))
+        os.makedirs(output_dir)
+
+    processing_id = str(tile_id + "_" + datetime_to_str(date_start) + "_" + datetime_to_str(date_stop))
+    path_out = op.join(output_dir, processing_id)
+    logging.debug("Path_out is: {}", path_out)
+
+    path_tmp = str(params.get("path_tmp", os.path.join(path_out,"tmp")))
+    if not os.path.exists(path_tmp):
+        logging.info("Create directory {} ...".format(path_tmp))
+        os.makedirs(path_tmp)
+    logging.debug("Path_tmp is: {}".format(path_tmp))
+
+    input_products_list = params.get("input_products_list", [])
+    output_dates_file_path = params.get("output_dates_file_path")
+    if not output_dates_file_path:
+        output_dates_file_path = op.join(path_tmp, "output_dates.txt")
+
+    ram = params.get("ram", 4096)
+    logging.debug("Ram is: {}".format(ram))
+
+    # Set maximum ITK threads
+    os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(params.get("nb_threads", 1))
+
+    use_densification = params.get("use_densification", False)
+    if use_densification:
+        densification_path_list = params.get("densification_products_list", [])
+
+    log = params.get("log", True)
+    if log:
+        sys.stdout = open(params.get("log_stdout", op.join(path_out, "stdout.log")), 'w')
+        sys.stderr = open(params.get("log_stderr", op.join(path_out, "stderr.log")), 'w')
+
+    # ----------------------------------------------------------------------------------------
+    # Search snow products
+    # ----------------------------------------------------------------------------------------
+    logging.info("Search snow products")
+    product_dict = load_products(date_start, date_stop, date_margin, input_products_list, tile_id, None)
+
+    # Exiting with error if none of the input products were loaded
+    if not product_dict:
+        logging.error("Empty snow product list!")
+        return
+
+    # Do the loading of the products to densify the timeserie
+    if use_densification:
+        load_densification_products(date_margin, date_start, date_stop, densification_path_list, path_tmp, product_dict,
+                                    ram)
+
+    # ----------------------------------------------------------------------------------------
+    # Sort products by acquisition date, retrieve input and output dates
+    # ----------------------------------------------------------------------------------------
+    logging.info("Sort products by acquisition date")
+    # re-order products according acquisition date
+    input_dates_file_path = op.join(path_tmp, "input_dates.txt")
+
+    logging.info("Retrieve input dates")
+    input_dates = sorted(product_dict.keys())
+    write_list_to_file(input_dates_file_path, input_dates)
+
+    # compute or retrieve the output dates
+    logging.info("Retrieve output dates")
+    output_dates = compute_output_dates(date_start, date_stop, output_dates_file_path)
+
+    # ----------------------------------------------------------------------------------------
+    # Merge products at the same date
+    # ----------------------------------------------------------------------------------------
+    merged_product_dict = merge_product_at_same_date(path_tmp, product_dict, ram)
+
+    # ----------------------------------------------------------------------------------------
+    # Convert snow masks into binary masks
+    # ----------------------------------------------------------------------------------------
+    binary_snow_mask_list = convert_snow_masks_into_binary_snow_masks(path_tmp, ram, merged_product_dict)
+    binary_cloud_mask_list = convert_snow_masks_into_binary_cloud_masks(path_tmp, ram, merged_product_dict)
+
+    # ----------------------------------------------------------------------------------------
+    # Compute Cloud Coverage Duration "CLOUD_OCCURENCE" and multitemp_cloud_mask
+    # ----------------------------------------------------------------------------------------
+    logging.debug("Prepare call to compute_CCD")
+    cloud_occurence = op.join(path_tmp, "CLOUD_OCCURENCE_" + processing_id + ".tif")
+    multitemp_cloud_vrt = op.join(path_tmp, "multitemp_cloud_mask.vrt")
+
+    compute_CCD(binary_cloud_mask_list, cloud_occurence, multitemp_cloud_vrt,ram)
+
+    # ----------------------------------------------------------------------------------------
+    # Compute Snow Coverage Duration "SCD" and multitemp_snow_mask
+    # ----------------------------------------------------------------------------------------
+    logging.debug("Prepare call to compute_SCD")
+    multitemp_snow_vrt = op.join(path_tmp, "multitemp_snow_mask.vrt")
+    snow_coverage_duration = op.join(path_tmp, "SCD_" + processing_id + ".tif")
+    gapfilled_timeserie = op.join(path_tmp, "DAILY_SNOW_MASKS_" + processing_id + ".tif")
+    multitemp_snow100 = op.join(path_tmp, "multitemp_snow100.tif")
+    multitemp_snow100_gapfilled = op.join(path_tmp, "multitemp_snow100_gapfilled.tif")
+
+    compute_SCD(binary_snow_mask_list, multitemp_cloud_vrt, input_dates_file_path, output_dates_file_path, output_dates,
+                snow_coverage_duration, multitemp_snow_vrt, gapfilled_timeserie, multitemp_snow100,
+                multitemp_snow100_gapfilled,ram)
+
+    # run compute SOD_SMOD
+    logging.debug("Prepare call to compute_SOD_SMOD.py")
+    sod_file = os.path.join(path_tmp, "SOD_{}.tif".format(processing_id))
+    smod_file = os.path.join(path_tmp, "SMOD_{}.tif".format(processing_id))
+
+    compute_SOD_SMOD(gapfilled_timeserie, sod_file=sod_file, smod_file=smod_file)
+
+    # run compute NOBS
+    logging.debug("Prepare call to compute_NOBS.py")
+    nobs_output_file = op.join(path_tmp, "NOBS_{}.tif".format(processing_id))
+
+    compute_NOBS(multitemp_cloud_vrt, output_file=nobs_output_file)
+
+    # create metadata
+    logging.info("Create metadata")
+    create_snow_annual_map_metadata(input_products_list + densification_path_list, path_out)
+
+    # Move outputs into path_out
+    logging.info("Move outputs into {}.".format(path_out))
+    shutil.copy2(input_dates_file_path, path_out)
+    shutil.copy2(output_dates_file_path, path_out)
+    shutil.copy2(multitemp_cloud_vrt, path_out)
+    shutil.copy2(cloud_occurence, path_out)
+    shutil.copy2(snow_coverage_duration, path_out)
+    shutil.copy2(gapfilled_timeserie, path_out)
+    shutil.copy2(nobs_output_file, path_out)
+    shutil.copy2(smod_file, path_out)
+    shutil.copy2(sod_file, path_out)
+
+    os.remove(input_dates_file_path)
+    os.remove(output_dates_file_path)
+    os.remove(multitemp_cloud_vrt)
+    os.remove(cloud_occurence)
+    os.remove(snow_coverage_duration)
+    os.remove(gapfilled_timeserie)
+    os.remove(nobs_output_file)
+    os.remove(smod_file)
+    os.remove(sod_file)
 
-    # Run the snow detector
-    snow_annual_map_evaluation_app = snow_annual_map_evaluation.snow_annual_map_evaluation(data)
-    snow_annual_map_evaluation_app.run()
+    logging.info("End run_snow_annual_map.py")
 
-    if data.get("run_comparison_evaluation", False):
-        snow_annual_map_evaluation_app.run_evaluation()
 
-    if data.get("run_modis_comparison", False):
-        snow_annual_map_evaluation_app.compare_modis()
+def create_snow_annual_map_metadata(product_list, path_out):
+    # Compute and create the content for the product metadata file.
+    logging.info("Start metadata computation.")
+    metadata_path = op.join(path_out, "LIS_METADATA.XML")
+    logging.info("Metadata file: {}".format(metadata_path))
+    root = etree.Element("INPUTS_LIST")
+    for product_path in product_list:
+        logging.debug("Product path: {}".format(product_path))
+        product_name = op.basename(str(product_path))
+        etree.SubElement(root, "PRODUCT_NAME").text = product_name
+    et = etree.ElementTree(root)
+    et.write(metadata_path, pretty_print=True)
+    logging.info("End metadata computation.")
 
-    logging.info("End run_snow_annual_map.py")
 
 if __name__ == "__main__":
     if len(sys.argv) != 2:
diff --git a/app/run_snow_detector.py b/app/run_snow_detector.py
index a21450bf..570c456d 100644
--- a/app/run_snow_detector.py
+++ b/app/run_snow_detector.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import sys
 import os.path as op
@@ -11,15 +30,16 @@ from s2snow.version import VERSION
 
 def show_help():
     """Show help of the run_snow_detector script"""
-    print "This script is used to run the snow detector module that compute snow mask" \
-          + " using OTB applications on Spot/LandSat/Sentinel-2 products from theia platform"
-    print "Usage: python run_snow_detector.py param.json"
-    print "python run_snow_detector.py version to show version"
-    print "python run_snow_detector.py help to show help"
+    print("This script is used to run the snow detector module that compute snow mask" \
+          + " using OTB applications on Spot/LandSat/Sentinel-2 products from theia platform")
+    print("Usage: python run_snow_detector.py param.json")
+    print("python run_snow_detector.py version to show version")
+    print("python run_snow_detector.py help to show help")
 
 
 def show_version():
-    print VERSION
+    print(VERSION)
+
 
 # ----------------- MAIN ---------------------------------------------------
 
@@ -43,7 +63,7 @@ def main(argv):
 
     # Set logging level and format.
     logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
-        format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+                        format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
     logging.info("Start run_snow_detector.py")
     logging.info("Input args = " + json_file)
 
@@ -52,6 +72,7 @@ def main(argv):
     snow_detector_app.detect_snow(2)
     logging.info("End run_snow_detector.py")
 
+
 if __name__ == "__main__":
     if len(sys.argv) != 2:
         show_help()
diff --git a/doc/atbd/ATBD_CES-Neige.tex b/doc/atbd/ATBD_CES-Neige.tex
index 7a3159c0..da613be9 100644
--- a/doc/atbd/ATBD_CES-Neige.tex
+++ b/doc/atbd/ATBD_CES-Neige.tex
@@ -1,465 +1,469 @@
-% Created 2016-05-31 mar. 12:08
-\documentclass[a4paper]{article}
-\usepackage[utf8]{inputenc}
-\usepackage[T1]{fontenc}
- \usepackage{lmodern}
-\usepackage{fixltx2e}
-\usepackage{graphicx}
-\usepackage{longtable}
-\usepackage{float}
-\usepackage{wrapfig}
-\usepackage{rotating}
-\usepackage[normalem]{ulem}
-\usepackage{amsmath}
-\usepackage{textcomp}
-\usepackage{marvosym}
-\usepackage{wasysym}
-\usepackage{amssymb}
-\usepackage{hyperref}
-\hypersetup{
-colorlinks=true,
-linkcolor=blue,
-pdfauthor=Simon Gascoin,
-pdftitle=Algorithm theoretical basis documentation for an operational snow cover product from Sentinel-2 and Landsat-8 data (Let-it-snow)}
-\tolerance=1000
-\usepackage{amsfonts,bm}
-\usepackage{color}
-\usepackage[usenames,dvipsnames]{xcolor}
-\usepackage[margin=2.5cm,a4paper]{geometry}
-\usepackage{enumitem}
-\usepackage[]{algorithm2e}
-\usepackage{fancyhdr}
-\usepackage{tabularx}
-%\usepackage{listings}
-\usepackage{minted}
-% \lstset{language=Matlab}
-
-\usepackage{tikz}
-\usetikzlibrary{shapes,arrows,decorations.markings,shapes,fit}
-
-
-\renewcommand{\maketitle}{}
-\date{\today}
-% \title{ATBD CES surface enneigée}
-% \hypersetup{
-%   pdfkeywords={},
-%   pdfsubject={},
-%   pdfcreator={Emacs 24.3.1 (Org mode 8.2.4)}}
-\begin{document}
-
-\maketitle
-\pagestyle{fancy}
-% \providecommand{\alert}[1]{\textbf{#1}}
-% \setlist[itemize,1]{label=$\diamond$}
-% \setlist[itemize,2]{label=$\ast$}
-% \setlist[itemize,3]{label=$\star$}
-% \setlist[itemize,4]{label=$\bullet$}
-% \setlist[itemize,5]{label=$\circ$}
-% \setlist[itemize,6]{label=$-$}
-% \setlist[itemize,7]{label=$\cdot$}
-% \setlist[itemize,8]{label=$\cdot$}
-% \setlist[itemize,9]{label=$\cdot$}
-% \renewlist{itemize}{itemize}{9}
-\lhead[]{\includegraphics[width=0.1\textwidth]{./images/logo_cesbio.png}}
-\rhead[]{\thepage}
-% \cfoot{\textcolor{PineGreen}{copyright?}}
-
-\input{./page_titre.tex}
-
-\begin{abstract}
- 
-This document describes the algorithm of the Let-it-snow (LIS) processing chain to generate the snow cover extent product for the Theia land data center. The algorithm takes as input a Sentinel-2 or Landsat-8 image of surface reflectance corrected from atmospheric and slope effects, the associated cloud mask (level 2A product provided by Theia) and a digital elevation model. The output is a single band raster at the same resolution of the input image giving the snow presence or absence and a cloud mask. The output cloud mask is different from the input cloud mask because some pixels can be reclassified as snow or no-snow by the algorithm.
-
-The snow detection algorithm works in two passes: first the most evident snow cover is detected using a set of conservative thresholds, then these snow pixels are used to determine the lowest elevation of the snow cover. A second pass is performed for the pixels above this elevation with a new set of less conservative thresholds.
-
-The processing chain also generates a vectorized version of the snow mask after pass 1 and 2 and a color composite that is overlaid by these polygons. These secondary products are intended for expert validation purpose.
-
-\end{abstract}
-
-\newpage
-\tableofcontents
-\newpage
-% 
-\section{Introduction}\label{par:intro}
-
-\subsection{Motivation}
-
-The snow cover is a key factor of many ecological, climatological and hydrological processes in cold regions. The monitoring of the snow cover is of particular societal relevance in mountain regions since the seasonal snow melt modifies the soil moisture, groundwater recharge and river flow, often providing critical water resources to downstream areas\footnote{Barnett T. P., Adam J. C. and Lettenmaier D. P., Potential impacts of a warming climate on water availability in snow-dominated regions, Nature 438 (7066), 2005.}. 
-
-The snow cover is one of the 50 Essential Climate Variables (ECVs) that were defined by the Global Observing System for Climate (GCOS)\footnote{GCOS Essential Climate Variables  \url{http://www.wmo.int/pages/prog/gcos/index.php?name=EssentialClimateVariables}} in accordance with the Committee on Earth Observation Satellites (CEOS) agencies\footnote{Global Climate Observing System (GCOS) Implementation Plan \url{http://remotesensing.usgs.gov/ecv/document/gcos-138.pdf}} to support the work of the UNFCCC and the IPCC. 
-
-The \textit{snow cover extent} or \textit{snow cover area} is the extent of the snow cover on the land surface. A snow cover extent product is typically formatted as a georeferenced raster image whose pixel value indicate if snow is present or absent in the pixel.
-
-Other major satellite snow products include: (i) the snow cover fraction (ii) snow albedo (iii) the snow water equivalent. The snow cover fraction and albedo are generated from optical observations, while the snow water equivalent is retrieved using passive or active microwave. The snow water equivalent is potentially the most useful product since it gives directly the amount of accumulated water (snow mass), however current products are unsuitable to address user needs for many applications and places because they are available at coarse scale (25~km) with some limitations in the retrievals. This is due to the lack of observations in the wavelengths that are adapted to snow water equivalent sensing. As of today the snow cover extent product is still the most widely used for hydrological and climatological applications. This will certainly remain true for the next decade since there is no planned mission to retrieve the snow water equivalent or the snow depth at global scale\footnote{This was the objective of the CoreH2O mission but the project was not selected by the ESA for the Earth Explorer-7 program in 2015.}. 
-
-Current snow cover area products are derived from low to mid-resolution optical observations (e.g. AVHRR, VEGETATION, MODIS) but their spatial resolution (1~km to 250~m) is too coarse for various applications, in particular in mountain regions where the topography causes large spatial variability of the snow cover at decametric scales. High resolution snow cover maps can be generated from Landsat images but the temporal revisit of 16~days is not sufficient for snow cover monitoring during the melt season. The ESA Sentinel-2 mission offers the unique opportunity to study the snow cover extent dynamics at 20 m resolution with a 5 day revisit time. If combined with Landsat the temporal resolution can be further increased. Both Sentinel-2 and Landsat missions are global missions that are expected to run over long periods, allowing the development of operational products and services\footnote{Drusch M., Del Bello U., Carlier S. et al., Sentinel-2: ESA's Optical High-Resolution Mission for GMES Operational Services, Remote Sensing of Environment 120, 2012.}.
-
-\subsection{Objective}
-
-The objective of this algorithm is to generate a snow cover extent product from Landsat-8 and Sentinel-2 images at high resolution (30~m for Landsat-8, 20~m for Sentinel-2). The main requirements are:
-\begin{itemize}
- \item The algorithm should be efficient to allow the processing of large areas (10$^4$ km$^2$) with a reasonable computation cost. 
- \item It should be robust to seasonal and spatial variability of the snow cover and land surface properties. 
- \item It should maximize the number of pixels that are classified as snow or no-snow.
- \item It is always preferable to falsely classify a pixel as cloud than falsely classify a pixel as snow or no-snow. 
-%  \item It should not depend on uncertain external data (like meteorological data) to limit the risk of a product discontinuation.
-\end{itemize}
-
-\subsection{Development}
-
-The algorithm prototype was developed by Simon Gascoin with insights from
-Olivier Hagolle in June 2015.  The snow detection function and a script to run
-this function with an example are given in appendices \ref{par:castest} and
-\ref{par:s2snow} as formatted documents that includes the original Matlab code,
-comments, and output. The LIS chain was designed to work on any high resolution
-multi-spectral images from satellite sensors that include at least a channel in
-the visible spectrum and a channel near 1.5 µm (typically referred to as
-mid-infrared or ``MIR''). This initial code was ported to Python 2.7 and C++ by
-Manuel Grizonnet in order to make it scalable to large images using Orfeo
-Toolbox and GDAL.
-
-LIS currently supports SPOT-4, SPOT-5, Landsat-8 and
-Sentinel-2 level 2A products.
-
-The LIS code, installation documentation and configuration file examples are
-available in the Cesbio's gitlab:
-\url{http://tully.ups-tlse.fr/grizonnet/let-it-snow}.
-
-The list of all contributors is available in the LIS source in the file README.md.
-
-\subsection{Limitations}
-
-The product is based on optical observations therefore it is not adapted to the detection of the snow cover:
-
-\begin{itemize}
- \item in polar regions when illumination is insufficient ;
-  \item in dense forest areas where the ground is obstructed by the canopy, like in evergreen conifer forests.
- \end{itemize}
- 
-The algorithm may also fail to detect the snow cover in steep shaded slopes if the solar elevation is very low (typically below 20°). This can occur in mid-latitude areas in winter. In this case the slope correction in the L2A product is generally not applied as indicated in the L2A mask.
-
-The algorithm can only reduce the number of cloud pixels from the original L2A cloud mask. If a cloud was not detected by the previous cloud mask algorithm (MACCS) then it can only be classified as snow or no-snow. 
-
-The algorithm output depends on the scale of the input because the snowline elevation is computed at the scale of the image. In the case of the level 2A products this is 110~km by 110~km. The underlying assumption is that a large altitudinal variation of the snowline elevation is not likely at such a scale. Our impression is that this assumption is supported by regional analyses of the snowline in many mountain ranges, but this could be further assessed using mid-resolution snow products\footnote{Gascoin, S., Hagolle, O., Huc, M., Jarlan, L., Dejoux, J.-F., Szczypta, C., Marti, R., and Sánchez, R.: A snow cover climatology for the Pyrenees from MODIS snow products, Hydrol. Earth Syst. Sci., 19, 2337-2351}\footnote{Krajčí P., Holko L. and Parajka J., Variability of snow line elevation, snow cover area and depletion in the main Slovak basins in winters 2001–2014, Journal of Hydrology and Hydromechanics 64(1), 2016}.
-
-\section{Algorithm}\label{par:algo}
-
-\subsection{Inputs}\label{par:inputs}
-\begin{itemize}
- \item From a level 2A product:
- \begin{itemize}
-  \item the cloud and cloud shadow mask (referred to as ``L2A cloud mask'' in the following),
-  \item the green, red and MIR bands from the flat surface reflectance product (Tab.~\ref{tab:bands}). These images are corrected for atmospheric and terrain slope effects. The slope correction is important in mountain regions since it enables to use the same detection thresholds whatever the sun-slope geometry. 
-  \end{itemize}
-  \item A digital elevation model (DEM). The DEM is resampled from the SRTM seamless DEM\footnote{Jarvis A., H.I. Reuter, A.  Nelson, E. Guevara, 2008, Hole-filled  seamless SRTM data V4, International  Centre for Tropical  Agriculture (CIAT), available  from \url{http://srtm.csi.cgiar.org}.}. 
-  \item The parameters of the algorithm: \textcolor{red}{$r_f$, $d_z$, $r_B$, $r_D$, ,$n_1$, $n_2$, $r_1$, $r_2$, $f_s$, $f_t$} (written \textcolor{red}{in red} throughout the document).
-  
-\end{itemize}
-
-\begin{table}[h]
-\begin{center}
-\begin{tabular}{|l|lll|}
-\hline
-  & \multicolumn{3}{|c|}{Band}\\
-\hline
-Sensor & Green & Red & MIR\\
-\hline
-SPOT-4 HRV & 1 (20 m, 0.55 µm) & 2 (20 m, 0.65 µm) & 4 (20~m, 1.6 µm)\\
-SPOT-5 HRG & 1 (10 m, 0.55 µm) & 2 (10 m, 0.65 µm) & 4 (10~m, 1.6 µm)\\
-Sentinel-2 MSI & 2 (10 m, 0.56 µm) & 3 (10 m, 0.66 µm) & 5 (20 m, 1.6 µm)\\
-Landsat-8 OLI & 3 (30 m, 0.56 µm) & 4 (30 m, 0.65 µm) & 6 (30 m, 1.6 µm)\\
-\hline
-\end{tabular}
-\end{center}
-\caption{Index of the spectral band in the L2A flat surface reflectance products used by LIS. In parentheses is also indicated the spatial resolution and the wavelength of the band center.}
-\end{table}\label{tab:bands}
-
-\subsection{Outputs}\label{par:outputs}
-
-The main output is a raster image (*SEB.TIF) of the snow and cloud mask. It has the same projection and extent of the initial L2A product and the same resolution as the MIR band, i.e. 20~m for Sentinel-2 and SPOT-4, 30~m for Landsat-8 (Tab.~\ref{tab:bands}). It coded as follows:
-\begin{itemize}
- \item 0: no-snow
- \item 100: snow
- \item 205: cloud including cloud shadow
- \item 254: no data
-\end{itemize}
-
-The same data are made available as polygons (ESRI Shapefile format) of the cloud and snow cover extent (*SEB\_VEC*). Two fields of information are embedded in this file:
-\begin{itemize}
-  \item DN:
-  \begin{itemize}
-    \item 0: no-snow
-    \item 100: snow
-    \item 205: cloud including cloud shadow
-    \item 254: no data
-  \end{itemize}
-  \item field:
-  \begin{itemize}
-    \item no-snow
-    \item snow
-    \item cloud
-    \item no-data
-  \end{itemize}
-\end{itemize}
-
-The other output files are rather useful for the expert evaluation and troubleshooting:
-\begin{itemize}
- \item an RGB color composite image of bands MIR/red/green also showing the snow and cloud mask boundaries (*COMPO.TIF);
- \item a binary mask of snow and clouds (*SEB\_ALL.TIF):
- \begin{itemize}
- \item bit 1: snow (pass 1)
- \item bit 2: snow (pass 2)
- \item bit 3: clouds (pass 1)
- \item bit 4: clouds (pass 2)
- \item bit 5: clouds (initial all cloud)
- \item bit 6: slope flag (optional bad slope correction flag)
- \end{itemize}
- \item a metadata file (*METADATA.XML)
- 
-\end{itemize}
-
-\subsection{Pre-processing}
-
-In the case of Sentinel-2 the red and green bands are first resampled with the cubic method to a pixel size of 20~m by 20~m to match the resolution of the SWIR band.
-
-The DEM is also resampled to the resolution of the target product (30~m or 20~m, see Sect.~\ref{par:outputs}) using the cubic spline method that is implemented in the GDAL library.
-
-\subsection{Snow detection}\label{par:snowdetec}
-
-The snow detection is based on the Normalized Difference Snow Index (NDSI) and the reflectance in the red band. The NDSI is defined as\footnote{Dozier, J.: Spectral signature of alpine snow cover from the Landsat Thematic Mapper, Remote sensing of Environment 28, 9–22, 1989}:
-
-\begin{equation}
-\mathrm{NDSI} = \frac{\rho_\mathrm{green}-\rho_\mathrm{MIR}}{\rho_\mathrm{green}+\rho_\mathrm{MIR}}
-\end{equation}
-where $\rho_\mathrm{green}$ (resp. $\rho_\mathrm{MIR}$) is the slope-corrected surface reflectance in the green band (resp. MIR at 1.6~$\mu$m). The NDSI is based on the fact that only snow surfaces are very bright in the visible but dark in the shortwave infrared. Some lake pixels may also have a high NDSI value so we add a criterion on the red reflectance to remove these. A pixel is classified as snow if the two following conditions are fulfilled:
-\begin{itemize}
- \item $\mathrm{NDSI} > n_i$,
- \item $\rho_\mathrm{red} > r_i$
-\end{itemize}
-where $n_i$ and $r_i$ are two parameters with $i=\{1,2\}$. Otherwise the pixel is marked as no-snow. 
-
-
-\subsection{Snowline elevation}
-
-The snow detection (Sect.~\ref{par:snowdetec}) is performed a first time using thresholds \textcolor{red}{$n_1$} and \textcolor{red}{$r_1$}. The parameters are set to low values to minimize the false snow detections. As a consequence, many snow covered areas are not detected. However, this pass 1 enables to estimate a minimum snow cover elevation $z_s$. For that purpose the DEM is used to segment the image in elevation band of height \textcolor{red}{$d_z$}. The fraction of the cloud-free area of each band that is covered by snow is computed. We find the lowest elevation band $b$ at which the the snow cover fraction is greater than \textcolor{red}{$f_s$}. Then, $z_s$ is defined as the lower edge of the elevation band that is two elevation bands below band $b$. The snow cover fraction in each elevation band is determined using the pixels that are not marked as cloud in the pass 1 cloud mask (Sect.~\ref{par:cloud}). To ensure that $z_s$ is computed with a statistically significant sample of pixels, the snowline calculation is not activated if the total fraction of snow pixels in the image is lower than $f_t$. A detailed example of the determination of $z_s$ is given in appendix~\ref{par:castest}.
-
-\subsection{Cloud mask processing}\label{par:cloud}
-
-The L2A cloud mask is conservative because it is computed at a coarser resolution and also because it is developed for a large range of applications. However, the detection of the snow cover is robust to a thin, transparent, cloud cover. More importantly, the L2A cloud mask tends to falsely classify the edges of the snow cover as cloud. Hence, it is possible to recover many pixels from the L2A cloud mask and reclassify them as snow or no-snow. This step is important because it substantially increases the number of observations. A pixel from the L2A cloud mask cannot be reclassified as snow or no-snow if:
-
-\begin{itemize}
- \item it is coded as ``cloud shadow''  in L2A cloud mask. Note that it can be
-   cloud shadows matched with a cloud or cloud shadows in the zone where clouds could be outside the image ;
- \item or: it is coded as ``high altitude cloud'' (or ``cirrus'') in the L2A cloud mask;
- \item or: it is not a ``dark'' cloud (see below).
-\end{itemize}
-
-The cloud shadows are excluded because the signal-to-noise ratio is too low in these areas. 
-
-The ``high clouds'' are excluded because they can have a similar spectral signature as the snow cover (high reflectance in the visible and low reflectance in the MIR). This type of cloud is only detected in Landsat-8 and Sentinel-2 images because it is based on the spectral band centered on the 1.38 µm wavelength\footnote{Hagolle, O., High cloud detection using the cirrus band of LANDSAT 8 or Sentinel-2, \url{http://www.cesbio.ups-tlse.fr/multitemp/?p=4109}}, therefore the test is not activated for SPOT images.
-
-We select only the ``dark clouds'' because the snow test is robust to the snow/cloud confusion in this case. The ``dark'' clouds are defined using a threshold in the red band after down-sampling the red band by a factor \textcolor{red}{$r_f$} using the bilinear method. This resampling is applied to smooth locally anomalous pixels\footnote{It was also inspired by the MACCS algorithm, which performs the cloud detection at 240~m for Landsat-8 L2A products.}. Therefore, if a (non-shadow, non-high-cloud) cloud pixel has a red reflectance at this coarser resolution that is lower than $r_D$ then it is temporarily removed from the cloud mask and proceeds to the snow test. The new cloud mask at this stage is the pass 1 cloud mask (Fig.~\ref{fig:flowchart}). 
-
-After passing the pass 1 and 2 snow tests, some pixels that were originally marked as cloud will not be reclassified as snow. These pixels are marked as cloud if they have a reflectance in the red that is greater than $r_B$. Otherwise they are classified as no-snow. Here the full resolution red band is used. The resulting cloud mask is the pass 2 cloud mask. 
-
-\pagestyle{empty}
-
-% Define block styles
-\tikzstyle{decision} = [diamond, draw, fill=gray!20, 
-    text width=4.5em, text centered, inner sep=0pt]
-\tikzstyle{block} = [rectangle, draw, fill=gray!20, 
-    text width=5em, text centered, rounded corners, minimum height=4em]
-\tikzstyle{blockfinal} = [rectangle, draw, fill=green!20, 
-    text width=5em, text centered, rounded corners, minimum height=4em]
-\tikzstyle{blockinput} = [rectangle, draw, fill=blue!20, 
-    text width=5em, text centered, rounded corners, minimum height=4em]
-\tikzstyle{line} = [draw, -latex']
-\tikzstyle{cloud} = [draw, ellipse, fill=red!20, node distance=3cm,
-    minimum height=2em]
-\tikzstyle{bigbox}=[inner sep=20pt]
-
-\begin{figure}[H]
- 
-\begin{tikzpicture}[node distance = 3.5cm, auto]
-    % Place nodes
-    \node [blockinput, text width=13em] (Level 2A product) {Level 2A product \begin{itemize}
-\item cloud mask 
-\item flat surface reflectances (green, red, MIR)
-\end{itemize} };
-    \node [cloud, left of=Level 2A product, node distance = 6cm] (MUSCATE) {MUSCATE};
-%     \node [rectangle, draw, text at top, right of=Level 2A product, node distance = 6cm, minimum height=5em, text width=18em] (leg) {Legend};
-    \node [blockfinal, right of=Level 2A product, node distance = 6cm, minimum height=2em, text width=3em] (blockfinalleg) {Output};
-    \node [blockinput, left of=blockfinalleg, node distance = 1.5cm, minimum height=2em, text width=3em] (blockinputleg) {Input};
-    \node [block, right of=blockfinalleg, node distance = 1.9cm, minimum height=2em] (blockleg) {\textcolor{red}{Parameter}};
-    \node[bigbox, fit=(blockinputleg)(blockfinalleg)(blockleg)] (leg) {};
-    \node[below right] at (leg.north west) {Legend};
-    \node [blockinput, below of=MUSCATE] (DEM) {DEM};
-    \node [decision, below of=Level 2A product] (is cloud or shadow?) {Is cloud or shadow?};
-    \node [blockfinal, right of=is cloud or shadow?] (cloudfinal1) {Cloud (pass 1)};
-    \node [decision, below of=is cloud or shadow?] (snowtest1) {Is snow? \textcolor{red}{$n_1$}, \textcolor{red}{$r_1$}};
-    \node [blockfinal, left of=snowtest1] (pass1) {Snow\\(pass 1)};
-    \node [decision, below of=snowtest1] (snowlim) {Enough snow?\\ \textcolor{red}{$f_t$}};
-    \node [decision, below of=snowlim] (abovezs) {Is above snowline?};
-    \node [decision, right of=abovezs] (darkcloud) {Is shadow or high or bright cloud? \textcolor{red}{$r_D$}};
-    \node [block, left of=abovezs] (zs) {Snowline elevation \textcolor{red}{$d_z$}, \textcolor{red}{$f_s$}};       
-    \node [decision, below of=darkcloud] (snowtest2) {Is snow? \textcolor{red}{$n_2$}, \textcolor{red}{$r_2$}};
-    \node [blockfinal, right of=snowtest2] (pass2) {Snow\\(pass 2)};
-    \node [decision, below of=snowtest2] (wascloud) {Was cloud?};
-	\node [blockfinal, right of=wascloud] (nosnow) {No snow};
-    \node [decision, below of=wascloud] (backtocloud) {Is dark?
-      \textcolor{red}{$r_B$}};
-    \node [blockfinal, right of=darkcloud] (cloudfinal2) {Cloud (pass 2)};
-    \node [blockfinal, left of=backtocloud] (cloudfinal) {Cloud (final)};
-
-    % Draw edges
-    \path [line] (Level 2A product) -- (is cloud or shadow?);
-    \path [line,dashed] (MUSCATE) -- (Level 2A product);
-    \path [line] (is cloud or shadow?) -- node[near start]{yes} (cloudfinal1);
-    \path [line] (is cloud or shadow?) -- node[near start]{no} (snowtest1);
-    \path [line] (snowtest1) -- node[near start]{yes} (pass1);
-    \path [line] (snowtest1) -- node[near start]{yes} (snowlim);
-    \path [line] (snowlim) -- node[near start]{yes} (abovezs);
-    \path [line] (abovezs) -- node[near start]{yes} (darkcloud);
-    \path [line] (darkcloud) -- node[near start]{yes}(cloudfinal2);
-    \path [line] (darkcloud) -- node[near start]{no}(snowtest2);
-    \path [line] (snowtest2) -- node[near start]{yes} (pass2);
-    \path [line] (snowtest2) -- node[near start]{no} (wascloud);
-    \path [line,dashed] (pass1) -- (zs);
-    \path [line,dashed] (zs) -- (abovezs);
-    \path [line] (abovezs) |- node[near start]{no} (wascloud);
-    \path [line,dashed] (MUSCATE) -- (DEM);
-    \path [line,dashed] (DEM) |- (zs);
-    \path [line] (wascloud) -- node[near start]{no} (nosnow);
-    \path [line] (wascloud) -- node[near start]{yes} (backtocloud);
-    \path [line] (backtocloud) -| node[near start]{yes} (nosnow);
-    \path [line] (backtocloud) -- node[near start]{no} (cloudfinal);
-    
-\end{tikzpicture}
-\caption{Flowchart of the snow detection algorithm}
-\end{figure}\label{fig:flowchart}
-
-\subsection{Parameters description}\label{par:param}
-
-\subsubsection{Main algorithm parameters}\label{par:sciparam}
-
-The table below gives the description of the main parameters of the algorithm:
-
-\begin{table}[!htbp]
-\begin{center}
-\begin{tabularx}{\textwidth}{|l X l l|}
-\hline
-Parameter & Description & Name in the configuration file & Default value\\
-\hline
-\textcolor{red}{$r_f$} & Resize factor to produce the down-sampled red band & \texttt{rf} & 8 for L8 (12 for S2) \\
-\textcolor{red}{$r_D$} & Maximum value of the down-sampled red band reflectance to define a dark cloud pixel & \texttt{rRed\_darkcloud} & 0.300 \\
-\textcolor{red}{$n_1$} & Minimum value of the NDSI for the pass 1 snow test & \texttt{ndsi\_pass1} & 0.400\\
-\textcolor{red}{$n_2$} & Minimum value of the NDSI for the pass 2 snow test & \texttt{ndsi\_pass2} & 0.150\\
-\textcolor{red}{$r_1$} & Minimum value of the red band reflectance the pass 1 snow test  & \texttt{rRed\_pass1} & 0.200 \\
-\textcolor{red}{$r_1$} & Minimum value of the red band reflectance the pass 2 snow test  & \texttt{rRed\_pass2} & 0.040 \\
-\textcolor{red}{$d_z$} & Size of elevation band in the DEM used to define $z_s$ & \texttt{dz} & 0.100 \\
-\textcolor{red}{$f_t$} & Minimum snow fraction in an elevation band to define $z_s$ & \texttt{fsnow\_lim} & 0.100 \\
-\textcolor{red}{$fc_t$} & Minimum clear pixels fraction (snow and no-snow) in an elevation band to define $z_s$ & \texttt{fclear\_lim} & 0.100 \\
-\textcolor{red}{$f_s$} & Minimum snow fraction in the image to activate the pass 2 snow test & \texttt{fsnow\_total\_lim} & 0.001 \\
-\textcolor{red}{$r_B$} & Minimum value of the red band reflectance to return a non-snow pixel to the cloud mask & \texttt{rRed\_backtocloud} & 0.100 \\
-\hline
-\end{tabularx}
-\end{center}
-\caption{LIS algorithm parameters description and default values.}
-\end{table}\label{tab:param}
-
-Above default values related to reflectance are given as float values between 0
-and 1. Threshold related to reflectance values follow the convention of
-considering milli-reflectance as input (values between 0 and 1000) in the json
-file. Some products can encode reflectance with other convention (floating
-values between 0 and 1 or reflectance between 0 and 10000), to handle those
-cases, there is a parameter 'multi' in the json configuration file which allows
-to scale reflectance parameters. For instance, for products with reflectance
-between 0 and 10000 you can use
-
-\newpage
-
-\subsubsection{JSON schema of configuration file}\label{par:jsonparam}
-
-The JSON Schema here describes the parameter file format and provide a clear, human-
-and machine-readable documentation of all the algorithm parameters. JSON schema
-was generated on \href{https://jsonschema.net} with the following options (with
-metadata and relative id).
-
-\inputminted[tabsize=2, fontsize=\tiny]{js}{schema.json}
-
-\section{Validation}\label{par:validation}
-
-The snow maps derived from Landsat data were generally considered as ``ground truth'' to validate and calibrate lower resolution snow cover products\footnote{Hall, D.K., Riggs, G.A.: Accuracy assessment of the MODIS snow products, Hydrological Processes 21(12), 1534–1547, 2007}. There is no space-borne sensor with a MIR channel that provides higher resolution imagery than Sentinel-2 or Landsat-8. As a consequence it is difficult to conduct a quantitative assessment. Hence the validation of the algorithm and the adjustment of the parameters was primarily done by visual inspection of the snow cover mask boundaries on the color composite images in Moroccan Atlas, Pyrenees, and the French Alps. First tests were done on subsets of SPOT-4 Take~5 images. The Python/C++ implementation of LIS allowed the processing and inspection of larger datasets such as a series of full 57 Landsat-8 scenes over the Pyrenees available from THEIA.
-
-\begin{figure}[h]
- \centering
- \includegraphics[width=\textwidth]{./images/montage_L8CESneige.png}
- % montage_L8CESneige.png: 1152x882 pixel, 72dpi, 40.64x31.11 cm, bb=0 0 1152 882
- \caption{Color composites of a Landsat-8 tile D0005H0001 time series over the Pyrenees processed by LIS. The snow mask is drawn in magenta and cloud mask in green. Each image is 110 km by 110 km.}
- \label{fig:L8montage}
-\end{figure}
-
-The implementation of the Sentinel-2 configuration was tested on the Sentinel-2A image of 06-July-2015 tile 30TYN. The output snow mask was compared with an aerial photograph that was taken at a similar period of the year available from the Institut National Information Géographique Forestière. Both images were not acquired in the same year but the snow patterns at the end of the melt season tend to reproduce from one year to the other. The LIS snow mask matches very well the snow cover that is visible on the aerial photograph\footnote{Gascoin, S. First Sentinel-2 snow map \url{http://www.cesbio.ups-tlse.fr/multitemp/?p=7014}}
-
-\begin{figure}[h]
- \centering
- \includegraphics[width=\textwidth]{./images/S2snow.png}
- % S2snow.png: 1071x1048 pixel, 150dpi, 18.13x17.74 cm, bb=0 0 514 503
- \caption{The Sentinel-2A image of 06-July-2015 (level 2A, tile 30TYN) and the snow mask generated by LIS. The snow mask is in magenta and the background image is a color composite RGB NIR/Red/Green. The inset is a zoom in the Vignemale area (Fig.~\ref{fig:S2snowzoom}).}
- \label{fig:S2snow}
-\end{figure}
-
-\begin{figure}[h]
- \centering
- \includegraphics[width=\textwidth]{./images/Sentinel2_testmontage.png}
- % Sentinel2_testmontage.png: 2014x811 pixel, 72dpi, 71.05x28.61 cm, bb=0 0 2014 811
- \caption{The LIS snow mask from the Sentinel-2A image of 06-July-2015 (Fig.~\ref{fig:S2snow}) is superposed to an aerial image taken in August 2013 and distributed by the Institut National Information Géographique Forestière.}
- \label{fig:S2snowzoom}
-\end{figure}
-
-The output of LIS was also examined by comparing the output snow mask from two images acquired on the same day by two different sensors. The example shown in Fig.~\ref{fig:L8vsS4-23042013} illustrates that both snow masks are consistent, although further inspection revealed that the SPOT-4 snow mask tends to underestimate the snow cover area in this case. This is probably due to the lower radiometric resolution of SPOT-4 sensor.
-
-\begin{figure}[h]
- \centering
- \includegraphics[width=\textwidth]{./images/L8vsS4-23042013_montage.png}
- % L8vsS4-23042013_montage.png: 960x563 pixel, 96dpi, 25.40x14.89 cm, bb=0 0 720 422
- \caption{Comparison of the output of LIS from two a Landsat-8 and SPOT-4 product acquired on the same day.}
- \label{fig:L8vsS4-23042013}
-\end{figure}
-
-The output of LIS was also compared to the output of the fmask algorithm\footnote{Zhu, Z., Wang, S. and Woodcock, C.E., 2015. Improvement and expansion of the Fmask algorithm: cloud, cloud shadow, and snow detection for Landsats 4–7, 8, and Sentinel 2 images. Remote Sensing of Environment, 159, pp.269-277.} available in Google Earth Engine (Landsat TOA with Fmask collections). The snow mask are similar, because the method of detection is also based on the NDSI. We found that fmask falsely detects water area in snow-free shaded slopes (an example is given in Fig~\ref{fig:fmask}). The cloud mask is also more conservative than LIS, which is normal given that fmask is a general-purpose algorithm like MACCS. 
-
-\begin{figure}[h]
- \centering
- \includegraphics[width=\textwidth]{./images/fmask20140111.png}
- % fmask20140111.png: 3507x2480 pixel, 300dpi, 29.69x21.00 cm, bb=0 0 842 595
- \caption{Comparison of the output of the LIS and fmask algorithm on 2014-01-11 in the Pyrenees.}
- \label{fig:fmask}
-\end{figure}
-
-
-\clearpage
-
-\section{Conclusion and perspectives}\label{par:conclu}
-
-The LIS processing chain is a robust and numerically efficient tool to generate a new, high-resolution snow cover product for Theia Land data center. The LIS snow mask is an improvement from the L2A snow mask (Fig.~\ref{fig:L2AvsLIS}), but its accuracy is largely due to the quality of the slope-corrected L2A product. After the launch of Sentinel-2B the frequency of observations will increase. This will improve the atmospheric corrections by the multi-temporal algorithm MACCS and all the products that are derived from the L2A product in general, including this snow cover extent product. 
-
-\begin{figure}[h]
- \centering
- \includegraphics[width=\textwidth]{./images/Maroc_20130327_S4T5.png}
- % Maroc_20130327_S4T5.png: 1234x856 pixel, 85dpi, 36.88x25.58 cm, bb=0 0 1045 725
- \caption{Snow and cloud mask after processing by LIS (left) vs. L2A original cloud and snow mask (right). Clouds are marked in green, cloud shadows in black, snow in magenta. Revisiting the cloud mask enables to increase the area of snow/no snow detection.}
- \label{fig:L2AvsLIS}
-\end{figure}
-
-
-In the meantime we would like to further validate LIS using terrestrial time lapse images in the next year. This could also allow a calibration of the parameters (e.g. $n_2$ ,$r_2$). 
-
-A cloud-free snow cover extent product would facilitate the exploitation of the data by end-users. We plan to work on the development of a cloud-free snow cover product (i.e. a level 3 product). The cloud removal or ``gap-filling'' algorithm will rely on a series a spatio-temporal filters to reclassify the cloud pixels that are output by LIS. This type of gap-filling algorithm was already developed for MODIS snow cover products\footnote{Gascoin, S., Hagolle, O., Huc, M., Jarlan, L., Dejoux, J.-F., Szczypta, C., Marti, R., and Sánchez, R.: A snow cover climatology for the Pyrenees from MODIS snow products, Hydrol. Earth Syst. Sci., 19, 2337-2351.} but must be further assessed on Sentinel-2 time series. We also plan to evaluate the combination of Sentinel-2 and Landsat-8 snow maps to increase the number of observations used by the gap-filling algorithm. 
-
-\clearpage
-
-\appendix
-\input{include/castest_CESneige.tex}
-\input{include/S2snow.tex}
-\end{document}
+% Created 2016-05-31 mar. 12:08
+\documentclass[a4paper]{article}
+\usepackage[utf8]{inputenc}
+\usepackage[T1]{fontenc}
+ \usepackage{lmodern}
+\usepackage{fixltx2e}
+\usepackage{graphicx}
+\usepackage{longtable}
+\usepackage{float}
+\usepackage{wrapfig}
+\usepackage{rotating}
+\usepackage[normalem]{ulem}
+\usepackage{amsmath}
+\usepackage{textcomp}
+\usepackage{marvosym}
+\usepackage{wasysym}
+\usepackage{amssymb}
+\usepackage{hyperref}
+\hypersetup{
+colorlinks=true,
+linkcolor=blue,
+pdfauthor=Simon Gascoin,
+pdftitle=Algorithm theoretical basis documentation for an operational snow cover product from Sentinel-2 and Landsat-8 data (Let-it-snow)}
+\tolerance=1000
+\usepackage{amsfonts,bm}
+\usepackage{color}
+\usepackage[usenames,dvipsnames]{xcolor}
+\usepackage[margin=2.5cm,a4paper]{geometry}
+\usepackage{enumitem}
+\usepackage[]{algorithm2e}
+\usepackage{fancyhdr}
+\usepackage{tabularx}
+%\usepackage{listings}
+\usepackage{minted}
+% \lstset{language=Matlab}
+
+\usepackage{tikz}
+\usetikzlibrary{shapes,arrows,decorations.markings,shapes,fit}
+
+
+\renewcommand{\maketitle}{}
+\date{\today}
+% \title{ATBD CES surface enneigée}
+% \hypersetup{
+%   pdfkeywords={},
+%   pdfsubject={},
+%   pdfcreator={Emacs 24.3.1 (Org mode 8.2.4)}}
+\begin{document}
+
+\maketitle
+\pagestyle{fancy}
+% \providecommand{\alert}[1]{\textbf{#1}}
+% \setlist[itemize,1]{label=$\diamond$}
+% \setlist[itemize,2]{label=$\ast$}
+% \setlist[itemize,3]{label=$\star$}
+% \setlist[itemize,4]{label=$\bullet$}
+% \setlist[itemize,5]{label=$\circ$}
+% \setlist[itemize,6]{label=$-$}
+% \setlist[itemize,7]{label=$\cdot$}
+% \setlist[itemize,8]{label=$\cdot$}
+% \setlist[itemize,9]{label=$\cdot$}
+% \renewlist{itemize}{itemize}{9}
+\lhead[]{\includegraphics[width=0.1\textwidth]{./images/logo_cesbio.png}}
+\rhead[]{\thepage}
+% \cfoot{\textcolor{PineGreen}{copyright?}}
+
+\input{./page_titre.tex}
+
+\begin{abstract}
+ 
+This document describes the algorithm of the Let-it-snow (LIS) processing chain to generate the snow cover extent product for the Theia land data center. The algorithm takes as input a Sentinel-2 or Landsat-8 image of surface reflectance corrected from atmospheric and slope effects, the associated cloud mask (level 2A product provided by Theia) and a digital elevation model. The output is a single band raster at the same resolution of the input image giving the snow presence or absence and a cloud mask. The output cloud mask is different from the input cloud mask because some pixels can be reclassified as snow or no-snow by the algorithm.
+
+The snow detection algorithm works in two passes: first the most evident snow cover is detected using a set of conservative thresholds, then these snow pixels are used to determine the lowest elevation of the snow cover. A second pass is performed for the pixels above this elevation with a new set of less conservative thresholds.
+
+The processing chain also generates a vectorized version of the snow mask after pass 1 and 2 and a color composite that is overlaid by these polygons. These secondary products are intended for expert validation purpose.
+
+\end{abstract}
+
+\newpage
+\tableofcontents
+\newpage
+% 
+\section{Introduction}\label{par:intro}
+
+\subsection{Motivation}
+
+The snow cover is a key factor of many ecological, climatological and hydrological processes in cold regions. The monitoring of the snow cover is of particular societal relevance in mountain regions since the seasonal snow melt modifies the soil moisture, groundwater recharge and river flow, often providing critical water resources to downstream areas\footnote{Barnett T. P., Adam J. C. and Lettenmaier D. P., Potential impacts of a warming climate on water availability in snow-dominated regions, Nature 438 (7066), 2005.}. 
+
+The snow cover is one of the 50 Essential Climate Variables (ECVs) that were defined by the Global Observing System for Climate (GCOS)\footnote{GCOS Essential Climate Variables  \url{http://www.wmo.int/pages/prog/gcos/index.php?name=EssentialClimateVariables}} in accordance with the Committee on Earth Observation Satellites (CEOS) agencies\footnote{Global Climate Observing System (GCOS) Implementation Plan \url{http://remotesensing.usgs.gov/ecv/document/gcos-138.pdf}} to support the work of the UNFCCC and the IPCC. 
+
+The \textit{snow cover extent} or \textit{snow cover area} is the extent of the snow cover on the land surface. A snow cover extent product is typically formatted as a georeferenced raster image whose pixel value indicate if snow is present or absent in the pixel.
+
+Other major satellite snow products include: (i) the snow cover fraction (ii) snow albedo (iii) the snow water equivalent. The snow cover fraction and albedo are generated from optical observations, while the snow water equivalent is retrieved using passive or active microwave. The snow water equivalent is potentially the most useful product since it gives directly the amount of accumulated water (snow mass), however current products are unsuitable to address user needs for many applications and places because they are available at coarse scale (25~km) with some limitations in the retrievals. This is due to the lack of observations in the wavelengths that are adapted to snow water equivalent sensing. As of today the snow cover extent product is still the most widely used for hydrological and climatological applications. This will certainly remain true for the next decade since there is no planned mission to retrieve the snow water equivalent or the snow depth at global scale\footnote{This was the objective of the CoreH2O mission but the project was not selected by the ESA for the Earth Explorer-7 program in 2015.}. 
+
+Current snow cover area products are derived from low to mid-resolution optical observations (e.g. AVHRR, VEGETATION, MODIS) but their spatial resolution (1~km to 250~m) is too coarse for various applications, in particular in mountain regions where the topography causes large spatial variability of the snow cover at decametric scales. High resolution snow cover maps can be generated from Landsat images but the temporal revisit of 16~days is not sufficient for snow cover monitoring during the melt season. The ESA Sentinel-2 mission offers the unique opportunity to study the snow cover extent dynamics at 20 m resolution with a 5 day revisit time. If combined with Landsat the temporal resolution can be further increased. Both Sentinel-2 and Landsat missions are global missions that are expected to run over long periods, allowing the development of operational products and services\footnote{Drusch M., Del Bello U., Carlier S. et al., Sentinel-2: ESA's Optical High-Resolution Mission for GMES Operational Services, Remote Sensing of Environment 120, 2012.}.
+
+\subsection{Objective}
+
+The objective of this algorithm is to generate a snow cover extent product from Landsat-8 and Sentinel-2 images at high resolution (30~m for Landsat-8, 20~m for Sentinel-2). The main requirements are:
+\begin{itemize}
+ \item The algorithm should be efficient to allow the processing of large areas (10$^4$ km$^2$) with a reasonable computation cost. 
+ \item It should be robust to seasonal and spatial variability of the snow cover and land surface properties. 
+ \item It should maximize the number of pixels that are classified as snow or no-snow.
+ \item It is always preferable to falsely classify a pixel as cloud than falsely classify a pixel as snow or no-snow. 
+%  \item It should not depend on uncertain external data (like meteorological data) to limit the risk of a product discontinuation.
+\end{itemize}
+
+\subsection{Development}
+
+The algorithm prototype was developed by Simon Gascoin with insights from
+Olivier Hagolle in June 2015.  The snow detection function and a script to run
+this function with an example are given in appendices \ref{par:castest} and
+\ref{par:s2snow} as formatted documents that includes the original Matlab code,
+comments, and output. The LIS chain was designed to work on any high resolution
+multi-spectral images from satellite sensors that include at least a channel in
+the visible spectrum and a channel near 1.5 µm (typically referred to as
+mid-infrared or ``MIR''). This initial code was ported to Python 2.7 and C++ by
+Manuel Grizonnet in order to make it scalable to large images using Orfeo
+Toolbox and GDAL.
+
+LIS currently supports SPOT-4, SPOT-5, Landsat-8 and
+Sentinel-2 level 2A products.
+
+The LIS code, installation documentation and configuration file examples are
+available in the Cesbio's gitlab:
+\url{http://tully.ups-tlse.fr/grizonnet/let-it-snow}.
+
+The list of all contributors is available in the LIS source in the file README.md.
+
+\subsection{Limitations}
+
+The product is based on optical observations therefore it is not adapted to the detection of the snow cover:
+
+\begin{itemize}
+ \item in polar regions when illumination is insufficient ;
+  \item in dense forest areas where the ground is obstructed by the canopy, like in evergreen conifer forests.
+ \end{itemize}
+ 
+The algorithm may also fail to detect the snow cover in steep shaded slopes if the solar elevation is very low (typically below 20°). This can occur in mid-latitude areas in winter. In this case the slope correction in the L2A product is generally not applied as indicated in the L2A mask.
+
+The algorithm can only reduce the number of cloud pixels from the original L2A cloud mask. If a cloud was not detected by the previous cloud mask algorithm (MACCS) then it can only be classified as snow or no-snow. 
+
+The algorithm output depends on the scale of the input because the snowline elevation is computed at the scale of the image. In the case of the level 2A products this is 110~km by 110~km. The underlying assumption is that a large altitudinal variation of the snowline elevation is not likely at such a scale. Our impression is that this assumption is supported by regional analyses of the snowline in many mountain ranges, but this could be further assessed using mid-resolution snow products\footnote{Gascoin, S., Hagolle, O., Huc, M., Jarlan, L., Dejoux, J.-F., Szczypta, C., Marti, R., and Sánchez, R.: A snow cover climatology for the Pyrenees from MODIS snow products, Hydrol. Earth Syst. Sci., 19, 2337-2351}\footnote{Krajčí P., Holko L. and Parajka J., Variability of snow line elevation, snow cover area and depletion in the main Slovak basins in winters 2001–2014, Journal of Hydrology and Hydromechanics 64(1), 2016}.
+
+\section{Algorithm}\label{par:algo}
+
+\subsection{Inputs}\label{par:inputs}
+\begin{itemize}
+ \item From a level 2A product:
+ \begin{itemize}
+  \item the cloud and cloud shadow mask (referred to as ``L2A cloud mask'' in the following),
+  \item the green, red and MIR bands from the flat surface reflectance product (Tab.~\ref{tab:bands}). These images are corrected for atmospheric and terrain slope effects. The slope correction is important in mountain regions since it enables to use the same detection thresholds whatever the sun-slope geometry. 
+  \end{itemize}
+  \item A digital elevation model (DEM). The DEM is resampled from the SRTM seamless DEM\footnote{Jarvis A., H.I. Reuter, A.  Nelson, E. Guevara, 2008, Hole-filled  seamless SRTM data V4, International  Centre for Tropical  Agriculture (CIAT), available  from \url{http://srtm.csi.cgiar.org}.}. 
+  \item The parameters of the algorithm: \textcolor{red}{$r_f$, $d_z$, $r_B$, $r_D$, ,$n_1$, $n_2$, $r_1$, $r_2$, $f_s$, $f_t$} (written \textcolor{red}{in red} throughout the document).
+  
+\end{itemize}
+
+\begin{table}[h]
+\begin{center}
+\begin{tabular}{|l|lll|}
+\hline
+  & \multicolumn{3}{|c|}{Band}\\
+\hline
+Sensor & Green & Red & MIR\\
+\hline
+SPOT-4 HRV & 1 (20 m, 0.55 µm) & 2 (20 m, 0.65 µm) & 4 (20~m, 1.6 µm)\\
+SPOT-5 HRG & 1 (10 m, 0.55 µm) & 2 (10 m, 0.65 µm) & 4 (10~m, 1.6 µm)\\
+Sentinel-2 MSI & 2 (10 m, 0.56 µm) & 3 (10 m, 0.66 µm) & 5 (20 m, 1.6 µm)\\
+Landsat-8 OLI & 3 (30 m, 0.56 µm) & 4 (30 m, 0.65 µm) & 6 (30 m, 1.6 µm)\\
+\hline
+\end{tabular}
+\end{center}
+\caption{Index of the spectral band in the L2A flat surface reflectance products used by LIS. In parentheses is also indicated the spatial resolution and the wavelength of the band center.}
+\end{table}\label{tab:bands}
+
+\subsection{Outputs}\label{par:outputs}
+
+The main output is a raster image (*SEB.TIF) of the snow and cloud mask. It has the same projection and extent of the initial L2A product and the same resolution as the MIR band, i.e. 20~m for Sentinel-2 and SPOT-4, 30~m for Landsat-8 (Tab.~\ref{tab:bands}). It coded as follows:
+\begin{itemize}
+ \item 0: no-snow
+ \item 100: snow
+ \item 205: cloud including cloud shadow
+ \item 254: no data
+\end{itemize}
+
+The same data are made available as polygons (ESRI Shapefile format) of the cloud and snow cover extent (*SEB\_VEC*). Two fields of information are embedded in this file:
+\begin{itemize}
+  \item DN:
+  \begin{itemize}
+    \item 0: no-snow
+    \item 100: snow
+    \item 205: cloud including cloud shadow
+    \item 254: no data
+  \end{itemize}
+  \item field:
+  \begin{itemize}
+    \item no-snow
+    \item snow
+    \item cloud
+    \item no-data
+  \end{itemize}
+\end{itemize}
+
+The other output files are rather useful for the expert evaluation and troubleshooting:
+\begin{itemize}
+ \item an RGB color composite image of bands MIR/red/green also showing the snow and cloud mask boundaries (*COMPO.TIF);
+ \item a binary mask of snow and clouds (*SEB\_ALL.TIF):
+ \begin{itemize}
+ \item bit 1: snow (pass 1)
+ \item bit 2: snow (pass 2)
+ \item bit 3: clouds (pass 1)
+ \item bit 4: clouds (pass 2)
+ \item bit 5: clouds (initial all cloud)
+ \item bit 6: slope flag (optional bad slope correction flag)
+ \end{itemize}
+ \item a metadata file (*METADATA.XML)
+ 
+\end{itemize}
+
+\subsection{Pre-processing}
+
+In the case of Sentinel-2 the red and green bands are first resampled with the cubic method to a pixel size of 20~m by 20~m to match the resolution of the SWIR band.
+
+The DEM is also resampled to the resolution of the target product (30~m or 20~m, see Sect.~\ref{par:outputs}) using the cubic spline method that is implemented in the GDAL library.
+
+\subsection{Snow detection}\label{par:snowdetec}
+
+The snow detection is based on the Normalized Difference Snow Index (NDSI) and the reflectance in the red band. The NDSI is defined as\footnote{Dozier, J.: Spectral signature of alpine snow cover from the Landsat Thematic Mapper, Remote sensing of Environment 28, 9–22, 1989}:
+
+\begin{equation}
+\mathrm{NDSI} = \frac{\rho_\mathrm{green}-\rho_\mathrm{MIR}}{\rho_\mathrm{green}+\rho_\mathrm{MIR}}
+\end{equation}
+where $\rho_\mathrm{green}$ (resp. $\rho_\mathrm{MIR}$) is the slope-corrected surface reflectance in the green band (resp. MIR at 1.6~$\mu$m). The NDSI is based on the fact that only snow surfaces are very bright in the visible but dark in the shortwave infrared. Some lake pixels may also have a high NDSI value so we add a criterion on the red reflectance to remove these. A pixel is classified as snow if the two following conditions are fulfilled:
+\begin{itemize}
+ \item $\mathrm{NDSI} > n_i$,
+ \item $\rho_\mathrm{red} > r_i$
+\end{itemize}
+where $n_i$ and $r_i$ are two parameters with $i=\{1,2\}$. Otherwise the pixel is marked as no-snow. 
+
+
+\subsection{Snowline elevation}
+
+The snow detection (Sect.~\ref{par:snowdetec}) is performed a first time using thresholds \textcolor{red}{$n_1$} and \textcolor{red}{$r_1$}. The parameters are set to low values to minimize the false snow detections. As a consequence, many snow covered areas are not detected. However, this pass 1 enables to estimate a minimum snow cover elevation $z_s$. For that purpose the DEM is used to segment the image in elevation band of height \textcolor{red}{$d_z$}. The fraction of the cloud-free area of each band that is covered by snow is computed. We find the lowest elevation band $b$ at which the the snow cover fraction is greater than \textcolor{red}{$f_s$}. Then, $z_s$ is defined as the lower edge of the elevation band that is two elevation bands below band $b$. The snow cover fraction in each elevation band is determined using the pixels that are not marked as cloud in the pass 1 cloud mask (Sect.~\ref{par:cloud}). To ensure that $z_s$ is computed with a statistically significant sample of pixels, the snowline calculation is not activated if the total fraction of snow pixels in the image is lower than $f_t$. A detailed example of the determination of $z_s$ is given in appendix~\ref{par:castest}.
+
+\subsection{Cloud mask processing}\label{par:cloud}
+
+The L2A cloud mask is conservative because it is computed at a coarser resolution and also because it is developed for a large range of applications. However, the detection of the snow cover is robust to a thin, transparent, cloud cover. More importantly, the L2A cloud mask tends to falsely classify the edges of the snow cover as cloud. Hence, it is possible to recover many pixels from the L2A cloud mask and reclassify them as snow or no-snow. This step is important because it substantially increases the number of observations. A pixel from the L2A cloud mask cannot be reclassified as snow or no-snow if:
+
+\begin{itemize}
+ \item it is coded as ``cloud shadow''  in L2A cloud mask. Note that it can be
+   cloud shadows matched with a cloud or cloud shadows in the zone where clouds could be outside the image ;
+ \item or: it is coded as ``high altitude cloud'' (or ``cirrus'') in the L2A cloud mask;
+ \item or: it is not a ``dark'' cloud (see below).
+\end{itemize}
+
+The cloud shadows are excluded because the signal-to-noise ratio is too low in these areas. 
+
+The ``high clouds'' are excluded because they can have a similar spectral signature as the snow cover (high reflectance in the visible and low reflectance in the MIR). This type of cloud is only detected in Landsat-8 and Sentinel-2 images because it is based on the spectral band centered on the 1.38 µm wavelength\footnote{Hagolle, O., High cloud detection using the cirrus band of LANDSAT 8 or Sentinel-2, \url{http://www.cesbio.ups-tlse.fr/multitemp/?p=4109}}, therefore the test is not activated for SPOT images.
+
+We select only the ``dark clouds'' because the snow test is robust to the snow/cloud confusion in this case. The ``dark'' clouds are defined using a threshold in the red band after down-sampling the red band by a factor \textcolor{red}{$r_f$} using the bilinear method. This resampling is applied to smooth locally anomalous pixels\footnote{It was also inspired by the MACCS algorithm, which performs the cloud detection at 240~m for Landsat-8 L2A products.}. Therefore, if a (non-shadow, non-high-cloud) cloud pixel has a red reflectance at this coarser resolution that is lower than $r_D$ then it is temporarily removed from the cloud mask and proceeds to the snow test. The new cloud mask at this stage is the pass 1 cloud mask (Fig.~\ref{fig:flowchart}). 
+
+After passing the pass 1 and 2 snow tests, some pixels that were originally marked as cloud will not be reclassified as snow. These pixels are marked as cloud if they have a reflectance in the red that is greater than $r_B$. Otherwise they are classified as no-snow. Here the full resolution red band is used. The resulting cloud mask is the pass 2 cloud mask. 
+
+\pagestyle{empty}
+
+% Define block styles
+\tikzstyle{decision} = [diamond, draw, fill=gray!20, 
+    text width=4.5em, text centered, inner sep=0pt]
+\tikzstyle{block} = [rectangle, draw, fill=gray!20, 
+    text width=5em, text centered, rounded corners, minimum height=4em]
+\tikzstyle{blockfinal} = [rectangle, draw, fill=green!20, 
+    text width=5em, text centered, rounded corners, minimum height=4em]
+\tikzstyle{blockend} = [ 
+    text width=5em, text centered, rounded corners, minimum height=4em]
+\tikzstyle{blockinput} = [rectangle, draw, fill=blue!20, 
+    text width=5em, text centered, rounded corners, minimum height=4em]
+\tikzstyle{line} = [draw, -latex']
+\tikzstyle{cloud} = [draw, ellipse, fill=red!20, node distance=3cm,
+    minimum height=2em]
+\tikzstyle{bigbox}=[inner sep=20pt]
+
+\begin{figure}[H]
+ 
+\begin{tikzpicture}[node distance = 3.5cm, auto]
+    % Place nodes
+    \node [blockinput, text width=13em] (Level 2A product) {Sentinel-2 L2A \begin{itemize}
+\item cloud mask 
+\item flat surface reflectances
+\end{itemize} };
+    \node [cloud, left of=Level 2A product, node distance = 6cm] (MAJA) {MAJA};
+%     \node [rectangle, draw, text at top, right of=Level 2A product, node distance = 6cm, minimum height=5em, text width=18em] (leg) {Legend};
+    \node [blockfinal, right of=Level 2A product, node distance = 6cm, minimum height=2em, text width=3em] (blockfinalleg) {Output};
+    \node [blockinput, left of=blockfinalleg, node distance = 1.5cm, minimum height=2em, text width=3em] (blockinputleg) {Input};
+    \node [block, right of=blockfinalleg, node distance = 1.9cm, minimum height=2em] (blockleg) {\textcolor{red}{Parameter}};
+    \node[bigbox, fit=(blockinputleg)(blockfinalleg)(blockleg)] (leg) {};
+    \node[below right] at (leg.north west) {Legend};
+    \node [blockinput, below of=MAJA] (DEM) {DEM};
+    \node [blockinput, above of=MAJA , node distance = 2cm] (L1C) {Sentinel-2 L1C};
+    \node [decision, below of=Level 2A product] (is cloud or shadow?) {Is high cloud or cloud shadow?};
+    \node [blockfinal, right of=is cloud or shadow?] (cloudfinal1) {Cloud (pass 1)};
+    \node [decision, below of=is cloud or shadow?] (snowtest1) {Is snow? \textcolor{red}{$n_1$}, \textcolor{red}{$r_1$}};
+    \node [blockfinal, left of=snowtest1] (pass1) {Snow\\(pass 1)};
+    \node [decision, below of=snowtest1] (snowlim) {Enough snow?\\ \textcolor{red}{$f_t$}};
+    \node [decision, below of=snowlim] (abovezs) {Is above snowline?};
+    \node [block, left of=abovezs] (zs) {Snowline elevation \textcolor{red}{$d_z$}, \textcolor{red}{$f_s$}};       
+    \node [decision, right of=abovezs] (snowtest2) {Is snow? \textcolor{red}{$n_2$}, \textcolor{red}{$r_2$}};
+    \node [blockfinal, right of=snowtest2] (pass2) {Snow\\(pass 2)};
+    \node [decision, below of=snowtest2] (wascloud) {Was cloud?};
+    \node [blockfinal, right of=snowtest1] (nosnow1) {No snow (pass 1)};
+    \node [blockfinal, right of=wascloud] (nosnow2) {No snow (pass 2)};
+	\node [blockend, right of=snowlim] (stop) {End of Processing};
+    \node [decision, below of=wascloud] (backtocloud) {Is dark?
+      \textcolor{red}{$r_B$}};
+    \node [blockfinal, left of=backtocloud] (cloudfinal) {Cloud (pass 2)};
+
+    % Draw edges
+    \path [line] (Level 2A product) -- (is cloud or shadow?);
+    \path [line,dashed] (MAJA) -- (Level 2A product);
+    \path [line,dashed] (L1C) -- (MAJA);
+    \path [line] (is cloud or shadow?) -- node[near start]{yes} (cloudfinal1);
+    \path [line] (is cloud or shadow?) -- node[near start]{no} (snowtest1);
+    \path [line] (snowtest1) -- node[near start]{yes} (pass1);
+    \path [line] (snowtest1) -- node[near start]{} (snowlim);
+    \path [line] (snowlim) -- node[near start]{yes} (abovezs);
+    \path [line] (snowlim) -- node[near start]{no} (stop);
+    \path [line] (snowtest2) -- node[near start]{yes} (pass2);
+    \path [line] (snowtest2) -- node[near start]{no} (wascloud);
+    \path [line,dashed] (pass1) -- (zs);
+    \path [line,dashed] (zs) -- (abovezs);
+    \path [line] (abovezs) |- node[near start]{no} (wascloud);
+    \path [line] (abovezs) -- node[near start]{yes} (snowtest2);
+    \path [line,dashed] (DEM) -- (MAJA);
+    \path [line,dashed] (DEM) |- (zs);
+    \path [line] (wascloud) -- node[near start]{no} (nosnow2);
+    \path [line] (wascloud) -- node[near start]{yes} (backtocloud);
+    \path [line] (backtocloud) -| node[near start]{yes} (nosnow2);
+    \path [line] (snowtest1) -- node[near start]{no} (nosnow1);
+    \path [line] (backtocloud) -- node[near start]{no} (cloudfinal);
+    
+\end{tikzpicture}
+\caption{Flowchart of the snow detection algorithm}
+\end{figure}\label{fig:flowchart}
+
+\subsection{Parameters description}\label{par:param}
+
+\subsubsection{Main algorithm parameters}\label{par:sciparam}
+
+The table below gives the description of the main parameters of the algorithm:
+
+\begin{table}[!htbp]
+\begin{center}
+\begin{tabularx}{\textwidth}{|l X l l|}
+\hline
+Parameter & Description & Name in the configuration file & Default value\\
+\hline
+\textcolor{red}{$r_f$} & Resize factor to produce the down-sampled red band & \texttt{rf} & 8 for L8 (12 for S2) \\
+\textcolor{red}{$r_D$} & Maximum value of the down-sampled red band reflectance to define a dark cloud pixel & \texttt{rRed\_darkcloud} & 0.300 \\
+\textcolor{red}{$n_1$} & Minimum value of the NDSI for the pass 1 snow test & \texttt{ndsi\_pass1} & 0.400\\
+\textcolor{red}{$n_2$} & Minimum value of the NDSI for the pass 2 snow test & \texttt{ndsi\_pass2} & 0.150\\
+\textcolor{red}{$r_1$} & Minimum value of the red band reflectance the pass 1 snow test  & \texttt{rRed\_pass1} & 0.200 \\
+\textcolor{red}{$r_2$} & Minimum value of the red band reflectance the pass 2 snow test  & \texttt{rRed\_pass2} & 0.040 \\
+\textcolor{red}{$d_z$} & Size of elevation band in the DEM used to define $z_s$ & \texttt{dz} & 0.100 \\
+\textcolor{red}{$f_t$} & Minimum snow fraction in an elevation band to define $z_s$ & \texttt{fsnow\_lim} & 0.100 \\
+\textcolor{red}{$fc_t$} & Minimum clear pixels fraction (snow and no-snow) in an elevation band to define $z_s$ & \texttt{fclear\_lim} & 0.100 \\
+\textcolor{red}{$f_s$} & Minimum snow fraction in the image to activate the pass 2 snow test & \texttt{fsnow\_total\_lim} & 0.001 \\
+\textcolor{red}{$r_B$} & Minimum value of the red band reflectance to return a non-snow pixel to the cloud mask & \texttt{rRed\_backtocloud} & 0.100 \\
+\hline
+\end{tabularx}
+\end{center}
+\caption{LIS algorithm parameters description and default values.}
+\end{table}\label{tab:param}
+
+Above default values related to reflectance are given as float values between 0
+and 1. Threshold related to reflectance values follow the convention of
+considering milli-reflectance as input (values between 0 and 1000) in the json
+file. Some products can encode reflectance with other convention (floating
+values between 0 and 1 or reflectance between 0 and 10000), to handle those
+cases, there is a parameter 'multi' in the json configuration file which allows
+to scale reflectance parameters. For instance, for products with reflectance
+between 0 and 10000 you can use
+
+\newpage
+
+\subsubsection{JSON schema of configuration file}\label{par:jsonparam}
+
+The JSON Schema here describes the parameter file format and provide a clear, human-
+and machine-readable documentation of all the algorithm parameters. JSON schema
+was generated on \href{https://jsonschema.net} with the following options (with
+metadata and relative id).
+
+\inputminted[tabsize=2, fontsize=\tiny]{js}{snow_detector_schema.json}
+
+\section{Validation}\label{par:validation}
+
+The snow maps derived from Landsat data were generally considered as ``ground truth'' to validate and calibrate lower resolution snow cover products\footnote{Hall, D.K., Riggs, G.A.: Accuracy assessment of the MODIS snow products, Hydrological Processes 21(12), 1534–1547, 2007}. There is no space-borne sensor with a MIR channel that provides higher resolution imagery than Sentinel-2 or Landsat-8. As a consequence it is difficult to conduct a quantitative assessment. Hence the validation of the algorithm and the adjustment of the parameters was primarily done by visual inspection of the snow cover mask boundaries on the color composite images in Moroccan Atlas, Pyrenees, and the French Alps. First tests were done on subsets of SPOT-4 Take~5 images. The Python/C++ implementation of LIS allowed the processing and inspection of larger datasets such as a series of full 57 Landsat-8 scenes over the Pyrenees available from THEIA.
+
+\begin{figure}[h]
+ \centering
+ \includegraphics[width=\textwidth]{./images/montage_L8CESneige.png}
+ % montage_L8CESneige.png: 1152x882 pixel, 72dpi, 40.64x31.11 cm, bb=0 0 1152 882
+ \caption{Color composites of a Landsat-8 tile D0005H0001 time series over the Pyrenees processed by LIS. The snow mask is drawn in magenta and cloud mask in green. Each image is 110 km by 110 km.}
+ \label{fig:L8montage}
+\end{figure}
+
+The implementation of the Sentinel-2 configuration was tested on the Sentinel-2A image of 06-July-2015 tile 30TYN. The output snow mask was compared with an aerial photograph that was taken at a similar period of the year available from the Institut National Information Géographique Forestière. Both images were not acquired in the same year but the snow patterns at the end of the melt season tend to reproduce from one year to the other. The LIS snow mask matches very well the snow cover that is visible on the aerial photograph\footnote{Gascoin, S. First Sentinel-2 snow map \url{http://www.cesbio.ups-tlse.fr/multitemp/?p=7014}}
+
+\begin{figure}[h]
+ \centering
+ \includegraphics[width=\textwidth]{./images/S2snow.png}
+ % S2snow.png: 1071x1048 pixel, 150dpi, 18.13x17.74 cm, bb=0 0 514 503
+ \caption{The Sentinel-2A image of 06-July-2015 (level 2A, tile 30TYN) and the snow mask generated by LIS. The snow mask is in magenta and the background image is a color composite RGB NIR/Red/Green. The inset is a zoom in the Vignemale area (Fig.~\ref{fig:S2snowzoom}).}
+ \label{fig:S2snow}
+\end{figure}
+
+\begin{figure}[h]
+ \centering
+ \includegraphics[width=\textwidth]{./images/Sentinel2_testmontage.png}
+ % Sentinel2_testmontage.png: 2014x811 pixel, 72dpi, 71.05x28.61 cm, bb=0 0 2014 811
+ \caption{The LIS snow mask from the Sentinel-2A image of 06-July-2015 (Fig.~\ref{fig:S2snow}) is superposed to an aerial image taken in August 2013 and distributed by the Institut National Information Géographique Forestière.}
+ \label{fig:S2snowzoom}
+\end{figure}
+
+The output of LIS was also examined by comparing the output snow mask from two images acquired on the same day by two different sensors. The example shown in Fig.~\ref{fig:L8vsS4-23042013} illustrates that both snow masks are consistent, although further inspection revealed that the SPOT-4 snow mask tends to underestimate the snow cover area in this case. This is probably due to the lower radiometric resolution of SPOT-4 sensor.
+
+\begin{figure}[h]
+ \centering
+ \includegraphics[width=\textwidth]{./images/L8vsS4-23042013_montage.png}
+ % L8vsS4-23042013_montage.png: 960x563 pixel, 96dpi, 25.40x14.89 cm, bb=0 0 720 422
+ \caption{Comparison of the output of LIS from two a Landsat-8 and SPOT-4 product acquired on the same day.}
+ \label{fig:L8vsS4-23042013}
+\end{figure}
+
+The output of LIS was also compared to the output of the fmask algorithm\footnote{Zhu, Z., Wang, S. and Woodcock, C.E., 2015. Improvement and expansion of the Fmask algorithm: cloud, cloud shadow, and snow detection for Landsats 4–7, 8, and Sentinel 2 images. Remote Sensing of Environment, 159, pp.269-277.} available in Google Earth Engine (Landsat TOA with Fmask collections). The snow mask are similar, because the method of detection is also based on the NDSI. We found that fmask falsely detects water area in snow-free shaded slopes (an example is given in Fig~\ref{fig:fmask}). The cloud mask is also more conservative than LIS, which is normal given that fmask is a general-purpose algorithm like MACCS. 
+
+\begin{figure}[h]
+ \centering
+ \includegraphics[width=\textwidth]{./images/fmask20140111.png}
+ % fmask20140111.png: 3507x2480 pixel, 300dpi, 29.69x21.00 cm, bb=0 0 842 595
+ \caption{Comparison of the output of the LIS and fmask algorithm on 2014-01-11 in the Pyrenees.}
+ \label{fig:fmask}
+\end{figure}
+
+
+\clearpage
+
+\section{Conclusion and perspectives}\label{par:conclu}
+
+The LIS processing chain is a robust and numerically efficient tool to generate a new, high-resolution snow cover product for Theia Land data center. The LIS snow mask is an improvement from the L2A snow mask (Fig.~\ref{fig:L2AvsLIS}), but its accuracy is largely due to the quality of the slope-corrected L2A product. After the launch of Sentinel-2B the frequency of observations will increase. This will improve the atmospheric corrections by the multi-temporal algorithm MACCS and all the products that are derived from the L2A product in general, including this snow cover extent product. 
+
+\begin{figure}[h]
+ \centering
+ \includegraphics[width=\textwidth]{./images/Maroc_20130327_S4T5.png}
+ % Maroc_20130327_S4T5.png: 1234x856 pixel, 85dpi, 36.88x25.58 cm, bb=0 0 1045 725
+ \caption{Snow and cloud mask after processing by LIS (left) vs. L2A original cloud and snow mask (right). Clouds are marked in green, cloud shadows in black, snow in magenta. Revisiting the cloud mask enables to increase the area of snow/no snow detection.}
+ \label{fig:L2AvsLIS}
+\end{figure}
+
+
+In the meantime we would like to further validate LIS using terrestrial time lapse images in the next year. This could also allow a calibration of the parameters (e.g. $n_2$ ,$r_2$). 
+
+A cloud-free snow cover extent product would facilitate the exploitation of the data by end-users. We plan to work on the development of a cloud-free snow cover product (i.e. a level 3 product). The cloud removal or ``gap-filling'' algorithm will rely on a series a spatio-temporal filters to reclassify the cloud pixels that are output by LIS. This type of gap-filling algorithm was already developed for MODIS snow cover products\footnote{Gascoin, S., Hagolle, O., Huc, M., Jarlan, L., Dejoux, J.-F., Szczypta, C., Marti, R., and Sánchez, R.: A snow cover climatology for the Pyrenees from MODIS snow products, Hydrol. Earth Syst. Sci., 19, 2337-2351.} but must be further assessed on Sentinel-2 time series. We also plan to evaluate the combination of Sentinel-2 and Landsat-8 snow maps to increase the number of observations used by the gap-filling algorithm. 
+
+\clearpage
+
+\appendix
+\input{include/castest_CESneige.tex}
+\input{include/S2snow.tex}
+\end{document}
diff --git a/doc/atbd/snow_annual_map_schema.json b/doc/atbd/snow_annual_map_schema.json
new file mode 100644
index 00000000..a66f4ab2
--- /dev/null
+++ b/doc/atbd/snow_annual_map_schema.json
@@ -0,0 +1,112 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "id": "snow_annual_map_params",
+    "properties": {
+        "log": {
+            "default": true,
+            "description": "Log output and error to files (std***.log). (mandatory)",
+            "id": "log",
+            "title": "The Log schema.",
+            "type": "boolean"
+        },
+        "log_stdout": {
+            "description": "Log output (std***.log). (optional, if not set, default is path_out/log_stdout.log)",
+            "id": "log_stdout",
+            "title": "The log_stdout schema.",
+            "type": "string"
+        },
+        "log_stderr": {
+            "description": "Log error (std***.log). (optional, if not set, default is path_out/log_sterr.log)",
+            "id": "log_stderr",
+            "title": "The log_stderr schema.",
+            "type": "string"
+        },
+        "mode": {
+            "default":"RUNTIME",
+            "description": "The processing mode to use, RUNTIME to obtain only output products faster, DEBUG to obtain all intermediate files (optional)",
+            "id": "mode",
+            "title": "The Mode schema.",
+            "type": "string"
+        },
+        "tile_id": {
+            "description": "The identifier of the tile corresponding to the input input_products_list products (mandatory)",
+            "id": "tile_id",
+            "title": "The Tile_id schema.",
+            "type": "string"
+        },
+        "input_products_list": {
+            "default": [],
+            "description": "The input products list, containing the paths of homogeneous snow products only on tile_id at same resolution and size (optional)",
+            "id": "input_products_list",
+            "title": "The input_products_list schema.",
+            "type": "list"
+        },
+        "path_tmp": {
+            "default":"",
+            "description": "The path where to store temporary files, else the application try to retrive $TMPDIR in env (optional)",
+            "id": "path_tmp",
+            "title": "The Path_tmp schema.",
+            "type": "string"
+        },
+        "use_densification": {
+            "default":"false",
+            "description": "Activate the densification using snow products from heterogeneous sensors (optional)",
+            "id": "use_densification",
+            "title": "The Use_densification schema.",
+            "type": "boolean"
+        },
+        "densification_products_list": {
+            "default": [],
+            "description": "The densification list, containing the paths of heterogenous snow products from heterogeneous sensors (optional)",
+            "id": "densification_products_list",
+            "title": "The densification_products_list schema.",
+            "type": "list"
+        },
+        "path_out": {
+            "description": "Path to output directory. (mandatory)",
+            "id": "path_out",
+            "title": "The Path_out schema.",
+            "type": "string"
+        },
+        "date_start": {
+            "description": "Start of the date range for which we want to generate the snow_annual_map (DD/MM/YYYY) (mandatory)",
+            "id": "date_start",
+            "title": "The Date_start schema.",
+            "type": "string"
+        },
+        "date_stop": {
+            "description": "Stop of the date range for which we want to generate the snow_annual_map (DD/MM/YYYY) (mandatory)",
+            "id": "date_stop",
+            "title": "The Date_stop schema.",
+            "type": "string"
+        },
+        "date_margin": {
+            "default": 15,
+            "description": "The margin ouside the date range to use for better interpolation results (in days) (optional)",
+            "id": "date_margin",
+            "title": "The Date_margin schema.",
+            "type": "string"
+        },
+        "ram": {
+            "default": 4096,
+            "description": "Maximum number of RAM memory used by the program. (optional)",
+            "id": "ram",
+            "title": "The Ram schema.",
+            "type": "integer"
+        },
+        "nb_threads": {
+            "default": 1,
+            "description": "Maximum number of threads use by the program. (optional)",
+            "id": "nb_threads",
+            "title": "The Nb_threads schema.",
+            "type": "integer"
+        },
+        "output_dates_filename": {
+            "description": "Path to output_dates, containing all dates you want in the output. (optional, by default step between two dates is one day)",
+            "id": "output_dates_filename",
+            "title": "The output_dates_filename schema.",
+            "type": "string"
+        }
+    },
+    "type": "object"
+}
diff --git a/doc/atbd/schema.json b/doc/atbd/snow_detector_schema.json
similarity index 84%
rename from doc/atbd/schema.json
rename to doc/atbd/snow_detector_schema.json
index 29493dc7..b3272efe 100644
--- a/doc/atbd/schema.json
+++ b/doc/atbd/snow_detector_schema.json
@@ -83,7 +83,7 @@
                     "type": "float"
                 },
                 "rm_snow_inside_cloud_min_area": {
-                    "default": 25000,
+                    "default": 5000,
                     "description": "Minimum area (in pixels) for snow areas to execute the cold cloud removal. (experimental)",
                     "id": "rm_snow_inside_cloud_min_area",
                     "title": "The rm_snow_inside_cloud_min_area schema.",
@@ -137,12 +137,12 @@
                     "type": "boolean"
                 },
                 "ram": {
-                    "default": 1024,
+                    "default": 2048,
                     "description": "Maximum number of RAM memory used by the program.",
                     "id": "ram",
                     "title": "The Ram schema.",
                     "type": "integer"
-                }
+                },
                 "target_resolution": {
                     "default": -1,
                     "description": "Resolution of the output SNOW products in meter (automatically use the input product resolution in case target_resolution=-1)",
@@ -190,7 +190,8 @@
                     "id": "gdal_trace_outline_dp_toler",
                     "title": "The gdal_trace_outline_dp_toler schema.",
                     "type": "int"
-                },
+                }
+            },
             "type": "object"
         },
         "inputs": {
@@ -341,6 +342,72 @@
                 }
             },
             "type": "object"
+        },
+        "fsc": {
+            "id": "fsc",
+            "properties": {
+                "dofsc": {
+                    "id": "dofsc",
+                    "default": false,
+                    "description": "Compute fractional snow cover.",
+                    "title": "the fsc schema",
+                    "type": "boolean"
+                },
+                "fscToc_Eq": {
+                    "id": "fscToc_Eq",
+                    "default": "1.45*ndsi-0.01",
+                    "description": "FscToc equation.",
+                    "title": "the fscToc_Eq schema",
+                    "type": "string"
+                },
+                "fscOg_Eq": {
+                    "id": "fscOg_Eq",
+                    "default": "fscToc/(1-tcd)",
+                    "description": "FscOgequation.",
+                    "title": "the fscOg_Eq schema",
+                    "type": "string"
+                },
+                "tcd": {
+                    "id": "tcd",
+                    "default": "",
+                    "description": "Raster map of th tree cover density coded in percent (0-100). Specific to H&R Snow & Ice parameter (generate the snow cover fraction in the binary snow mask)",
+                    "title": "the tcd schema",
+                    "type": "string"
+                },
+                "cosims_mode": {
+                    "id": "cosims_mode",
+                    "default": false,
+                    "description": "Cosims mode activate.",
+                    "title": "the cosims_mode schema",
+                    "type": "boolean"
+                }
+            }
+        },
+        "water_mask": {
+            "id": "water_mask",
+            "properties": {
+                "apply": {
+                    "id": "apply",
+                    "default": false,
+                    "description": "Mask land water before snow processing.",
+                    "title": "the apply schema",
+                    "type": "boolean"
+                },
+                "path": {
+                    "id": "path",
+                    "default": "",
+                    "description": "Path to raster (must be geotiff file, ending with .tif) or shapefile (must be a shapefile, ending with .shp)",
+                    "title": "the path schema",
+                    "type": "string"
+                },
+                "raster_values": {
+                    "id": "raster_values",
+                    "default": [1],
+                    "description": "List of values from water mask raster that are considered land water and must therefore be masked before snow processing. This option is unused if path input is a shapefile",
+                    "title": "the raster_values schema",
+                    "type": "array"
+                }
+            }
         }
     },
     "type": "object"
diff --git a/doc/cla/ccla-en.doc b/doc/cla/ccla-en.doc
new file mode 100644
index 0000000000000000000000000000000000000000..e020068bc76d88449245b77bb69cbb2211a646ea
GIT binary patch
literal 33792
zcmca`Uhu)fjZzO8(10}qGsD0CoD6J8;*1Oo40bTS00RRP0|Ns{?BD<Y|6#)aN5PN^
zfxl4Kg5r>cfq{XQfdL%n><kPH91IK$oD2*MTnr2h+zbp1JPZsBAa#5U3=I4X3=9Gc
z3=Dz{3=BdH3=F~y3=AR+3=E<S3=Con3=HB73=9$s3=EPC3=C2X3=Gl?3=A?13=Fai
z3=DD%3=HxN3=9ek3=E143=B#P3=GN)3=Ap^3=FCa3=C=v3=HZF3=A3!3=EnK3=CQf
z3=G-~3=BF93=Fyq3=Db<3=H}V3=9Sg3=D=03=BpL3=GB$3=Ad=3=F0W3=C!r3=HNB
z3=9?w3=EbG3=CEb3=Gx`3=B353=FnVKiD%cFgQTj&>$F*R1iZxLq0<Zg93vqLmop3
zLlHwIEGrX5i!rb=AO{>Vnm}oY7<I&|8MPk~0>p$Fa`?l_Z;%)<`Jb18fnliUe^4C;
z@-Ha=J2EgZI599VI5RLXxIoJTHwFd<cLoLq4+aJXPpFtTltzT~@L(}A{QJ+$#Q@I3
zY@DDPoq^#1BoVoR%5UGy<f8oI{In8<@cg0_U6=gq(%jU%5^$a18w?V2$xjB?1R%Da
zOMWs~UeAKzi{++5h(Y*BMo2RPpAx)sLJZ0bTnvm1(cl^hqBMhnLEz(mCWZ`fUFOG-
z&rrmW%aF*B1FeskRhSc1m<?4Jg_qbch%B*Ta8zN?QekA!XK>PD@b_Y22=roMaP|^o
z$o1f1DfZxD3GiZIi1T7$Nc0k6i1%P*$@5_0DDhwc)sGMxv5{sB|KvaY{|K&)I2pnj
z!Wk?W{1{3Zav2O6s=#%ZFoVAr6GNaE69dEoCWd?sP;Ca%$H)k()iCve>N|c0e=k;s
zKrdE?I4@QtJ%Tv&fJ_4EVPgpNVq=K&VnfoSh(iykJ_YGvWC-+PWQg-(MABmj)+5Le
zz>v?7!;s04%#g~Uz)--D$WX+fz>vc5k|B*Dk)f2K1RN|@44ML;{yQ>)YGH1M0ER?{
zbg+6zSR^r|G8BQ^4zPfS1v)6e%@{%jKK&151l8!g44&W`3Zy=dAr)L_`7mTM<S}G}
z+a^w0Oa&PX4F4JIv>3g>VV}t81#Xe}f?Za~Pzts;1xzM0<TK<l<T9j!%}iv-WGG@t
z1-EIOy#(M9pXepPU<Ed-5bS<_24}4L6d0Vrc7WPQFn@sD4{=8$*d4+QAq=Su6$~X{
z`ysAKVkl+EVaQ=fWhiDqm?6U8@5Rm#=*7+ea!Z^SI|IlaZVdnA85tHaf=VZL1`mc*
zhD3%Gu>aE;7#MgNBpHO4d|(h+Qo+Ec!pI=O;P1u35a`9h;Or&C5a7kh5a-3kkmx1D
zV8Y-e05S*ETH=JeNrAzMA)g@yT>A?!NHGYdh%hoT2rpq^5Lwa#Zma1FEM(lo2x_yj
zFo43n7#zP$4AKlN3~^rE3=RxR0*nlY7(t~fJ2*TF7)ls28S)wO!0wY_5MELNb{~%l
zBZClwvzG*e83!XsYoeC~Lo5fQ8rX-I7(wknR&Z>CLa74WDwSmb>4Mng#vm`i$nXdp
z5^V600Hq61J5-K=S%ooCg^~9d2Lsa%!2)GQCI(@6jDl@s0q0_dBCvg*7(wYzg29!c
zf}wyR2V7btf@7%^nw~+aJB1;iAsL)|At|O9+~$>M&~;H@^gRY@A^&G!XaFU68Kl?~
zf!hm;MNrY9!=NC*$iTz|YRj@QxItTA5V!L(C@>(q8r0GS#kmj@sKv{TBkh3-P(=n%
zfhEG=2lkN?lOMQ+jz7JD+Vx7%7zVZ7v86eXe=Ot}7+4q>LO^U#od=@vVNe|c!=T;-
z2!nbPpzcJ06a#~lBLhR83u=oWBzH)Zfq~1Efg#9=fuX<!vlsAzg@Hkkm4QKum4QK7
zh=Czih=HL~h=IXSj)6fqLgc?_1BB#fVE6zjdDuRHQY#||0|PSy*bnd?E6jZ$TAqRN
z11L6>7}!66dLT?JtFE2y2Kg|Yht<)2sk0;lgA6aEr)vWa1yIjH?jxx8!@|J8!N|bC
z!NkD8@tuJ|fRUep>4O9#sB6UW0aQJJEMQ~-b(fewfcnb(3@jf&-5N#)&JQ31K-PdT
zW5fUdAk4(T1=a@&aZtd4)SqBrVA#Y05eJzCs(YOoxj%q1Mk7Nt69YpNm}~}<Ent#?
z@yGxF{~5s~6PRQMlPr#)1|J)UV6X?1jf|k)dlQ)53?}z83NQ#T{0I4qp@sST0Y-*a
zQ1~&lf!Xa~wljm$M^N*iAIzJ;4D#nhFnbc1&A=c4iZ77YK%GP|8*DfzfFR`wQUGy4
z0tnQf19_OJ0OEl3RYB@O{W>9Nz=2{L)IfD+;Qjz|-~mREKTd<~{?Ew4z`?-C1d1&N
z20^%^K#m8ca!}yL{rUg@-M|0;rI;BQR<SZLlz@sehW}s^qz2?8CRogY1Og!aWN_!$
zU2Fo_B9J0{ID{b+T=jqoeLsd&hH?f422e)~)JOppmZoA87(tH0X9h?vn4yv(mmvvU
z4Kpwps}X0rBSR5GCbT(}DK>$T5!8nV8HWdh3VcXa4JsNTg}MSmFu3y0WJqHGl@Iv+
zWJzS$xPvRwM21{&#gW3mz+i7eoWDR-GpNYU1iMOs0TQwd4AyMKg=RRkGKE#Dpd#Ls
z$S@6INMy(WHw_XQ7#M=pCNMI>5(ychn#hpH0J0r9raajufRZvE(;!ly=57f?J_9Ir
zAm)G?7G4bAU=dI^hgg$*81fl17&5^PEf<DVhB5|_d7$PSsG*k$>Kw5U=O)4?IWe#>
zFmm)tFfbfpVqnMs=c3FtF71XgEz3dv0t+zcurM&x$byP;P(jVY&^!S|ql$t=KswZ!
z8Bj`Ckk}|DJp_b6r84Q-@ajM-+dc>|aNPO-7q1qOB*`uTx5q#_89*Ie&@d&4BM5H3
zf;ugrTn1{~IWnY!oByfcQW-Q122uwaxO8K1We8&MV{l~fV{l<`Wr$)>VDM#dWC&si
zVen+|12@_;z~w2ZISlHIq%sJARfIAGF}N~>FoZC;GAMw%J)prgNM{LRI#tN&&~Xcj
z47+ghRprlJ=LCD27*;WOujB^B52!MKfF=%Vn1kx5Ooki=P#I$d9y&~7a9~Jg0CmZV
z8T1&^z}>l%^mJAP0|P?_LkU9(LjglEgB625gFZt!80s-3g8QY(3>gfm40_<UeL90a
zyw?gz0L2XY3_c8=49*O$41Ns346Y2i3`Puk;66H`aQ*im)G`DG7^+H8f%Twu{->Y*
z83L;(axiGCZ@vjizb~L;>LB?522k6bfdS-x(AXQO)e34U&w#2~108$c0Uduo0`BKB
zFkArlrx+OSK=r@;3mH=fC2*h2<kY<4R0YTMqSRFINDV{|H2&`98sz8b=i(Zr;OiI^
z;_0X0oKcinT#}iW3Q+?Z-_Of0%1zACOUW<E1&`B%d<N<%f*U|>d<7t1g4rMjbPS#0
zoRZ0t*%!e*zt1s>%EvjcfxHJ*fuF(^ejt09uAaR*M^9Q{@^lUcZJnR%K;v_)XkszY
zc?OV$APlN?L3%-90&4$(#6TEX9KVZ3mh79QjEuetoGc6sAB6TpybemH;6^Q|7skin
zoS2`Jm}dbIg{Xj%oGfAt5uo-Cln?5Oftbj1h9Eu+|NqYqp63A-Eg&|iI02=55F4fj
zMt6W2aiBgB0|Nt$1`FaKKx#lUP&lOF!bPl*nITZ?9yB9$j1@BT1XIw-!N6e0%)s!9
z6*2<_6DO5U<ACIDQuQ6@W?)ESVqnnZV_;aoz`#(#0hv`J)f||57Y@h_8;nmbEyTva
zU<Arq<Qg!l4{HcyaDYY)u}aZYkcr_YP0b%Qd^7|`LtwCn051bCgAaH%MHk%F*JUVX
z$YaO{4@m_u6fxv8WHF>NBr}vS@G=B5<TIo(lrWTohiE_@CwGP-@Sp%_{QziiAO+kZ
z0?oG-fZN)j!6J}4(9jI1TLl_;0?m1VIuD?^BG9lB{*IUeg9d{V17-(Ji9wSA<n|DT
z46qBb859^Q8S)uQ859`Oz;?s<AbUU^Gtj&XXpRTeQBwd9^(Ziabc5VSTnOkfD1c`r
z6u{<#<|aU9gZ!evkj#(+4i(T~SS5o3LkYOY0kH>UGh~(r6pp3fp*PTc87OoMz=KsK
z3?MzQxim<xL;>6>1%)a|KPY6&!7-Ep4P(^sgbc}n1{6UZuzZGG1_f}qfXs#1Q4AhX
zgScG*I`0J<+^J;H0h<V!*8qhOXy^<Sa}YBV8FIk0dXfsR3?&Su3?P4l><I?<tUxn7
zAQg}qEKoQrFyt|ShVMY(3Q8{uM5ZqVhFpeH21x9H;<OArPnHOF8)yy*(tRd9^*A&5
zFgP-3GJt%Z2TmQJ?kmW3pg02gQvsZYKz;%Zae~4@fuWcolOY}4sRf065;%T9Gj5=s
zBxpbln~Ol<2nqqv0Aex&$Yc;N9UK$r0}P-P0ve<PxdG%B^tcDbc@aY{cwP|XD$u-C
zIRhvKg3@dzSR7psL=-di7l89I$V5>3gTyK*b%1<~FMJchwv;oZg6Ak<I$$owZYm_k
zouTP#AX5`4=YZl4l*U0m1*I*}JX<k?H3KMLg32t=tR5ugg33IQ2q=C*Iw5n^kP-!y
z%RwbqI(U=;WELdPLrg?2XF%m9D2-v3caTsA>4n7)$QDrU1g*XTjZT1UDPRB%V1vre
zV(@qctb~KaEGS15fm0?Z^?}lJ8d6DvSw@0l3{>ubTm`ZjWFu<M2bHd{_*7y5xw?n}
zWIL!-2jy&#J)n{nmRgG$N*R(Ea>3;gwp@VlB_xCfJ{^GaB*ZO{6$zk0dQe<eflC*V
zk8zcw$bJRo9Z*_=lmoC71t|$3<pe0Vr!s)lfCdpE^T?G9snD5yhzkZsXn;}y$ej>3
zfyyP22q^7?YO#EV0)|TPfF>lhgVInQ*k7Ra9iVUkg_i=x2pTAbKvFSiWkx=O9=LY)
zWB{d5NG^huvY<2!sv$tB9%Ob7c;FSZk^z(#K=~0gkO(S&^TG8JC?rAUE+{`h+?NZU
zr3cvpDmOuH2l)n8UPIy=<PuOmEM@?e!ypz+9Vm7{t35z<8mR0mU`PSGF^vIMD?myx
zP_BUFI&Aq05_SV0pP*0&)gqv@4Qfq*%3D2#6oyg;P)P!Ezc7OWg9k$-LjZUn8#K!4
z2c8O20<VYy`5ELFTm!jE42%pS7{j}wpmhph8~|Rs0;(rKaRIU&6q6uVfhLYXWezAt
zk=IRt;t(`_l**6>UHbxxcTf!nDHlQJz~T~7dqPrRHaI^Ofa_C`Igqr9t-S|w3222+
z5km?CXiyqdjzY>S%<>JC8X@f!D+UE9jAx)N#P1$Z7^Z;pMKO3hTY<)|i3jIZnBR$|
zX&XX=*-fBzZ=kdZDl;La1*{%P1oxEan~p%U6`&PvpuCK*84`>1h$GM{BhU)A0`LSl
zsPsm-f_VEC7~B{V87de+dWqKs%168mk<fl0DD*+SNzl48P~8G4H9!;Apz;7zvVh7u
zNa+OPfl3@$`v?@Dpq4tQhJ&@cVRZ(mBmtQPs`n8s9FT5Mn+;++hy`j9gIbfI9t0?D
z!1RMkI#ACE<O)bB1gb+ptq@3W24N<)HV4Gb17FvG`W~P)a)=Oyg#f6o1cfJhe+1FC
zfV7N2wK6CSKw%Av1yId3@cu%i6p-7J88R6P7(h!%Kyd`B!5}#t)O$u9`vCRwK)qE&
zJVJa4X(JB2zd$V^NZM3@)+M0c52XDC>al}bKp?k6+Ib+=pca1#xJC!@V5t`rTaa7<
zO531x4H7|w8YF!}Yy#y1SXl@%8#R4H+JuJSUMy%q38>AL2kuM2asWhy5;R9EF@X9f
zpwT1+2AEh81EfDv4DPdoaxZAy0MtSRwenyo5tL)m+p(aQe-3!`0^~}N3Q$WD<V#Qs
z1f&zxh6nWwL9q^+=FSF>5<yA@SbGLrYY=%{29ozdDQRFvQ9!vAG(rH%j}W(jN-dBN
zQGEnz`{FK(5U~qODWLW`B>y7!AVK*A6k3ST1BELn=YU$HIp7vNq_qcWuOVbWH3q2N
z4$3W{)CTI!g5m|Hx|9J_;(}5vL?>viHmElW;UUaXU_gXE!WF1KhnWd+8>n>&>bZg9
z2;>4t`USN>LB0mnn;;c=4C&yUfGvzbDH0UUpw+t|H-qd2g*T|p4QXG3dMluCfs`E}
zc~Dy%v^E~p?gix^P^>^wJ;?V;3=p@0#<=nsau`4<AG9_dlsZA?XfR}gN3cL9L2Lr`
z=|Q0b(hVw!K)wNu?!dwkWCyZ1NFBtdpdJxy<N{_MvPy`nVCH~Q4QM<MG#&zS5h&zA
za-e=4sI~%)mmsGI$Y^3Yc=QuuIw*udv5XvZpqd&~l7K=IWExB*C@w)^2XZY$4pahU
zLVXYNGpKz58Uq34M^H}y62hR81XMbM(m7<b6I4q<+^WG~%3#2t3JyPrnV_YGpzs3O
z4DtuWb`TG=X8}~EgTfsYCZJRRNfDqCNsyjmaGHjg1Bw-x+d%0D<Tj8?Ky?Tx-+)Xk
z2ah3Qx)^)d!OA~eWhf|J5H9&ZsT&l6=zT9x`Gy+1$hjKiC(u|1DCIz056W90(QNQI
zAS9$gJxx$}fy6;B0o9hE_y?_>hlEctLk@VfG!tCbg5m%)CJrhoK)DTMI%sJkDBXbS
zOVIuYP>OZ}hbyQj39=P+#1WJ)^T1|;Y(;frIe07{H2RO*eW0)a*$Wz@2aR8Y*q~A!
z6w9C#14<8|*oMR%B9uTm4`d_A{uJ<D3s4OSDuqBYklX|^1LA*B4hP9Y(hSU9AiF`~
z1u_*@w}9*i)u@oL0htF1Z%C+t(gkdU4WtGXW{}(isbL3Gy9w6*1NjFMwxE&|<R6ev
z^T06*2|xTPF%7H|6q2CWfW#vx%s}Y^)TRdQODP5StU%!oD+NFy12G-Bi~@;5!i%^$
z9Z>lKvrCB~9GaRyZ47Fc@R%_R%FUoM736b}&p<RN1R&uGn!$mD1t>Q_=HNiJDyYSj
z3=T0+Tds&92|S+!N@tMJC<l+;fmXwTYC%Y;3#vUqH7aQNAt)unN&!$=3rdB9DOEvy
z0jeKCVFD^sKxqKf_5}H-1Ujz*@;js~hNLw}`UB-+NdAMgJV9eSpfV0JUI`hu29-K6
z|AJ~=Tww!A$JoLi)Z&DUA%apqs3ZaPd_eUE#6_UK0%#l`R0l!AA5<bkSfJ1b#UiNn
zUCdAkp6>#UNkVjjQUE9ggUS+6dI9ACP@4c$Um{wgsQN&D0o6{R`V`jE!ImQsDFos(
zP>luBmjfRE1f?{Pih=L9Ajb1x`4Y8V1*%m+wKgcOLF2oiwhXA;fy6r^1VOEPP)L`d
zwBkV_3d`jn|H9G<XlEXJ8p4-4KyCx&Bv9%DrBjev(7Y|E?+6<21kFUlYC=$K6fuBC
zFF|n&N#CGU329Hm+B}ffA;>47c}I}hkdz8aW1u<_)Sp7sFOV`0RwqE}O(h1%;(t(m
zg{|ER$@O{+O3?8>&>B|-2IM(>P$~e;*MiCkP;LX251?`oG|mzXp8J8V1^}fxQ2!ki
z4xlgv)m6leo511^q!X0R6|l_igJK5eH%KU@GC+Kc7*l|`U@)XuSgM1x2|#5Qs0|28
z3!qReL25sN$_7wN6|sf_l8!;SJd**E8X@rx@$W$R4zyeylyeYq4eBjIS~Q>@G$_S_
zN;FU`f#yx~7<9oqTOs)el!8H}GfY;O0o2w5l}w;M1V|KQb|tvh0<C+{1uq!~&6I)U
zK>Kh(;a3LUZ<)xD1nq@EN>B7YJ*eCTm9n6m2T6sXGzroVsxx5ihE#BE2@**KuUP=e
zLvkA^Pl2|zfbtc{&7gb+%Abf9FUU=x@(1J&P_BTq(jajLnwv^wfb@7U)j;|ou(}*%
z4yZ(jnFC4}pxgx#hov5fsi-MI0Xh}}>P6w|6@bbgm>D2DK;cc;e;}JdWeX(MKz2h)
zP*6Jo6uTfB9l_xRG7l8GWej=@MhuX;6gkv27;w)r(PmvfWL}CkW{Watf@d#5qbQK_
z2|bk|L@`?wpi&JKR-pO-J+;EhNLYz#3?2!?EPp|J<UwjdZ3SGVF=#J5s00L+j-dVr
zDD^_rgIoZrJ%~v;u-XfhYLeh9y<jCYDAyy;`hrpnD0jegfl?31oscpa!~)fCAW=|0
zf{eF;!UCcPWKt1$GZ{n$y6F%-pgaaz;RKsAhP3uUHY2BSWVb@fT#&Cpc7a?$Ts;J-
z>p=P-@rgOI2eNgv76SE12BH>1U3~~?(?H5p5DA)30ri?eaWt?slLoZz1Jx~%HaMs}
z2K5F&xe!v9LsC2<t$|8@&}bc`MFv`%2Fe|vv<u0@=p_y$jf2{epjITv&!Cn&D9=Os
zBB1gbBnoN^Luxz3m<FVNg5>Em@M=R)xdEytL9PIepo4TGvq2kqA*+B&z%?f<=V5EJ
zf@&*}d@%#UY)F{|Yh{B<JMzj>)Gz_n)1Ytx)l{H%1E@`E0xm&ec@$KyLuz@DZy+HF
z3Ug3Az)C|<Is&N!)ncg(>EKofq)rFb$e?xrWCjFOl0iZpQo12b1C=tMc1b=1B>X{c
zLez#J-{Nx*Xf6;`3WCB2RKp_N2b#kG)wH0}9MpFKt@wkiAcDjlDAqu_AT4{yI5YA(
zEl7<IYJH*hA0WNGU~nr8lp0|pZjKBJ44^(B$R5xJbPe#>2dHcZ&7^=zXGpC83QNe@
zDUdKgA74PEG*H<M3TsHJ2DulM$`C$8cLONhL&{#{QN>IK(Aq3S_(NO)sc9j82e|{(
za)Y#%A+~|?9K@#}KY?-wqF)Pg3BHhnjJleVkd_c}1*t_KF%F7Pd~Gy{UlD3S^&;wM
zW-@rD4mk`p7(n3<N^>9@QD!JGn1k1{g3JJ|mV&Lz1FaY#CPWZ<xft5s;bkyG2?0=x
z9b_-4g%2v%Ks{to$qRA=C>?=96%@aq90$q^knvuKTMD472toQFEnryw!yiVVk_coE
zC_h1F@gaE-l!icV0m&gsNk~W{(mf<!Lv-MG6>=^Gm8zgR4!;h>s03(zHLSM+^BX9)
zfXY8e*$asiSh#^=6IAMeN?MS;p$vK8J`E_QATzU|+y(0QKtdJL?*r{y1@$i=c?#5P
z0Qn!Z#vGJaAvPdZHXw2fq^5!8Q4lX3T!zEK3f-3was06YDt{nl0*C~y#t#OsgbxPK
zQ#mm>G6XYtG6aL|31{$R2x0JG@Mj2R2w_lQ2nTPL4gznW25p&kWdP~*X9xnX>-Gn4
zs|M-tWB{=ta&8QuF#$h@NCpK4Z}2v27qHo>;JIQ@P6oLblomj~0gVhoaz98EQg?yM
zWKd{=(i12QQ2RI_QBWTil&V1EG@w)p(g7)BK=B31K_GRYx($>%ApLbvc?&9iK(iSL
zJ3(!I&}=1W%o3s#A_p3!0{J<F!H2;W>;pfrFLl9wa$^Vr)9wte48Gu<$01-fAa)Rg
zGlK^-%p4h<7(BuIlS3FH!73oJ?*<NOKL${Ug2K&>0TQa9ay<YX>L9zF8A8G9-$CIW
zzyR8m9>Cxa-s21@^~}L}J{Y`q2UIhFQW7LhfZF4r5)0HChvX^Lk{HtJ0?oXD!Vwf|
zpt=L(b6CoUm;;Ixkc;t`bfB<?sE4#@Kw*w*A1HKTWzJw|kAli$Pzwc=CO~Bpq!j@w
z9Z+{);HnWoK8BQ;kQ9~1P>C^82})Uzwg7CsE2yOh8UqKd_5y`9r2YYg4TuLCT|$gJ
zgVHW2J|SvAJW!~DQaY%7g^ihj;s%t%L9qy$QHGRBp!O@Mjs=zWkTeaNDZ!qXAoVc*
zv38JeLFF^31OtT@$X%e&1F=E=hm3ziT9Y6@f=Xr3m?=aY<aUq_bhjf)K+rr2#2iqo
z0^(!PEHtS61f^iuDt%B%3~~o({|(5upb!JiZ-Yu_Q0OGWcQ=7t46_#$51{Y@jp*ST
z-35gWXf6N}j-cEF3RzJ8fQ`;VMo=N849MRgAK`AZfa(;GS)iIBg+UiQ5(8>ER)W{a
zMZ<U5fJ}juiKz^+3`*d!3tk2baK840_P0T41Ja`g<wlVIi@`NC$WNeAM^I`4r6EuV
zK=L{y4})fIL1Kd;zky;C)J6xzAY?We)LI6`5NKQxR`!7WhFYIO`~wO}NDT@q(?BIY
zB#nU58fKdrG$RA5ee)S0EpSly5#L6oO)C|Y9ziJzRtCa+1S(%Jdjsm=F=s+6RY9o~
z6z`xMGVpN>sgXeaI?$R6$oeTzzE%LIc;Z)u;#R4^-~ygMOeMlZggZdTbwPTCq^(ZH
z?GA+5*kh0S_5?%svw`YqPz^y$2;g=<vJA*C=or1H2WroO)I@{#Pl8e$<Nz{IFPyym
z2x>`#Qiu~cCeiaI$d|BM3uHc`ZUW^VQ0om;YC=i|P%Q*1S3xxes09S-_u$LHdJM7P
z`7@AzKzm|Ar4XpC3DOHHVVuCSAfJND7Eql8Dn&u2BfAPx`ha#YfW{m^wE$=|3o^@*
z2R>=C2;3J0=?C?xK<bfq-XcaNARz%7Z2`Fe<S$SU7!<OgwPv7_4%B-9m6M>l2vUN8
zS}34cfYm}E7lJ|tR*%3+Q&25Ho|}-vh?ju_944R;8|?d`N1XdWr<8CoFn|s-mf~Pw
z(BfcV(BWWU*u%lVu#ba*;Q$8%0~;p;gA6AFgB&LVLkbrILlqYT!zLaEh7&vt4D)yy
z7#8p{Ff8I_VA#dSz;J<&f#C!{1H(Oj28JjB28I{`28K8R28J1e3=Hc885oWUGB7+4
zWMFtC$iSc=#K52=#K52;#K2%8#K2%C#K2%7#K2%B#K2%9#K2%D#K7=Oh=JjS5Cg+2
zAqIvwLJSOL!VC-*!VC;m!VC;G!VC;-A`A>1A`A>%A`A>xA`A?(L>L(6h%hkB6JcOD
zC&Iv>C(6KJAj-gCB+9^GBFez9Pn3b-fG7h4=(JHgF$M+)F$M-FF$RWxVhjui#26S3
zi7_x75o2Irkzinul3-xCBf-G%NrHi4izEZXDM<!~JSheS&?%;0zrK9Q@RH%>S14eB
z&<tN+zI^%eIm7+u7oWdmIKuFi;p-7B_$$L#5Q6CW3QBVlxgMMx#U7j-0bWcDab8UD
zqoEn34ZK7dV6vv519K6Q{LEP(&1@Wup&X1%9E{AN9E{>P)QZ5wkxlmn`K3sTfnkCa
z1H&XK28Jn83=FHJ85j;pGcZh$XJD8l&%iK6o`FF`iGe{!iGksk5(C2rB?g8p6$XX|
z6$XYJH3o(zH3o)n>I@7&)EO9lsWUMAQD<OK(_moG&|qND(qLea(_~;U(PUuwqshP^
zrp3VEro+JCp~JwiLx+K3mktBN9vub-AzcOr5nTob30(#TDP0DJ3S9<<DqRMK8eIm4
zExHU0+jJQicIYxNY|vw1SfbCsuuGqTp~--OVU_^{!!9ERhCN0M4Eu~27}!i17&uHJ
zC&_Y{F)(nMF);9$F);kGWMKFQy$nLgnt?&Xnt|bpEd#?fTLy+3whRnSb_@(N>=+o*
z92gkt92gk-92giTI503wa$sPX;=sT#&4GbojsxWMT4>xsqZ|~83}0Ws;`159y=NDn
zeFeq*SB9^LK(ZhyNQ8s6fC-2hFTimx>!ih4kfEfc1d3*cVh_++&4@UbK#qUpxMSdF
z&H^3(h7$X@(~$_a`1b*w#w+8<z#!+yz@XyDz)<GMz)<1Hz)<DLz|i2xz;Mrzf#HE8
z1H&Un28I`o3=B?A3=A$#3=D2g3=AGl3=CdQ3=BR_3=Dow3=9EIkkflZoER9woER9g
zoER8#oERALoER9)oEaD_oEaFboEaEwoEaGGoEaD#oEaEGoEaFxoEaD*oEaFRTo@Q)
zTo@SQTo@P<To@RVTo@SATo@QKTp%X~LqZu8(jW{DSr7pU+OOac1%)lcV^E$LQJ51_
z4uo<rk{JJ5pp%!GpyfZP+=rF<ureK1euK(nP<ae1i(zFjtn7uAxv;VpR>uBBj)(8a
zocB06Z;@qQBXeFNbDkq}o+5Kl5&+88M)#4G-9_fyK;~RS=Ag)2L6*6M%mJOs2j`!I
zvlti{&LDG8Ogn`vgJ6RWtiyp>85p?K7`Q=)z~fK>I%XP%IpC@xXQG3}Ss55$7auT)
zgX%$89f(~2!RiHAodBx`U}Ze4YzLLquyPt!M#IWt4pT@ujB>K)Z-h4)7;vW!aKQ*u
z`WeOm(H}ts45K8X0ZBKvVNL<jC?{%N2Z_M&RTu+AUj`8{d=bU~(dR(~5+)*!!Es8i
zItkE;>##Z!RzJe(MD+R(R!_j{2Uxv8s6H6@_<x2Jqzq4xIVduZaLS-4L*YC?)^iV;
z13I1)&c6j`F)%QolmRGZ4NCEXtc93ZCar7$n<xP~Z68)&!|G~m^>i4tjz+JaQ=oNp
zmJ0(z4z!*Iog0Ze&c7jh>kBgH6Ef!mGUpvK=M6IF6*33KXwXTka7mOXLrDNAagGvY
zxN|A!M1Hu@$KWgm1_sa(t#JN6I12{@xg>_=U=Gmz6-<dLOz<)qBm)*@PE=uri(=CY
zQi%s+NgtwC3^oP~Tn3Et8=CYKHXL(Q+;EUxX~R-B@eSro$`h8dsZ7XYF$!Q{Fr1LV
zXaGL_)i{8GLBoiF&w#<fR_cQYF9QSmeF-|COWmMe-fqgk0KyTS-V7l82z1Eozlu3)
zZ*i80hR!<hF?wmw1&+M49{m?|yBty`&SRP2AmDO<Z=1wIR%Q*)&81Id-441fkQHT{
zUdU-}qS(m76v=Ty=+G>MmJ>HP6!Y&yZFzZgS#@9gw2L2Koc{gl-2S+C)$jN5oSmtB
z(sKXjtevIP-|r4B^)8tbR6KFf)^#Fl?bEFfeB(ctZJRj%+Hz~ntN)s=ZSVW>(qpO2
z)z#fW@`2B^wj5?j{kJa9%Hm4kD{iy)l`FHhoeH~rWo_1)S?ix&IAtRGAbh2Cfo{}e
zx6Nl$UsdmjJNkcJ?za2K-q@GMPTrQbKYF#A`ONH?5_k9D{3q`hrcRu1z2=+T_v5*z
zz8f7~w14fDrT-UQ|9zduD0#=xFUi7%zqd`?v2m|Zi~sbBsNau&wcVHhWIb2N^VhM0
zN%LCmkK4>sNO&n-7kFRnUW?S6^#3-;&j;(R+PvxPraKaL?$xsmZhR^2=sevfKDT4d
z&w1bOmdq?HUHZQG(8t15!*xb|p&z7W!^88VEzbTfuHcQzj4t)rC~hu1c~8ulgB9O)
z7w@rtnzPF=t5<w_+q!!P3c}azom6=Cuuyi){r36mCByaw%~5^0<GI204Do#{_T}Db
zJ}I&2S9|-4n-5J)k8jeonK$Q+i~R90+2F;G;tsFBa_oammFU}Kj+@K+ozL#M=X?Ly
z=juuK%_bJ6v_C%9@_e=5!`dCUUW)J)o+(!?{M9mB_4JpAI`dlQ|CbV8zVFX9i^zv7
zlyr*K5-i?v<rnN~=U2SB_2sb@&VF}ROFWMemOH-naYtq2W{W-2ywkV?-o4gvN&T^Z
z?%{{u4J;3)8@`W>+Bv_KefRtY0{!j#m#m-v!0S<PXZzXO|JpLo_gNg=T(Zf2eMf(M
zyhEx<S(y1kr7MxCb<$V04wf!l!Bp%1LUFG^y+hjLy4nqe6Rd8`=<m)7eEfZGecqQV
zCyw!7y>ZMdYX78vzproOJo<b_{I#X`|9vjpF*8{?^mTyg(sgCeTC6Oyoz~8Y`u5ss
z_SM@9OTMOM7(8FMgg-b_UBS2Z#`E{Whs$QPTR%Aen|)7%%iZ>e_jDv5*(#pA$Mf>7
z@1cx4%C&p{UAq5Tdu5#e&g<Tj<|kz})fp!1yp%Y(_3+-^Pv!C_EU9$<C~V_m-G3rL
zfATN6$$|GoPWeClwZp`Co_1&B|2HqwlRk)w&63!Ym~>%&^6D4KSAzDm)h|D<U(Ni)
z_1E4b-cyg>ONe~Jd$6FVL(_AD;N}yXW?1YyQxUdU#Ze(`>eJFaE^=a(mY;YkE4n0X
zpDs<Fab}_Z`3lWV4KX&azfOF<H||sZy}4KByWF3=Li^qBBl${lpWYh^FAow{|9|)1
zH4D%GtCS}_;j#O!rsU})$oP}nZ&LRDn9sZ3ySJ|Vy&|-Hw#KHbZt5p%1^+H7cX6l_
zc{u;I`t;M=|I6`4{9I?8J9q0R+i60Do`%J`+#h7bURys6z3V;gRSM6sfRk61pOo5q
z#a*_4cqgOM)m}mWCHqrXoomTA&ONwsc#gFDOm_`yx&ETNDn4#z%Yv?#oN@3wV)W~C
z)6>eoEJX&B_x-VX7QZsTQBIpNCGyg&qmMF=S)V$Rq;)UxKii)Ez7N+sKWiAq_b=Hu
z@rrfRws)>lL7A`fD#N}{Z#`0DH_t7sDbP^2aDCkZ;rCbX&%9Hc`E=bhp+HlMj=x_v
zdht*2`n9{g?!A(~^;ZF&;~g{jg;I{Z+44roJ+!Jz<~Tbili}8B&No)47v}cms7{wI
zRl8*9)AizX*j%NhyVmDiu1#V8oMHZXWmjcrFTYNB(fT-Md%v2n70<qBnTt;76ZE_!
ztbEnQ;Jz|bk^Zigj*q5i$9-41ll98awDkA$TQjS#{<o>@t11dS_x1ILAm3@MGY*%T
z{rP-;6;EW>^eYEWd|Nt?liAmrKPtJyPI%kOx=p=R=iI+c{?*YS)BEB4*WdP~_j-AP
zpI59t7k2FWmC~(`xldd_HNE8Kb1yA%_xakwJ~@AOJ)gDfXGxI#vUPIjgo|by$L1Ao
zdXkoI<EA__*)~Ptn8IbxgBd9s<tK!g>@R1(6xWsco-yU{{_^zCw_D@)&)>Up`R>VG
zB`IF{>a)!@rLtVQ>HoZ9mvP5mk-p_I7iDJWANzew_1O)lU-BIulb%m}?&C6PiW^G`
zuh=F2{dbl=yHONe{`3CKWXD%Om8VEg*dKU2e$L4vF7+Ghn(F`cOjex#FgZOT<#*A}
zKk9sHGjHS_DZ5<Po4Mb6$r*`G>FkqJqVIHGx>@U!SGxSf+ZU}@SQ=-{o0c~%`oh7p
z6JOuWywsO{D=0nsR@Q5^qg##`o6h~}zwXHOi<c$t$If`aU%KS7Rn?s7+QNI58r_Um
zjJ&7ybLaN3WtEjihi_}_|KPPz`1g(a?#HEb>fM)BEKivfe!i6Vld=8@EBzBv7pF;I
zuF3nncIE3e-!JLEaNks~pSYt^f9YkvE6o)WkM~5~veue;kMWJmOq*Atrz_S^uv;^o
z=hvJ*liTHUK7U`iZ0jQV)#3&h&iuS~jQgI|wS-h7aqf)Y(*<{aayqi>^tt_qd+g6w
zo!2#cyT-`!<Ne~>FRDX0%@%i<aqX?S`9RczacAUv(ZtP9CN8#FZ}Y66Y1Isy(^Zcm
zJbbhZEF2BeW-pbHK3OVf8UJqYX{nza_l|^BHt*9(Kcl_(>ZW7Sn)|;^^S1lEazaGT
z(M0dhI|KZ5;wJtmzqGo?o>TjRyLqj8?zgy!D~jIVoS}8P{Osoc?;?{Q#iu>W-(T$~
z@Ll*zw9==D+rOGV6}Hstz2CI+okMMCTFur~nQvcjFFmrQG=0e##g_|KKb8L)eJN0W
z`nj5y_xj$wI63!H^mVWJS`pI~PI*32O<JcDCC`*Lf4ObsFn^kzmB(Dimo2kPoA0c6
z{j{pq>(<N3MQi`@U%PsIsp;G8*M3iszI^bK(D}tPtv*(UWIy`2@T*PLB%%FRyd%rk
zCHU^@+WH|X@`&tam;996*si%Zj{l2Zv;DQjf4--F8{5`C-^EthWq9sLGiOoqtn&vu
ztv*(k9J1+ey7oCkO?A<{135OHHS^W<&%a>4(P=budADVlhy1l*wf;|)Mi(y$8QLrp
zo-Z#yTcR${*1jxIDa-m)fYbbKFKRTev2L0fA@)*9^UCbNu1_<PIaeid>ALn*a~t01
zP3CMe+^zFeL(RPPNygf`@M*7=D}<v{Rcj+xUq5NJH|pB!shb@2r~5vs*WS76fT6ed
zk@W{peNV{ywtwc2pV_?jb^nYXGN~@od!+q`f8RqpPQz~k-;+9io4LH(fAROz?4(aq
z?`g+B@Sj#}v0D47@?Q6*s_ApomM#CP_Qad{ac#flKf%XV-Jdg-=2RN~U;6l5^tp$@
zuYOI5sXKM&_guY+_a~pXHxmEfvj35{H;0qFOsL}j1!W%6Tg9)gn3{cs^Olun_{!ip
z>(!moRea9vQ!N+2n2_0NqjLCM^ZvGPpD$cJby&j7#kObuLqVHWeWzVYMNBPJX1-!L
z{_S){sO!wVlNtr5C3*x;owVwkN7pUY+E7a~#rR2AvtF(bD>yelerJ*WGW8SPlDRwY
z8}WP(xNf_O<ET-{)DO9#THjY1-O`_2(z7|`k>Rfiu5xJ;+GT!Cca|$UaoFdR!CV_3
zRsNe#61%LFCx<dW<`!yJ_}iP^X`|zK;mO0)t6%OP{%gZyRQchwt#jv&J2S3MV-i^y
zq5HVHU}}!K-0jYzWyhipN}Rrur>V*wee$B%*8WZGDT}R6ls|TiOv?OM6W1$y?eoq5
z-<PC(o+F(y?|ioAM!w1{>%S`&+%w8?TbL8TI%)m2(^W-V__8;s9PFF%eplaz{mE>z
zcBIbOVfw$Xa`ufq{rVjbE&qhcPENKs^R4X1^Vc0`+b!0bxop$l8Yh{2>!jJ+8?UEw
z_dI9%dgtAbPfKcD0`749kGZG1f9mpCR>f<q7T-TryCOTlCE%CKlekaTdDB)Vf4ndG
zUu50v-svlo?Ved2PZ5$!tQPt6x!*eUlm3i{IUPCidH0@OJ{j}cI<GUcekrJI^jYd6
zYVljw{lhd7tF4<(uAHoUqmP-h$xw8f&}p_mw^mLMJ(r(id0l7mxzsINdp5dkJz`rm
zapSG`e?rsNM4yTHTm4YxXRlQ^|Hpls<L(uI`st=Ofh~RU6>HZUI?Lw<O*tOp6ChfA
zE9kw<(sfopZS~YRj?LLz)AGMWYR9&?S98jrav0h_+S9qVXv)4Tmal)b#p~`b3_G>^
zT+zPU(}U{u?tj>MMfAGR$Itw?-p>jW-k&8}f4!vWwaP2^9lYBY&#sP{bZ7C3gNiR7
z%00MIxAXAYMQ7rED&}q4a{T)pubs#4D);U7Zkf@wne*qOJ6DUQ?rtjHS@U@FIybMT
zJu~{I@lE@-<Jwtw!LFrqqm{1jTw2r|?e;sh$aq&o)q>^GdgVJsuk3bayRe@z&opc4
z{%WU5cPD>J{x0(T_0sLtZtqH@m+Y2SVRQT?{r&%0XMWND{~4<yEHl^7`ez$4bIy<3
z|GE1%`GKw;h4z33HwC(O473)Q8uvegx}NBF$YS$7=>7-rh2h})<3TGRKx^Fm89=*w
zu&Ku<Cd9x5zSR$OQ3Nr!6T-{_&zphm_Xn@Wg^80&b1^W1@6HAJiCFtVmngt)sxV{#
zT^$CRp$FYQN~-OkyO^0kcLjpoK&<^Bx1I!D;tal|2ec{y=@JhTodEMc$ZilOMmy;4
z^#o920pIKiS``P{gAZDFQ3+lj0$EiJS^)@K8-d$Y4F)Fgef6Nr7Kydr6?A6|)PB&q
z3D7B#F5vy9pxb#tYs^45@!+--Sw@P1i2-zH56Es1MwY|H2Hl|zyEg@r>LI5)fL3dP
zPXEEB51Y6c10y`{iM1be_chEcRt9vRlNKKq(DV<o8-!uz;-f*g@)94y_)LPxf$W80
YhzNcX+5h-e5+@6@8$=VMV=&nd0P0JeVE_OC

literal 0
HcmV?d00001

diff --git a/doc/cla/icla-en.doc b/doc/cla/icla-en.doc
new file mode 100644
index 0000000000000000000000000000000000000000..a385eeffb2f074512c8fd20e9658e5fefe44e42e
GIT binary patch
literal 30208
zcmca`Uhu)fjZzO8(10}qGsD0CoD6J8;*1Oo4CXMt00RRP0|Ns{?BD<Y|6#)aN5PN^
zfxl4Kg5r>cfq{XQfdL%n><kPH91IK$oD2*MTnr2h+zbp1JPZsBAa#5U3=I4X3=9Gc
z3=Dz{3=BdH3=F~y3=AR+3=E<S3=Con3=HB73=9$s3=EPC3=C2X3=Gl?3=A?13=Fai
z3=DD%3=HxN3=9ek3=E143=B#P3=GN)3=Ap^3=FCa3=C=v3=HZF3=A3!3=EnK3=CQf
z3=G-~3=BF93=Fyq3=Db<3=H}V3=9Sg3=D=03=BpL3=GB$3=Ad=3=F0W3=C#acUmwo
zFjzv_P;ZV>Xdw{9kk63MP{N?V;L4E4P{L5ePzlS*MA2dltPIFT5~B%}hKNx|teR2#
z5g|ZKm?4Khto#Ov5tILU85kIbLjDKUQ?UGR#lXN|&A`B5!@$5`3oQ@q85kHG7#J8F
z85kIx7#J9wq2jI#3=D1z3=HlJ3=AGnF)t{M2<73!Vr2ODpP7pRoQK&sK{Yx9!vRPl
zas!p$zM08I`NjEZB?{sBMJc*2`Prqpsd**fI>9#>B<7Nz46X@4Y(1C!WU#!R1;ZE1
zO@$DH@R5v=W&}PZc;$o`lo_}f7#X6$H4;Q=1_Oh@$Nx+W8Q{9ik0GC-h#{9Dks$|K
zA2F*iC#o<TsxS&Kv0)HdV#DC5!l0$X$e_>Qq{ZOx#ljHi#lqn1CB~5J!NXGQ!NU^Z
z#lR5f#ln#2CBhKz!N`*5!NO7E!2+ruAvR(o%^3d4fBOFsTpMvRgfoOQSTOi8lrrQp
z7&26W>n>pie=jD6Krbc+hy_dx`5K_w45W{d5mc*T>I2nx{0#nHtPFu(tPF8ptVntU
zap(b=1k%IC5a`9m5a-2)q(>2l9#DM>(!<CQ=*7qo=f#Mm#}KSXkRgB}pCN}KlOdTQ
zl|g}_fFY5gh(UoNh2bSb8bcyODMJZ3SgaT{1wQ?EWCZ0xZiWDcM22**dPrC#F{CmS
zf!hwSfQJP-D8S7aLIpnk4`c+@=)4S`;2H{~K93<4Txa<(WHRJ2WP{r#PFhR_84L{n
z8SJzey})6g$Oy{0q71%ZmlZOUg6&NKlgSME47m)s45?r<6B#lYiWpMCZ5n4U0eHkG
zdI>OCfz2ueyPu!I8LK`824}Dxpf(cBA0YQb+|dYjhcH73Ln=cBLkZY^h%1s9N*Qt(
zau`w>iWv}Qh%oqju`>jEu`__&66eLv0CI;L!#{aOhDD5^(utkHgCUh6ks$@_|8xch
z23`h92H_<i7(|v-FtDjGGDtA^dvP!XdT}r~d&w{acyThsd2ul$ddV=DFgOW-%mKBQ
zIN@$mU~po{XGj6p{sIhA3_>X)jEoGzOBfhLmh^zzYWe~T88<P4+H5Qgpl~k+$1f9u
zGy@AmoEJBP1A~$PBf}v^P^roe4vzwc5{68Me1<%*`(zk|msEh=$D_i?AjIJ8CBb0E
z!3fft=q14r%fYAy_TeQ)Q2UP+9NVB!ssOi2Wf?%aAU3%%$O|wsJOYOV8$2XH=>pUa
zm1AI5VN6tE<UPj0z_dfKK$($=K^PvRU>jM$xtO5{Y~LqFQ2LW#aAl}qC}79|mllcO
zSSp34XHe=+VaR7l2IpQ#iYW%SdF2^&T@)C7kAYgq{}~t>KnY$3DfUF*_JU#&RCMSt
zC<rhzFfoDJvTO`)(AF2k?Ys;M49KnqwRAyoF2n?C@p9uxd!PbTkpWa-i7@zqeWb+X
z2X3L`Pj8@hy%IErL2Y+zX%6I{b&?DWERaeOTuXq&@L^CL0>hx*1PFtA6QJ(I8F2=N
zC~HIy03<d|h=C!?kbxn_8ns8D#mvCq&CI|M!py+%jF*8yfscW~hmV0_xg-OFaD>Qz
z(FO>~&%p2jQqr(}0Hsbw4h9Bh2C$pqJye)`K(ssq;|EaaD>1Nt0QESSSXNy--3{_l
zI1j6%`%-5~1_l{kNYB;=l-a;71-Xx)-VO@`0|z4m0|yfW1IKp;1_4HX2Br@ZjG(R%
z#|Kb#0J4CQ1=Kxa{s8J5^E0q~0Ci&+88|<H3;<aJ!i)|7|AR0S0~c5yD1<=)1yX;4
zfq`KY3q%}b7O1XuX5{_=$_R}N)l3WwO<=MaOtyeY2F4%%|NmzMlT2We8BDS`f*N>i
zAcDajOg1utdh1PKb~BjV&nUnk!0;dBFNPN8?*|weT0!B*&<19=gW1juN*_VZfqpP=
z0yD^;6T$3BU^WATzz0zC5#%=p20<_zY&a-@Ams;A0C7M92-JTAd6=jG;(+u`LFz&6
zI3Z}jfnpofAa!Qo{s3~|0Y;ENPJ`|K&&a{R!NAA_iY;VEfgBG?)u6zQ`}6<*yMO=x
zOEEJrtYT$gC;=5;4FAC-NDatCOt6>(2?Rj;#o*4cyVwM<MIc4^a0o*txT*ma@_r1d
z4CM?844{q|s8Iqc98JY0FoGO~&kT@UFheCnE<+Ny+GSucRwK@KM}{JXOlb2bQ)~hw
zBdE^~G7b+074VR%8B`QP3ULL7U~pxf$&kjtz(B}PmPCe)JGkOZWXJ_q6e$c04E83(
z`3qDPgNo=(u&WdpAR)`ZV9iEcXof>8Pgo@iD$-4f4AT&XM1~A-Qy`InfgxCJ0wW_V
zk&qFpi41uRAls2+%9CvZC@JGH4I%|<-j*=rGk{_TVh*Sw;l<z$76El@h&9QFA)g_G
zArsuNa$!hiC}RMb2WqZ?8g`is3=Ezu#JP#ENlpwb42&GT5)2GSm>3u`z_}=GjZ3?p
zOv`eRzrX?vOPCoL1SLVmIcNxpg`s%@h(;9!iGXydGc%x+upqHfOnL|ifl6i4wc*u)
zR<?Z*VBom(|1VxGAW4#40&ahSbTWWCw4h;05JwQ)Tm^MfK)DRmm~&)E2RHXq!KE^2
z*bAf%G*Icr;K~rh;K$&|;K$&?;K~ripuphE;K&ff5W?We;0JD`XMoF7Q1chm`AB6D
z0ILXP2x4$$2w@0eaAi;ccXL34X^_ql#B{9WbOr_n7pyWQ2wph(s`BTqbAmlh467Kt
zS8{{G8&o+zKobWwtU+~7CPNMbsLZee5B()EI4~qLfVx!040;S{;O^K-dO9nDfq@}|
zp@gA?p@5;7!HPkjL7$-<4D}ci!Tri)h75*O20d`wJe@%w-s^-!XfcC6gAaozgENCG
zgC9dMgDZnBgAs!sxQ|UJ6#xAPwe&y%hN==&06l1(|LLcHhQR8H91Pm(n{R^B;|u7R
zI7mK#0o3+pU;w!v)c66lLP0Iv8BjHApkwDdpyTI9!2MJPh6~{S5Cg*<sQ#CKAuVE1
zg7wKvPR%P$Rd7r%N=*fiv_RxQ<L7R!L4J;YF0N4uzK%g5o_-3>8AX}JC7F4t5H+Cj
z`Mmt1+{7Hcl>Cxh@HjijXP_~0P=lsHan=@)FF|Y&4Z>jdpBpD0&%Oxmx!vs6oO_J(
z8pwMfX@VG6_<`(Yx_b8N96f1)$<sL)v~_;21Eq6TG_e@yJORi;5C+wvAibb40k!8q
zVjzqxPS8=K3WyGYH%l2AeHA!a7#KbX?T6?BC3A2i7t|x;V{lH)&q>U)fQUj=KuJy(
zF@}g>D2D;kGXwFE=L|u782<mCAD$*bY*23vR7`=`Ff}l`1I&m6^@$i57+^G55C;KL
z1Db)tAq^KUVr5_ejSPcY`yloESRpf0Fa?zy3=DS63=FqeAu~`gaZ>3p4oK-hs=n>q
z3=Bz33=EQd3=9hx7#LDGAhT+ungdgB!U36KgYn6wnb;T@j6hkFTmwe+VGV%@4$!C}
zRw<eaGBMnwsrjRZkA}c#2n_ZR;AMb7AMlKdF1Y)z%TUaa$B+*mrqTentb-Xe!JV2S
zhJ1!BhE#@Rh7tzId<<ytq>LexA%&q7Jj4VVd;pF3Lpnd8`3+D<12iuL8dAdF2~%Lu
zU{GSf?3^iqZHKrsgdqbwERoHiz);DM&rr&sz>o&+7r^)+yFeW?&^!xhP6yOcQveV3
zC_w!JnyW(gi5`Ojc(y?SYz}D70b~Zq-3koJ3_0KdD$t-;C4&M(3AiT$F%M(|WcCIW
z9x(qGFo4W101rx)Fo5*H=Exwu6$NnT6Ew5}(x1Yhz)%hj^$c+55<To7LvNsgLr}*o
zpCOk)0qlQ}xez;w!2@Ryw<|#Bvp|D5l?*yy6Cv{zppXC!ks*fj5*c#9vvraRt_&p%
zr3@f{gX{?g_qaeaHy{;|nJG|sDlp_RfQIQn9q4?9GzLUy27rAGOBV_ZxeTQYkWdH3
zRT+4`D-rA#bl-tyW<lmay2-Hc!At>=l;O<a!{EpONev*AKrs*M%7R=3iWiXI6u{{N
z<PXsBCCKjz48;tY4C&wwE-18<z;ObaJp=VBaiv#K*nxZvO5e#0Ad^A7ba3dS4<3M0
z1!w>c<OYyi(Bm5v&qWNm;5j~!t3Y#1<qV)y2TIMEU~zOk5K+vO4M|-f6G3SXk~%;s
z0_0<SVVnrIrJNxZJl_b@0dp~SQz5bJ3{6w?OhKUB0g5Y7ItKX+l%7CyWW@~D44}LR
zTCW6}k%Od8P+0{M0mUasCuDvZQaXTgGHCca9X!GSG7FNgAtoZ18KCkm4{Iuggfd7k
zEIvTCfN~&c<rHXS0%S`8186Y1fFYlu7(7-1DpNqY1mX@zx`gBfn0rC_3lyrLd0dcb
zARmE-VL&N1ADnwZ^O2yG3NjH?7J>3VL=2PyK(Uy|0178q3J0ZHNSuOvQp^BKxgZwE
zy&!d<0WZ*8Hz?<WQcD3t3OFaHF~IT>B+fx*KvE#Kya)+F&<sDQqyzat4_y9(LI9*o
zm_dQTgCUY306drm8kz9}4@WD3S7Cr`0-1no@JWe*kwFAwNJ<nmH_E`v5CEPn2c>3E
zP6OEu3O!H`1Pxt-;s_Lq$TPK|a03lsr!u5LXWKzx4a$d**aVpa3qM32hlFc3Ln=cm
zLjgF~gUrciC;*op5S_@W2jmjant~#R6b8^R8mP8XU{GLigW8EGJ*^lN7~<&&VJ-o!
zf&hg*%#V%?DGVv#VbEgms2lBF1#<~KeUIHPm`fm2NT2~=(7H!Z%z)wyyB^wz!dwEH
zs|T$EDF9C^fl2|8YiMI9c2i+4abrkiNCwxLxzJK!pj-kP$AzU@kQ+d8L;F;U96F#C
z843)%43W^5CMaY;ZCKFC7DVX_nj!<`e^5yRD%l{V5r_vWCqOMkP~8H{cc79R)*677
z%b=12RN{c@JIvY!VmqXc0oenp4M1rY;Q~;}25Q5BWU;kq5M~xLlrkhSfMOOj-ku7s
zJs@tTe?1Rs0fAQCAVL%rULb#h>K{<}p|^@aEiX`*fx-|}pF!&0RA}pdp!|V|2T=GY
zGh{LpFn}hdL2(4Cksx&{sGXk1P{L3S?oWVx2Wne_+R32y38be4i4RB%mHzPoYCS;G
zngX<50JXUwZ5B{l1=MN)xfRlu0;vY|5=y`|GKdF?8BnZ%QaUJxK%oKhAE?EK+zNrj
z6T~J^Y=cT+P*{P?hJ*twz3M@Gr-tCxGH6m7)KbX<_m@!BC_(e65(B8O0O?o2#EPJO
zhGKBv0F)m=eH2i;3gjwSS_9=lQ0)$K1;mG-K2<rm-vDwYNFAsj0P+neRzVYK+2E81
z>J@-wL2VxeEd5v19v-Mq0_q)uQh*<{X94QXfI=9wdXn1xF;Ff8mF-FlxWX9}X4u09
z6r-Tl5-2=DeJD`OLflyn?kj_O;gHk~Y7K(I0HhLR7AOyZ(ga8cs1FV5UxL`6u!5Ni
zTJizP1E4qswNyc=2^12b&;^+T%2%K^3TmkcNj)GlK)u}x22iMjbmlRD%!auOWH-n?
z$T0@8ACxXZX$_KkKz_*qx9G5yi;#Q>NvEJP9n{C8f35`eav`Asaz7+2LG4vgNP*H~
z9=N9fQGwp}1^FK0Q&3t?1FHmu93)Oby%SIvfl?x*2M8L)0re{(F#t*Xpd1Y`9hBlh
zV*sH36-X2kJ_8XV8t`!#C5CWlN(7AyfYLf>O(iV9LVSkYFN63D=5tWm0p&SR-vAW4
zh?E9OKS~UksREQ&K&ys9p$IYyM1w*TQgeYuXCV0j)X#>DlY#n3pb{$?9QvTrw1^=I
zJXQv>0}`_3;3Wl+8W~h3LHZh?^~t3S5WUDTiKzb}d0`-80OD>?c!5FzRMLY|5U4*6
zDc>OO1^KoZ>~Ba)g`{3ko`>XCNNEr18-aS#3gCHtNZSNb6GHq4Dw%PG1tb?>3vEyt
z59#ZIVir_3fLarv`Uv78P<;$a8KBk&B)mZ*U=S9lgaE}GsKzU1s05E@f%?u6ouD`e
zrGHTB0i_U7Dg%}1pw=OxMnKgEDnCG@VW4peNN*J4KJ@&AND~mBf%>T+eL3KMIw);{
zR18FnLsB@zA0QutdizBTu<;H^`2uR8gJKsn@&FoP0hLpbm_~$IE(0i~<THRqjUjTd
zG6<HFK|X|~4A6>s^i+c{J%G#tr9Du31EoxmTF|&6s6_@E6#$JG7c)T0d{B8-!~p8!
zgJKhsqCx2p(y9cF%7Stlq<jGR1T=;SG8>XML8%H<vxC|Ti24UoCc^3rPz?rhg%Sf~
znI4FbyM3<5pagF3fYz=kFt{@mfk%2lDFzhp=;NfIvJO<Q2ZLMmh|yO_o&kjeD3(C=
z9dW%4h#eq(pmdF`9RrFNn9m?#l*$0{FQQ!z@*SiE0F?>!8gYiDE{NYjHV@raI3$fg
z+A%o{Dd5=wP{{-eebC5Z9)m7;9|<H^fYK$Xl!eLaGC)Q_i@>{VK%$^>tP)(`fMz3f
z!3(@Vqgo(2(5@FyXqAC?HzYD7f!p$sG7!Bj3@SZAWhE##K~ff|Jp{_-so)YG(VBzI
zJb=uF<a9`@0-_2e0vb_+m4P6ifpQRNu_-7Qf&2o>RiNCM4IZ-wrAv@oLFEm|eW1Al
zNdASCG@$X&L<WeRm}($%B#^Qz7wk?@i48Lclu|)?9ArK$4M9u=mFJlZkWvWbTS&SG
zwJUJ7qCxo_W(LR(P&`1&4t)Lt*$gU4Ah8Lu8&W2M>U~JqfN~;eZxzTqkW0!K^caj7
zAax~jd}uJ>9(Se9>N?0sEp5yeWzYoo`3TLBfKnx9ivd)Mfx-$@`lF{~ScwSfTN;CH
z!7OV*`_w=xK`jAXB`;_{8lpY~Etm$SCs26}vL94aC_sCq_|grmrUIpzB>3zqtUiT|
z(u2%`m<>uVpu7Ro1xh<0cS1^F5DQe7fkZ*M2(&~UG^z+P4dez;Iw}G$@P?>BHyxq}
zl*2%?k+7Ky(C8JUmw}wVk=+U@cR{`eg*wO;q|`Z(Fr=*3fw*8G+C{K1fw%xv>W$Vt
zpdKP9rIJ(kXh3T>P%Qz<OQ5+P(2Q6Kc&r4u?FCA2kW>RI!$JL4NM9GURsodKLFo{Z
ze?g@La#;z9D^U9n)cOOt6x7Cs)IA_kP{|7t#a+9BYNa&r96u=af@(sL8$e@AAbrSe
z(AFPND;bpUKytA3f~}<qs&zo}#S93uK`ku>crO>!>LagAL=6#8eG3W|P>lm>7eGow
z6L4t=sp}wPfROqc<R3^#g2EhB8o}niK&=%}ng`WKsSN4hb_S$&hLl&J9wMj&0+mXT
zP=}OM2-83%2B_r$>Jfn0AXg#c3gll;aNh?~8i7hNP)`h0>*q66f@45~p%}bE1Qds$
zS{PK{LPqdFJczBJ@(z?9KrsaAm*ZZqf$7I&@OUW5-yjzv<UyvwYHm>97ZxfYSK@9b
zfKma(H^hc9$Y+q$2?}da=wvd0_@GdQv~WOmAZQ*Bk^^9A5|oxfDIC-)2lXdG=?m7X
zfv5z95J*OW0kfwGG6iHaC>NrKImi!)aDey*<ZE<wka=&AEue94gsVZd9jHYE3IWi_
zBB&JsQUz(L5f_di_knUA{Y!n2?;*7?X<-2B&4N-NXm32EBm>pJpxgz@ouK|6D2+m5
z3(~hn76Yw*0oBl;JO!$s>F;k)DPc-N837t&hr}|dyd<VI1WE&tG96M1fzlKxq(Nx}
zl-ocfwxIqAC?A0GBP11r#{5B{2+Exx8c`Q0Fqk8)5Cf$;$m|y)uYhdCodXc1G^mUL
zl}0dMfKo1~jsm#^WGA9T1Gx*P3gmuVB>|{bgU!aFx(DP_M7aX;FZ#+4Pz?pD5kWKh
z5FdcXG(mL-XfzU(DnWg_6b4=JYA#US0}@9qTVbt2gdZV(0*#S@%24D`)Bw+Pz}y6K
zUplxq1?eH-AHf6V7EmaGR?>meH>A`8g%YHW0J$IJa*#NvgaWPT0F@)4J|d_O2&&H^
z>8BjLK@pOoAY~4s{(+Q(piw>0m>Q^@2l+S=X}%3+2eK(3J3y%h)XD(q1-0EEE(5s+
zGzJ6;OOP#j;CTW_J_VJC5Emg*DXv(Ctp+he$u}^2K|OkqX`qq;<O5KcLrMlny#=a&
zK{X?2g(b+HpcGsH9(4o7DPnGb7<Yi;0b~!T{(#J2L+k+g5M(w;4irC-lngNu(t`z!
zGJs+bR^sA!6)2a$bc1RjP(0w*f$#@tl_P9)0#+A*%1BUq2r_~IDh&|n4HQ?P*a5{i
z$lg$fJn$F>C_h0`Gw9q8P+ow9DrCG0w5tr%f&le&Ky43D3kB4I0i_mLu0YJ{gHi${
zq(JE&R04x?2WYec)K>?ED$KX&zJ!S5D+54jA5!{&Ql|oV{cA9IJ!&umXs*GD!I2@D
z!IL2voZG?~JQ+e5JQ(~LLK#9BK&?X1*2Ez2c16$zMOOxpUVnxl@H#es@Fqr(4o?OU
z8zSe%02)#7V~Au>VDJWSYIFgc4Vp!Vqy&(AL1_Wx8&InT;zp1tq%8|7ML?km@;N9(
zFiQnkz5$K4f>Jj~2dIStDzQM}2J$gTH^|SR)B$Q66v4+kL38H_J3)PT%(W4a+z0Y=
z2!juUE7%8qU|;HjL(h-FjUfo^J9h?G24C>bz!0!15Icy$nZW}ZYK{y}44&YX!66Kh
zU=@%Q<^~RHKL${kf<n%X0TQl`;FZvza0l7$%n%A*l?n>+00z(|&;SO1@E$}^UCGN}
z4$k$#;8i`KvtB@H36d&6=?GNQfogI{>H_6$NbLj)Gsp-aXoeLOE0A;!QjbXc5OYBB
z0&+3_8WI%R5cQCG2T-V^+6QV?!16c!TNa?Y5LBCj%0-Z0K{GlaH-gFoP~8X0<B$><
zS2+vvA*7T9m0ytc+Mv`4n)v~RAgF``r6th(11P_PN;*jC21=Ev^(ZJTKq(MZBOvC8
zK&cfJe~`2X;(=TUO530k6*evkiVIK~0<jYmJD`%2{&gxSJh0WKAaRh}K|VouJEF`3
z&2fUl5!C7d`3BUwK(04Qt$mSg1;qj=wD8TzpvN2{H-KttP&o;i_kfftptuD22UM<L
z@5zAb5KxSOY68?&5ol#uG<<D4$P`eI8x&`;3`z`;Tx<c(#jyN?2qjp4fz;xVGzV(E
zL)tYUGw3w~f|@2l=Wl^p5}-5;Nspj*O%e12BT!ldr2tT>g{Z(4yP!THD7HYU0h9wF
z<uoWKfx;4$PeG*^sFVlIR6t4=NPiO~52`yrYC+)ut4kmu1JVg;xr6Kn#WiT|6f{4F
z=<k3;L3ToNJ;;n=23QLT5-aGdfkCYl&>S5oMv+(Xk;_(KaDk3F5o01~E)Ik_z;k#Y
z)?nZJ19l&1mnsJX1L!b2HVy^`4h{weISvK}1r7!V9S#NtD-H$*8x97BRU8ZqYd9Df
z)^RW}Y~WyEc*nuO@C_u##lYai#lTR<!@$tO!@v;3%fMj8$G{N6$G{NA$G|X!kAYzt
z9|OY-J_d#rd<+b$_!t=0@G&r~<6~ghz{kK4!Oy@D#m~SH!_UBwz|X+&h@XMs2|okF
zGkyk!SNseNIRXp}c>)X!1p*8VB?1f#-vk&Keh4ry{1RYb_$R=?&>+ab&?Lyf&?3md
z&>_gcz$L`Mz$3)Kz$e7OASA@VFhz)g0d&ID3?T-FIYJB!a>5J@3c?HwD#8p5YQhW*
zX(9{^IU)=UdZG*rCZY@s-$WT0{)sX$ED&R0SR}^4utbc3VVM{M!x1qChGSw33@5}G
z7(l03eTD*t&kUbmK*8tF&lv7KyYLJo#PH(q3!L~c15^iyW_a;}K_b_KlcU&!lOw>3
zi6PF534SmjgS3H{C<9E^6m(n~LXw|33lxQH9E_nHj7%Ji%%L2NA~3ZmrijBt(M(|Q
z1^MNYI0M5oaR!DL;tUM0#2FZRBpDbcNis0ZlVV_4BE`U<Aj80*Cd0rWB*(xYA;-Wl
zL7stOk~{;$6nO@Qeew(p2jm$T4#_hxtW#iM*rLF|P@~Ae(4@$~a7LMd;hZu9gPjTk
zgM$hKgOdsa!weM$hFK~M41ZJ@82*8d-%@2@uux@Suu^4Uuu)}TFi~S*&{Ah$Fi>Y;
z_@U0g@K2qAflG^lfk%shflrHpK|z;+K~0x|K|+s#K|zm!!9b6J!AOsRA<dA1A;S=Q
zl9(w2L!2oCLxL#-Ly;K+LxmXwgOVi!gNh{sgPJ7+!v;$RhHaJ%3_GkC7<O4PFzm5n
zVAyBHz~E)gz~E!ez~E=izz}53zz|{$Ij!vLSB9@}zyRlcW_bPi-e-^;!`Jg*d8jf5
zh}c)CN{}eS5eDe)dQgnPF()Jjq4CC2?7_)`5^u6jT8sr5N=izgB*0MY0XoGG5&sg%
zX#+(cKXVr7U=hM8!vk~z8xvao2jzF<ybjB&uzU*3pRjxh%a5>p2+Mz<eD@jdR|W=#
z*OYQzA{+1=ne!Bx^B9?fa<ut<WSLvYoEyj-6q#$tGFOl}mykIqdO$~fz$H;+&cTHl
z7#L7wkR>2&(7_j2Fe?KCml^{%=xjPHvY=z<IN&NEXCd;S@nHv_GKhoX2vjzL%0y6k
zh+O8u$~#!O0L$~R{0_^%u)GV)qoDi<%Zn(d{QQ7;3qpQ_P!RG9go2QtAQXiB0HGk{
zI|v0K-#{n``3gcoNYEiI5EcUi!!tOGfq~%(GUpL8=K(V39x?}XObeWk5@RT7hR9@s
zk}^;d1KbWc14rzFVwhZvmUhHJCojUvN?7@bTqeTG!vJWR7-r4D5CJV0LFa>@#P4so
z%NZCLej;<eBXhnYb5IJ1kH|9bkvS+?8h0d|N7i!|nR6PMgRLw^mZuw=n}H#&g@GZq
zrSN}33j+f@U&3OZ19X}xQ=$qJiVRp3bcQipl#n?v^U!I2D+U__1}+0e`3+5a3LB0&
zDsDK)uC!q(oA?HECgllB*;FPRWH$<6U@)AJ!Dvtb3K!!51_liy20jA@2V1ERBD@R?
z=;vqZfDZKsDP>?_*lr5J5uM%)Ap8h)NYTHFIcslmmWYPVI`A=iY0m|Yyt5wt7j(NE
zQYOx0ncyJca)57}#6nhP4bRP`Ph{N=x-F0uWt(2eX>Fp|$ifuKaYE?OEQOX6H#ijY
z??i2Rd30HIU;DI+A77mQ{p#HQxOdg>_wk&aseICM|L3forPJT<4lVUAnG#eyanaUw
zB5Uo_tq*+TKbLKrIRDymYt5_wnyzi{`|;9asm#^Y-9hq!&$PB2W=Z|GF3-y1O5iJQ
zv-XuMv$mZIyL@GB)|y%CpIta*BKjbFrF4OA)MK~JXH#EQ?}$74e_igj`^Vncm&Q)s
zmbO27wVL_N?3faF_u%{|?-!;{oNvA6o80%~xu?Dx9bL44?Ukkf7hV5-oyRD7$I&mz
z!iB%LP290@uTYEs^opq8kAJn@m;YovSIG0%v4TnSTJ4Y9%u`5sDP0$MU+i9s)SUGH
zHpkBg>#f?n>FlOE5_ay@vkh*1DedSy-6lS_W6jTb-|m*oEG%96zWC6`!c@a`Mtz|l
zq-Ddy^P?@!{w}WIjmnHJ_1P$HE<AZp%$b7~-*y-8v3{Df%P^}~e0tludj|@_*X^BD
zc=oVRcFg_u`RgUa_65yReYoSf!SxLBeJl3m-f2E5vFKNO`-+<nO-zq((zTg4=Z%Z}
z@i5uo#gF0+ufKBagH4s_+hmTL%le(q?z!iC|Jdj1N%zer7N)d6KGyPlwco?q9k*VJ
z@D-jZS1tV2GF$cZmxns@TIT<k5?;RV&ozt4hbxqHiq#S<-f`s@>}uy%yt(z|u@%mK
zcUDV0j}ewTzV&fOW#eXxJ<`0>xC7q3)^JJvv48I2hu;k>52hQwkBr(mzm<LW`~?F2
z?faLkpZ~z?QE+Gb+1mfwGSBx}9Nb*8$$ouDe|)?{s!3Ux`9h^Dk*RgkSG5k7E?mJ>
z>;6J<uRy&++T*&~4TTe|Zp`TK&I)||eQtf;mn$cZ@n5}h%qwdDq<_D!Z{s}rd`A4W
zrT71RF5EFQSvmA|fa%h8WzSlyEVG@~&WZZ=+G+OH+Y3v+rezpBU$=xmI8$B0xAw;K
z_rizEX0%&BIRBe{PlL<d_J{X$Bp=x-p1jBN@~!Woj62G;d;eX!|5|%xod3@2-jn7h
zWi{0qChNSEIJx!k-rY~-@+T~*bp9x8<6_-^B0zugFS*Hq_e4(lKm4`B#CV=|XXF1j
zFVmAgh>FdU*prxaVSe)J7s*$G_O#V6Kd)cS{KWOw-Xq>qkKRj&e8PLMpr=FAbAsUJ
z6PspO>^oBtwphhcA#Li@(mgJ6VwIMkcq%KpBy689O`dUPq5k;_%}ot4Hm|=<e7-mC
zQ~tfVSLVCipS(i*-R>j#N^+mx8wxKE5?23z_ue%N&;P5GCq3b@`>v+s=_JVbliP1n
z_Wqd9yWYFEuKc|sw0yS4rmJr1Cu;@&E-80$s1tcO|F!z`)7$^c@kabyXPi5C>nGc3
zLWQ1&#k$-dWW-)uKMlR>J?&Kr&#{1$SCyZX+Iq!Zwtsjhqtex0LH{NDQ&*j9$v4hD
zxN&%nwEIkV4QsjnqPr?SZf47Zu9uu~@H=Ak>vPl7%D*f{29x*wv3VB1GQUwyn=vKw
z(yXJ8GLKoGI+CPyFY!Oyp8mcM*E>IJ7{>Q6**Ecub<?(Yu2Mmnuk$LyzE5vGQe!vI
zEvzZfP`7Y>-2&nFSMSffQ=9p8-87*<Q;UwjUp9L2Pw@J+yS?tclE3v=0iNR>Gx&v4
zj=b6OM#(+2s!QfLJ13Lj)@jZ+R;L%{_T{Kfmo8PiWa!iN;&j+trKP*p=UlE$VgH<A
z{&{6rWoR$IPI%G!IA(jlny?kmzGs<>PUsW#yd<oA)y3exGE<TMu9c3Drf0`}SGkk*
z%Fnd)_w!pbtFQjIsqCvN3Ox7q^@bqdX{<92mzn+fe0~*AWY_d72TpujI**gt*P1^n
zxx-F)+se94y;bMjzfAts(IC_N;r!R%_NDiFd4iu;tUecZ?E00`t&h1+Tt79v<mPiP
zEphkx+QL3Le|9~ewd-d|ko~fCa_5ALW*f)m6>fTxmTu#wJTuugMd6sjWzT~dDI4V{
zgqZ9vXTKEJmHD1A<?#OU^v}0j<M+?syK?#N$z3HWUis>?%{Ha7T)OH1ykeJe$6t}Y
z<uMm!X6GOKeN6S)4X0o79UhaOPkip<GHHq%OA4>pCI0<)mOi^t6kPuE{>)^@S3i}f
zNKe=wcszd2$s#WG8|#|t|Mg5(oc}O6Jt5_H(at~Wd}=dq<Q*xyT-Teq-+Rd!iB9S4
zlT)JabY8ky>yuZy{KVTAtyfqYXUv<HH!b?Y!Lt)z-_5+#mwhWJJ^EJGYqg_Wju@NH
z{p!E&$n}etCGN-0c)wq|<g!)Ooax%adzKpAj8=@ir}cB^_ONA@l}3keYwZ8vwNd!@
zjr#7#rE}`tmsKoJnG}A$l=qXd{s}An6H*taNnftX`@DAL>owmm>A!H_RIi`7qf&qA
zWxp%U6%vp4MBTF1nt6}$jmu1%SE8pY)=#ioGo9zxoIaD=<#RrNU%71SBKg(g1{co!
zympNHp4GL4R3mZjjNj7*cYbm@vg`D@{fB$(&sUw-HG8|p$nxX;;@dB(LpaSAcbIYQ
zt-1L?)Pr$n<a^P?%}*vSwpnlUte|Pt44czck0LyLv<oa84bo;Wm5@GJDrXu0ZtrQS
zpB(p&gjF{0(@8(0z4z*-W6_%XzfJSD`@C{OM9$Gf@6S5}{B+_b{wTk+y2qYV`+~c9
zt$OaaxQQ!@-rt;|b-MiQ=Kt>^lOM&WJ<8u-?I-YE_)E0Xr-<9Xnm!e_)a$+9wDX-q
zZD?A})>WBrUvDoxvZXYA$r;6$3sygs{~CQMP=5NknwR(b-n}?E_fqtAulQOK(-ls6
zK2c3trxPX5ls134ZR9Y2nw^!$T*sF!vrC)rta$yjs@Ch)%gIG+|M6eDdVHzr+wIqW
zPmsQR@RHE^#WSrwR)%Ch`nd3`P1Pi!{a3sr%hx6N?&{k5Au95S>}Hqzl-$^^xi^mg
zi(a$+wZ(tFr+ypT);{0GR@r5E?npCdQSz+w2Rp4kR+b#H>2JFBIYUi#(YymWHl8)}
z)%4H5V7}35G;?{kWtfNjwP3aWPnAX&F9{jiEEAqDFF#wNF3;A!EKn)S`c#0^{B194
zG_SF4ni(PXQc3g5?7*&1Gm<%1C2{Gx_Ed8l-snx{Y%<)f^Hf94y!A=O+Pd&*uazr=
zqf=FDBUfKPX|*@%+Uu#C9QCLBKB?E<x$1zSxA&3t2Ty%Z$osZ`=8vD*y!Lhfj2|+o
zF4B9X{fB?wLpx5xZvx+wI)0nEyxV{A_tWg8PgCz{$3O6&R&23a`>FC?_ok}pbJUhC
z|El)HoB45VzvVx{$5!2+GnVF58vbAU_+0e4hrzFYO^K;Hb?5h7y@~fHpSL#>|KGCz
zk+(O8le|o*;{OF@9@1OIudbMyeTDOum1g+L;5qBnozhi&&h1kz7r&U0*=eJ4_+0b;
zwr`&=Ts?JI!pp_BXZ}M$n^k?MT}nkvEmUT{VmSWobVaD^%)OHu1*au?1W%o`>YGQ`
zE!EmkOEbmzNmsL8t`93XH$Q%7k^M6D6Wx-zJMSCud=I#8yNcteQOMK}xuII$R~p^Y
zpIp+jIpvYzuL-VlX%pIIeoc3lD>`x5=aa!)8y{8vn@<wEtdu8*GC$@PYF7B$o84)n
z<9Ol8!_=!^?jQbZ!(&wW;k2!D=Z-rwu1;eTSs0=FxVm6!j=J3K&ZA|=q7F)&zLKY@
z${v04qS)5{P3$R)txl9bc8pBQ{8tm#D|_wp&Hvw*q<o$uoiguyw&q5@$}H=@D;C@{
z%5htm6TmuY{k79oMO*l?H>e!!oAG{E--rFlY_oQx&e&o4zprxkjXnMP9S<%4gvm}$
zwm9>x?8o!h9cSAu)|$C&)885=nSAS{+1nehr*ii^XZm{Q-H%U8YFz^EaQu(Cr@DXY
z@>y2JYpfREKUKRTJHRF2m&=p5Pu6+URwjSEFZo|&-R$1!E0gV>SsYIhl1r=>`SZEo
zI`otNjE6ZLIq`Y-o?Si}^V&MEGqZjvsBH9E>LP0KTi5-=G!d(<n@+Brtb3!6nX}1I
zbehm<wm-L4P7ghopJI7kXYskzEn9mwx@<jSTQzaxt@nRI)7C_viTGRnQ08Z^RX6{~
zeVgO%6@U8arZ<5teeo4**Bd&^=LStV9^(@rT6`<$z0A^eRzGd^)HsgK*<91|zeH-s
zwzyYw%Aax=+CSRUxwdG^zAKimf3(Hx?k@~GwfkJrzT4A->h<n_*m*_ty3ohZ{I}lE
z3KHI*C0c*Ir0BKEEB76|+ZWHSj+u04@rr|rFCWT1xKX$B@Y+RZ;(jXTZQ64D`yH>H
z$L=ck?e=b&(Y2ZL=b}4Ti>B^wD&AT1c=I|puckdS`lj(s`?llSS$Dy%rE{Z|uJ2r0
z)Ew>hJGIDoS47o<<<WZOJ4LVTc4oV<pE1ugYw7-Kr%87we@gx?^8EGE?bUAYN~D+U
zmR4bN{3ZSU|5;~#(f|J$t0F8j*U$QA8!>avkK6yb`!@N3uAYJRfCe`OI))y!{)Za(
ze}Oue=ywQV^F8Ry&jX;tZo&5lg64BUbF%&npcUoV)Z-HqVqgN_@&Gz06m%;SKDEel
z;E{8%{r=$fBFJ*2u(=qRz_((6{6wt%phGl4Cy#>cF=PN8B@fz<0=j($BtjbIV_;$c
zojnY)8-$6`4RY&F&?R2rL!Lo%aiBx&iP4Fv2IhZ|-5`vq0#n3}fq@|blvo%*cRGOP
zaY4H~K{NlA;B(j@>wZ8hhhS?(FwKC8XfQB=?<)jdzCo=0pmT^pmv(^6fb2O;VQ>NO
zfCAml1eu`($q|F47?>D9XFh}M24Q0KfbLL>2!^PHq<YYr&=T-{O8MYhsvvU2kYWss
z;PeNw8-$6`1G?)Crj8ZE=cL7l1vLGG>;_?&x%g;L1_tomLHN{=A_uY;hKbRK?0;g^
S6H)`S8$=UQ3z8p9_5%QBF0}Cg

literal 0
HcmV?d00001

diff --git a/doc/snow_annual_map.md b/doc/snow_annual_map.md
index eeeb3ae3..97b9a500 100644
--- a/doc/snow_annual_map.md
+++ b/doc/snow_annual_map.md
@@ -1,20 +1,20 @@
-# Snow Annual Map
+# Snow cover duration map
 
-This file describes the snow annual map generation algorithm.
+This file describes the algorithm to generate the snow cover duration map.
 
 
 ## Objectives:
 
-The main objectives of the snow annual map algorithm is to create a product providing the synthesis of the snow cover on an annual basis over mountain regions. This product relies on LIS snow products computed over different L2A optical products such as S2 or L8, and uses a multi-temporal approach.
+The objective of this algorithm is to provide the snow cover duration (SCD) at 20 m spatial resolution. It was designed to compute the SCD on a yearly basis, however it can work over a longer or shorter period of time. This product relies on LIS snow products from Sentinel-2 or Landsat-8, and uses a multi-temporal approach.
 
-The approach is validated with Modis and Landsat 8 products.
+The approach is evaluated with a similar product derived from MODIS products.
 
 
 **General approach:**
 
-- First a time series is created from Let It Snow (LIS) snow cover products corresponding to the desired time range (a snow season goes from 01/09 of one year to 31/08 of the next one) 
-- Then, the time series is interpolated using the [otbImageTimeSeriesGapFilling](https://gitlab.orfeo-toolbox.org/jinglada/temporalgapfilling) application to obtain sampling on a daily basis.
-- Finally, the daily time-series are combined (temporal sommation) to obtain a mono-band image, representing the number of snow days. 
+- First a time series is created from let-it-snow (LIS) snow cover products corresponding to the desired time range (typically an annual SCD map is computed over the period 01-Sep of year N to 31-Aug of year N+1) 
+- Then, every pixel of this stack is linearly interpolated in the time dimension using the [otbImageTimeSeriesGapFilling](https://gitlab.orfeo-toolbox.org/jinglada/temporalgapfilling) application to obtain a gap-filled time series on a daily basis.
+- Finally, the number of snow days per pixel is computed by temporal aggregation of the daily gap-filled time series of snow cover maps. 
 
 ## Detailed approach
 ### Data Collection
@@ -142,7 +142,7 @@ The snow annual map processing can be launch as follows:
 
     python run_snow_annual_map.py param.json
 
-Where 'param.json' is a configuration file containing the supported parameters according the json schema describes by [snow_annual\_map\_schema.json](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/doc/snow_annual_map_schema.json)
+Where 'param.json' is a configuration file containing the supported parameters according the json schema describes by [snow_annual\_map\_schema.json](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/doc/atbd/snow_annual_map_schema.json)
 
 
 ## Product format
@@ -176,9 +176,9 @@ Note: In case of densification, the nodata value is set automatically to 0 causi
 ## Snow persistence maps production
 
 The current snow annual map production covers Pyrenees and Alps for three snow seasons (2015-2016, 2016-2017, 2017-2018), using S2 and L8 snow products [distibuted by Theia](https://theia.cnes.fr/atdistrib/rocket/#/search?collection=Snow). When the snow products were not available trough the webportal, the snow products were generated from the L2A products using LIS snow detector.
-A blog ticket showing the results of the production covering Pyrenees and Alps for the snow seasons 2016-2017 and 2017-2018 is available, and it also provides comparaison between the snow annual cover and the ski stations implentation.
+A blog post shows the results over the Pyrenees and the Alps for the snow seasons 2016-2017 and 2017-2018, which highlights the comparison of the annual snow cover duration map with ski pistes.
 
-Link to the ticket blog:  [http://www.cesbio.ups-tlse.fr/multitemp/?p=14620](http://www.cesbio.ups-tlse.fr/multitemp/?p=14620).
+Link to the blog post:  [http://www.cesbio.ups-tlse.fr/multitemp/?p=14620](http://www.cesbio.ups-tlse.fr/multitemp/?p=14620).
 
 Link to the interactive map: [http://osr-cesbio.ups-tlse.fr/echangeswww/majadata/simon/snowMaps.html](http://osr-cesbio.ups-tlse.fr/echangeswww/majadata/simon/snowMaps.html).
 
diff --git a/doc/tutorials/prepare_snow_annual_map_data.md b/doc/tutorials/prepare_snow_annual_map_data.md
index 67e1a384..bac87b23 100644
--- a/doc/tutorials/prepare_snow_annual_map_data.md
+++ b/doc/tutorials/prepare_snow_annual_map_data.md
@@ -54,7 +54,7 @@ def main():
               "data_availability_check":False}
 ```
 
-These parameters are described in [snow_annual_map_schema.json](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/doc/snow_annual_map_schema.json)
+These parameters are described in [snow_annual_map_schema.json](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/doc/atbd/snow_annual_map_schema.json)
 and correspond to the parameters of the json file to provide to the application run_snow_annual_map.py.
 
 However, the two last parameters are specific to prepare_data_for_snow_annual_map.py:
diff --git a/hpc/LIS_SEB_style_OTB.txt b/hpc/LIS_SEB_style_OTB.txt
index 991435d9..b79d0a30 100644
--- a/hpc/LIS_SEB_style_OTB.txt
+++ b/hpc/LIS_SEB_style_OTB.txt
@@ -1,3 +1,23 @@
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 ##Color table use to produce LIS SEB using OTB Color Mapping application
 
 # cyan: snow
diff --git a/hpc/batch_compute_NOBS.sh b/hpc/batch_compute_NOBS.sh
new file mode 100644
index 00000000..0b49a28e
--- /dev/null
+++ b/hpc/batch_compute_NOBS.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#PBS -N TheiaNeige
+#PBS -J 1-191:1
+#PBS -l select=1:ncpus=1:mem=20000mb
+#PBS -l walltime=00:59:00
+#PBS -M gascoins@cesbio.cnes.fr
+#PBS -m e
+
+# Load lis environnment 
+module load lis/develop
+
+# Load all the available product names from the tile directory
+pin=/work/OT/siaa/Theia/Neige/SNOW_ANNUAL_MAP_LIS_1.5/S2_with_L8_Densification/
+inputFiles=($(find $pin -name multitemp_cloud_mask.vrt))
+
+# use the PBS_ARRAY_INDEX variable to distribute jobs in parallel (bash indexing is zero-based)
+i="${inputFiles[${PBS_ARRAY_INDEX} - 1]}"
+
+# run script
+python "${PBS_O_WORKDIR}"/compute_NOBS.py ${i}
diff --git a/hpc/batch_compute_PSL.sh b/hpc/batch_compute_PSL.sh
new file mode 100644
index 00000000..2c14df04
--- /dev/null
+++ b/hpc/batch_compute_PSL.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#PBS -N TheiaNeige
+#PBS -J 1-191:1
+#PBS -l select=1:ncpus=1:mem=20000mb
+#PBS -l walltime=00:59:00
+#PBS -M gascoins@cesbio.cnes.fr
+#PBS -m e
+
+# Load lis environnment 
+module load lis/develop
+
+# Load all the available product names from the tile directory
+pin=/work/OT/siaa/Theia/Neige/SNOW_ANNUAL_MAP_LIS_1.5/S2_with_L8_Densification/
+inputFiles=($(find $pin -maxdepth 3 -name multitemp_cloud_mask.vrt))
+
+# use the PBS_ARRAY_INDEX variable to distribute jobs in parallel (bash indexing is zero-based)
+i="${inputFiles[${PBS_ARRAY_INDEX} - 1]}"
+
+# run script
+python "${PBS_O_WORKDIR}"/compute_PSL.py ${i}
diff --git a/hpc/batch_compute_SOD_SMOD.sh b/hpc/batch_compute_SOD_SMOD.sh
new file mode 100644
index 00000000..c232dfe2
--- /dev/null
+++ b/hpc/batch_compute_SOD_SMOD.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#PBS -N TheiaNeige
+#PBS -J 1-62:1
+#PBS -l select=1:ncpus=1:mem=20000mb
+#PBS -l walltime=00:59:00
+#PBS -M gascoins@cesbio.cnes.fr
+#PBS -m e
+
+# Load lis environnment 
+module load lis/develop
+
+# Stay in currrent directory
+cd "${PBS_O_WORKDIR}"
+
+# Load all the available product names from the tile directory
+pin=/work/OT/siaa/Theia/Neige/SNOW_ANNUAL_MAP_LIS_1.5/S2_with_L8_Densification/
+inputFiles=($(find $pin -maxdepth 2 -name DAILY_SNOW_MASKS*tif))
+
+# Check array size 
+if [ ${#inputFiles[@]} != 63 ]; then 
+  echo "PSB array size should be equal to" ${#inputFiles[@]}
+  exit
+fi
+# use the PBS_ARRAY_INDEX variable to distribute jobs in parallel (bash indexing is zero-based)
+i="${inputFiles[${PBS_ARRAY_INDEX} - 1]}"
+
+# run script
+python "${PBS_O_WORKDIR}"/compute_SOD_SMOD.py ${i}
diff --git a/hpc/compute_PSL.py b/hpc/compute_PSL.py
new file mode 100644
index 00000000..88440c83
--- /dev/null
+++ b/hpc/compute_PSL.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This script computes the Permanent snow area AKA "PSL" in CoSIMS from annnual snow map outputs
+# The PSL of a given year is defined as pixels where snow was always observed when possible from May 01 to Sep 01 of this year
+# In HAL the dependencies are loaded with module load lis/develop
+# Author: Simon Gascoin
+
+import rasterio
+import numpy as np
+import os,sys
+import numpy as np
+
+# input file is input_dates.txt 
+# Example: f="/work/OT/siaa/Theia/Neige/SNOW_ANNUAL_MAP_LIS_1.5/S2_with_L8_Densification//T32TNS_20170901_20180831/tmpdir/multitemp_cloud_mask.vrt"
+
+f=sys.argv[1]
+
+fdir=os.path.split(os.path.split(f)[0])[0]
+fdates=fdir+os.sep+"input_dates.txt"
+d=np.loadtxt(fdates)
+# Creates May 01 for the year
+May01=np.round(d[-1]/10000)*10000+501
+# find index of closest date
+idx = (np.abs(d - May01)).argmin()
+
+srcObs=rasterio.open(f, 'r')
+srcSnow=rasterio.open(os.path.split(f)[0]+os.sep+"multitemp_snow_mask.vrt", 'r')
+n=srcObs.meta["count"]
+# read only from the date which is the closest to May 01 up to Sep 01
+snow = srcSnow.read(range(idx,n))
+obs = srcObs.read(range(idx,n))
+# pixels in p are either snow (1), cloud (1) or no snow (0)
+p=snow+obs
+# PSL corresponds to pixels always flagged as snow or cloud 
+PSL=np.all(p,axis=0)
+
+# output file suffix
+outfile=os.path.split(fdir)[1]
+
+# export PSL...tif in the parent folder of the input file
+with rasterio.Env():
+    profile = srcObs.profile
+    profile.update(
+        dtype=rasterio.uint8,
+        driver='GTiff',
+        count=1)
+
+    with rasterio.open("{}/PSL_{}.tif".format(fdir,outfile), 'w', **profile) as dst:
+        dst.write(PSL.astype(rasterio.uint8), 1)
+
diff --git a/hpc/makeTCDforLIS.sh b/hpc/makeTCDforLIS.sh
new file mode 100644
index 00000000..874126e3
--- /dev/null
+++ b/hpc/makeTCDforLIS.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# script to make the TCD input file for FSC in LIS
+# usage: sh makeTCDforLIS.sh T32TLS
+tile=$1
+echo "Prepare TCD to run LIS on tile ${tile}"
+tcdFolder="/work/OT/siaa/Theia/Neige/CoSIMS/data/TCD/"
+# prepare VRT from all TCD files (can be skipped when the VRT will be populated with all EEA tiles)
+gdalbuildvrt $tcdFolder/TCD_2015_020m_eu_03035_d05.vrt $tcdFolder/TCD_2015_020m_eu_03035_d05*/TCD_2015_020m_eu_03035_d05*.tif
+# use metadata from the DEM of the target tile to define the projection and extent
+dem="/work/OT/siaa/Theia/Neige/DEM/S2__TEST_AUX_REFDE2_${tile}_0001.DBL.DIR/S2__TEST_AUX_REFDE2_${tile}_0001_ALT_R2.TIF"
+proj=$(gdalsrsinfo -o proj4 $dem)
+gdalinfo $dem -json > tmp.json
+te=$(python2.7 -c "import json; print '{} {} {} {}'.format(*[item for sublist in [json.load(open('tmp.json'))['cornerCoordinates'][x] for x in ['lowerLeft','upperRight']] for item in sublist])")
+eval gdalwarp -te $te -t_srs "${proj}" -tr 20 20 -r cubic $tcdFolder/TCD_2015_020m_eu_03035_d05.vrt $tcdFolder/TCD_2015_R2_${tile}.TIF
diff --git a/hpc/makefigureTile_lis_Sentinel2_cluster_muscate.sh b/hpc/makefigureTile_lis_Sentinel2_cluster_muscate.sh
index 69f93f11..3f25b769 100644
--- a/hpc/makefigureTile_lis_Sentinel2_cluster_muscate.sh
+++ b/hpc/makefigureTile_lis_Sentinel2_cluster_muscate.sh
@@ -1,4 +1,24 @@
 #!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 #PBS -N TheiaNViz
 #PBS -j oe
 #PBS -l select=1:ncpus=4:mem=10gb
diff --git a/hpc/old/run_cloud_removal_cluster.sh b/hpc/old/run_cloud_removal_cluster.sh
deleted file mode 100644
index 0705a9ac..00000000
--- a/hpc/old/run_cloud_removal_cluster.sh
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/bin/bash
-#Script launching LIS cloud removal on linux3-ci 
-#  
-#Please setup USER CONFIG for your system before lauching this script
-######################USER CONFIG####################################
-#####################################################################
-#lis app
-lis_app=$HOME/lis/run_cloud_removal.py
-#json template
-lis_config=$HOME/lis/config/param_cloudremoval_template.json
-#path where pbs script will be generated 
-lis_job_script_PBS=$HOME/lis/pbs/lis_job_cr.pbs
-#path where config will be generated
-lis_config_list=$HOME/lis/config/config_list_cr.conf
-#pbs log
-lis_log=$HOME/lis/log
-#IO directories
-data_input=$DATACI/test_cloudremoval/input
-data_output=$DATACI/test_cloudremoval/output
-#tiles to compute
-tiles="N2A_EcrinsFranceD0000B0000"
-stats=false
-hsmin=2500
-hsmax=3700
-#####################################################################
-
-echo "Generating config list..."
-rm $lis_config_list
-tiles_nb=0
-for tile in $tiles
-do
-pimg=$data_input/$tile
-inputdem=$data_input/SRTM/$tile/$tile.tif
-
-imgarr=($pimg/*)
-imgnb=$(find $pimg -mindepth 1 -maxdepth 1 -type d | wc -l)
-slicemax=$(($imgnb-2))
-
-for i in `seq 2 $slicemax`
-do
-    echo "$tile $inputdem ${imgarr[$i-2]} ${imgarr[$i-1]} ${imgarr[$i]} ${imgarr[$i+1]} ${imgarr[$i+2]}" >> $lis_config_list
-    ((tiles_nb++))
-done 
-done
-
-echo "Done"
-echo "Number of images to compute: $tiles_nb"
-
-echo "Generating pbs script..."
-#Create pbs job script
-cat <<EOF > $lis_job_script_PBS
-#!/bin/bash
-#PBS -N lis
-#PBS -l select=1:ncpus=1
-#PBS -l walltime=00:45:00
-#PBS -o $lis_log
-#PBS -e $lis_log
-#PBS -J 1-${tiles_nb}:1
-
-tile=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f1)
-dempath=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f2)
-m2path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f3)
-m1path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f4)
-t0path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f5)
-p1path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f6)
-p2path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f7)
-
-#copy input data to tmp
-#tmp directories
-rm -r \$TMPCI/\$(basename \$t0path)_LIS_cr
-data_tmp=\$TMPCI/\$(basename \$t0path)_LIS_cr
-data_input_tmp=\$data_tmp/input
-data_output_tmp=\$data_tmp/output
-
-mkdir -p \$data_input_tmp/\$tile/\$(basename \$t0path)
-mkdir -p \$data_input_tmp/SRTM/\$tile
-
-cp -r \$t0path/* \$data_input_tmp/\$tile/\$(basename \$t0path)
-cp \$dempath \$data_input_tmp/SRTM/\$tile/\$(basename \$dempath)
-
-t0path=\$data_input_tmp/\$tile/\$(basename \$t0path)
-dempath=\$data_input_tmp/SRTM/\$tile/\$(basename \$dempath)
-
-#create json
-config=\$t0path.json
-cp $lis_config \$config
-
-#modify json
-m2img=\$(find \$m2path -name *SEB.TIF)
-m1img=\$(find \$m1path -name *SEB.TIF)
-t0img=\$(find \$t0path -name *SEB.TIF)
-p1img=\$(find \$p1path -name *SEB.TIF)
-p2img=\$(find \$p2path -name *SEB.TIF)
-pout=\$data_output_tmp/\$tile/\$(basename \$t0path)
-mkdir -p \$pout
-sed -i -e "s|outputdir|\$pout|g" \$config
-sed -i -e "s|m2path|\$m2img|g" \$config
-sed -i -e "s|m1path|\$m1img|g" \$config
-sed -i -e "s|t0path|\$t0img|g" \$config
-sed -i -e "s|p1path|\$p1img|g" \$config
-sed -i -e "s|p2path|\$p2img|g" \$config
-sed -i -e "s|dempath|\$dempath|g" \$config
-sed -i -e "s|hsmax|$hsmax|g" \$config
-sed -i -e "s|hsmin|$hsmin|g" \$config
-
-#run cloud removal
-python $lis_app \$config
-
-#copy output files
-mkdir -p $data_output/\$tile
-cp -r \$pout $data_output/\$tile
-cp \$config $data_output/\$tile
-
-EOF
-
-echo "Done"
-
-#lauch qsub
-echo "Launching qsub..."
-id_job_lis=$(qsub $lis_job_script_PBS)
-echo "Done"
-echo "LIS cr ID job: $id_job_lis"
diff --git a/hpc/old/run_lis_cluster.sh b/hpc/old/run_lis_cluster.sh
deleted file mode 100644
index 88a35a8e..00000000
--- a/hpc/old/run_lis_cluster.sh
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/bin/bash
-#Script launching LIS on linux3-ci 
-#  
-#Please setup USER CONFIG for your system before lauching this script
-######################USER CONFIG####################################
-#####################################################################
-#lis app
-lis_app=$HOME/lis/run_snow_detector.py
-#json template
-lis_config=$HOME/lis/config/param_full_Landsat8_template.json
-#path where pbs script will be generated 
-lis_job_script_PBS=$HOME/lis/pbs/lis_job.pbs
-#path where config will be generated
-lis_config_list=$HOME/lis/config/config_list.conf
-#pbs log
-lis_log=$HOME/lis/log
-#IO directories
-data_input=$DATACI/test_lis/input
-data_output=$DATACI/test_lis/output
-#tiles to compute
-tiles="N2A_France-MetropoleD0005H0001"
-#####################################################################
-
-echo "Generating config list..."
-rm $lis_config_list
-tiles_nb=0
-for tile in $tiles
-do
-pimg=$data_input/$tile
-inputdem=$data_input/SRTM/$tile/$tile.tif
-
-for imgdir in $pimg/*
-  do    
-  #build input config list
-  cat << end_configlist >> $lis_config_list
-$tile $imgdir $inputdem
-end_configlist
-  ((tiles_nb++))
-  done
-done 
-echo "Done"
-echo "Number of images to compute: $tiles_nb"
-
-echo "Generating pbs script..."
-#Create job pbs script
-cat << EOF > $lis_job_script_PBS
-#!/bin/bash
-#PBS -N lis
-#PBS -l select=1:ncpus=1
-#PBS -l walltime=00:15:00
-#PBS -o $lis_log
-#PBS -e $lis_log
-EOF
-if [ $tiles_nb -gt 1 ]
-then
-  cat << EOF >> $lis_job_script_PBS
-#PBS -J 1-${tiles_nb}:1
-tile=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f1)
-imgdir_path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f2)
-dem_path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f3)
-EOF
-else
-  cat << EOF >> $lis_job_script_PBS
-tile=\$(sed -n 1p $lis_config_list | cut -d ' ' -f1)
-imgdir_path=\$(sed -n 1p $lis_config_list | cut -d ' ' -f2)
-dem_path=\$(sed -n 1p $lis_config_list | cut -d ' ' -f3)
-EOF
-fi
-cat << EOF >> $lis_job_script_PBS
-#copy input data to tmp
-#tmp directories
-rm -r \$TMPCI/\$(basename \$imgdir_path)_LIS
-data_tmp=\$TMPCI/\$(basename \$imgdir_path)_LIS
-data_input_tmp=\$data_tmp/input
-data_output_tmp=\$data_tmp/output
-
-mkdir -p \$data_input_tmp/\$tile/\$(basename \$imgdir_path)
-mkdir -p \$data_input_tmp/SRTM/\$tile
-
-cp -r \$imgdir_path/* $data_input_tmp/\$tile/\$(basename \$imgdir_path)
-cp \$dem_path $data_input_tmp/SRTM/\$tile/\$(basename \$dem_path)
-
-imgdir_path=$data_input_tmp/\$tile/\$(basename \$imgdir_path)
-dem_path=$data_input_tmp/SRTM/\$tile/\$(basename \$dem_path)
-
-#create json
-config=\$imgdir_path.json   
-cp $lis_config \$config
-# modify only three parameters: image file, cloud file, dem file, output dir
-inputimage=\$(find \$imgdir_path -name *ORTHO_SURF_CORR_PENTE*.TIF)
-inputcloud=\$(find \$imgdir_path -name *NUA.TIF)
-inputdem=\$dem_path
-pout=$data_output_tmp/\$tile/\$(basename \$imgdir_path)
-mkdir -p \$pout
-sed -i -e "s|inputimage|\$inputimage|g" \$config
-sed -i -e "s|inputcloud|\$inputcloud|g" \$config
-sed -i -e "s|inputdem|\$inputdem|g" \$config
-sed -i -e "s|outputdir|\$pout|g" \$config
-
-#run the snow detection
-python $lis_app \$config
-
-#copy output files
-mkdir -p $data_output/\$tile
-cp -r \$pout $data_output/\$tile
-cp \$config $data_output/\$tile
-
-EOF
-
-echo "Done"
-
-#lauch qsub
-echo "Launching qsub..."
-id_job_lis=$(qsub $lis_job_script_PBS)
-echo "Done"
-echo "LIS ID job: $id_job_lis"
-
-
diff --git a/hpc/old/run_lis_s2_cluster.sh b/hpc/old/run_lis_s2_cluster.sh
deleted file mode 100644
index 78bebc27..00000000
--- a/hpc/old/run_lis_s2_cluster.sh
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/bin/bash
-#Script launching LIS on linux3-ci 
-#  
-#Please setup USER CONFIG for your system before lauching this script
-######################USER CONFIG####################################
-#####################################################################
-#lis app
-lis_app=$HOME/lis/run_snow_detector.py
-#json template
-lis_config=$HOME/lis/config/param_full_Landsat8_template.json
-#path where pbs script will be generated 
-lis_job_script_PBS=$HOME/lis/pbs/lis_job.pbs
-#path where config will be generated
-lis_config_list=$HOME/lis/config/config_list.conf
-#pbs log
-lis_log=$HOME/lis/log
-#IO directories
-data_input=$DATACI/test_lis/input
-data_output=$DATACI/test_lis/output
-#tiles to compute
-tiles="N2A_France-MetropoleD0005H0001"
-#####################################################################
-
-echo "Generating config list..."
-rm $lis_config_list
-tiles_nb=0
-for tile in $tiles
-do
-pimg=$data_input/$tile
-inputdem=$data_input/SRTM/$tile/$tile.tif
-
-for imgdir in $pimg/*
-  do    
-  #build input config list
-  cat << end_configlist >> $lis_config_list
-$tile $imgdir $inputdem
-end_configlist
-  ((tiles_nb++))
-  done
-done 
-echo "Done"
-echo "Number of images to compute: $tiles_nb"
-
-echo "Generating pbs script..."
-#Create job pbs script
-cat << EOF > $lis_job_script_PBS
-#!/bin/bash
-#PBS -N lis
-#PBS -l select=1:ncpus=1
-#PBS -l walltime=00:15:00
-#PBS -o $lis_log
-#PBS -e $lis_log
-EOF
-if [ $tiles_nb -gt 1 ]
-then
-  cat << EOF >> $lis_job_script_PBS
-#PBS -J 1-${tiles_nb}:1
-tile=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f1)
-imgdir_path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f2)
-dem_path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f3)
-EOF
-else
-  cat << EOF >> $lis_job_script_PBS
-tile=\$(sed -n 1p $lis_config_list | cut -d ' ' -f1)
-imgdir_path=\$(sed -n 1p $lis_config_list | cut -d ' ' -f2)
-dem_path=\$(sed -n 1p $lis_config_list | cut -d ' ' -f3)
-EOF
-fi
-cat << EOF >> $lis_job_script_PBS
-#copy input data to tmp
-#tmp directories
-rm -r \$TMPCI/\$(basename \$imgdir_path)_LIS
-data_tmp=\$TMPCI/\$(basename \$imgdir_path)_LIS
-data_input_tmp=\$data_tmp/input
-data_output_tmp=\$data_tmp/output
-
-mkdir -p \$data_input_tmp/\$tile/\$(basename \$imgdir_path)
-mkdir -p \$data_input_tmp/SRTM/\$tile
-
-cp -r \$imgdir_path/* $data_input_tmp/\$tile/\$(basename \$imgdir_path)
-cp \$dem_path $data_input_tmp/SRTM/\$tile/\$(basename \$dem_path)
-
-imgdir_path=$data_input_tmp/\$tile/\$(basename \$imgdir_path)
-dem_path=$data_input_tmp/SRTM/\$tile/\$(basename \$dem_path)
-
-#create json
-config=\$imgdir_path.json   
-cp $lis_config \$config
-# modify only three parameters: image file, cloud file, dem file, output dir
-inputimage_green=$(find $dataPath -name *FRE_B3.tif)
-inputimage_red=$(find $dataPath -name *FRE_B4.tif)
-inputimage_swir=$(find $dataPath -name *FRE_B11.tif)
-
-inputcloud=$(find $dataPath/MASKS/* -name *CLM_R2.tif)
-inputdem=$(find $dataPath/SRTM/* -name *ALT_R2.TIF)
-
-pout=$data_output_tmp/\$tile/\$(basename \$imgdir_path)
-mkdir -p \$pout
-
-#Create json file in python
-
-#!/usr/bin/python
-import json
-
-jsonFile = open("${outputPath}/param_s2_test.json", "r")
-data = json.load(jsonFile)
-jsonFile.close()
-
-general = data["general"]
-general["pout"]="$pout"
-general["nodata"]="-10000"
-
-inputs = data["inputs"]
-inputs["cloud_mask"]="$inputcloud"
-inputs["dem"]="$inputdem"
-
-input_green=inputs["green_band"]
-input_green["path"]="$inputimage_green"
-input_green["noBand"]="1"
-
-input_red=inputs["red_band"]
-input_red["path"]="$inputimage_red"
-input_red["noBand"]="1"
-
-input_swir=inputs["swir_band"]
-input_swir["path"]="$inputimage_swir"
-input_swir["noBand"]="1"
-
-jsonFile = open("${outputPath}/param_s2_test.json", "w+")
-jsonFile.write(json.dumps(data, indent=4))
-jsonFile.close()
-
-
-SCRIPT
-
-#run the snow detection
-python $lis_app \$config
-
-#copy output files
-mkdir -p $data_output/\$tile
-cp -r \$pout $data_output/\$tile
-cp \$config $data_output/\$tile
-
-EOF
-
-echo "Done"
-
-#lauch qsub
-echo "Launching qsub..."
-id_job_lis=$(qsub $lis_job_script_PBS)
-echo "Done"
-echo "LIS ID job: $id_job_lis"
-
-
diff --git a/hpc/param20190612T31TCH.json b/hpc/param20190612T31TCH.json
new file mode 100644
index 00000000..8dd626f6
--- /dev/null
+++ b/hpc/param20190612T31TCH.json
@@ -0,0 +1,67 @@
+{
+    "inputs": {
+        "swir_band": {
+            "path": "./input/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2_FRE_B11.tif", 
+            "noBand": 1
+        }, 
+        "red_band": {
+            "path": "./input/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2_FRE_B4.tif", 
+            "noBand": 1
+        }, 
+        "cloud_mask": "./input/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2/MASKS/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2_CLM_R2.tif", 
+        "green_band": {
+            "path": "./input/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2_FRE_B3.tif", 
+            "noBand": 1
+        }, 
+        "div_mask": "./input/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2/MASKS/SENTINEL2A_20190612-104918-663_L2A_T31TCH_C_V2-2_MG2_R2.tif", 
+        "dem": "/work/OT/siaa/Theia/Neige/DEM/S2__TEST_AUX_REFDE2_T31TCH_0001.DBL.DIR/S2__TEST_AUX_REFDE2_T31TCH_0001_ALT_R2.TIF", 
+        "div_slope_thres": 64
+    }, 
+    "vector": {
+        "generate_vector": false, 
+        "generate_intermediate_vectors": false, 
+        "gdal_trace_outline_dp_toler": 0, 
+        "use_gdal_trace_outline": true, 
+        "gdal_trace_outline_min_area": 0
+    }, 
+    "snow": {
+        "fsnow_total_lim": 0.001, 
+        "fsnow_lim": 0.1, 
+        "dz": 100, 
+        "ndsi_pass1": 0.4, 
+        "ndsi_pass2": 0.15, 
+        "red_pass1": 200, 
+        "red_pass2": 40, 
+        "fclear_lim": 0.1
+    }, 
+    "cloud": {
+        "rm_snow_inside_cloud": false, 
+        "rm_snow_inside_cloud_threshold": 0.85, 
+        "strict_cloud_mask": false, 
+        "rm_snow_inside_cloud_min_area": 5000, 
+        "shadow_out_mask": 64, 
+        "all_cloud_mask": 1, 
+        "shadow_in_mask": 32, 
+        "red_darkcloud": 300, 
+        "red_backtocloud": 100, 
+        "rf": 12, 
+        "high_cloud_mask": 128, 
+        "rm_snow_inside_cloud_dilation_radius": 1
+    }, 
+    "general": {
+        "nb_threads": 1, 
+        "multi": 10, 
+        "ram": 2048, 
+        "log": true, 
+        "pout": "./output/", 
+        "target_resolution": -1, 
+        "nodata": -10000, 
+        "preprocessing": false
+    },
+    "fsc": {
+        "dofsc": true,
+        "fscToc_Eq": "1.45*ndsi-0.01", 
+        "fscOg_Eq": "fscToc/(1-tcd)",
+        "tcd": "/work/OT/siaa/Theia/Neige/CoSIMS/data/TCD/TCD_2015_R2_T31TCH.TIF"
+    }    
+}
diff --git a/hpc/prepare_data_for_snow_annual_map.py b/hpc/prepare_data_for_snow_annual_map.py
index 066934e6..f2a53bc4 100755
--- a/hpc/prepare_data_for_snow_annual_map.py
+++ b/hpc/prepare_data_for_snow_annual_map.py
@@ -1,291 +1,297 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#=========================================================================
-#
-#  Program:   lis
-#  Language:  Python
-#
-#  Copyright (c) Germain Salgues
-#  Copyright (c) Manuel Grizonnet
-#
-#  See lis-copyright.txt for details.
-#
-#  This software is distributed WITHOUT ANY WARRANTY; without even
-#  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-#  PURPOSE.  See the above copyright notices for more information.
-#
-#  this file requires python/3.5.2 and amalthee/0.1
-#=========================================================================
-import os
-import sys
-import os.path as op
-import json
-import csv
-import copy
-import logging
-import subprocess
-from datetime import datetime, timedelta
-from libamalthee import Amalthee
-
-def str_to_datetime(date_string, format="%Y%m%d"):
-    """ Return the datetime corresponding to the input string
-    """
-    logging.debug(date_string)
-    return datetime.strptime(date_string, format)
-
-def datetime_to_str(date, format="%Y%m%d"):
-    """ Return the datetime corresponding to the input string
-    """
-    logging.debug(date)
-    return date.strftime(format)
-
-def call_subprocess(process_list):
-    """ Run subprocess and write to stdout and stderr
-    """
-    logging.info("Running: " + " ".join(process_list))
-    process = subprocess.Popen(
-        process_list,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE)
-    out, err = process.communicate()
-    logging.info(out)
-    sys.stderr.write(str(err))
-
-class prepare_data_for_snow_annual_map():
-    def __init__(self, params):
-        logging.info("Init snow_multitemp")
-        self.raw_params = copy.deepcopy(params)
-
-        self.tile_id = params.get("tile_id")
-        self.date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
-        self.date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
-        self.date_margin = timedelta(days=params.get("date_margin", 0))
-        self.output_dates_filename = params.get("output_dates_filename", None)
-        self.mode = params.get("mode", "RUNTIME")
-        self.mission_tags = ["SENTINEL2"]#["LANDSAT"]#
-
-        self.snow_products_dir = str(params.get("snow_products_dir"))
-        self.path_tmp = str(params.get("path_tmp", os.environ.get('TMPDIR')))
-
-        self.input_products_list=params.get("input_products_list",[]).copy()
-        logging.info(self.input_products_list)
-        self.processing_id = self.tile_id + "_" + \
-                             datetime_to_str(self.date_start) + "_" + \
-                             datetime_to_str(self.date_stop)
-
-        self.path_out = op.join(str(params.get("path_out")), self.processing_id)
-        self.use_densification = params.get("use_densification", False)
-        if self.use_densification:
-            self.mission_tags.append("LANDSAT")
-            self.densification_products_list=params.get("densification_products_list",[]).copy()
-            logging.info(self.densification_products_list)
-
-        if not os.path.exists(self.path_out):
-            os.mkdir(self.path_out)
-
-        self.ram = params.get("ram", 512)
-        self.nbThreads = params.get("nbThreads", None)
-
-        self.snow_products_availability = 0
-        self.datalake_products_availability = 0
-
-    def run(self):
-        logging.info('Process tile:' + self.tile_id +'.')
-        logging.info(' for period ' + str(self.date_start) + ' to ' + str(self.date_stop))
-
-        # compute the range of required snow products
-        search_start_date = self.date_start - self.date_margin
-        search_stop_date = self.date_stop + self.date_margin
-
-        # open a file to store the list of L2A products for which we need to generate the snow products
-        filename_i = os.path.abspath(self.processing_id +"_pending_for_snow_processing.txt")
-        FileOut = open(os.path.join(".", filename_i),"w")
-
-        resulting_df = None
-        snow_processing_requested = 0
-
-        # loop on the different type of products to require
-        for mission_tag in self.mission_tags:
-            # use amalthee to request the products from Theia catalogues
-            parameters = {"processingLevel": "LEVEL2A", "location":str(self.tile_id)}
-            amalthee_theia = Amalthee('theia')
-            amalthee_theia.search(mission_tag,
-                                  datetime_to_str(search_start_date, "%Y-%m-%d"),
-                                  datetime_to_str(search_stop_date, "%Y-%m-%d"),
-                                  parameters,
-                                  nthreads = self.nbThreads)
-
-            nb_products = amalthee_theia.products.shape[0]
-            logging.info('There are ' + str(nb_products) + ' ' + mission_tag + ' products for the current request')
-
-            snow_products_list=[]
-            if nb_products:
-                # get the dataframe containing the requested products and append extra needed fields.
-                df = amalthee_theia.products
-                df['snow_product'] = ""
-                df['snow_product_available'] = False
-                snow_product_available = 0
-                datalake_product_available = 0
-                datalake_update_requested = 0
-
-                # loop on each products from the dataframe
-                for product_id in df.index:
-                    logging.info('Processing ' + product_id)
-
-                    # check datalake availability
-                    if df.loc[product_id, 'available']:
-                        datalake_product_available += 1
-
-                    # check snow product availability
-                    expected_snow_product_path = op.join(self.snow_products_dir, self.tile_id, product_id)
-                    df.loc[product_id, 'snow_product'] = expected_snow_product_path
-                    logging.info(expected_snow_product_path)
-
-                    # the snow product is already available
-                    if op.exists(expected_snow_product_path):
-                        logging.info(product_id + " is available as snow product")
-                        snow_product_available += 1
-                        df.loc[product_id, 'snow_product_available'] = True
-                        snow_products_list.append(expected_snow_product_path)
-                    # the L2A product is available in the datalake but request a snow detection
-                    elif df.loc[product_id, 'available']:
-                        logging.info(product_id + " requires to generate the snow product")
-                        snow_processing_requested += 1
-                        FileOut.write(df.loc[product_id, 'datalake']+"\n")
-                    # the product must be requested into the datalake before snow detection
-                    else:
-                        logging.info(product_id + " requires to be requested to datalake.")
-                        datalake_update_requested += 1
-
-                if resulting_df is not None:
-                    resulting_df = resulting_df.append(df)
-                else:
-                    resulting_df = df
-
-                self.snow_products_availability = float(snow_product_available/nb_products)
-                logging.info("Percent of available snow product : " + str(100*self.snow_products_availability) + "%")
-
-                self.datalake_products_availability = float(datalake_product_available/nb_products)
-                logging.info("Percent of available datalake product : " + str(100*self.datalake_products_availability) + "%")
-
-                # datalake update if not all the products are available
-                if datalake_update_requested > 0:
-                    logging.info("Requesting an update of the datalake because of " + str(datalake_update_requested) + " unavailable products...")
-                    # this will request all products of the request
-                    # @TODO request only the products for which the snow products are not available
-                    amalthee_theia.fill_datalake()
-                    logging.info("End of requesting datalake.")
-            # we only append a single type of products to the main input list
-            if mission_tag == "SENTINEL2":#"LANDSAT":#
-                self.input_products_list.extend(snow_products_list)
-            # the other types are use for densification purpose only
-            else:
-                self.densification_products_list.extend(snow_products_list)
-
-        # request snow detection processing for listed products
-        FileOut.close()
-        if snow_processing_requested != 0:
-            self.process_snow_products(filename_i, snow_processing_requested)
-
-        # Create fill to access requested products status
-        if resulting_df is not None:
-            products_file = op.join(self.path_out, "input_datalist.csv")
-            logging.info("Products detailed status is avaible under: " + products_file)
-            resulting_df.to_csv(products_file, sep=';')
-        else:
-            logging.error("No products available to compute snow annual map!!")
-
-    def build_json(self):
-        # the json is created only is more than 99.9% of the snow products are ready
-        # @TODO this param should not be hard coded
-        if self.snow_products_availability > 0.999:
-            snow_annual_map_param_json = os.path.join(self.path_out, "param.json")
-            logging.info("Snow annual map can be computed from: " + snow_annual_map_param_json)
-            self.raw_params['data_availability_check'] = True
-            self.raw_params['log'] = True
-            self.raw_params['log_stdout'] = op.join(self.path_out,"stdout.log")
-            self.raw_params['log_stderr'] = op.join(self.path_out,"stderr.log")
-            self.raw_params['input_products_list'] = self.input_products_list
-            if self.use_densification:
-                self.raw_params['densification_products_list'] = self.densification_products_list
-            jsonFile = open(snow_annual_map_param_json, "w")
-            jsonFile.write(json.dumps(self.raw_params, indent=4))
-            jsonFile.close()
-            return snow_annual_map_param_json
-        else:
-            logging.error("Snow annual map cannot be computed because of too many missing products")
-
-    def process_snow_products(self, file_to_process, array_size=None):
-        logging.info("Ordering processing of the snow products on " + file_to_process)
-        command = ["qsub",
-                   "-v",
-                   "filename=\""+file_to_process+"\",tile=\""+self.tile_id[1:]+"\",out_path=\""+self.snow_products_dir+"\",overwrite=\"false\"",
-                   "run_lis_from_filelist.sh"]
-        # in case the array size is provided, it requires a job array of the exact size.
-        if array_size:
-            command.insert(1, "-J")
-            command.insert(2, "1-"+str(array_size+1))
-        print(" ".join(command))
-        try:
-            call_subprocess(command)
-            logging.info("Order was submitted the snow annual map will soon be available.")
-        except:
-            logging.warning("Order was submitted the snow annual map will soon be available, but missinterpreted return code")
-
-    def process_snow_annual_map(self, file_to_process):
-        logging.info("Ordering processing of the snow annual map, " + file_to_process)
-        command = ["qsub",
-                   "-v",
-                   "config=\""+file_to_process+"\",overwrite=false",
-                   "run_snow_annual_map.sh"]
-        print(" ".join(command))
-        try:
-            call_subprocess(command)
-            logging.info("Order was submitted the snow annual map will soon be available.")
-        except:
-            logging.warning("Order was submitted the snow annual map will soon be available, but missinterpreted return code")
-
-def main():
-    params = {"tile_id":"T32TPS",
-              "date_start":"01/09/2017",
-              "date_stop":"31/08/2018",
-              "date_margin":15,
-              "mode":"DEBUG",
-              "input_products_list":[],
-              # path_tmp is an actual parameter but must only be uncomment with a correct path
-              # else the processing use $TMPDIR by default
-              #"path_tmp":"",
-              #"path_out":"/home/qt/salguesg/scratch/multitemp_workdir/tmp_test",
-              "path_out":"/work/OT/siaa/Theia/Neige/SNOW_ANNUAL_MAP_LIS_1.5/L8_only",
-              "ram":8192,
-              "nbThreads":6,
-              "use_densification":False,
-              "log":True,
-              "densification_products_list":[],
-              # the following parameters are only use in this script, and doesn't affect snow_annual_map processing
-              "snow_products_dir":"/work/OT/siaa/Theia/Neige/PRODUITS_NEIGE_LIS_develop_1.5",
-              "data_availability_check":False}
-
-    with open('selectNeigeSyntheseMultitemp.csv', 'r') as csvfile:
-        tilesreader = csv.reader(csvfile)
-        firstline = True
-        for row in tilesreader:
-            if firstline:    #skip first line
-                firstline = False
-            else:
-                tile_id = 'T' + str(row[0])
-                params['tile_id'] = tile_id
-
-                prepare_data_for_snow_annual_map_app = prepare_data_for_snow_annual_map(params)
-                prepare_data_for_snow_annual_map_app.run()
-                config_file = prepare_data_for_snow_annual_map_app.build_json()
-                if config_file is not None:
-                    prepare_data_for_snow_annual_map_app.process_snow_annual_map(config_file)
-
-if __name__== "__main__":
-    # Set logging level and format.
-    logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=\
-        '%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
-    main()
-
-
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#  this file requires python/3.5.2 and amalthee/0.1
+
+import os
+import sys
+import os.path as op
+import json
+import csv
+import copy
+import logging
+import subprocess
+from datetime import datetime, timedelta
+from libamalthee import Amalthee
+
+def str_to_datetime(date_string, format="%Y%m%d"):
+    """ Return the datetime corresponding to the input string
+    """
+    logging.debug(date_string)
+    return datetime.strptime(date_string, format)
+
+def datetime_to_str(date, format="%Y%m%d"):
+    """ Return the datetime corresponding to the input string
+    """
+    logging.debug(date)
+    return date.strftime(format)
+
+def call_subprocess(process_list):
+    """ Run subprocess and write to stdout and stderr
+    """
+    logging.info("Running: " + " ".join(process_list))
+    process = subprocess.Popen(
+        process_list,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE)
+    out, err = process.communicate()
+    logging.info(out)
+    sys.stderr.write(str(err))
+
+class prepare_data_for_snow_annual_map():
+    def __init__(self, params):
+        logging.info("Init snow_multitemp")
+        self.raw_params = copy.deepcopy(params)
+
+        self.tile_id = params.get("tile_id")
+        self.date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
+        self.date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
+        self.date_margin = timedelta(days=params.get("date_margin", 0))
+        self.output_dates_filename = params.get("output_dates_filename", None)
+        self.mode = params.get("mode", "RUNTIME")
+        self.mission_tags = ["SENTINEL2"]#["LANDSAT"]#
+
+        self.snow_products_dir = str(params.get("snow_products_dir"))
+        self.path_tmp = str(params.get("path_tmp", os.environ.get('TMPDIR')))
+
+        self.input_products_list=params.get("input_products_list",[]).copy()
+        logging.info(self.input_products_list)
+        self.processing_id = self.tile_id + "_" + \
+                             datetime_to_str(self.date_start) + "_" + \
+                             datetime_to_str(self.date_stop)
+
+        self.path_out = op.join(str(params.get("path_out")), self.processing_id)
+        self.use_densification = params.get("use_densification", False)
+        if self.use_densification:
+            self.mission_tags.append("LANDSAT")
+            self.densification_products_list=params.get("densification_products_list",[]).copy()
+            logging.info(self.densification_products_list)
+
+        if not os.path.exists(self.path_out):
+            os.mkdir(self.path_out)
+
+        self.ram = params.get("ram", 512)
+        self.nbThreads = params.get("nbThreads", None)
+
+        self.snow_products_availability = 0
+        self.datalake_products_availability = 0
+
+    def run(self):
+        logging.info('Process tile:' + self.tile_id +'.')
+        logging.info(' for period ' + str(self.date_start) + ' to ' + str(self.date_stop))
+
+        # compute the range of required snow products
+        search_start_date = self.date_start - self.date_margin
+        search_stop_date = self.date_stop + self.date_margin
+
+        # open a file to store the list of L2A products for which we need to generate the snow products
+        filename_i = os.path.abspath(self.processing_id +"_pending_for_snow_processing.txt")
+        FileOut = open(os.path.join(".", filename_i),"w")
+
+        resulting_df = None
+        snow_processing_requested = 0
+
+        # loop on the different type of products to require
+        for mission_tag in self.mission_tags:
+            # use amalthee to request the products from Theia catalogues
+            parameters = {"processingLevel": "LEVEL2A", "location":str(self.tile_id)}
+            amalthee_theia = Amalthee('theia')
+            amalthee_theia.search(mission_tag,
+                                  datetime_to_str(search_start_date, "%Y-%m-%d"),
+                                  datetime_to_str(search_stop_date, "%Y-%m-%d"),
+                                  parameters,
+                                  nthreads = self.nbThreads)
+
+            nb_products = amalthee_theia.products.shape[0]
+            logging.info('There are ' + str(nb_products) + ' ' + mission_tag + ' products for the current request')
+
+            snow_products_list=[]
+            if nb_products:
+                # get the dataframe containing the requested products and append extra needed fields.
+                df = amalthee_theia.products
+                df['snow_product'] = ""
+                df['snow_product_available'] = False
+                snow_product_available = 0
+                datalake_product_available = 0
+                datalake_update_requested = 0
+
+                # loop on each products from the dataframe
+                for product_id in df.index:
+                    logging.info('Processing ' + product_id)
+
+                    # check datalake availability
+                    if df.loc[product_id, 'available']:
+                        datalake_product_available += 1
+
+                    # check snow product availability
+                    expected_snow_product_path = op.join(self.snow_products_dir, self.tile_id, product_id)
+                    df.loc[product_id, 'snow_product'] = expected_snow_product_path
+                    logging.info(expected_snow_product_path)
+
+                    # the snow product is already available
+                    if op.exists(expected_snow_product_path):
+                        logging.info(product_id + " is available as snow product")
+                        snow_product_available += 1
+                        df.loc[product_id, 'snow_product_available'] = True
+                        snow_products_list.append(expected_snow_product_path)
+                    # the L2A product is available in the datalake but request a snow detection
+                    elif df.loc[product_id, 'available']:
+                        logging.info(product_id + " requires to generate the snow product")
+                        snow_processing_requested += 1
+                        FileOut.write(df.loc[product_id, 'datalake']+"\n")
+                    # the product must be requested into the datalake before snow detection
+                    else:
+                        logging.info(product_id + " requires to be requested to datalake.")
+                        datalake_update_requested += 1
+
+                if resulting_df is not None:
+                    resulting_df = resulting_df.append(df)
+                else:
+                    resulting_df = df
+
+                self.snow_products_availability = float(snow_product_available/nb_products)
+                logging.info("Percent of available snow product : " + str(100*self.snow_products_availability) + "%")
+
+                self.datalake_products_availability = float(datalake_product_available/nb_products)
+                logging.info("Percent of available datalake product : " + str(100*self.datalake_products_availability) + "%")
+
+                # datalake update if not all the products are available
+                if datalake_update_requested > 0:
+                    logging.info("Requesting an update of the datalake because of " + str(datalake_update_requested) + " unavailable products...")
+                    # this will request all products of the request
+                    # @TODO request only the products for which the snow products are not available
+                    amalthee_theia.fill_datalake()
+                    logging.info("End of requesting datalake.")
+            # we only append a single type of products to the main input list
+            if mission_tag == "SENTINEL2":#"LANDSAT":#
+                self.input_products_list.extend(snow_products_list)
+            # the other types are use for densification purpose only
+            else:
+                self.densification_products_list.extend(snow_products_list)
+
+        # request snow detection processing for listed products
+        FileOut.close()
+        if snow_processing_requested != 0:
+            self.process_snow_products(filename_i, snow_processing_requested)
+
+        # Create fill to access requested products status
+        if resulting_df is not None:
+            products_file = op.join(self.path_out, "input_datalist.csv")
+            logging.info("Products detailed status is avaible under: " + products_file)
+            resulting_df.to_csv(products_file, sep=';')
+        else:
+            logging.error("No products available to compute snow annual map!!")
+
+    def build_json(self):
+        # the json is created only is more than 99.9% of the snow products are ready
+        # @TODO this param should not be hard coded
+        if self.snow_products_availability > 0.999:
+            snow_annual_map_param_json = os.path.join(self.path_out, "param.json")
+            logging.info("Snow annual map can be computed from: " + snow_annual_map_param_json)
+            self.raw_params['data_availability_check'] = True
+            self.raw_params['log'] = True
+            self.raw_params['log_stdout'] = op.join(self.path_out,"stdout.log")
+            self.raw_params['log_stderr'] = op.join(self.path_out,"stderr.log")
+            self.raw_params['input_products_list'] = self.input_products_list
+            if self.use_densification:
+                self.raw_params['densification_products_list'] = self.densification_products_list
+            jsonFile = open(snow_annual_map_param_json, "w")
+            jsonFile.write(json.dumps(self.raw_params, indent=4))
+            jsonFile.close()
+            return snow_annual_map_param_json
+        else:
+            logging.error("Snow annual map cannot be computed because of too many missing products")
+
+    def process_snow_products(self, file_to_process, array_size=None):
+        logging.info("Ordering processing of the snow products on " + file_to_process)
+        command = ["qsub",
+                   "-v",
+                   "filename=\""+file_to_process+"\",tile=\""+self.tile_id[1:]+"\",out_path=\""+self.snow_products_dir+"\",overwrite=\"false\"",
+                   "run_lis_from_filelist.sh"]
+        # in case the array size is provided, it requires a job array of the exact size.
+        if array_size:
+            command.insert(1, "-J")
+            command.insert(2, "1-"+str(array_size+1))
+        print(" ".join(command))
+        try:
+            call_subprocess(command)
+            logging.info("Order was submitted the snow annual map will soon be available.")
+        except:
+            logging.warning("Order was submitted the snow annual map will soon be available, but missinterpreted return code")
+
+    def process_snow_annual_map(self, file_to_process):
+        logging.info("Ordering processing of the snow annual map, " + file_to_process)
+        command = ["qsub",
+                   "-v",
+                   "config=\""+file_to_process+"\",overwrite=false",
+                   "run_snow_annual_map.sh"]
+        print(" ".join(command))
+        try:
+            call_subprocess(command)
+            logging.info("Order was submitted the snow annual map will soon be available.")
+        except:
+            logging.warning("Order was submitted the snow annual map will soon be available, but missinterpreted return code")
+
+def main():
+    params = {"tile_id":"T32TPS",
+              "date_start":"01/09/2017",
+              "date_stop":"31/08/2018",
+              "date_margin":15,
+              "mode":"DEBUG",
+              "input_products_list":[],
+              # path_tmp is an actual parameter but must only be uncomment with a correct path
+              # else the processing use $TMPDIR by default
+              #"path_tmp":"",
+              #"path_out":"/home/qt/salguesg/scratch/multitemp_workdir/tmp_test",
+              "path_out":"/work/OT/siaa/Theia/Neige/SNOW_ANNUAL_MAP_LIS_1.5/L8_only",
+              "ram":8192,
+              "nbThreads":6,
+              "use_densification":False,
+              "log":True,
+              "densification_products_list":[],
+              # the following parameters are only use in this script, and doesn't affect snow_annual_map processing
+              "snow_products_dir":"/work/OT/siaa/Theia/Neige/PRODUITS_NEIGE_LIS_develop_1.5",
+              "data_availability_check":False}
+
+    with open('selectNeigeSyntheseMultitemp.csv', 'r') as csvfile:
+        tilesreader = csv.reader(csvfile)
+        firstline = True
+        for row in tilesreader:
+            if firstline:    #skip first line
+                firstline = False
+            else:
+                tile_id = 'T' + str(row[0])
+                params['tile_id'] = tile_id
+
+                prepare_data_for_snow_annual_map_app = prepare_data_for_snow_annual_map(params)
+                prepare_data_for_snow_annual_map_app.run()
+                config_file = prepare_data_for_snow_annual_map_app.build_json()
+                if config_file is not None:
+                    prepare_data_for_snow_annual_map_app.process_snow_annual_map(config_file)
+
+if __name__== "__main__":
+    # Set logging level and format.
+    logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=\
+        '%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+    main()
+
+
diff --git a/hpc/prepare_data_for_snow_monthly_map.py b/hpc/prepare_data_for_snow_monthly_map.py
new file mode 100644
index 00000000..e9114057
--- /dev/null
+++ b/hpc/prepare_data_for_snow_monthly_map.py
@@ -0,0 +1,309 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#  this file requires python/3.5.2 and amalthee/0.1
+
+import os
+import sys
+import os.path as op
+import json
+import csv
+import copy
+import logging
+import subprocess
+from datetime import datetime, timedelta
+from libamalthee import Amalthee
+from dateutil import relativedelta
+
+def str_to_datetime(date_string, format="%Y%m%d"):
+    """ Return the datetime corresponding to the input string
+    """
+    logging.debug(date_string)
+    return datetime.strptime(date_string, format)
+
+def datetime_to_str(date, format="%Y%m%d"):
+    """ Return the datetime corresponding to the input string
+    """
+    logging.debug(date)
+    return date.strftime(format)
+
+def call_subprocess(process_list):
+    """ Run subprocess and write to stdout and stderr
+    """
+    logging.info("Running: " + " ".join(process_list))
+    process = subprocess.Popen(
+        process_list,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE)
+    out, err = process.communicate()
+    logging.info(out)
+    sys.stderr.write(str(err))
+
+class prepare_data_for_snow_annual_map():
+    def __init__(self, params):
+        logging.info("Init snow_multitemp")
+        self.raw_params = copy.deepcopy(params)
+
+        self.tile_id = params.get("tile_id")
+        self.date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
+        self.date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
+        self.date_margin = timedelta(days=params.get("date_margin", 0))
+        self.output_dates_filename = params.get("output_dates_filename", None)
+        self.mode = params.get("mode", "RUNTIME")
+        self.mission_tags = ["SENTINEL2"]#["LANDSAT"]#
+
+        self.snow_products_dir = str(params.get("snow_products_dir"))
+        self.path_tmp = str(params.get("path_tmp", os.environ.get('TMPDIR')))
+
+        self.input_products_list=params.get("input_products_list",[]).copy()
+        logging.info(self.input_products_list)
+        self.processing_id = self.tile_id + "_" + \
+                             datetime_to_str(self.date_start) + "_" + \
+                             datetime_to_str(self.date_stop)
+
+        self.path_out = op.join(str(params.get("path_out")), self.processing_id)
+        self.use_densification = params.get("use_densification", False)
+        if self.use_densification:
+            self.mission_tags.append("LANDSAT")
+            self.densification_products_list=params.get("densification_products_list",[]).copy()
+            logging.info(self.densification_products_list)
+
+        if not os.path.exists(self.path_out):
+            os.mkdir(self.path_out)
+
+        self.ram = params.get("ram", 512)
+        self.nbThreads = params.get("nbThreads", None)
+
+        self.snow_products_availability = 0
+        self.datalake_products_availability = 0
+
+    def run(self):
+        logging.info('Process tile:' + self.tile_id +'.')
+        logging.info(' for period ' + str(self.date_start) + ' to ' + str(self.date_stop))
+
+        # compute the range of required snow products
+        search_start_date = self.date_start - self.date_margin
+        search_stop_date = self.date_stop + self.date_margin
+
+        # open a file to store the list of L2A products for which we need to generate the snow products
+        filename_i = os.path.abspath(self.processing_id +"_pending_for_snow_processing.txt")
+        FileOut = open(os.path.join(".", filename_i),"w")
+
+        resulting_df = None
+        snow_processing_requested = 0
+
+        # loop on the different type of products to require
+        for mission_tag in self.mission_tags:
+            # use amalthee to request the products from Theia catalogues
+            parameters = {"processingLevel": "LEVEL2A", "location":str(self.tile_id)}
+            amalthee_theia = Amalthee('theia')
+            amalthee_theia.search(mission_tag,
+                                  datetime_to_str(search_start_date, "%Y-%m-%d"),
+                                  datetime_to_str(search_stop_date, "%Y-%m-%d"),
+                                  parameters,
+                                  nthreads = self.nbThreads)
+
+            nb_products = amalthee_theia.products.shape[0]
+            logging.info('There are ' + str(nb_products) + ' ' + mission_tag + ' products for the current request')
+
+            snow_products_list=[]
+            if nb_products:
+                # get the dataframe containing the requested products and append extra needed fields.
+                df = amalthee_theia.products
+                df['snow_product'] = ""
+                df['snow_product_available'] = False
+                snow_product_available = 0
+                datalake_product_available = 0
+                datalake_update_requested = 0
+
+                # loop on each products from the dataframe
+                for product_id in df.index:
+                    logging.info('Processing ' + product_id)
+
+                    # check datalake availability
+                    if df.loc[product_id, 'available']:
+                        datalake_product_available += 1
+
+                    # check snow product availability
+                    expected_snow_product_path = op.join(self.snow_products_dir, self.tile_id, product_id)
+                    df.loc[product_id, 'snow_product'] = expected_snow_product_path
+                    logging.info(expected_snow_product_path)
+
+                    # the snow product is already available
+                    if op.exists(expected_snow_product_path):
+                        logging.info(product_id + " is available as snow product")
+                        snow_product_available += 1
+                        df.loc[product_id, 'snow_product_available'] = True
+                        snow_products_list.append(expected_snow_product_path)
+                    # the L2A product is available in the datalake but request a snow detection
+                    elif df.loc[product_id, 'available']:
+                        logging.info(product_id + " requires to generate the snow product")
+                        snow_processing_requested += 1
+                        FileOut.write(df.loc[product_id, 'datalake']+"\n")
+                    # the product must be requested into the datalake before snow detection
+                    else:
+                        logging.info(product_id + " requires to be requested to datalake.")
+                        datalake_update_requested += 1
+
+                if resulting_df is not None:
+                    resulting_df = resulting_df.append(df)
+                else:
+                    resulting_df = df
+
+                self.snow_products_availability = float(snow_product_available/nb_products)
+                logging.info("Percent of available snow product : " + str(100*self.snow_products_availability) + "%")
+
+                self.datalake_products_availability = float(datalake_product_available/nb_products)
+                logging.info("Percent of available datalake product : " + str(100*self.datalake_products_availability) + "%")
+
+                # datalake update if not all the products are available
+                if datalake_update_requested > 0:
+                    logging.info("Requesting an update of the datalake because of " + str(datalake_update_requested) + " unavailable products...")
+                    # this will request all products of the request
+                    # @TODO request only the products for which the snow products are not available
+                    amalthee_theia.fill_datalake()
+                    logging.info("End of requesting datalake.")
+            # we only append a single type of products to the main input list
+            if mission_tag == "SENTINEL2":#"LANDSAT":#
+                self.input_products_list.extend(snow_products_list)
+            # the other types are use for densification purpose only
+            else:
+                self.densification_products_list.extend(snow_products_list)
+
+        # request snow detection processing for listed products
+        FileOut.close()
+        if snow_processing_requested != 0:
+            self.process_snow_products(filename_i, snow_processing_requested)
+
+        # Create fill to access requested products status
+        if resulting_df is not None:
+            products_file = op.join(self.path_out, "input_datalist.csv")
+            logging.info("Products detailed status is avaible under: " + products_file)
+            resulting_df.to_csv(products_file, sep=';')
+        else:
+            logging.error("No products available to compute snow annual map!!")
+
+    def build_json(self):
+        # the json is created only is more than 99.9% of the snow products are ready
+        # @TODO this param should not be hard coded
+        if self.snow_products_availability > 0.999:
+            snow_annual_map_param_json = os.path.join(self.path_out, "param.json")
+            logging.info("Snow annual map can be computed from: " + snow_annual_map_param_json)
+            self.raw_params['data_availability_check'] = True
+            self.raw_params['log'] = True
+            self.raw_params['log_stdout'] = op.join(self.path_out,"stdout.log")
+            self.raw_params['log_stderr'] = op.join(self.path_out,"stderr.log")
+            self.raw_params['input_products_list'] = self.input_products_list
+            if self.use_densification:
+                self.raw_params['densification_products_list'] = self.densification_products_list
+            jsonFile = open(snow_annual_map_param_json, "w")
+            jsonFile.write(json.dumps(self.raw_params, indent=4))
+            jsonFile.close()
+            return snow_annual_map_param_json
+        else:
+            logging.error("Snow annual map cannot be computed because of too many missing products")
+
+    def process_snow_products(self, file_to_process, array_size=None):
+        logging.info("Ordering processing of the snow products on " + file_to_process)
+        command = ["qsub",
+                   "-v",
+                   "filename=\""+file_to_process+"\",tile=\""+self.tile_id[1:]+"\",out_path=\""+self.snow_products_dir+"\",overwrite=\"false\"",
+                   "run_lis_from_filelist.sh"]
+        # in case the array size is provided, it requires a job array of the exact size.
+        if array_size:
+            command.insert(1, "-J")
+            command.insert(2, "1-"+str(array_size+1))
+        print(" ".join(command))
+        try:
+            call_subprocess(command)
+            logging.info("Order was submitted the snow annual map will soon be available.")
+        except:
+            logging.warning("Order was submitted the snow annual map will soon be available, but missinterpreted return code")
+
+    def process_snow_annual_map(self, file_to_process):
+        logging.info("Ordering processing of the snow annual map, " + file_to_process)
+        command = ["qsub",
+                   "-v",
+                   "config=\""+file_to_process+"\",overwrite=false",
+                   "run_snow_annual_map.sh"]
+        print(" ".join(command))
+        try:
+            call_subprocess(command)
+            logging.info("Order was submitted the snow annual map will soon be available.")
+        except:
+            logging.warning("Order was submitted the snow annual map will soon be available, but missinterpreted return code")
+
+
+def main():
+    for iy in list(range(2018,2019)):
+        y='{:d}'.format(iy)
+    # iterate from April to July yyyy
+        for im in list(range(4,8)):
+            m1='{:02d}'.format(im)
+            m2='{:02d}'.format(im+1)
+            d1="01/"+m1+"/"+y
+            d2=datetime_to_str(str_to_datetime(d1,"%d/%m/%Y")+relativedelta.relativedelta(months=+1,days=-1),"%d/%m/%Y")
+            logging.info("Processing month "+d1+" to "+d2)
+            params = {"tile_id":"T32TPS",
+                      "date_start":d1,
+                      "date_stop":d2,
+                      "date_margin":30,
+                      "mode":"DEBUG",
+                      "input_products_list":[],
+                      # path_tmp is an actual parameter but must only be uncomment with a correct path
+                      # else the processing use $TMPDIR by default
+                      # "path_tmp":"",
+                      #"path_out":"/home/qt/salguesg/scratch/multitemp_workdir/tmp_test",
+                      "path_out":"/work/OT/siaa/Theia/Neige/SNOW_ANNUAL_MAP_LIS_1.5/S2_with_L8_Densification_Months/",
+                      #"path_out":"/work/OT/siaa/Theia/Neige/SNOW_ANNUAL_MAP_LIS_1.5/S2_with_L8_Densification/YYYY0901_YYYY0401/",
+                      "ram":8192,
+                      "nbThreads":6,
+                      "use_densification":True,
+                      "log":True,
+                      "densification_products_list":[],
+                      # the following parameters are only use in this script, and doesn't affect snow_annual_map processing
+                      "snow_products_dir":"/work/OT/siaa/Theia/Neige/PRODUITS_NEIGE_LIS_develop_1.5",
+                      "data_availability_check":False}
+            with open('selectNeigeSyntheseMultitempAlpesFr.csv', 'r') as csvfile:
+                tilesreader = csv.reader(csvfile)
+                firstline = True
+                for row in tilesreader:
+                    if firstline:    #skip first line
+                        firstline = False
+                    else:
+                        tile_id = 'T' + str(row[0])
+                        params['tile_id'] = tile_id
+
+                        prepare_data_for_snow_annual_map_app = prepare_data_for_snow_annual_map(params)
+                        prepare_data_for_snow_annual_map_app.run()
+                        config_file = prepare_data_for_snow_annual_map_app.build_json()
+                        if config_file is not None:
+                            prepare_data_for_snow_annual_map_app.process_snow_annual_map(config_file)
+
+
+if __name__== "__main__":
+    # Set logging level and format.
+    logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=\
+        '%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+    main()
+
+
diff --git a/hpc/runTile_lis_Sentinel2_cluster_muscate_anytile.sh b/hpc/runTile_lis_Sentinel2_cluster_muscate_anytile.sh
index c7866e8d..d0901725 100755
--- a/hpc/runTile_lis_Sentinel2_cluster_muscate_anytile.sh
+++ b/hpc/runTile_lis_Sentinel2_cluster_muscate_anytile.sh
@@ -1,4 +1,24 @@
 #!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 #PBS -N TheiaNeige
 #PBS -j oe
 #PBS -l select=1:ncpus=1:mem=4000mb
diff --git a/hpc/runTile_lis_Sentinel2_cluster_muscate_anytile_anydate.sh b/hpc/runTile_lis_Sentinel2_cluster_muscate_anytile_anydate.sh
index 5dc52e10..1b9dad5c 100755
--- a/hpc/runTile_lis_Sentinel2_cluster_muscate_anytile_anydate.sh
+++ b/hpc/runTile_lis_Sentinel2_cluster_muscate_anytile_anydate.sh
@@ -1,4 +1,24 @@
 #!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 #PBS -N TheiaNeige
 #PBS -j oe
 #PBS -l select=1:ncpus=1:mem=4000mb
diff --git a/hpc/runTile_lis_Sentinel2_datalake_anytile.sh b/hpc/runTile_lis_Sentinel2_datalake_anytile.sh
index 5fcbddba..d0333312 100644
--- a/hpc/runTile_lis_Sentinel2_datalake_anytile.sh
+++ b/hpc/runTile_lis_Sentinel2_datalake_anytile.sh
@@ -1,4 +1,24 @@
 #!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 #PBS -N TheiaNeige
 #PBS -j oe
 #PBS -l select=1:ncpus=4:mem=20000mb
diff --git a/hpc/runTile_lis_Sentinel2_datalake_anytile_anydate.sh b/hpc/runTile_lis_Sentinel2_datalake_anytile_anydate.sh
index d4e54783..a900b50f 100644
--- a/hpc/runTile_lis_Sentinel2_datalake_anytile_anydate.sh
+++ b/hpc/runTile_lis_Sentinel2_datalake_anytile_anydate.sh
@@ -1,4 +1,24 @@
 #!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 #PBS -N TheiaNeige
 #PBS -j oe
 #PBS -l select=1:ncpus=1:mem=4000mb
diff --git a/hpc/run_lis_from_filelist.sh b/hpc/run_lis_from_filelist.sh
index 517e05b6..8c1bf2c2 100644
--- a/hpc/run_lis_from_filelist.sh
+++ b/hpc/run_lis_from_filelist.sh
@@ -1,4 +1,24 @@
 #!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 #PBS -N TheiaNeigeArrayFromFile
 #PBS -j oe
 #PBS -l select=1:ncpus=4:mem=20000mb
diff --git a/hpc/run_snow_annual_map.sh b/hpc/run_snow_annual_map.sh
index 1ac3f2ad..a0f4ad55 100644
--- a/hpc/run_snow_annual_map.sh
+++ b/hpc/run_snow_annual_map.sh
@@ -1,4 +1,24 @@
 #!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 #PBS -N TheiaNeigeRunSnowAnnualMap
 #PBS -j oe
 #PBS -l select=1:ncpus=8:mem=20000mb
diff --git a/legacy/app/runBatch_Landsat8.sh b/legacy/app/runBatch_Landsat8.sh
deleted file mode 100755
index 3ec40fee..00000000
--- a/legacy/app/runBatch_Landsat8.sh
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/bash
-#export PYTHONPATH=/home/grizonnetm/etudes/bin/lis/bin/:$PYTHONPATH
-source /mnt/data/home/otbtest/config_otb.sh
-export PYTHONPATH=/mnt/data/home/gascoins/CES-Neige/build/let-it-snow/bin/:$PYTHONPATH
-
-output_dir=/mnt/data/home/gascoins/Landsat8/Output-CES-Neige
-
-# main input path 
-pin="/mnt/data/home/gascoins/Landsat8/"
-
-# Path to DEM
-pdem=$pin/"SRTM"
-
-# Tiles to process
-#tiles="N2A_France-MetropoleD0005H0001 N2A_France-MetropoleD0005H0002"
-tiles="N2A_France-MetropoleD0006H0001"
-
-for tile in $tiles
-do
-
-# Path to tiles
-pimg=$pin/$tile
-
-# input DEM
-inputdem=$pdem/$tile/$tile.tif
-
-for i in $pimg/*
-  do
-
-  config=$i.json
-
-  # write the config based on a template file
-  cp ../config/param_full_Landsat8_template.json $config
-
-  # modify only three parameters: image file, cloud file, dem file, output dir
-  inputimage=$(find $i -name *ORTHO_SURF_CORR_PENTE*.TIF)
-  inputcloud=$(find $i -name *NUA.TIF)
-  pout=$output_dir/$tile/$(basename $i)
-  sed -i -e "s|inputimage|$inputimage|g" $config
-  sed -i -e "s|inputcloud|$inputcloud|g" $config
-  sed -i -e "s|inputdem|$inputdem|g" $config
-  sed -i -e "s|outputdir|$pout|g" $config
-
-  # creates the output directory
-  mkdir -p $pout
-
-  # run the snow detection
-  python s2snow.py $config
-
-  # backup config file
-  mv $config $pout
-
-  done
-done 
-
-exit 0
-
diff --git a/legacy/app/runBatch_Take5.sh b/legacy/app/runBatch_Take5.sh
deleted file mode 100755
index 9c6fe888..00000000
--- a/legacy/app/runBatch_Take5.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-#export PYTHONPATH=/home/grizonnetm/etudes/bin/lis/bin/:$PYTHONPATH
-source /mnt/data/home/otbtest/config_otb.sh
-export PYTHONPATH=/mnt/data/home/gascoins/CES-Neige/build/let-it-snow/bin/:$PYTHONPATH
-
-output_dir=/mnt/data/home/gascoins/Take5/Output-CES-Neige
-
-# main input path 
-pin="/mnt/data/home/gascoins/Take5/"
-
-# Path to DEM
-pdem=$pin/"SRTM"
-
-# Tiles to process
-tiles="CAlpes CMaroc CSudmipy-E CSudmipy-O"
-
-for tile in $tiles
-do
-
-# Path to tiles
-pimg=$pin/$tile"_LEVEL2A"
-
-# input DEM
-inputdem=$pdem/$tile/$tile.tif
-
-for i in $pimg/*
-  do
-
-  # write the config based on a template file
-  cp ../config/param_full_Take5_template.json tmp.json
-
-  # modify only three parameters: image file, cloud file, dem file, output dir
-  inputimage=$(find $i -name *ORTHO_SURF_CORR_PENTE*.TIF)
-  inputcloud=$(find $i -name *NUA.TIF)
-  pout=$output_dir/$tile/$(basename $i)
-  sed -i -e "s|inputimage|$inputimage|g" tmp.json
-  sed -i -e "s|inputcloud|$inputcloud|g" tmp.json
-  sed -i -e "s|inputdem|$inputdem|g" tmp.json
-  sed -i -e "s|outputdir|$pout|g" tmp.json
-
-  # creates the output directory
-  mkdir -p $pout
-
-  # run the snow detection
-  python s2snow.py tmp.json
-
-  # backup config file
-  mv tmp.json $pout
-
-  done
-done 
-
-exit 0
-
diff --git a/legacy/app/runLis.sh b/legacy/app/runLis.sh
deleted file mode 100755
index bce3c36d..00000000
--- a/legacy/app/runLis.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-#export PYTHONPATH=/home/grizonnetm/etudes/bin/lis/bin/:$PYTHONPATH
-
-#source /mnt/data/home/otbtest/config_otb.sh
-export PYTHONPATH=/home/grizonnetm/etudes/bin/lis/bin/:$PYTHONPATH
-
-param=$1
-
-output_dir=/home/grizonnetm/temporary/Output-CES-Neige
-
-rm -rf $output_dir
-mkdir -p $output_dir
-
-python s2snow.py $param 
-
-exit 0
diff --git a/legacy/app/runLis_griz.sh b/legacy/app/runLis_griz.sh
deleted file mode 100755
index 02764faa..00000000
--- a/legacy/app/runLis_griz.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-#export PYTHONPATH=/home/grizonnetm/etudes/bin/lis/bin/:$PYTHONPATH
-source /mnt/data/home/otbtest/config_otb.sh
-export PYTHONPATH=/mnt/data/home/grizonnetm/build/lis/bin/:$PYTHONPATH
-
-param=$1
-
-output_dir=/mnt/data/home/grizonnetm/temporary/s2
-
-rm -rf $output_dir
-mkdir -p $output_dir
-
-python s2snow.py $param 
-
-exit 0
diff --git a/legacy/config/param_cloudremoval_template.json b/legacy/config/param_cloudremoval_template.json
deleted file mode 100644
index 202efc7c..00000000
--- a/legacy/config/param_cloudremoval_template.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
-    "general":{
-        "pout":"outputdir",
-        "ram":512,
-	"nbThreads":1,
-	"stats":false
-         },
-    "inputs":{
-	"m2Path":"m2path",
-        "m1Path":"m1path",
-        "t0Path":"t0path",
-        "p1Path":"p1path",
-	"p2Path":"p2path",
-	"demPath":"dempath",
-	"refPath":"refpath",
-    },
-    "steps":{
-	"s1":true,
-	"s2":true,
-	"s3":true,
-	"s4":true
-    }
-}
diff --git a/legacy/config/param_extract_maroc.json b/legacy/config/param_extract_maroc.json
deleted file mode 100644
index 554abec8..00000000
--- a/legacy/config/param_extract_maroc.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
-    "general":{
-        "pout":"/mnt/data/home/gascoins/Output-CES-Neige-Full",
-        "shadow_value":64,
-        "ram":1024,
-	"mode":"spot4",
-	"generate_vector":false
-        },
-    "inputs":{
-        "image":"/mnt/data/home/gascoins/Take5/AOI_test_CESNeige/LEVEL2A/Maroc/SPOT4_HRVIR_XS_20130327_N2A_ORTHO_SURF_CORR_PENTE_CMarocD0000B0000.TIF",
-        "dem":"/mnt/data/home/gascoins/Take5/AOI_test_CESNeige/SRTM/Maroc/Maroc.tif",
-        "cloud_mask":"/mnt/data/home/gascoins/Take5/AOI_test_CESNeige/LEVEL2A/Maroc/SPOT4_HRVIR_XS_20130327_N2A_CMarocD0000B0000_NUA.TIF"
-    },
-    "cloud_mask":{
-        "rf":8,
-        "rRed_darkcloud":650,
-        "rRed_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "rRed_pass1":200,
-        "ndsi_pass2":0.15,
-        "rRed_pass2":120,
-        "fsnow_lim":0.1,
-        "fsnow_total_lim":0.001
-    }
-}
-
diff --git a/legacy/config/param_extract_maroc_cluster.json b/legacy/config/param_extract_maroc_cluster.json
deleted file mode 100644
index 08ab5978..00000000
--- a/legacy/config/param_extract_maroc_cluster.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
-    "general":{
-        "pout":"/data/ctsiap/grizonnm/Output-CES-Neige-Full",
-        "shadow_value":64,
-        "ram":1024,
-	"mode":"spot4",
-	"generate_vector":false
-        },
-    "inputs":{
-        "image":"/data/ctsiap/grizonnm/test_CES/Take5/AOI_test_CESNeige/LEVEL2A/Maroc/SPOT4_HRVIR_XS_20130327_N2A_ORTHO_SURF_CORR_PENTE_CMarocD0000B0000.TIF",
-        "dem":"/data/ctsiap/grizonnm/test_CES/Take5/AOI_test_CESNeige/SRTM/Maroc/Maroc.tif",
-        "cloud_mask":"/data/ctsiap/grizonnm/test_CES/Take5/AOI_test_CESNeige/LEVEL2A/Maroc/SPOT4_HRVIR_XS_20130327_N2A_CMarocD0000B0000_NUA.TIF"
-    },
-    "cloud_mask":{
-        "rf":8,
-        "rRed_darkcloud":650,
-        "rRed_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "rRed_pass1":200,
-        "ndsi_pass2":0.15,
-        "rRed_pass2":120,
-        "fsnow_lim":0.1,
-        "fsnow_total_lim":0.001
-    }
-}
-
diff --git a/legacy/config/param_full_Landsat8_template.json b/legacy/config/param_full_Landsat8_template.json
deleted file mode 100644
index f82a0306..00000000
--- a/legacy/config/param_full_Landsat8_template.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
-    "general":{
-        "pout":"outputdir",
-        "nodata":-10000,
-        "ram":1024,
-	"nb_threads":1,
-	"generate_vector":false,
-	"preprocessing":false,
-	"log":true
-    },
-    "inputs":{
-	"green_band": {
-	    "path": "inputimage",
-	    "noBand": 3
-	},
-	"red_band": {
-	    "path": "inputimage",
-	    "noBand": 4
-	},
-	"swir_band": {
-	    "path": "inputimage",
-	    "noBand": 6
-	},
-        "dem":"inputdem",
-        "cloud_mask":"inputcloud"
-    },
-    "cloud":
-    {
-        "shadow_mask":64,
-        "all_cloud_mask":0,
-        "high_cloud_mask":64,
-        "rf":8,
-        "red_darkcloud":650,
-        "red_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "red_pass1":200,
-        "ndsi_pass2":0.15,
-        "red_pass2":120,
-        "fsnow_lim":0.1,
-        "fsnow_total_lim":0.001
-    }
-}
diff --git a/legacy/config/param_full_Take5_template.json b/legacy/config/param_full_Take5_template.json
deleted file mode 100644
index 84fec052..00000000
--- a/legacy/config/param_full_Take5_template.json
+++ /dev/null
@@ -1,33 +0,0 @@
-{
-    "general":{
-        "pout":"outputdir",
-        "shadow_value":64,
-        "ram":1024,
-	"nbThreads":1,
-	"mode":"spot",
-	"generate_vector":false,
-	"preprocessing":false,
-	"postprocessing":false
-         },
-    "inputs":{
-	"vrt":"inputvrt",
-        "image":"inputimage",
-        "dem":"inputdem",
-        "cloud_mask":"inputcloud"
-    },
-    "cloud_mask":{
-        "rf":8,
-        "rRed_darkcloud":650,
-        "rRed_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "rRed_pass1":200,
-        "ndsi_pass2":0.15,
-        "rRed_pass2":120,
-        "fsnow_lim":0.1,
-        "fsnow_total_lim":0.001
-    }
-}
-
diff --git a/legacy/config/param_full_maroc.json b/legacy/config/param_full_maroc.json
deleted file mode 100644
index 698bfff7..00000000
--- a/legacy/config/param_full_maroc.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
-    "general":{
-        "pout":"/mnt/data/home/gascoins/Output-CES-Neige-Full",
-        "shadow_value":64,
-        "ram":1024,
-	"mode":"spot4",
-	"generate_vector":false
-        },
-    "inputs":{ 
-        "image":"/mnt/data/home/grizonnetm/CES-Neige/Data_Full/SPOT4_HRVIR_XS_20130327_N2A_CMarocD0000B0000/SPOT4_HRVIR_XS_20130327_N2A_ORTHO_SURF_CORR_PENTE_CMarocD0000B0000.TIF",
-        "dem":"/mnt/data/home/gascoins/Take5/SRTM/CMaroc/CMaroc.tif",
-        "cloud_mask":"/mnt/data/home/grizonnetm/CES-Neige/Data_Full/SPOT4_HRVIR_XS_20130327_N2A_CMarocD0000B0000/MASK/SPOT4_HRVIR_XS_20130327_N2A_CMarocD0000B0000_NUA.TIF"
-    },
-    "cloud_mask":{
-        "rf":8,
-        "rRed_darkcloud":650,
-        "rRed_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "rRed_pass1":200,
-        "ndsi_pass2":0.15,
-        "rRed_pass2":120,
-        "fsnow_lim":0.05,
-        "fsnow_total_lim":0.001
-    }
-}
-
diff --git a/legacy/config/param_s2.json b/legacy/config/param_s2.json
deleted file mode 100644
index 53219cf7..00000000
--- a/legacy/config/param_s2.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
-    "general":{
-        "pout":"/mnt/data/home/grizonnetm/temporary/s2",
-        "shadow_value":32,
-        "ram":1024,
-	"mode":"s2",
-	"generate_vector":false
-        },
-    "inputs":{ 
-        "image":"/mnt/data/SENTINEL2/N2A_TEC/30TYN/S2A_OPER_SSC_L2VALD_30TYN____20150706/S2A_OPER_SSC_L2VALD_30TYN____20150706.DBL.DIR",
-        "dem":"/mnt/data/home/grizonnetm/SRTM-S2/srtm_superimpose.tif",
-        "cloud_mask":"/mnt/data/SENTINEL2/N2A_TEC/30TYN/S2A_OPER_SSC_L2VALD_30TYN____20150706/S2A_OPER_SSC_L2VALD_30TYN____20150706.DBL.DIR/S2A_OPER_SSC_PDTANX_L2VALD_30TYN____20150706_CLD_R2.DBL.TIF"
-    },
-    "cloud_mask":{
-        "rf":8,
-        "rRed_darkcloud":500,
-        "rRed_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "rRed_pass1":200,
-        "ndsi_pass2":0.15,
-        "rRed_pass2":120,
-        "fsnow_lim":0.1,
-        "fsnow_total_lim":0.001
-    }
-}
-
diff --git a/legacy/config/param_test.json b/legacy/config/param_test.json
deleted file mode 100644
index 65b41e95..00000000
--- a/legacy/config/param_test.json
+++ /dev/null
@@ -1,33 +0,0 @@
-{
-    "general":{
-        "pout":"/home/klempkat/let-it-snow/build-superbuild/Testing/Temporary",
-        "shadow_value":64,
-        "ram":1024,
-	"mode":"spot4",
-	"generate_vector":false,
-	"preprocessing":false,
-	"postprocessing":false
-         },
-    "inputs":{
-	"vrt":"inputvrt",
-        "image":"/home/grizonnetm/Data-Neige/Data-LIS/Input-Data-Test/Take5/AOI_test_CESNeige/LEVEL2A/Maroc/SPOT4_HRVIR_XS_20130327_N2A_ORTHO_SURF_CORR_PENTE_CMarocD0000B0000.TIF",
-        "dem":"/home/grizonnetm/Data-Neige/Data-LIS/Input-Data-Test/Take5/AOI_test_CESNeige/SRTM/Maroc/Maroc.tif",
-        "cloud_mask":"/home/grizonnetm/Data-Neige/Data-LIS/Input-Data-Test/Take5/AOI_test_CESNeige/LEVEL2A/Maroc/SPOT4_HRVIR_XS_20130327_N2A_CMarocD0000B0000_NUA.TIF",
-	"xml":"inputxml"
-    },
-    "cloud_mask":{
-        "rf":8,
-        "rRed_darkcloud":650,
-        "rRed_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "rRed_pass1":200,
-        "ndsi_pass2":0.15,
-        "rRed_pass2":120,
-        "fsnow_lim":0.1,
-        "fsnow_total_lim":0.001
-    }
-}
-
diff --git a/legacy/config/param_test_preprocessing_landsat.json b/legacy/config/param_test_preprocessing_landsat.json
deleted file mode 100644
index 5f0f2fea..00000000
--- a/legacy/config/param_test_preprocessing_landsat.json
+++ /dev/null
@@ -1,32 +0,0 @@
-
-{
-    "general":{
-        "pout":"/home/grizonnetm/Data-Neige/LandSat_Bassies/s2snow_output",
-        "shadow_value":64,
-        "ram":1024,
-	"mode":"landsat",
-	"generate_vector":true,
-	"preprocessing":true
-        },
-    "inputs":{ 
-	"vrt":"/home/grizonnetm/Data-Neige/LandSat_Bassies/SRTM_sud_ouest/sud_ouest.vrt",
-        "image":"/home/grizonnetm/Data-Neige/LandSat_Bassies/LANDSAT8_OLITIRS_XS_20150312_N2A_France-MetropoleD0005H0001/LANDSAT8_OLITIRS_XS_20150312_N2A_ORTHO_SURF_CORR_PENTE_France-MetropoleD0005H0001.TIF",
-        "dem":"/home/grizonnetm/Data-Neige/LandSat_Bassies/LANDSAT8_OLITIRS_XS_20150312_N2A_France-MetropoleD0005H0001/SRTM/LANDSAT8_OLITIRS_XS_20150312_N2A_SRTM_France-MetropoleD0005H0001.TIF",
-        "cloud_mask":"/home/grizonnetm/Data-Neige/LandSat_Bassies/LANDSAT8_OLITIRS_XS_20150312_N2A_France-MetropoleD0005H0001/MASK/LANDSAT8_OLITIRS_XS_20150312_N2A_France-MetropoleD0005H0001_NUA.TIF"
-    },
-    "cloud_mask":{
-        "rf":8,
-        "rRed_darkcloud":650,
-        "rRed_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "rRed_pass1":200,
-        "ndsi_pass2":0.15,
-        "rRed_pass2":120,
-        "fsnow_lim":0.1,
-        "fsnow_total_lim":0.001
-    }
-}
-
diff --git a/legacy/config/param_test_s2.json b/legacy/config/param_test_s2.json
deleted file mode 100644
index c64b346a..00000000
--- a/legacy/config/param_test_s2.json
+++ /dev/null
@@ -1,46 +0,0 @@
-{
-    "general":{
-        "pout":"/home/klempkat/let-it-snow/muscate/output",
-        "nodata":-10000,
-        "ram":1024,
-	"nb_threads":1,
-	"generate_vector":false,
-	"preprocessing":false,
-	"log":true
-    },
-    "inputs":{
-	"green_band": {
-	    "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B3.tif",
-	    "noBand": 1
-	},
-	"red_band": {
-	    "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B4.tif",
-	    "noBand": 1
-	},
-	"swir_band": {
-	    "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B11.tif",
-	    "noBand": 1
-	},
-        "dem":"/home/grizonnetm/Data-Neige/MUSCATE/MNT_S2_N2/S2__TEST_AUX_REFDE2_T29RNQ_0001_1.0/S2__TEST_AUX_REFDE2_T29RNQ_0001/S2__TEST_AUX_REFDE2_T29RNQ_0001.DBL.DIR/S2__TEST_AUX_REFDE2_T29RNQ_0001_ALT_R2.TIF",
-        "cloud_mask":"/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/MASKS/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_CLM_R2.tif"
-    },
-    "cloud":
-    {
-        "shadow_mask":64,
-        "all_cloud_mask":0,
-        "high_cloud_mask":32,
-        "rf":12,
-        "red_darkcloud":650,
-        "red_backtocloud":100
-    },
-    "snow":{
-        "dz":100,
-        "ndsi_pass1":0.4,
-        "red_pass1":200,
-        "ndsi_pass2":0.15,
-        "red_pass2":120,
-        "fsnow_lim":0.1,
-        "fsnow_total_lim":0.001
-    }
-}
-
diff --git a/legacy/config/s2_n2a_format.txt b/legacy/config/s2_n2a_format.txt
deleted file mode 100644
index 7fdf1e0d..00000000
--- a/legacy/config/s2_n2a_format.txt
+++ /dev/null
@@ -1,35 +0,0 @@
-Salut Manuel,
-
-Les données S2A sont là : /mnt/data/SENTINEL2/N2A_TEC
-Elles sont rangées par tuiles : 31TCJ (Toulouse) et 30TYN (Tarbes, avec l'Aneto et Gavarnie)), puis par dates
-
-Il y a encore un sous répertoire, il faut choisir  : S2A_OPER_SSC_L2VALD_31TCJ____20150805.DBL.DIR
-
-Ensuite on trouve des fichiers pour la résolution 10m "R1" et la résolution 20m "R2"
-
-
-    - les réflectances de surface sont dans le plan SRE (ou FRE pour la correction des effets du relief)
-Ce sont les fichiers R2 dont les numéros de bandes sont décalés. La première
-bande ne correspond à rien, et la bande 12 qui aurait dû être en sixième
-position n'est pas fournie. 
-Bon j'espère qu'on aura bientôt la prochaine version qui corrigera le bug.
-
-    - Le masque de Nuages est le plan CLD
-        - l'ordre des bits à changé par rapport au produit que tu connais :
-
-Bit 1 : Cloud_Mask.all : summary Logical or of All cloud and shadow
-masks
-
-Bit 2 - Cloud_Mask.all clouds: Logical or of All cloud masks
-
-Bit 3 : Cloud_Mask.shadows : shadows mask from clouds within image
-
-Bit 4 : Cloud_Mask.shadvar: shadows mask from clouds outside image
-
-Bit 5 - Cloud_Mask.refl : reflectance threshold
-
-Bit 6 - Cloud_Mask.refl_var : reflectance variation threshold
-
-Bit 7 - Cloud_Mask.extension : extension of the cloud mask
-
-Bit 8 - Cirrus Mask pour S2 et L8 
diff --git a/legacy/test/histo_utils_test.cxx b/legacy/test/histo_utils_test.cxx
deleted file mode 100644
index f0cc0fcd..00000000
--- a/legacy/test/histo_utils_test.cxx
+++ /dev/null
@@ -1,34 +0,0 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
-
-#include "histo_utils.h"
-#include <iostream>
-int main(int argc, char * argv [])
-{
-const int result = compute_snowline(argv[1],argv[2],argv[3],atoi(argv[4]),atof(argv[5]));
-const int expected = atoi(argv[6]);
-std::cout << "result: " << result << std::endl;
-
-if (result == expected)
-  {
-  return EXIT_SUCCESS;
-  }
-else
-  {
-  std::cerr << "Expected value is " << expected << " but get " << result << std::endl; 
-  return EXIT_FAILURE;
-  }
-}
diff --git a/legacy/utils/projette_mnt_Landsat8_full.sh b/legacy/utils/projette_mnt_Landsat8_full.sh
deleted file mode 100644
index 2cf26e36..00000000
--- a/legacy/utils/projette_mnt_Landsat8_full.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#/bin/sh
-
-# Path to the SRTM virtual raster dataset
-fsrtm="/mnt/data/DONNEES_AUX/SRTM/srtm.vrt"
-
-# Path to images
-pimg="/mnt/data/home/gascoins/Landsat8"
-
-# Tiles to process
-tiles="N2A_France-MetropoleD0005H0001 N2A_France-MetropoleD0005H0002 N2A_France-MetropoleD0006H0001"
-
-# Path to write the output DEM
-pout="/mnt/data/home/gascoins/Landsat8/SRTM"
-
-# Full extent images from Level2
-for s0 in $tiles
-do
- 
-  # input folder to get target image SRS and extent
-  p="$pimg/$s0"
-
-  # only one file is needed to get the SRS
-  f=$(ls $p/*/*PENTE*TIF | head -n1)
-
-  # get target extent
-  # gdal_warp convention = xmin ymin xmax ymax
-  xminymin=$(gdalinfo $f | grep "Lower Left" | tr -d '(,)A-z' | awk '{print $1, $2}')
-  xmaxymax=$(gdalinfo $f | grep "Upper Right" | tr -d '(,)A-z' | awk '{print $1, $2}')
-  te="$xminymin $xmaxymax"
-
-  # get the SRS
-  proj=$(gdalsrsinfo -o proj4 $f | tr -d "'")
-
-  # output folder to write projected SRTM
-  po="$pout/$s0/"
-  mkdir -p $po
-
-  # projet SRTM dem with cubicspline resampling (target resolution in 20 m x 20 m) 
-  gdalwarp -dstnodata -32768 -tr 30 30 -r cubicspline -overwrite -te $te -t_srs "$proj" $fsrtm $po$s0.tif &
-
-done
-wait
-exit 0
-
diff --git a/legacy/utils/projette_mnt_Take5_full.sh b/legacy/utils/projette_mnt_Take5_full.sh
deleted file mode 100644
index e4f65872..00000000
--- a/legacy/utils/projette_mnt_Take5_full.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#/bin/sh
-
-# Path to the SRTM virtual raster dataset
-fsrtm="/mnt/data/DONNEES_AUX/SRTM/srtm.vrt"
-
-# Path to images
-pimg="/mnt/data/home/gascoins/Take5"
-
-# Tiles to process
-tiles="CAlpes CMaroc CSudmipy-O CSudmipy-E"
-
-# Path to write the output DEM
-pout="/mnt/data/home/gascoins/Take5/SRTM"
-
-# Full extent images from Level2
-for s0 in $tiles
-do
- 
-  # input folder to get target image SRS and extent
-  p="$pimg/$s0"_LEVEL2A
-
-  # only one file is needed to get the SRS
-  f=$(ls $p/*/*PENTE*TIF | head -n1)
-
-  # get target extent
-  # gdal_warp convention = xmin ymin xmax ymax
-  xminymin=$(gdalinfo $f | grep "Lower Left" | tr -d '(,)A-z' | awk '{print $1, $2}')
-  xmaxymax=$(gdalinfo $f | grep "Upper Right" | tr -d '(,)A-z' | awk '{print $1, $2}')
-  te="$xminymin $xmaxymax"
-
-  # get the SRS
-  proj=$(gdalsrsinfo -o proj4 $f | tr -d "'")
-
-  # output folder to write projected SRTM
-  po="$pout/$s0/"
-  mkdir -p $po
-
-  # projet SRTM dem with cubicspline resampling (target resolution in 20 m x 20 m) 
-  gdalwarp -dstnodata -32768 -tr 20 20 -r cubicspline -overwrite -te $te -t_srs "$proj" $fsrtm $po$s0.tif &
-
-done
-wait
-exit 0
-
diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt
index 44fcb474..2ebc05f4 100644
--- a/python/CMakeLists.txt
+++ b/python/CMakeLists.txt
@@ -1,3 +1,23 @@
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 find_program(PYTHON "python")
 
 if (PYTHON)
diff --git a/python/s2snow/app_wrappers.py b/python/s2snow/app_wrappers.py
index bb2976eb..3dc2b170 100644
--- a/python/s2snow/app_wrappers.py
+++ b/python/s2snow/app_wrappers.py
@@ -1,20 +1,25 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
-#=========================================================================
 #
-#  Program:   lis
-#  Language:  Python
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
 #
-#  Copyright (c) Simon Gascoin
-#  Copyright (c) Manuel Grizonnet
+# This file is part of Let-it-snow (LIS)
 #
-#  See lis-copyright.txt for details.
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
 #
-#  This software is distributed WITHOUT ANY WARRANTY; without even
-#  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-#  PURPOSE.  See the above copyright notices for more information.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
 #
-#=========================================================================
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 import logging
 
 # OTB Applications
@@ -40,7 +45,7 @@ def band_math(il, out, exp, ram=None, out_type=None):
         bandMathApp = otb.Registry.CreateApplication("BandMath")
         bandMathApp.SetParameterString("exp", exp)
         for image in il:
-            if isinstance(image, basestring):
+            if isinstance(image, str):
                 bandMathApp.AddParameterStringList("il", image)
             else:
                 bandMathApp.AddImageToParameterInputImageList("il", image)
@@ -155,7 +160,7 @@ def band_mathX(il, out, exp, ram=None, out_type=None):
         bandMathApp = otb.Registry.CreateApplication("BandMathX")
         bandMathApp.SetParameterString("exp", exp)
         for image in il:
-            if isinstance(image, basestring):
+            if isinstance(image, str):
                 bandMathApp.AddParameterStringList("il", image)
             else:
                 bandMathApp.AddImageToParameterInputImageList("il", image)
@@ -199,10 +204,17 @@ def compute_snow_line(img_dem, img_snow, img_cloud, dz, fsnowlim, fclearlim, \
         snowLineApp.SetParameterFloat("fsnowlim", fsnowlim)
         snowLineApp.SetParameterFloat("fclearlim", fclearlim)
         snowLineApp.SetParameterInt("offset", offset)
+        if not isinstance(centeroffset, int):
+            if round(centeroffset,0) != centeroffset:
+                raise IOError("centeroffset shoud be an integer, got %s instead with value %s => error"%(type(centeroffset), centeroffset))
+            else:
+                print("WARNING: centeroffset shoud be an integer, got %s instead with value %s => converting to int"%(type(centeroffset), centeroffset))
+            centeroffset = int(centeroffset)
         snowLineApp.SetParameterInt("centeroffset", centeroffset)
-
         if reverse:
-            snowLineApp.SetParameterString("reverse", "true")
+            snowLineApp.SetParameterInt("reverse", 1)
+        else:
+            snowLineApp.SetParameterInt("reverse", 0)
 
         if ram is not None:
             logging.info("ram = " + str(ram))
@@ -304,8 +316,10 @@ def compute_contour(img_in, img_out, foreground_value, fullyconnected, \
             logging.info("out = " + img_out)
             cloudMaskApp.SetParameterString("out", img_out)
         if fullyconnected:
-            cloudMaskApp.SetParameterString("fullyconnected", "true")
+            cloudMaskApp.SetParameterInt("fullyconnected", 1)
             cloudMaskApp.SetParameterString("inputmask", img_in)
+        else:
+            cloudMaskApp.SetParameterInt("fullyconnected", 0)
         if ram is not None:
             logging.info("ram = " + str(ram))
             cloudMaskApp.SetParameterString("ram", str(ram))
@@ -371,3 +385,45 @@ def get_app_output(app, out_key, mode="RUNTIME"):
     else:
         logging.error("Unexpected mode")
     return app_output
+
+def gap_filling(img_in, mask_in, img_out, input_dates_file=None,
+                output_dates_file=None, ram=None, out_type=None):
+    """ Create and configure the ImageTimeSeriesGapFilling application
+        using otb.Registry.CreateApplication("ImageTimeSeriesGapFilling")
+
+    Keyword arguments:
+    img_in -- the input timeserie image
+    mask_in -- the input masks
+    img_out -- the output image
+    ram -- the ram limitation (not mandatory)
+    out_type -- the output image pixel type  (not mandatory)
+    """
+    if img_in and mask_in and img_out:
+        logging.info("Processing ImageTimeSeriesGapFilling with args:")
+        logging.info("img_in = " + img_in)
+        logging.info("mask_in = " + mask_in)
+        logging.info("img_out = " + img_out)
+
+        gap_filling_app = otb.Registry.CreateApplication("ImageTimeSeriesGapFilling")
+        gap_filling_app.SetParameterString("in", img_in)
+        gap_filling_app.SetParameterString("mask", mask_in)
+        gap_filling_app.SetParameterString("out", img_out)
+
+        gap_filling_app.SetParameterInt("comp", 1)
+        gap_filling_app.SetParameterString("it", "linear")
+
+        if input_dates_file is not None:
+            logging.info("input_dates_file = " + input_dates_file)
+            gap_filling_app.SetParameterString("id", input_dates_file)
+        if output_dates_file is not None:
+            logging.info("output_dates_file = " + output_dates_file)
+            gap_filling_app.SetParameterString("od", output_dates_file)
+        if ram is not None:
+            logging.info("ram = " + str(ram))
+            gap_filling_app.SetParameterString("ram", str(ram))
+        if out_type is not None:
+            logging.info("out_type = " + str(out_type))
+            gap_filling_app.SetParameterOutputImagePixelType("out", out_type)
+        return gap_filling_app
+    else:
+        logging.error("Parameters img_in, img_out and mask_in are required")
diff --git a/python/s2snow/cloud_builder.py b/python/s2snow/cloud_builder.py
index 0ffdff87..4178831f 100644
--- a/python/s2snow/cloud_builder.py
+++ b/python/s2snow/cloud_builder.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import sys
 import os
@@ -13,9 +32,9 @@ from subprocess import call
 
 
 def show_help():
-    print "This script is used to create clouds on data"
-    print "Usage: cloud_builder.py mode plaincloudthreshold randomcloudthreshold inputpath outputplaincloudpath ouputrandomcloudpath"
-    print "Mode : 0 %plain cloud image, 1 %random cloud image, 2 both"
+    print("This script is used to create clouds on data")
+    print("Usage: cloud_builder.py mode plaincloudthreshold randomcloudthreshold inputpath outputplaincloudpath ouputrandomcloudpath")
+    print("Mode : 0 %plain cloud image, 1 %random cloud image, 2 both")
 
 
 def main(argv):
diff --git a/python/s2snow/cloud_removal.py b/python/s2snow/cloud_removal.py
index f9cb1452..67623b5a 100644
--- a/python/s2snow/cloud_removal.py
+++ b/python/s2snow/cloud_removal.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import os
 import sys
@@ -15,8 +34,8 @@ import gdalconst
 
 
 def show_help():
-    print "This script is used to remove clouds from snow data"
-    print "Usage: cloud_removal.py config.json"
+    print("This script is used to remove clouds from snow data")
+    print("Usage: cloud_removal.py config.json")
 
 
 def get_raster_as_array(raster_file_name):
@@ -59,7 +78,7 @@ def compute_HSmax(image_path, dem_path):
 def compute_cloudpercent(image_path):
     array_image, dataset_image = get_raster_as_array(image_path)
     cloud = np.sum(array_image == 205)
-    tot_pix = np.sum(array_image != 254)
+    tot_pix = np.sum(array_image < 254)
     return (float(cloud) / float(tot_pix)) * 100
 
 
@@ -72,7 +91,7 @@ def compute_cloud(image):
 def step1(m2_path, m1_path, t0_path, p1_path, p2_path, output_path, ram):
 
     # Cloud conditions (include pixel flagged as cloud and also as no data
-    cloud_nodata_condition = "(im2b1 == 205 || im2b1 == 254)"
+    cloud_nodata_condition = "(im2b1 == 205 || im2b1 >= 254)"
 
     # S(y,x,t) = 1 if (S(y,x,t-1) = 1 and S(y,x,t+1) = 1)
     call(
@@ -124,7 +143,7 @@ def step1(m2_path, m1_path, t0_path, p1_path, p2_path, output_path, ram):
 def step2(t0_path, dem_path, output_path, ram):
 
     percentage_cloud = compute_cloudpercent(t0_path)
-    print "cloud percent : " + str(percentage_cloud)
+    print("cloud percent : " + str(percentage_cloud))
 
     # Perform step 2 only if cloud coverage is less than a threshold value
     # (hard coded for now to 30%)
@@ -133,7 +152,7 @@ def step2(t0_path, dem_path, output_path, ram):
     if cloudpercent_condition:
         # S(y,x,t) = 1 if (H(x,y) < Hsmin(t))
         hs_min = compute_HSmin(t0_path, dem_path)
-        print "hs_min: " + str(hs_min)
+        print("hs_min: " + str(hs_min))
         call(["otbcli_BandMath",
               "-ram",
               str(ram),
@@ -145,7 +164,7 @@ def step2(t0_path, dem_path, output_path, ram):
               "-exp",
               "im1b1==205?(im2b1<" + str(hs_min) + "?0:im1b1):im1b1"])
         hs_max = compute_HSmax(t0_path, dem_path)
-        print "hs_max: " + str(hs_max)
+        print("hs_max: " + str(hs_max))
         # S(y,x,t) = 1 if (H(x,y) > Hsmax(t))
         call(["otbcli_BandMath",
               "-ram",
@@ -164,7 +183,7 @@ def step2(t0_path, dem_path, output_path, ram):
 def step3(t0_path, output_path):
 
     # four-pixels neighboring
-    print "Starting step 3"
+    print("Starting step 3")
     array, dataset = get_raster_as_array(t0_path)
 
     # compute 4 pixel snow neighboring
@@ -172,7 +191,7 @@ def step3(t0_path, output_path):
 
     set_array_as_raster(array, dataset, output_path)
     # create file
-    print "End of step 3"
+    print("End of step 3")
 
 
 def step3_internal(array):
@@ -194,7 +213,7 @@ def step3_internal(array):
 def step4(t0_path, dem_path, output_path):
     # S(y,x,t) = 1 if (S(y+k,x+k,t)(kc(-1,1)) = 1 and H(y+k,x+k)(kc(-1,1)) <
     # H(y,x))
-    print "Starting step 4"
+    print("Starting step 4")
     array, dataset = get_raster_as_array(t0_path)
     array_dem, dataset_dem = get_raster_as_array(dem_path)
 
@@ -203,7 +222,7 @@ def step4(t0_path, dem_path, output_path):
 
     # create file
     set_array_as_raster(array, dataset, output_path)
-    print "End of step 4"
+    print("End of step 4")
 
 
 def step4_internal(array, array_dem):
@@ -333,7 +352,7 @@ def format_percent(array, total_cloud):
 
 
 def plot_stats(array):
-    steps = range(0, array.shape[0])
+    steps = list(range(0, array.shape[0]))
     TCE = array[:, 0]
     TRUE = array[:, 1]
     FALSE = array[:, 2]
@@ -355,7 +374,7 @@ def run(data):
     try:
         nb_defaultThreads = multiprocessing.cpu_count()
     except NotImplementedError:
-        print "Cannot get max number of CPU on the system. nbDefaultThreads set to 1."
+        print("Cannot get max number of CPU on the system. nbDefaultThreads set to 1.")
         nb_defaultThreads = 1
 
     nb_threads = general.get("nb_threads", nb_defaultThreads)
@@ -450,10 +469,10 @@ def run(data):
         stats_array_percent = np.vstack([stats_array_percent, np.sum(
             stats_array_percent, axis=0)])  # add total to array
 
-        print stats_array
+        print(stats_array)
         np.set_printoptions(precision=3)
         np.set_printoptions(suppress=True)
-        print stats_array_percent
+        print(stats_array_percent)
 
         # plot_stats(stats_array)
 
diff --git a/python/s2snow/compute_NOBS.py b/python/s2snow/compute_NOBS.py
new file mode 100644
index 00000000..d8140c87
--- /dev/null
+++ b/python/s2snow/compute_NOBS.py
@@ -0,0 +1,103 @@
+# !/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import rasterio
+import numpy as np
+import os, sys
+import logging
+from s2snow.version import VERSION
+
+
+def compute_NOBS(input_file, output_file=None):
+    """
+    computes NOBS_xxx.tif, the number of clear observations to compute the SCD, SMOD and SOD syntheses
+    :param input_file: the cloud mask vrt generated using run_show_annual_map script, multitemp_cloud_mask.vrt
+    :param output_file: NOBS file path.
+    :return:
+    """
+    logging.info("Start compute_NOBS.py using: {}".format(input_file))
+
+    if not os.path.isfile(input_file):
+        logging.error("Input file does not exist : {}", input_file)
+        return
+
+    if output_file is None:
+        output_dir = os.path.split(os.path.split(input_file)[0])[0]
+        synthesis_id = os.path.split(output_dir)[1]
+        output_file = os.path.join(output_dir,"NOBS_{}.tif".format(synthesis_id))
+
+
+    src = rasterio.open(input_file, 'r')
+
+    n = src.meta["count"]
+    W = src.read(range(1, n+1))
+    S = n - np.sum(W, axis=0)
+
+    with rasterio.Env():
+        profile = src.profile
+        profile.update(
+            dtype=rasterio.uint16,
+            driver='GTiff',
+            count=1)
+
+        with rasterio.open(output_file, 'w', **profile) as dst:
+            dst.write(S.astype(rasterio.uint16), 1)
+
+    logging.info("End of compute_NOBS.py")
+
+
+def show_help():
+    """
+    Show help for compute_NOBS
+    :return:
+    """
+    print("This script is used to compute NOBS. " \
+          + "Input file is the cloud mask vrt generated using run_show_annual_map script, " \
+          + "named multitemp_cloud_mask.vrt")
+    print("Usage: python compute_NOBS.py nobs_input_file synthesis_id output_dir")
+    print("Example: python compute_NOBS.py multitemp_cloud_mask.vrt T31TCH_20160901_20170831 /tmp")
+    print("python compute_NOBS.py version to show version")
+    print("python compute_NOBS.py help to show help")
+
+
+def show_version():
+    """
+    Show LIS version
+    :return:
+    """
+    print("LIS Version : {}".format(VERSION))
+
+
+def main(argv):
+    compute_NOBS(*argv[1:])
+
+
+if __name__ == "__main__":
+    if len(sys.argv) < 1 or len(sys.argv) > 3:
+        show_help()
+    else:
+        if sys.argv[1] == "version":
+            show_version()
+        elif sys.argv[1] == "help":
+            show_help()
+        else:
+            main(sys.argv)
diff --git a/python/s2snow/compute_SOD_SMOD.py b/python/s2snow/compute_SOD_SMOD.py
new file mode 100644
index 00000000..42bb4e14
--- /dev/null
+++ b/python/s2snow/compute_SOD_SMOD.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import rasterio
+import numpy as np
+import itertools, operator, sys, os
+import logging
+from s2snow.version import VERSION
+
+
+def compute_SOD_SMOD(input_file, sod_file=None, smod_file=None):
+    """
+    Computes the snow onset date (SOD) and the snow melt-out date (SMOD) from a stack of daily snow maps.
+    The dates are given in number of days since the first day of the synthesis (usually September 01).
+    :param input_file: the interpolated daily raster generated using run_show_annual_map script.
+    :param sod_file: SOD file path
+    :param smod_file: SMOD file path.
+    :return:
+    """
+    logging.info("Start compute_SOD_SMOD.py using: {}".format(input_file))
+
+    if not os.path.isfile(input_file):
+        logging.error("Input file does not exist : {}".format(input_file))
+        return
+
+    if sod_file is None or smod_file is None:
+        synthesis_id = os.path.split(input_file)[1]
+        output_dir = os.path.split(input_file)[0]
+    if sod_file is None:
+        sod_file = os.path.join(output_dir,"SOD_{}.tif".format(synthesis_id))
+    if smod_file is None:
+        smod_file = os.path.join(output_dir,"SMOD_{}.tif".format(synthesis_id))
+
+    src = rasterio.open(input_file, 'r')
+    n = src.meta["count"]
+
+    W = src.read(range(1, n+1))
+    n = np.shape(W)[1]
+    m = np.shape(W)[2]
+    sod = np.zeros((n, m), dtype='uint16')
+    smod = np.zeros((n, m), dtype='uint16')
+    for i in range(0, n):
+        for j in range(0, m):
+            w = W[:, i, j]
+            if np.sum(w) > 10:
+                r = max((list(y) for (x, y) in itertools.groupby((enumerate(w)), operator.itemgetter(1)) if x == 1),
+                        key=len)
+                smod[i, j] = r[-1][0]
+                sod[i, j] = r[0][0]
+
+    with rasterio.Env():
+        profile = src.profile
+        profile.update(
+            dtype=rasterio.uint16,
+            count=1)
+
+        with rasterio.open(smod_file, 'w', **profile) as dst:
+            dst.write(smod.astype(rasterio.uint16), 1)
+
+        with rasterio.open(sod_file, 'w', **profile) as dst:
+            dst.write(sod.astype(rasterio.uint16), 1)
+
+    logging.info("End of compute_SOD_SMOD.py")
+
+
+def show_help():
+    """
+    Show help for compute_SOD_SMOD .
+    :return:
+    """
+    print("This script is used to compute SOD and SMOD. " \
+          + "Input file is the interpolated daily raster generated using run_show_annual_map script." \
+          + " Example : DAILY_SNOW_MASKS_T31TCH_20160901_20170831.tif")
+    print(
+        "Usage: python compute_SOD_SMOD.py DAILY_SNOW_MASKS_T31TCH_20160901_20170831.tif T31TCH_20160901_20170831 /tmp")
+    print("Example: python compute_SOD_SMOD.py input_file synthesis_id output_dir")
+    print("python compute_SOD_SMOD.py version to show version")
+    print("python compute_SOD_SMOD.py help to show help")
+
+
+def show_version():
+    """
+    Show LIS version.
+    :return:
+    """
+    print("LIS Version : {}".format(VERSION))
+
+
+def main(argv):
+    compute_SOD_SMOD(*argv[1:])
+
+
+if __name__ == "__main__":
+    if len(sys.argv) < 1 or len(sys.argv) > 3:
+        show_help()
+    else:
+        if sys.argv[1] == "version":
+            show_version()
+        elif sys.argv[1] == "help":
+            show_help()
+        else:
+            main(sys.argv)
diff --git a/python/s2snow/dem_builder.py b/python/s2snow/dem_builder.py
index 30fedcc0..84dde40e 100644
--- a/python/s2snow/dem_builder.py
+++ b/python/s2snow/dem_builder.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import sys
 import subprocess
@@ -9,8 +28,8 @@ import ast
 from osgeo import gdal, gdalconst, osr
 
 def show_help():
-    print "This script is used to compute srtm mask from a vrt file to a region extent"
-    print "Usage: preprocessing.py srtm.vrt img.tif output.tif"
+    print("This script is used to compute srtm mask from a vrt file to a region extent")
+    print("Usage: preprocessing.py srtm.vrt img.tif output.tif")
 
 
 def get_extent(geotransform, cols, rows):
@@ -31,7 +50,7 @@ def get_extent(geotransform, cols, rows):
 
 def build_dem(psrtm, pimg, pout, ram, nbThreads):
     # load datasets
-    source_dataset = gdal.Open(psrtm, gdalconst.GA_Update)
+    source_dataset = gdal.Open(psrtm, gdalconst.GA_ReadOnly)
     source_geotransform = source_dataset.GetGeoTransform()
     source_projection = source_dataset.GetProjection()
 
@@ -94,9 +113,9 @@ def build_dem(psrtm, pimg, pout, ram, nbThreads):
          )
 
     except subprocess.CalledProcessError as e:
-        print e.output
-        print 'Error running command: ' + str(e.cmd) + ' see above shell error'
-        print 'Return code: ' + str(e.returncode)
+        print(e.output)
+        print('Error running command: ' + str(e.cmd) + ' see above shell error')
+        print('Return code: ' + str(e.returncode))
         return e.returncode
     
 def main(argv):
diff --git a/python/s2snow/findRefCandidates.py b/python/s2snow/findRefCandidates.py
index c7e4eac3..6404f1eb 100755
--- a/python/s2snow/findRefCandidates.py
+++ b/python/s2snow/findRefCandidates.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import os, sys
 import os.path as op
@@ -22,15 +41,15 @@ def main(argv):
 				
 				# Find potential
 				if snow_percent > minsnowthreshold and cloud_percent > mincloudthreshold and snow_percent < maxsnowthreshold and cloud_percent < maxcloudthreshold :
-					print root
-					print "snow percent: " + str(snow_percent)
-					print "cloud percent: " + str(cloud_percent)
+					print(root)
+					print(("snow percent: " + str(snow_percent)))
+					print(("cloud percent: " + str(cloud_percent)))
 					total_images += 1
 					
-	print "total images :" + str(total_images)
+	print(("total images :" + str(total_images)))
 
 if __name__ == "__main__":
 	if len(sys.argv) != 5:
-		print "Missing arguments"
+		print("Missing arguments")
 	else:
 		main(sys.argv)
diff --git a/python/s2snow/snow_annual_map.py b/python/s2snow/snow_annual_map.py
index f4d69ece..d868dbde 100644
--- a/python/s2snow/snow_annual_map.py
+++ b/python/s2snow/snow_annual_map.py
@@ -1,99 +1,302 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
-#=========================================================================
 #
-#  Program:   lis
-#  Language:  Python
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
 #
-#  Copyright (c) Germain Salgues
-#  Copyright (c) Manuel Grizonnet
+# This file is part of Let-it-snow (LIS)
 #
-#  See lis-copyright.txt for details.
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
 #
-#  This software is distributed WITHOUT ANY WARRANTY; without even
-#  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-#  PURPOSE.  See the above copyright notices for more information.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
 #
-#=========================================================================
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import logging
 import os
 import os.path as op
 import shutil
-import logging
-import multiprocessing
-from xml.dom import minidom
 from datetime import timedelta
 
-from lxml import etree
-
 import gdal
-from gdalconst import GA_ReadOnly
-
 # OTB Applications
 import otbApplication as otb
 
 # Import python decorators for the different needed OTB applications
-from s2snow.app_wrappers import band_math, get_app_output, super_impose
-
-from s2snow.utils import str_to_datetime, datetime_to_str
-from s2snow.utils import write_list_to_file, read_list_from_file
+from s2snow.app_wrappers import band_math, get_app_output, super_impose, band_mathX, gap_filling
 from s2snow.snow_product_parser import load_snow_product
+from s2snow.utils import datetime_to_str
+from s2snow.utils import write_list_to_file, read_list_from_file
 
 # Build gdal option to generate maks of 1 byte using otb extended filename
 # syntaxx
 GDAL_OPT = "?&gdal:co:NBITS=1&gdal:co:COMPRESS=DEFLATE"
 
-
-def parse_xml(filepath):
-    """ Parse an xml file to return the zs value of a snow product
-    """
-    logging.debug("Parsing " + filepath)
-    xmldoc = minidom.parse(filepath)
-    group = xmldoc.getElementsByTagName('Global_Index_List')[0]
-    zs = group.getElementsByTagName("QUALITY_INDEX")[0].firstChild.data
+LABEL_NO_SNOW = "0"
+LABEL_SNOW = "100"
+LABEL_CLOUD = "205"
+LABEL_NO_DATA = "255"
+LABEL_NO_DATA_OLD = "254"
+
+def compute_SCD(binary_snow_mask_list, multitemp_cloud_vrt, input_dates_filename, output_dates_filename, output_dates,
+                snow_coverage_duration, multitemp_snow_vrt, gapfilled_timeserie, multitemp_snow100,
+                multitemp_snow100_gapfilled, ram):
+    logging.info("Start compute_SCD")
+
+    # build snow mask vrt
+    logging.info("Building multitemp snow mask vrt")
+    logging.info("snow vrt: {}".format(multitemp_snow_vrt))
+
+    gdal.BuildVRT(multitemp_snow_vrt,
+                  binary_snow_mask_list,
+                  separate=True,
+                  srcNodata='None')
+
+    # multiply by 100 for the temporal interpolation
+    logging.info("Scale by 100 multitemp snow mask vrt")
+    bandMathXApp = band_mathX([multitemp_snow_vrt],
+                              multitemp_snow100,
+                              "im1 mlt 100",
+                              ram,
+                              otb.ImagePixelType_uint8)
+    bandMathXApp.ExecuteAndWriteOutput()
+    bandMathXApp = None
+
+    # gap filling the snow timeserie
+    logging.info("Gap filling the snow timeserie")
+    app_gap_filling = gap_filling(multitemp_snow100,
+                                  multitemp_cloud_vrt,
+                                  multitemp_snow100_gapfilled + "?&gdal:co:COMPRESS=DEFLATE",
+                                  input_dates_filename,
+                                  output_dates_filename,
+                                  ram,
+                                  otb.ImagePixelType_uint8)
+    # @TODO the mode is for now forced to DEBUG in order to generate img on disk
+    # img_in = get_app_output(app_gap_filling, "out", mode)
+    # if mode == "DEBUG":
+    # shutil.copy2(gapfilled_timeserie, path_out)
+    # app_gap_filling = None
+    img_in = get_app_output(app_gap_filling, "out", "DEBUG")
+    app_gap_filling = None
+    # threshold to 0 or 1
+    logging.info("Round to binary series of snow occurrence")
+    bandMathXApp = band_mathX([img_in],
+                              gapfilled_timeserie + GDAL_OPT,
+                              "(im1 mlt 2) dv 100",
+                              ram,
+                              otb.ImagePixelType_uint8)
+    bandMathXApp.ExecuteAndWriteOutput()
+    bandMathXApp = None
+
+    # generate the annual map
+    logging.info("Generate Snow coverate duration: {}".format(snow_coverage_duration))
+    band_index = list(range(1, len(output_dates) + 1))
+    logging.debug("Bande index: {}".format(band_index))
+    expression = "+".join(["im1b" + str(i) for i in band_index])
+    logging.debug("expression: {}".format(expression))
+    bandMathApp = band_math([gapfilled_timeserie],
+                            snow_coverage_duration,
+                            expression,
+                            ram,
+                            otb.ImagePixelType_uint16)
+    bandMathApp.ExecuteAndWriteOutput()
+    bandMathApp = None
+    logging.info("End compute_SCD")
+
+
+def compute_CCD(binary_cloud_mask_list, ccd_file_path, multitemp_cloud_vrt, ram):
+    logging.info("Start compute_CCD")
+
+    # build cloud mask vrt
+    logging.info("Building multitemp cloud mask vrt")
+    logging.info("cloud vrt: {}".format(multitemp_cloud_vrt))
+    gdal.BuildVRT(multitemp_cloud_vrt,
+                  binary_cloud_mask_list,
+                  separate=True,
+                  srcNodata='None')
+
+    # generate the summary map
+    logging.info("Generate Cloud_Occurence: {}".format(ccd_file_path))
+    band_index = list(range(1, len(binary_cloud_mask_list) + 1))
+    logging.debug("bande index: {}".format(band_index))
+    expression = "+".join(["im1b" + str(i) for i in band_index])
+    logging.debug("expression: {}".format(expression))
+    bandMathApp = band_math([multitemp_cloud_vrt],
+                            ccd_file_path,
+                            expression,
+                            ram,
+                            otb.ImagePixelType_uint16)
+    bandMathApp.ExecuteAndWriteOutput()
+    bandMathApp = None
+    logging.info("End compute_CCD")
+
+def convert_snow_masks_into_binary_cloud_masks(path_out, ram, product_dict):
+    logging.info("Start convert_snow_masks_into_binary_cloud_masks")
+    # convert the snow masks into binary cloud masks
+    expression = "im1b1==" + LABEL_CLOUD + "?1:(im1b1==" + LABEL_NO_DATA + "?1:(im1b1==" + LABEL_NO_DATA_OLD + "?1:0))"
+    binary_cloud_mask_list = convert_mask_list(path_out, product_dict, expression, "cloud", ram,
+                                               mask_format=GDAL_OPT)
+    logging.debug("Binary cloud mask list:")
+    logging.debug(binary_cloud_mask_list)
+    logging.info("Start convert_snow_masks_into_binary_cloud_masks")
+    return binary_cloud_mask_list
+
+
+def convert_snow_masks_into_binary_snow_masks(path_out, ram, product_dict):
+    logging.info("Start convert_snow_masks_into_binary_snow_masks")
+    # convert the snow masks into binary snow masks
+    expression = "(im1b1==" + LABEL_SNOW + ")?1:0"
+    binary_snow_mask_list = convert_mask_list(path_out, product_dict, expression, "snow", ram,
+                                              mask_format=GDAL_OPT)
+    logging.debug("Binary snow mask list:")
+    logging.debug(binary_snow_mask_list)
+    logging.info("End convert_snow_masks_into_binary_snow_masks")
+    return binary_snow_mask_list
+
+
+def merge_product_at_same_date(path_out, product_dict, ram):
+    logging.info("Start merge_product_at_same_date")
+    merge_product_dict = {}
+    for key in list(product_dict.keys()):
+        if len(product_dict[key]) > 1:
+            merged_mask = op.join(path_out, key + "_merged_snow_product.tif")
+            merge_masks_at_same_date(product_dict[key],
+                                     merged_mask,
+                                     LABEL_SNOW,
+                                     ram)
+            merge_product_dict[key] = merged_mask
+        else:
+            merge_product_dict[key] = product_dict[key][0].get_snow_mask()
+    logging.info("End merge_product_at_same_date")
+    return merge_product_dict
 
 
-#TODO move this function in app_wrappers.py along other otb applications
-def gap_filling(img_in, mask_in, img_out, input_dates_file=None,
-                output_dates_file=None, ram=None, out_type=None):
-    """ Create and configure the ImageTimeSeriesGapFilling application
-        using otb.Registry.CreateApplication("ImageTimeSeriesGapFilling")
+def compute_output_dates(date_start, date_stop, output_dates_file_path):
+    logging.info("Start compute_output_dates")
+    output_dates = []
+    if op.exists(output_dates_file_path):
+        logging.debug("Read output_date_file : {}".format(output_dates_file_path))
+        output_dates = read_list_from_file(output_dates_file_path)
+    else:
+        logging.debug("Compute output_dates from {} to {}".format(date_start, date_stop))
+        tmp_date = date_start
+        while tmp_date <= date_stop:
+            output_dates.append(datetime_to_str(tmp_date))
+            tmp_date += timedelta(days=1)
+        write_list_to_file(output_dates_file_path, output_dates)
+    logging.info("End compute_output_dates")
+    return output_dates
+
+
+def load_densification_products(date_margin, date_start, date_stop, densification_path_list, path_tmp, product_dict,
+                                ram):
+    logging.info("Start load_densification_products")
+    # load densification snow products
+    densification_product_dict = load_products(date_start, date_stop, date_margin, densification_path_list, None,
+                                               None)
+    logging.debug("Densification product dict:")
+    logging.debug(densification_product_dict)
+
+    # Get the footprint of the first snow product
+    s2_footprint_ref = product_dict[list(product_dict.keys())[0]][0].get_snow_mask()
+
+    if densification_product_dict:
+        # Reproject the densification products on S2 tile before going further
+        for densifier_product_key in list(densification_product_dict.keys()):
+            for densifier_product in densification_product_dict[densifier_product_key]:
+                original_mask = densifier_product.get_snow_mask()
+                reprojected_mask = op.join(path_tmp,
+                                           densifier_product.product_name + "_reprojected.tif")
+                if not os.path.exists(reprojected_mask):
+                    super_impose_app = super_impose(s2_footprint_ref,
+                                                    original_mask,
+                                                    reprojected_mask,
+                                                    "nn",
+                                                    int(LABEL_NO_DATA),
+                                                    ram,
+                                                    otb.ImagePixelType_uint8)
+                    super_impose_app.ExecuteAndWriteOutput()
+                    super_impose_app = None
+                densifier_product.snow_mask = reprojected_mask
+                logging.debug(densifier_product.snow_mask)
+
+            # Add the products to extend the product_dict
+            if densifier_product_key in list(product_dict.keys()):
+                product_dict[densifier_product_key].extend(densification_product_dict[densifier_product_key])
+            else:
+                product_dict[densifier_product_key] = densification_product_dict[densifier_product_key]
 
-    Keyword arguments:
-    img_in -- the input timeserie image
-    mask_in -- the input masks
-    img_out -- the output image
-    ram -- the ram limitation (not mandatory)
-    out_type -- the output image pixel type  (not mandatory)
-    """
-    if img_in and mask_in and img_out:
-        logging.info("Processing ImageTimeSeriesGapFilling with args:")
-        logging.info("img_in = " + img_in)
-        logging.info("mask_in = " + mask_in)
-        logging.info("img_out = " + img_out)
-
-        gap_filling_app = otb.Registry.CreateApplication("ImageTimeSeriesGapFilling")
-        gap_filling_app.SetParameterString("in", img_in)
-        gap_filling_app.SetParameterString("mask", mask_in)
-        gap_filling_app.SetParameterString("out", img_out)
-
-        gap_filling_app.SetParameterInt("comp", 1)
-        gap_filling_app.SetParameterString("it", "linear")
-
-        if input_dates_file is not None:
-            logging.info("input_dates_file = " + input_dates_file)
-            gap_filling_app.SetParameterString("id", input_dates_file)
-        if output_dates_file is not None:
-            logging.info("output_dates_file = " + output_dates_file)
-            gap_filling_app.SetParameterString("od", output_dates_file)
-        if ram is not None:
-            logging.info("ram = " + str(ram))
-            gap_filling_app.SetParameterString("ram", str(ram))
-        if out_type is not None:
-            logging.info("out_type = " + str(out_type))
-            gap_filling_app.SetParameterOutputImagePixelType("out", out_type)
-        return gap_filling_app
+            logging.debug(product_dict[densifier_product_key])
     else:
-        logging.error("Parameters img_in, img_out and mask_in are required")
+        logging.warning("No Densifying candidate product found!")
+    logging.info("End load_densification_products")
+
+
+def load_products(date_start, date_stop, date_margin, snow_products_list, tile_id=None, product_type=None):
+    logging.info("Start load_products")
+    logging.info("Parsing provided snow products list")
+    product_dict = {}
+    search_start_date = date_start - date_margin
+    search_stop_date = date_stop + date_margin
+    for product_path in snow_products_list:
+        try:
+            product = load_snow_product(str(product_path))
+            logging.info(str(product))
+            current_day = datetime_to_str(product.acquisition_date)
+            test_result = True
+            if search_start_date > product.acquisition_date or \
+                    search_stop_date < product.acquisition_date:
+                test_result = False
+            if (tile_id is not None) and (tile_id not in product.tile_id):
+                test_result = False
+            if (product_type is not None) and (product_type not in product.platform):
+                test_result = False
+            if test_result:
+                if current_day not in list(product_dict.keys()):
+                    product_dict[current_day] = [product]
+                else:
+                    product_dict[current_day].append(product)
+                logging.info("Keeping: {}".format(str(product)))
+            else:
+                logging.warning("Discarding: {}".format(str(product)))
+        except Exception:
+            logging.error("Unable to load product: {}".format(product_path))
+    logging.debug("Product dictionnary:")
+    logging.debug(product_dict)
+    logging.info("End load_products")
+    return product_dict
+
+
+def convert_mask_list(path_tmp, resulting_snow_mask_dict, expression, type_name, ram, mask_format=""):
+    binary_mask_list = []
+    for mask_date in sorted(resulting_snow_mask_dict):
+        binary_mask = op.join(path_tmp,
+                              mask_date + "_" + type_name + "_binary.tif")
+        binary_mask = extract_binary_mask(resulting_snow_mask_dict[mask_date],
+                                          binary_mask,
+                                          expression,
+                                          ram,
+                                          mask_format)
+        binary_mask_list.append(binary_mask)
+    return binary_mask_list
+
+
+def extract_binary_mask(mask_in, mask_out, expression, ram, mask_format=""):
+    bandMathApp = band_math([mask_in],
+                            mask_out + mask_format,
+                            expression,
+                            ram,
+                            otb.ImagePixelType_uint8)
+    bandMathApp.ExecuteAndWriteOutput()
+    return mask_out
+
 
 def merge_masks_at_same_date(snow_product_list, merged_snow_product, threshold=100, ram=None):
     """ This function implement the fusion of multiple snow mask
@@ -104,7 +307,7 @@ def merge_masks_at_same_date(snow_product_list, merged_snow_product, threshold=1
     threshold -- the threshold between valid <= invalid data
     ram -- the ram limitation (not mandatory)
     """
-    logging.info("Merging products into " + merged_snow_product)
+    logging.info("Merging products into {}".format(merged_snow_product))
 
     # the merging is performed according the following selection:
     #   if img1 < threshold use img1 data
@@ -113,12 +316,12 @@ def merge_masks_at_same_date(snow_product_list, merged_snow_product, threshold=1
     # the order of the images in the input list is important:
     #   we expect to have first the main input products
     #   and then the densification products
-    img_index = range(1, len(snow_product_list)+1)
+    img_index = list(range(1, len(snow_product_list) + 1))
     expression_merging = "".join(["(im" + str(i) + "b1<=" + str(threshold) + "?im" + str(i) + "b1:" for i in img_index])
-    expression_merging += "im"+str(img_index[-1])+"b1"
+    expression_merging += "im" + str(img_index[-1]) + "b1"
     expression_merging += "".join([")" for i in img_index])
 
-    img_list= [i.get_snow_mask() for i in snow_product_list]
+    img_list = [i.get_snow_mask() for i in snow_product_list]
     bandMathApp = band_math(img_list,
                             merged_snow_product,
                             expression_merging,
@@ -126,278 +329,3 @@ def merge_masks_at_same_date(snow_product_list, merged_snow_product, threshold=1
                             otb.ImagePixelType_uint8)
     bandMathApp.ExecuteAndWriteOutput()
     bandMathApp = None
-
-""" This module provide the implementation of the snow annual map """
-class snow_annual_map():
-    def __init__(self, params):
-        logging.info("Init snow_multitemp")
-
-        self.tile_id = params.get("tile_id")
-        self.date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
-        self.date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
-        self.date_margin = timedelta(days=params.get("date_margin", 0))
-        self.output_dates_filename = params.get("output_dates_filename", None)
-        self.mode = params.get("mode", "RUNTIME")
-
-        # Compute an id like T31TCH_20170831_20180901 to label the map
-        self.processing_id = str(self.tile_id + "_" + \
-                             datetime_to_str(self.date_start) + "_" + \
-                             datetime_to_str(self.date_stop))
-
-        # Retrive the input_products_list
-        self.input_path_list = params.get("input_products_list", [])
-
-        # @TODO an available path_tmp must be provide or the TMPDIR variable must be avaible
-        self.path_tmp = str(params.get("path_tmp", os.environ.get('TMPDIR')))
-        if not os.path.exists(self.path_tmp):
-            logging.error(self.path_tmp + ", the target does not exist and can't be used for processing")
-
-        self.path_out = op.join(str(params.get("path_out")), self.processing_id)
-
-        if not os.path.exists(self.path_out):
-            os.mkdir(self.path_out)
-
-        self.ram = params.get("ram", 512)
-        self.nbThreads = params.get("nbThreads", None)
-
-        self.use_densification = params.get("use_densification", False)
-        if self.use_densification:
-            self.densification_path_list = params.get("densification_products_list", [])
-
-        # Define label for output snow product (cf snow product format)
-        self.label_no_snow = "0"
-        self.label_snow = "100"
-        self.label_cloud = "205"
-        self.label_no_data = "254"
-
-        # Build useful paths
-        self.input_dates_filename = op.join(self.path_tmp, "input_dates.txt")
-        if not self.output_dates_filename:
-            self.output_dates_filename = op.join(self.path_tmp, "output_dates.txt")
-        self.multitemp_snow_vrt = op.join(self.path_tmp, "multitemp_snow_mask.vrt")
-        self.multitemp_cloud_vrt = op.join(self.path_tmp, "multitemp_cloud_mask.vrt")
-        self.gapfilled_timeserie = op.join(self.path_tmp, "DAILY_SNOW_MASKS_" + self.processing_id + ".tif")
-        self.annual_snow_map = op.join(self.path_tmp, "SNOW_OCCURENCE_" + self.processing_id + ".tif")
-        self.cloud_occurence_img = op.join(self.path_tmp, "CLOUD_OCCURENCE_" + self.processing_id +".tif")
-
-    def run(self):
-        logging.info("Run snow_annual_map")
-
-        # Set maximum ITK threads
-        if self.nbThreads:
-            os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(self.nbThreads)
-
-        # search matching snow product
-        self.product_dict = self.load_products(self.input_path_list, self.tile_id, None)
-        logging.debug("Product dictionnary:")
-        logging.debug(self.product_dict)
-
-        # Exiting with error if none of the input products were loaded
-        if not self.product_dict:
-            logging.error("Empty product list!")
-            return
-
-        # Do the loading of the products to densify the timeserie
-        if self.use_densification:
-            # load densification snow products
-            densification_product_dict = self.load_products(self.densification_path_list, None, None)
-            logging.info("Densification product dict:")
-            logging.info(densification_product_dict)
-
-            # Get the footprint of the first snow product
-            s2_footprint_ref = self.product_dict[list(self.product_dict.keys())[0]][0].get_snow_mask()
-
-            if densification_product_dict:
-                # Reproject the densification products on S2 tile before going further
-                for densifier_product_key in densification_product_dict.keys():
-                    for densifier_product in densification_product_dict[densifier_product_key]:
-                        original_mask = densifier_product.get_snow_mask()
-                        reprojected_mask = op.join(self.path_tmp,
-                                                   densifier_product.product_name + "_reprojected.tif")
-                        if not os.path.exists(reprojected_mask):
-                            super_impose_app = super_impose(s2_footprint_ref,
-                                                            original_mask,
-                                                            reprojected_mask,
-                                                            "nn",
-                                                            int(self.label_no_data),
-                                                            self.ram,
-                                                            otb.ImagePixelType_uint8)
-                            super_impose_app.ExecuteAndWriteOutput()
-                            super_impose_app = None
-                        densifier_product.snow_mask = reprojected_mask
-                        logging.debug(densifier_product.snow_mask)
-
-                    # Add the products to extend the self.product_dict
-                    if densifier_product_key in self.product_dict.keys():
-                        self.product_dict[densifier_product_key].extend(densification_product_dict[densifier_product_key])
-                    else:
-                        self.product_dict[densifier_product_key] = densification_product_dict[densifier_product_key]
-            else:
-                logging.warning("No Densifying candidate product found!")
-
-        # re-order products according acquisition date
-        input_dates = sorted(self.product_dict.keys())
-        write_list_to_file(self.input_dates_filename, input_dates)
-
-        # compute or retrive the output dates
-        output_dates = []
-        if op.exists(self.output_dates_filename):
-            output_dates = read_list_from_file(self.output_dates_filename)
-        else:
-            tmp_date = self.date_start
-            while tmp_date <= self.date_stop:
-                output_dates.append(datetime_to_str(tmp_date))
-                tmp_date += timedelta(days=1)
-            write_list_to_file(self.output_dates_filename, output_dates)
-
-        shutil.copy2(self.input_dates_filename, self.path_out)
-        shutil.copy2(self.output_dates_filename, self.path_out)
-
-        # merge products at the same date
-        self.resulting_snow_mask_dict={}
-        for key in self.product_dict.keys():
-            if len(self.product_dict[key]) > 1:
-                merged_mask = op.join(self.path_tmp, key + "_merged_snow_product.tif")
-                merge_masks_at_same_date(self.product_dict[key],
-                                         merged_mask,
-                                         self.label_snow,
-                                         self.ram)
-                self.resulting_snow_mask_dict[key] = merged_mask
-            else:
-                self.resulting_snow_mask_dict[key] = self.product_dict[key][0].get_snow_mask()
-
-        # convert the snow masks into binary snow masks
-        expression = "(im1b1==" + self.label_snow + ")?1:0"
-        self.binary_snowmask_list = self.convert_mask_list(expression, "snow", GDAL_OPT)
-        logging.debug("Binary snow mask list:")
-        logging.debug(self.binary_snowmask_list)
-
-        # convert the snow masks into binary cloud masks
-        expression = "im1b1=="+self.label_cloud+"?1:(im1b1=="+self.label_no_data+"?1:0)"
-        self.binary_cloudmask_list = self.convert_mask_list(expression, "cloud", GDAL_OPT)
-        logging.debug("Binary cloud mask list:")
-        logging.debug(self.binary_cloudmask_list)
-
-        # build cloud mask vrt
-        logging.info("Building multitemp cloud mask vrt")
-        logging.info("cloud vrt: " + self.multitemp_cloud_vrt)
-        gdal.BuildVRT(self.multitemp_cloud_vrt,
-                      self.binary_cloudmask_list,
-                      separate=True)
-
-        # generate the summary map
-        band_index = range(1, len(self.binary_cloudmask_list)+1)
-        expression = "+".join(["im1b" + str(i) for i in band_index])
-
-        bandMathApp = band_math([self.multitemp_cloud_vrt],
-                                self.cloud_occurence_img,
-                                expression,
-                                self.ram,
-                                otb.ImagePixelType_uint16)
-        bandMathApp.ExecuteAndWriteOutput()
-        bandMathApp = None
-
-        logging.info("Copying outputs from tmp to output folder")
-        shutil.copy2(self.cloud_occurence_img, self.path_out)
-
-        # build snow mask vrt
-        logging.info("Building multitemp snow mask vrt")
-        logging.info("snow vrt: " + self.multitemp_snow_vrt)
-        gdal.BuildVRT(self.multitemp_snow_vrt,
-                      self.binary_snowmask_list,
-                      separate=True)
-
-        # gap filling the snow timeserie
-        app_gap_filling = gap_filling(self.multitemp_snow_vrt,
-                                      self.multitemp_cloud_vrt,
-                                      self.gapfilled_timeserie+GDAL_OPT,
-                                      self.input_dates_filename,
-                                      self.output_dates_filename,
-                                      self.ram,
-                                      otb.ImagePixelType_uint8)
-
-        # @TODO the mode is for now forced to DEBUG in order to generate img on disk
-        #img_in = get_app_output(app_gap_filling, "out", self.mode)
-        #if self.mode == "DEBUG":
-            #shutil.copy2(self.gapfilled_timeserie, self.path_out)
-            #app_gap_filling = None
-
-        img_in = get_app_output(app_gap_filling, "out", "DEBUG")
-        shutil.copy2(self.gapfilled_timeserie, self.path_out)
-        app_gap_filling = None
-
-        # generate the annual map
-        band_index = range(1, len(output_dates)+1)
-        expression = "+".join(["im1b" + str(i) for i in band_index])
-
-        bandMathApp = band_math([img_in],
-                                self.annual_snow_map,
-                                expression,
-                                self.ram,
-                                otb.ImagePixelType_uint16)
-        bandMathApp.ExecuteAndWriteOutput()
-        bandMathApp = None
-
-        logging.info("Copying outputs from tmp to output folder")
-        shutil.copy2(self.annual_snow_map, self.path_out)
-
-        logging.info("End of snow_annual_map")
-
-        if self.mode == "DEBUG":
-            dest_debug_dir = op.join(self.path_out, "tmpdir")
-            if op.exists(dest_debug_dir):
-                shutil.rmtree(dest_debug_dir)
-            shutil.copytree(self.path_tmp, dest_debug_dir)
-
-    def load_products(self, snow_products_list, tile_id=None, product_type=None):
-        logging.info("Parsing provided snow products list")
-        product_dict = {}
-        search_start_date = self.date_start - self.date_margin
-        search_stop_date = self.date_stop + self.date_margin
-        for product_path in snow_products_list:
-            try:
-                product = load_snow_product(str(product_path))
-                logging.info(str(product))
-                current_day = datetime_to_str(product.acquisition_date)
-                test_result = True
-                if search_start_date > product.acquisition_date or \
-                   search_stop_date < product.acquisition_date:
-                   test_result = False
-                if (tile_id is not None) and (tile_id not in product.tile_id):
-                   test_result = False
-                if (product_type is not None) and (product_type not in product.platform):
-                   test_result = False
-                if test_result:
-                    if current_day not in product_dict.keys():
-                        product_dict[current_day] = [product]
-                    else:
-                        product_dict[current_day].append(product)
-                    logging.info("Keeping: " + str(product))
-                else:
-                    logging.warning("Discarding: " + str(product))
-            except Exception:
-                logging.error("Unable to load product :" + product_path)
-        return product_dict
-
-        
-    def convert_mask_list(self, expression, type_name, mask_format=""):
-        binary_mask_list = []
-        for mask_date in sorted(self.resulting_snow_mask_dict):
-            binary_mask = op.join(self.path_tmp,
-                                  mask_date + "_" + type_name + "_binary.tif")
-            binary_mask = self.extract_binary_mask(self.resulting_snow_mask_dict[mask_date],
-                                                   binary_mask,
-                                                   expression,
-                                                   mask_format)
-            binary_mask_list.append(binary_mask)
-        return binary_mask_list
-
-
-    def extract_binary_mask(self, mask_in, mask_out, expression, mask_format=""):
-        bandMathApp = band_math([mask_in],
-                                mask_out + mask_format,
-                                expression,
-                                self.ram,
-                                otb.ImagePixelType_uint8)
-        bandMathApp.ExecuteAndWriteOutput()
-        return mask_out
diff --git a/python/s2snow/snow_detector.py b/python/s2snow/snow_detector.py
index 8bda74ee..5be86629 100644
--- a/python/s2snow/snow_detector.py
+++ b/python/s2snow/snow_detector.py
@@ -1,20 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
-#=========================================================================
 #
-#  Program:   lis
-#  Language:  Python
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
 #
-#  Copyright (c) Simon Gascoin
-#  Copyright (c) Manuel Grizonnet
+# This file is part of Let-it-snow (LIS)
 #
-#  See lis-copyright.txt for details.
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
 #
-#  This software is distributed WITHOUT ANY WARRANTY; without even
-#  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-#  PURPOSE.  See the above copyright notices for more information.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 #
-#=========================================================================
 
 import os
 import os.path as op
@@ -36,7 +40,7 @@ from s2snow.app_wrappers import compute_snow_mask, compute_cloud_mask
 from s2snow.app_wrappers import band_math, compute_snow_line
 
 # Import utilities for snow detection
-from s2snow.utils import polygonize, extract_band, burn_polygons_edges, composition_RGB
+from s2snow.utils import polygonize, extract_band, edit_raster_from_shapefile, edit_raster_from_raster, edit_nodata_value
 from s2snow.utils import compute_percent, format_SEB_VEC_values, get_raster_as_array
 
 # this allows GDAL to throw Python Exceptions
@@ -66,7 +70,7 @@ class snow_detector:
         # (if -1 the target resolution is equal to the max resolution of the input band)
         self.target_resolution = general.get("target_resolution", -1)
 
-        # Parse vector option
+        # Parse vector options
         vector_options = data["vector"]
         self.generate_vector = vector_options.get("generate_vector", True)
         self.generate_intermediate_vectors = vector_options.get("generate_intermediate_vectors", False)
@@ -74,6 +78,17 @@ class snow_detector:
         self.gdal_trace_outline_dp_toler = vector_options.get("gdal_trace_outline_dp_toler", 0)
         self.gdal_trace_outline_min_area = vector_options.get("gdal_trace_outline_min_area", 0)
 
+        # Parse FSC options
+        if data.get("fsc", False):
+            self.dofsc = data["fsc"]['dofsc']
+            self.tcd_path = str(data["fsc"]['tcd'])
+            self.fscOg_Eq = data["fsc"]['fscOg_Eq']
+            self.fscToc_Eq = data["fsc"]['fscToc_Eq']
+            self.cosims_mode = data["fsc"]['cosims_mode']
+        else:
+            self.dofsc = False
+
+
         # Parse cloud data
         cloud = data["cloud"]
         self.rf = cloud.get("rf")
@@ -124,6 +139,7 @@ class snow_detector:
         gb_path_extracted = extract_band(inputs, "green_band", self.path_tmp, self.nodata)
         rb_path_extracted = extract_band(inputs, "red_band", self.path_tmp, self.nodata)
         sb_path_extracted = extract_band(inputs, "swir_band", self.path_tmp, self.nodata)
+        
 
         # Keep the input product directory basename as product_id
         self.product_id = op.basename(op.dirname(inputs["green_band"]["path"]))
@@ -187,6 +203,23 @@ class snow_detector:
                 yRes=self.target_resolution)
         else:
             sb_path_resampled = sb_path_extracted
+            
+        #apply water mask as NAN values to extracted bands so that they are not used
+        if 'water_mask' in data:
+            if data['water_mask']['apply']:
+                water_mask_path = data['water_mask']['water_mask_path']
+                water_mask_type = water_mask_path.split('.')[-1].lower()
+                if water_mask_type == 'tif':
+                    self.water_mask_raster_values = data['water_mask']['water_mask_raster_values']
+                    edit_raster_from_raster(rb_path_resampled, water_mask_path, src_values=data['water_mask']['water_mask_raster_values'], applied_value=self.nodata)
+                    edit_raster_from_raster(gb_path_resampled, water_mask_path, src_values=data['water_mask']['water_mask_raster_values'], applied_value=self.nodata)
+                    edit_raster_from_raster(sb_path_resampled, water_mask_path, src_values=data['water_mask']['water_mask_raster_values'], applied_value=self.nodata)
+                elif water_mask_type == 'shp':
+                    edit_raster_from_shapefile(rb_path_resampled, water_mask_path, applied_value=self.nodata)
+                    edit_raster_from_shapefile(gb_path_resampled, water_mask_path, applied_value=self.nodata)
+                    edit_raster_from_shapefile(sb_path_resampled, water_mask_path, applied_value=self.nodata)
+                else:
+                    raise IOError('Input water_mask_path must either be a GeoTIFF raster (.tif) or a shapefile (.shp)')
 
         # build vrt
         logging.info("building bands vrt")
@@ -223,7 +256,7 @@ class snow_detector:
         self.label_no_snow = "0"
         self.label_snow = "100"
         self.label_cloud = "205"
-        self.label_no_data = "254"
+        self.label_no_data = "255"
 
         # Build useful paths
         self.pass1_path = op.join(self.path_tmp, "pass1.tif")
@@ -245,9 +278,11 @@ class snow_detector:
         self.snow_all_path = op.join(self.product_path, "LIS_SNOW_ALL.TIF")
         self.final_mask_path = op.join(self.product_path, "LIS_SEB.TIF")
         self.final_mask_vec_path = op.join(self.product_path, "LIS_SEB_VEC.shp")
-        self.composition_path = op.join(self.product_path, "LIS_COMPO.TIF")
         self.histogram_path = op.join(self.product_path, "LIS_HISTO.TXT")
         self.metadata_path = op.join(self.product_path, "LIS_METADATA.XML")
+        self.ndsi_path = op.join(self.product_path, "LIS_NDSI.TIF")
+        self.fscToc_path = op.join(self.product_path, "LIS_FSCTOC.TIF")
+        self.fscOg_path = op.join(self.product_path, "LIS_FSCOG.TIF")
 
     def detect_snow(self, nbPass):
         # Set maximum ITK threads
@@ -279,43 +314,27 @@ class snow_detector:
             self.pass1()
         if nbPass == 2:
             self.pass2()
+        if self.dofsc:
+            self.passfsc()
 
-        # RGB composition
-        composition_RGB(
-            self.img,
-            self.composition_path,
-            self.nSWIR,
-            self.nRed,
-            self.nGreen,
-            self.multi)
-
-        # Gdal polygonize (needed to produce composition)
-        # TODO: Study possible loss and issue with vectorization product
-        if self.generate_vector:
-            polygonize(
-                self.final_mask_path,
-                self.final_mask_path,
-                self.final_mask_vec_path,
-                self.use_gdal_trace_outline,
-                self.gdal_trace_outline_min_area,
-                self.gdal_trace_outline_dp_toler)
-
-        # Burn polygons edges on the composition
-        # TODO add pass1 snow polygon in yellow
-        burn_polygons_edges(
-            self.composition_path,
-            self.final_mask_path,
-            self.label_snow,
-            self.label_cloud,
-            self.ram)
-
-        # Product formating
-        #~ format_SEB_VEC_values(self.final_mask_vec_path,
-                              #~ self.label_snow,
-                              #~ self.label_cloud,
-                              #~ self.label_no_data)
-        self.create_metadata()
+        if self.cosims_mode:
+            self.create_cosims_metadata()
+        else:
 
+            # Gdal polygonize (needed to produce composition)
+            # TODO: Study possible loss and issue with vectorization product
+            if self.generate_vector:
+                polygonize(
+                    self.final_mask_path,
+                    self.final_mask_path,
+                    self.final_mask_vec_path,
+                    self.use_gdal_trace_outline,
+                    self.gdal_trace_outline_min_area,
+                    self.gdal_trace_outline_dp_toler)
+
+            self.create_metadata()
+        
+        
     def create_metadata(self):
         # Compute and create the content for the product metadata file.
         snow_percent = compute_percent(self.final_mask_path,
@@ -342,6 +361,11 @@ class snow_detector:
             name='CloudPercent').text = str(cloud_percent)
         et = etree.ElementTree(root)
         et.write(self.metadata_path, pretty_print=True)
+        
+
+    def create_cosims_metadata(self):
+        self.create_metadata()
+        
 
     def extract_all_clouds(self):
         if self.mode == 'lasrc':
@@ -367,14 +391,14 @@ class snow_detector:
             else:
                 condition_all_clouds = "im1b1 > 0"
 
-                bandMathAllCloud = band_math(
-                    [self.cloud_init],
-                    self.all_cloud_path + GDAL_OPT,
-                    "("+condition_all_clouds+" > 0)?1:0",
-                    self.ram,
-                    otb.ImagePixelType_uint8)
-                bandMathAllCloud.ExecuteAndWriteOutput()
-                bandMathAllCloud = None
+            bandMathAllCloud = band_math(
+                [self.cloud_init],
+                self.all_cloud_path + GDAL_OPT,
+                "("+condition_all_clouds+" > 0)?1:0",
+                self.ram,
+                otb.ImagePixelType_uint8)
+            bandMathAllCloud.ExecuteAndWriteOutput()
+            bandMathAllCloud = None
 
     def extract_cloud_shadows(self):
         shadow_mask_path = op.join(self.path_tmp, "shadow_mask.tif") + GDAL_OPT
@@ -534,12 +558,12 @@ class snow_detector:
         logging.info("Start pass 1")
 
         # Pass1 : NDSI threshold
-        ndsi_formula = "(im1b" + str(self.nGreen) + "-im1b" + str(self.nSWIR) + \
+        self.ndsi_formula = "(im1b" + str(self.nGreen) + "-im1b" + str(self.nSWIR) + \
             ")/(im1b" + str(self.nGreen) + "+im1b" + str(self.nSWIR) + ")"
-        logging.info("ndsi formula: "+ ndsi_formula)
+        logging.info("ndsi formula: "+ self.ndsi_formula)
 
         # NDSI condition (ndsi > x and not cloud)
-        condition_ndsi = "(im2b1!=1 and (" + ndsi_formula + ")>" + str(self.ndsi_pass1) + " "
+        condition_ndsi = "(im2b1!=1 and (" + self.ndsi_formula + ")>" + str(self.ndsi_pass1) + " "
 
         condition_pass1 = condition_ndsi + \
             " and im1b" + str(self.nRed) + "> " + str(self.rRed_pass1) + ")"
@@ -616,7 +640,7 @@ class snow_detector:
 
         # compute individual snow area size
         (labels, label_counts)= np.unique(snowlabels, return_counts=True)
-        labels_area = dict(zip(labels, label_counts))
+        labels_area = dict(list(zip(labels, label_counts)))
         logging.debug(labels_area)
 
         logging.debug("Start loop on snow areas")
@@ -636,7 +660,7 @@ class snow_detector:
 
                 # Compute percent of surronding cloudy pixels
                 cloud_contour = cloud_mask[contour]
-                # print cloud_contour
+                # print(cloud_contour)
                 logging.debug("Contour processing done.")
 
                 result = np.bincount(cloud_contour)
@@ -708,12 +732,12 @@ class snow_detector:
         if snow_fraction > self.fsnow_total_lim:
             # Test zs value (-1 means that no zs elevation was found)
             if self.zs != -1:
-                # NDSI threshold again
-                ndsi_formula = "(im1b" + str(self.nGreen) + "-im1b" + str(self.nSWIR) + \
-                               ")/(im1b" + str(self.nGreen) + "+im1b" + str(self.nSWIR) + ")"
+                #~ # NDSI threshold again
+                #~ ndsi_formula = "(im1b" + str(self.nGreen) + "-im1b" + str(self.nSWIR) + \
+                               #~ ")/(im1b" + str(self.nGreen) + "+im1b" + str(self.nSWIR) + ")"
                 
                 condition_pass2 = "(im3b1 != 1) and (im2b1>" + str(self.zs) + ")" \
-                                  + " and (" + ndsi_formula + "> " + str(self.ndsi_pass2) + ")" \
+                                  + " and (" + self.ndsi_formula + "> " + str(self.ndsi_pass2) + ")" \
                                   + " and (im1b" + str(self.nRed) + ">" + str(self.rRed_pass2) + ")"
 
                 bandMathPass2 = band_math([self.img,
@@ -832,3 +856,45 @@ class snow_detector:
                                   self.ram,
                                   otb.ImagePixelType_uint8)
         bandMathPass3.ExecuteAndWriteOutput()
+        bandMathPass3 = None
+
+    def passfsc(self):
+        # write NDSIx100 (0-100), nosnow (0) cloud (205) and nodata (255)
+        expression = "(im2b1 == 100)?100*"+str(self.ndsi_formula)+":im2b1"
+        bandMathApp = band_math([self.img,self.final_mask_path],
+                                    self.ndsi_path,
+                                    expression,
+                                    self.ram,
+                                    otb.ImagePixelType_uint8)
+        bandMathApp.ExecuteAndWriteOutput()
+        bandMathApp = None
+        edit_nodata_value(self.ndsi_path, nodata_value=int(self.label_no_data))
+
+        # write top-of-canopy FSC (0-100), nosnow (0) cloud (205) and nodata (255)
+        #~ self.fscToc_Eq="1.45*ndsi-0.01" 
+        eq="min("+str(self.fscToc_Eq)+",1)"
+        exp=eq.replace("ndsi", "im1b1/100") # ndsi was written in %
+        expression = "(im2b1 == 100) ? 100*"+exp+" : im2b1"
+        bandMathApp = band_math([self.ndsi_path,self.final_mask_path],
+                                    self.fscToc_path,
+                                    expression,
+                                    self.ram,
+                                    otb.ImagePixelType_uint8)
+        bandMathApp.ExecuteAndWriteOutput()
+        bandMathApp = None
+        edit_nodata_value(self.fscToc_path, nodata_value=int(self.label_no_data))
+
+        # write on-ground FSC (0-100), nosnow (0) cloud (205) and nodata (255)
+        #~ self.fscOg_Eq="fscToc/(1-tcd)" 
+        eq="min("+str(self.fscOg_Eq)+",1)"
+        exp=eq.replace("fscToc", "im1b1/100") # fscToc was written in %
+        exp=exp.replace("tcd", "im3b1/100") # tcd is given in %
+        expression = "(im2b1 == 100) ? ( im3b1 > 100 ? im1b1 : 100*"+exp+" ) : im2b1"
+        bandMathApp = band_math([self.fscToc_path,self.final_mask_path,self.tcd_path],
+                                    self.fscOg_path,
+                                    expression,
+                                    self.ram,
+                                    otb.ImagePixelType_uint8)
+        bandMathApp.ExecuteAndWriteOutput()
+        bandMathApp = None
+        edit_nodata_value(self.fscOg_path, nodata_value=int(self.label_no_data))
diff --git a/python/s2snow/snow_product_parser.py b/python/s2snow/snow_product_parser.py
index 927d2954..d2aa394d 100644
--- a/python/s2snow/snow_product_parser.py
+++ b/python/s2snow/snow_product_parser.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import os
 import os.path as op
@@ -145,23 +164,23 @@ def main():
         format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 
     a = snow_product("/work/OT/siaa/Theia/S2L2A/data_production_muscate_juillet2017/L2B-SNOW/SENTINEL2A_20170314-104411-573_L2B-SNOW_T31TGK_D_V1-0")
-    print a.get_snow_mask()
+    print(a.get_snow_mask())
     a.extract_snow_mask(".")
-    print a.get_snow_mask()
-    print a.get_metadata()
-    print a.acquisition_date
+    print(a.get_snow_mask())
+    print(a.get_metadata())
+    print(a.acquisition_date)
 
     b = snow_product("/work/OT/siaa/Theia/Neige/output_muscate_v2pass2red40/T31TGL/SENTINEL2A_20151230-105153-392_L2A_T31TGL_D_V1-0")
-    print b.get_snow_mask()
+    print(b.get_snow_mask())
     b.extract_snow_mask(".")
-    print b.get_snow_mask()
-    print b.get_metadata()
+    print(b.get_snow_mask())
+    print(b.get_metadata())
 
     c = snow_product("/work/OT/siaa/Theia/Neige/output_muscate_v2pass2red40/Landsat-8/D0005H0001/LANDSAT8_OLITIRS_XS_20160812_N2A_France-MetropoleD0005H0001")
-    print c.get_snow_mask()
+    print(c.get_snow_mask())
     c.extract_snow_mask(".")
-    print c.get_snow_mask()
-    print c.get_metadata()
+    print(c.get_snow_mask())
+    print(c.get_metadata())
 
 if __name__ == '__main__':
     main()
diff --git a/python/s2snow/utils.py b/python/s2snow/utils.py
index 4ff104bf..4fdea6e0 100644
--- a/python/s2snow/utils.py
+++ b/python/s2snow/utils.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import os
 import os.path as op
@@ -14,6 +33,7 @@ from distutils import spawn
 
 import numpy as np
 
+import ogr
 import gdal
 import gdalconst
 from gdalconst import GA_ReadOnly
@@ -35,7 +55,8 @@ def call_subprocess(process_list):
         stderr=subprocess.PIPE)
     out, err = process.communicate()
     logging.info(out)
-    sys.stderr.write(err)
+    logging.error(err)
+    sys.stderr.write(str(err))
 
 def str_to_datetime(date_string, format="%Y%m%d"):
     """ Return the datetime corresponding to the input string
@@ -116,7 +137,7 @@ def polygonize(input_img, input_mask, output_vec, use_gina, min_area, dp_toler):
         call_subprocess([
             "ogr2ogr",
             "-sql",
-            'SELECT value AS DN from \"' +
+            'SELECT value AS SEB from \"' +
             str(unique_filename) +
             '\" where value != 0',
             output_vec,
@@ -195,7 +216,6 @@ def extract_band(inputs, band, path_tmp, noData):
     path = data_band["path"]
     band_no = data_band["noBand"]
 
-    dataset = gdal.Open(path, GA_ReadOnly)
     path_extracted = op.join(path_tmp, band+"_extracted.tif")
 
     logging.info("extracting "+band)
@@ -208,6 +228,60 @@ def extract_band(inputs, band, path_tmp, noData):
             bandList=[band_no])
  
     return path_extracted
+    
+    
+
+def edit_nodata_value(raster_file, nodata_value=None, bands=None):
+    
+    ds = gdal.Open(raster_file, gdal.GA_Update)
+    
+    #iterate on each band
+    for band_no in range(1, ds.RasterCount+1):
+        
+        if bands is not None:
+            if band_no not in bands:
+                #this band was not specified for edition, skip
+                continue
+        
+        band = ds.GetRasterBand(band_no)
+        if nodata_value is None:
+            #remove nodata value
+            ds.GetRasterBand(band_no).DeleteNoDataValue()
+        else:
+            #change nodata value
+            ds.GetRasterBand(band_no).SetNoDataValue(nodata_value)
+    
+    
+def edit_raster_from_shapefile(raster_target, src_shapefile, applied_value=0):    
+    shape_mask = ogr.Open(src_shapefile)
+    ds = gdal.Open(raster_target, gdal.GA_Update)
+    for shape_mask_layer in shape_mask:
+        gdal.RasterizeLayer(ds, [1], shape_mask_layer, burn_values=[applied_value])
+
+
+def edit_raster_from_raster(raster_target, src_raster, src_values, applied_value=0, layered_processing=False):
+    ds_mask = gdal.Open(src_raster, gdal.GA_ReadOnly)
+    band_mask = ds_mask.GetRasterBand(1)
+    ds = gdal.Open(raster_target, gdal.GA_Update)
+    band = ds.GetRasterBand(1)
+    if band.XSize != band_mask.XSize or band.YSize != band_mask.YSize:
+        raise IOError('array sizes from files do not match:\n%s'%('\n'.join([' - %s'%el for el in [raster_target, src_raster]])))
+        
+    if layered_processing:
+        #iterate load line per line to avoid memory issues
+        for ii in range(band.YSize - 1, -1, -1):
+            data = band.ReadAsArray(xoff=0, yoff=ii, win_xsize=band.XSize, win_ysize=1, buf_xsize=band.XSize, buf_ysize=1)
+            data_mask = band_mask.ReadAsArray(xoff=0, yoff=ii, win_xsize=band.XSize, win_ysize=1, buf_xsize=band.XSize, buf_ysize=1)
+            for val in src_values:
+                data[data_mask==val] = applied_value
+            band.WriteArray(data, xoff=0, yoff=ii)
+    else:
+        data = band.ReadAsArray()
+        data_mask = band_mask.ReadAsArray()
+        for val in src_values:
+            data[data_mask==val] = applied_value
+        band.WriteArray(data)
+
 
 
 def apply_color_table(raster_file_name, color_table):
@@ -249,9 +323,9 @@ def format_SEB_VEC_values(path, snow_label, cloud_label, nodata_label):
     table = op.splitext(op.basename(path))[0]
     ds = gdal.OpenEx(path, gdal.OF_VECTOR | gdal.OF_UPDATE)
     ds.ExecuteSQL("ALTER TABLE " + table + " ADD COLUMN type varchar(15)")
-    ds.ExecuteSQL("UPDATE " + table + " SET type='snow' WHERE DN="+\
+    ds.ExecuteSQL("UPDATE " + table + " SET type='snow' WHERE SEB="+\
                   snow_label, dialect="SQLITE")
-    ds.ExecuteSQL("UPDATE " + table + " SET type='cloud' WHERE DN="+\
+    ds.ExecuteSQL("UPDATE " + table + " SET type='cloud' WHERE SEB="+\
                   cloud_label, dialect="SQLITE")
-    ds.ExecuteSQL("UPDATE " + table + " SET type='no data' WHERE DN == "+\
+    ds.ExecuteSQL("UPDATE " + table + " SET type='no data' WHERE SEB == "+\
                   nodata_label, dialect="SQLITE")
diff --git a/python/s2snow/version.py b/python/s2snow/version.py
index c3e2ac98..65fb410b 100644
--- a/python/s2snow/version.py
+++ b/python/s2snow/version.py
@@ -1 +1,23 @@
-VERSION = "1.5"
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+VERSION = "1.6"
diff --git a/python/setup.py b/python/setup.py
index 5256a0bc..07d8411c 100644
--- a/python/setup.py
+++ b/python/setup.py
@@ -1,3 +1,25 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 from distutils.core import setup
 
 setup(name='s2snow',
diff --git a/python/setup.py.in b/python/setup.py.in
index 649e4a50..313f10b7 100644
--- a/python/setup.py.in
+++ b/python/setup.py.in
@@ -1,3 +1,22 @@
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 from distutils.core import setup
 
 setup(name='s2snow',
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 746a8b57..4cc47992 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -1,3 +1,23 @@
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 # Build our library
 add_library( histo_utils SHARED histo_utils.cxx )
 target_link_libraries(histo_utils ${OTB_LIBRARIES})
diff --git a/src/ComputeCloudMask.cxx b/src/ComputeCloudMask.cxx
index c7cc2bef..7f9fe622 100644
--- a/src/ComputeCloudMask.cxx
+++ b/src/ComputeCloudMask.cxx
@@ -1,3 +1,22 @@
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 #include "otbWrapperApplication.h"
 #include "otbWrapperApplicationFactory.h"
 #include "otbWrapperChoiceParameter.h"
@@ -35,7 +54,6 @@ public:
     SetDescription("Compute Cloud Mask application");
 
     // Documentation
-    SetDocName("Application for Compute Cloud Mask");
     SetDocLongDescription("This application does compute the cloud mask");
     SetDocLimitations("None");
     SetDocAuthors("Germain SALGUES");
diff --git a/src/ComputeContours.cxx b/src/ComputeContours.cxx
index c635de3f..77b671ad 100644
--- a/src/ComputeContours.cxx
+++ b/src/ComputeContours.cxx
@@ -1,3 +1,22 @@
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 #include "otbWrapperApplication.h"
 #include "otbWrapperApplicationFactory.h"
 #include "otbWrapperChoiceParameter.h"
@@ -36,7 +55,6 @@ public:
         SetDescription("Compute Contours application");
 
         // Documentation
-        SetDocName("Application for Computing Contours");
         SetDocLongDescription("This application does compute the contours of the final mask");
         SetDocLimitations("None");
         SetDocAuthors("Germain SALGUES");
@@ -56,9 +74,8 @@ public:
         MandatoryOff("backgroundvalue");
         SetDefaultParameterInt("backgroundvalue", 0);
 
-        AddParameter(ParameterType_Empty, "fullyconnected", "cloud refine image");
+        AddParameter(ParameterType_Int, "fullyconnected", "cloud refine image");
         SetParameterDescription( "fullyconnected", "Input cloud refine image");
-        DisableParameter("fullyconnected");
 
         AddRAMParameter();
 
@@ -94,7 +111,7 @@ public:
         m_ContourFilter->SetBackgroundValue(0);
         m_ContourFilter->SetBackgroundValue(GetParameterInt("backgroundvalue"));
        
-        m_ContourFilter->SetFullyConnected(IsParameterEnabled("fullyconnected"));
+        m_ContourFilter->SetFullyConnected(GetParameterInt("fullyconnected")==1);
 
         // Set the output image
         SetParameterOutputImage("out", m_ContourFilter->GetOutput());
diff --git a/src/ComputeNbPixels.cxx b/src/ComputeNbPixels.cxx
index 5da56af0..5d7c0eac 100644
--- a/src/ComputeNbPixels.cxx
+++ b/src/ComputeNbPixels.cxx
@@ -1,3 +1,22 @@
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 #include "otbWrapperApplication.h"
 #include "otbWrapperApplicationFactory.h"
 #include "otbWrapperChoiceParameter.h"
@@ -35,7 +54,6 @@ public:
     SetDescription("Compute Snow line application");
 
     // Documentation
-    SetDocName("Application to compute the snow line");
     SetDocLongDescription("This application does compute the ZS value and output the histogram of snow pixels per altitude slices.");
     SetDocLimitations("None");
     SetDocAuthors("Manuel Grizonnet");
diff --git a/src/ComputeSnowLine.cxx b/src/ComputeSnowLine.cxx
index 1db34645..3f837935 100644
--- a/src/ComputeSnowLine.cxx
+++ b/src/ComputeSnowLine.cxx
@@ -1,3 +1,22 @@
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 #include "otbWrapperApplication.h"
 #include "otbWrapperApplicationFactory.h"
 #include "otbWrapperChoiceParameter.h"
@@ -35,7 +54,6 @@ public:
     SetDescription("Compute Snow line application");
 
     // Documentation
-    SetDocName("Application to compute the snow line");
     SetDocLongDescription("This application does compute the ZS value and output the histogram of snow pixels per altitude slices.");
     SetDocLimitations("None");
     SetDocAuthors("Manuel Grizonnet");
@@ -60,10 +78,8 @@ public:
     AddParameter(ParameterType_Float, "fclearlim", "fclearlim");
     SetParameterDescription("fclearlim", "fclearlim");
 
-    AddParameter(ParameterType_Empty, "reverse", "reverse");
+    AddParameter(ParameterType_Int, "reverse", "reverse");
     SetParameterDescription("reverse", "reverse");
-    MandatoryOff("reverse");
-    DisableParameter("reverse");
 
     AddParameter(ParameterType_Int, "offset", "offset");
     SetParameterDescription("offset", "offset");
@@ -130,7 +146,7 @@ public:
                                     GetParameterInt("dz"),
                                     GetParameterFloat("fsnowlim"),
                                     GetParameterFloat("fclearlim"),
-                                    IsParameterEnabled("reverse"),
+                                    GetParameterInt("reverse")==1,
                                     GetParameterInt("offset"),
                                     GetParameterInt("centeroffset"),
                                     GetParameterAsString("outhist").c_str()
diff --git a/src/ComputeSnowMask.cxx b/src/ComputeSnowMask.cxx
index e7b48ebe..2389b065 100644
--- a/src/ComputeSnowMask.cxx
+++ b/src/ComputeSnowMask.cxx
@@ -1,3 +1,22 @@
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 #include "otbWrapperApplication.h"
 #include "otbWrapperApplicationFactory.h"
 #include "otbWrapperChoiceParameter.h"
@@ -36,7 +55,6 @@ public:
     SetDescription("Compute Snow Mask application");
 
     // Documentation
-    SetDocName("Application for Compute Snow Mask");
     SetDocLongDescription("This application does compute the snow mask");
     SetDocLimitations("None");
     SetDocAuthors("Germain SALGUES");
diff --git a/src/histo_utils.cxx b/src/histo_utils.cxx
index 897927db..1a523619 100644
--- a/src/histo_utils.cxx
+++ b/src/histo_utils.cxx
@@ -1,18 +1,22 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
 #include "histo_utils.h"
 
diff --git a/src/histo_utils.h b/src/histo_utils.h
index 985e7288..f6cd6bea 100644
--- a/src/histo_utils.h
+++ b/src/histo_utils.h
@@ -1,18 +1,22 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
 #ifndef HISTO_UTILS_H
 #define HISTO_UTILS_H
diff --git a/src/itkNarySnowMaskImageFilter.h b/src/itkNarySnowMaskImageFilter.h
index 246a9592..b183ea76 100644
--- a/src/itkNarySnowMaskImageFilter.h
+++ b/src/itkNarySnowMaskImageFilter.h
@@ -1,18 +1,22 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 #include "itkNaryFunctorImageFilter.h"
 #include "itkNumericTraits.h"
 #include <bitset>
diff --git a/src/itkUnaryCloudMaskImageFilter.h b/src/itkUnaryCloudMaskImageFilter.h
index 393cd434..14eeafb5 100644
--- a/src/itkUnaryCloudMaskImageFilter.h
+++ b/src/itkUnaryCloudMaskImageFilter.h
@@ -1,3 +1,22 @@
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 #include "itkUnaryFunctorImageFilter.h"
 #include "itkNumericTraits.h"
 #include <bitset>
diff --git a/src/otbStreamingHistogramMaskedVectorImageFilter.h b/src/otbStreamingHistogramMaskedVectorImageFilter.h
index dfe6b1eb..8dc8aded 100644
--- a/src/otbStreamingHistogramMaskedVectorImageFilter.h
+++ b/src/otbStreamingHistogramMaskedVectorImageFilter.h
@@ -1,18 +1,22 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 #ifndef __otbStreamingHistogramMaskedVectorImageFilter_h
 #define __otbStreamingHistogramMaskedVectorImageFilter_h
 
diff --git a/src/otbStreamingHistogramMaskedVectorImageFilter.txx b/src/otbStreamingHistogramMaskedVectorImageFilter.txx
index 5e5598cc..f9541eef 100644
--- a/src/otbStreamingHistogramMaskedVectorImageFilter.txx
+++ b/src/otbStreamingHistogramMaskedVectorImageFilter.txx
@@ -1,18 +1,21 @@
-/*=========================================================================
+// Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+//
+// This file is part of Let-it-snow (LIS)
+//
+//     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
 
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
 #ifndef __otbStreamingHistogramMaskedVectorImageFilter_txx
 #define __otbStreamingHistogramMaskedVectorImageFilter_txx
 #include "otbStreamingHistogramMaskedVectorImageFilter.h"
diff --git a/styles/LIS_SEB_style_v2_vector.qml b/styles/LIS.1.2_SEB_style_vector.qml
similarity index 100%
rename from styles/LIS_SEB_style_v2_vector.qml
rename to styles/LIS.1.2_SEB_style_vector.qml
diff --git a/styles/LIS_FSC_style_raster.qml b/styles/LIS_FSC_style_raster.qml
new file mode 100644
index 00000000..cacf95c0
--- /dev/null
+++ b/styles/LIS_FSC_style_raster.qml
@@ -0,0 +1,24 @@
+<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
+<qgis version="2.18.19" minimumScale="inf" maximumScale="1e+08" hasScaleBasedVisibilityFlag="0">
+  <pipe>
+    <rasterrenderer opacity="1" alphaBand="0" classificationMax="100" classificationMinMaxOrigin="User" band="1" classificationMin="0" type="singlebandpseudocolor">
+      <rasterTransparency/>
+      <rastershader>
+        <colorrampshader colorRampType="DISCRETE" clip="0">
+          <item alpha="255" value="0" label="No snow" color="#d6d2d0"/>
+          <item alpha="255" value="20" label="0 - 20" color="#f0f9e8"/>
+          <item alpha="255" value="40" label="20 - 40" color="#bae4bc"/>
+          <item alpha="255" value="60" label="40 - 60" color="#7bccc4"/>
+          <item alpha="255" value="80" label="60 - 80" color="#43a2ca"/>
+          <item alpha="255" value="100" label="80 - 100" color="#0868ac"/>
+          <item alpha="255" value="205" label="Cloud" color="#ffffff"/>
+          <item alpha="255" value="inf" label="No data" color="#000000"/>
+        </colorrampshader>
+      </rastershader>
+    </rasterrenderer>
+    <brightnesscontrast brightness="0" contrast="0"/>
+    <huesaturation colorizeGreen="128" colorizeOn="0" colorizeRed="255" colorizeBlue="128" grayscaleMode="0" saturation="0" colorizeStrength="100"/>
+    <rasterresampler maxOversampling="2" zoomedOutResampler="bilinear"/>
+  </pipe>
+  <blendMode>0</blendMode>
+</qgis>
diff --git a/styles/LIS_NDSI_style_raster.qml b/styles/LIS_NDSI_style_raster.qml
new file mode 100644
index 00000000..af080826
--- /dev/null
+++ b/styles/LIS_NDSI_style_raster.qml
@@ -0,0 +1,24 @@
+<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
+<qgis version="2.18.19" minimumScale="inf" maximumScale="1e+08" hasScaleBasedVisibilityFlag="0">
+  <pipe>
+    <rasterrenderer opacity="1" alphaBand="-1" classificationMax="100" classificationMinMaxOrigin="User" band="1" classificationMin="0" type="singlebandpseudocolor">
+      <rasterTransparency/>
+      <rastershader>
+        <colorrampshader colorRampType="DISCRETE" clip="0">
+          <item alpha="255" value="0" label="No snow" color="#d6d2d0"/>
+          <item alpha="255" value="20" label="0 - 20" color="#f0f9e8"/>
+          <item alpha="255" value="40" label="20 - 40" color="#bae4bc"/>
+          <item alpha="255" value="60" label="40 - 60" color="#7bccc4"/>
+          <item alpha="255" value="80" label="60 - 80" color="#43a2ca"/>
+          <item alpha="255" value="100" label="80 - 100" color="#0868ac"/>
+          <item alpha="255" value="205" label="Cloud" color="#ffffff"/>
+          <item alpha="255" value="inf" label="No data" color="#000000"/>
+        </colorrampshader>
+      </rastershader>
+    </rasterrenderer>
+    <brightnesscontrast brightness="0" contrast="0"/>
+    <huesaturation colorizeGreen="128" colorizeOn="0" colorizeRed="255" colorizeBlue="128" grayscaleMode="0" saturation="0" colorizeStrength="100"/>
+    <rasterresampler maxOversampling="2" zoomedOutResampler="bilinear"/>
+  </pipe>
+  <blendMode>0</blendMode>
+</qgis>
diff --git a/styles/LIS_SEB_style_v2.qml b/styles/LIS_SEB_style_raster.qml
similarity index 100%
rename from styles/LIS_SEB_style_v2.qml
rename to styles/LIS_SEB_style_raster.qml
diff --git a/styles/LIS_SEB_style_raster_qgis_3.4.qml b/styles/LIS_SEB_style_raster_qgis_3.4.qml
new file mode 100755
index 00000000..3ac5e475
--- /dev/null
+++ b/styles/LIS_SEB_style_raster_qgis_3.4.qml
@@ -0,0 +1,38 @@
+<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
+<qgis hasScaleBasedVisibilityFlag="0" minScale="1e+8" styleCategories="AllStyleCategories" version="3.4.2-Madeira" maxScale="0">
+  <flags>
+    <Identifiable>1</Identifiable>
+    <Removable>1</Removable>
+    <Searchable>1</Searchable>
+  </flags>
+  <customproperties>
+    <property value="false" key="WMSBackgroundLayer"/>
+    <property value="false" key="WMSPublishDataSourceUrl"/>
+    <property value="0" key="embeddedWidgets/count"/>
+    <property value="Value" key="identify/format"/>
+  </customproperties>
+  <pipe>
+    <rasterrenderer opacity="1" alphaBand="-1" band="1" type="paletted">
+      <rasterTransparency/>
+      <minMaxOrigin>
+        <limits>None</limits>
+        <extent>WholeRaster</extent>
+        <statAccuracy>Estimated</statAccuracy>
+        <cumulativeCutLower>0.02</cumulativeCutLower>
+        <cumulativeCutUpper>0.98</cumulativeCutUpper>
+        <stdDevFactor>2</stdDevFactor>
+      </minMaxOrigin>
+      <colorPalette>
+        <paletteEntry value="0" label="no snow" color="#777777" alpha="255"/>
+        <paletteEntry value="100" label="snow" color="#00ffff" alpha="255"/>
+        <paletteEntry value="205" label="cloud" color="#ffffff" alpha="255"/>
+        <paletteEntry value="254" label="nodata" color="#000000" alpha="255"/>
+      </colorPalette>
+      <colorramp name="[source]" type="randomcolors"/>
+    </rasterrenderer>
+    <brightnesscontrast contrast="0" brightness="0"/>
+    <huesaturation colorizeOn="0" colorizeStrength="100" grayscaleMode="0" colorizeBlue="128" saturation="0" colorizeGreen="128" colorizeRed="255"/>
+    <rasterresampler maxOversampling="2"/>
+  </pipe>
+  <blendMode>0</blendMode>
+</qgis>
diff --git a/styles/LIS_SEB_style_vector.qml b/styles/LIS_SEB_style_vector.qml
new file mode 100644
index 00000000..ef9c71e3
--- /dev/null
+++ b/styles/LIS_SEB_style_vector.qml
@@ -0,0 +1,331 @@
+<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
+<qgis version="2.18.16" simplifyAlgorithm="0" minimumScale="0" maximumScale="1e+08" simplifyDrawingHints="1" minLabelScale="1" maxLabelScale="1e+08" simplifyDrawingTol="1" readOnly="0" simplifyMaxScale="1" hasScaleBasedVisibilityFlag="0" simplifyLocal="1" scaleBasedLabelVisibilityFlag="0">
+  <edittypes>
+    <edittype widgetv2type="TextEdit" name="DN">
+      <widgetv2config IsMultiline="0" fieldEditable="1" constraint="" UseHtml="0" labelOnTop="0" constraintDescription="" notNull="0"/>
+    </edittype>
+  </edittypes>
+  <renderer-v2 attr="DN" forceraster="0" symbollevels="0" type="categorizedSymbol" enableorderby="0">
+    <categories>
+      <category render="true" symbol="0" value="205" label="cloud"/>
+      <category render="true" symbol="1" value="100" label="snow"/>
+    </categories>
+    <symbols>
+      <symbol alpha="1" clip_to_extent="1" type="fill" name="0">
+        <layer pass="0" class="SimpleFill" locked="0">
+          <prop k="border_width_map_unit_scale" v="0,0,0,0,0,0"/>
+          <prop k="color" v="255,255,255,255"/>
+          <prop k="joinstyle" v="bevel"/>
+          <prop k="offset" v="0,0"/>
+          <prop k="offset_map_unit_scale" v="0,0,0,0,0,0"/>
+          <prop k="offset_unit" v="MM"/>
+          <prop k="outline_color" v="0,0,0,255"/>
+          <prop k="outline_style" v="no"/>
+          <prop k="outline_width" v="0.26"/>
+          <prop k="outline_width_unit" v="MM"/>
+          <prop k="style" v="solid"/>
+        </layer>
+      </symbol>
+      <symbol alpha="1" clip_to_extent="1" type="fill" name="1">
+        <layer pass="0" class="SimpleFill" locked="0">
+          <prop k="border_width_map_unit_scale" v="0,0,0,0,0,0"/>
+          <prop k="color" v="0,255,255,255"/>
+          <prop k="joinstyle" v="bevel"/>
+          <prop k="offset" v="0,0"/>
+          <prop k="offset_map_unit_scale" v="0,0,0,0,0,0"/>
+          <prop k="offset_unit" v="MM"/>
+          <prop k="outline_color" v="0,0,0,255"/>
+          <prop k="outline_style" v="no"/>
+          <prop k="outline_width" v="0.26"/>
+          <prop k="outline_width_unit" v="MM"/>
+          <prop k="style" v="solid"/>
+        </layer>
+      </symbol>
+    </symbols>
+    <source-symbol>
+      <symbol alpha="1" clip_to_extent="1" type="fill" name="0">
+        <layer pass="0" class="SimpleFill" locked="0">
+          <prop k="border_width_map_unit_scale" v="0,0,0,0,0,0"/>
+          <prop k="color" v="71,102,130,255"/>
+          <prop k="joinstyle" v="bevel"/>
+          <prop k="offset" v="0,0"/>
+          <prop k="offset_map_unit_scale" v="0,0,0,0,0,0"/>
+          <prop k="offset_unit" v="MM"/>
+          <prop k="outline_color" v="0,0,0,255"/>
+          <prop k="outline_style" v="solid"/>
+          <prop k="outline_width" v="0.26"/>
+          <prop k="outline_width_unit" v="MM"/>
+          <prop k="style" v="solid"/>
+        </layer>
+      </symbol>
+    </source-symbol>
+    <rotation/>
+    <sizescale scalemethod="diameter"/>
+  </renderer-v2>
+  <labeling type="simple"/>
+  <customproperties>
+    <property key="embeddedWidgets/count" value="0"/>
+    <property key="labeling" value="pal"/>
+    <property key="labeling/addDirectionSymbol" value="false"/>
+    <property key="labeling/angleOffset" value="0"/>
+    <property key="labeling/blendMode" value="0"/>
+    <property key="labeling/bufferBlendMode" value="0"/>
+    <property key="labeling/bufferColorA" value="255"/>
+    <property key="labeling/bufferColorB" value="255"/>
+    <property key="labeling/bufferColorG" value="255"/>
+    <property key="labeling/bufferColorR" value="255"/>
+    <property key="labeling/bufferDraw" value="false"/>
+    <property key="labeling/bufferJoinStyle" value="64"/>
+    <property key="labeling/bufferNoFill" value="false"/>
+    <property key="labeling/bufferSize" value="1"/>
+    <property key="labeling/bufferSizeInMapUnits" value="false"/>
+    <property key="labeling/bufferSizeMapUnitMaxScale" value="0"/>
+    <property key="labeling/bufferSizeMapUnitMinScale" value="0"/>
+    <property key="labeling/bufferSizeMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/bufferTransp" value="0"/>
+    <property key="labeling/centroidInside" value="false"/>
+    <property key="labeling/centroidWhole" value="false"/>
+    <property key="labeling/decimals" value="3"/>
+    <property key="labeling/displayAll" value="false"/>
+    <property key="labeling/dist" value="0"/>
+    <property key="labeling/distInMapUnits" value="false"/>
+    <property key="labeling/distMapUnitMaxScale" value="0"/>
+    <property key="labeling/distMapUnitMinScale" value="0"/>
+    <property key="labeling/distMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/drawLabels" value="false"/>
+    <property key="labeling/enabled" value="false"/>
+    <property key="labeling/fieldName" value=""/>
+    <property key="labeling/fitInPolygonOnly" value="false"/>
+    <property key="labeling/fontBold" value="false"/>
+    <property key="labeling/fontCapitals" value="0"/>
+    <property key="labeling/fontFamily" value="Sans Serif"/>
+    <property key="labeling/fontItalic" value="false"/>
+    <property key="labeling/fontLetterSpacing" value="0"/>
+    <property key="labeling/fontLimitPixelSize" value="false"/>
+    <property key="labeling/fontMaxPixelSize" value="10000"/>
+    <property key="labeling/fontMinPixelSize" value="3"/>
+    <property key="labeling/fontSize" value="9"/>
+    <property key="labeling/fontSizeInMapUnits" value="false"/>
+    <property key="labeling/fontSizeMapUnitMaxScale" value="0"/>
+    <property key="labeling/fontSizeMapUnitMinScale" value="0"/>
+    <property key="labeling/fontSizeMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/fontStrikeout" value="false"/>
+    <property key="labeling/fontUnderline" value="false"/>
+    <property key="labeling/fontWeight" value="50"/>
+    <property key="labeling/fontWordSpacing" value="0"/>
+    <property key="labeling/formatNumbers" value="false"/>
+    <property key="labeling/isExpression" value="true"/>
+    <property key="labeling/labelOffsetInMapUnits" value="true"/>
+    <property key="labeling/labelOffsetMapUnitMaxScale" value="0"/>
+    <property key="labeling/labelOffsetMapUnitMinScale" value="0"/>
+    <property key="labeling/labelOffsetMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/labelPerPart" value="false"/>
+    <property key="labeling/leftDirectionSymbol" value="&lt;"/>
+    <property key="labeling/limitNumLabels" value="false"/>
+    <property key="labeling/maxCurvedCharAngleIn" value="20"/>
+    <property key="labeling/maxCurvedCharAngleOut" value="-20"/>
+    <property key="labeling/maxNumLabels" value="2000"/>
+    <property key="labeling/mergeLines" value="false"/>
+    <property key="labeling/minFeatureSize" value="0"/>
+    <property key="labeling/multilineAlign" value="0"/>
+    <property key="labeling/multilineHeight" value="1"/>
+    <property key="labeling/namedStyle" value="Normal"/>
+    <property key="labeling/obstacle" value="true"/>
+    <property key="labeling/obstacleFactor" value="1"/>
+    <property key="labeling/obstacleType" value="0"/>
+    <property key="labeling/offsetType" value="0"/>
+    <property key="labeling/placeDirectionSymbol" value="0"/>
+    <property key="labeling/placement" value="1"/>
+    <property key="labeling/placementFlags" value="0"/>
+    <property key="labeling/plussign" value="false"/>
+    <property key="labeling/predefinedPositionOrder" value="TR,TL,BR,BL,R,L,TSR,BSR"/>
+    <property key="labeling/preserveRotation" value="true"/>
+    <property key="labeling/previewBkgrdColor" value="#ffffff"/>
+    <property key="labeling/priority" value="5"/>
+    <property key="labeling/quadOffset" value="4"/>
+    <property key="labeling/repeatDistance" value="0"/>
+    <property key="labeling/repeatDistanceMapUnitMaxScale" value="0"/>
+    <property key="labeling/repeatDistanceMapUnitMinScale" value="0"/>
+    <property key="labeling/repeatDistanceMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/repeatDistanceUnit" value="1"/>
+    <property key="labeling/reverseDirectionSymbol" value="false"/>
+    <property key="labeling/rightDirectionSymbol" value=">"/>
+    <property key="labeling/scaleMax" value="10000000"/>
+    <property key="labeling/scaleMin" value="1"/>
+    <property key="labeling/scaleVisibility" value="false"/>
+    <property key="labeling/shadowBlendMode" value="6"/>
+    <property key="labeling/shadowColorB" value="0"/>
+    <property key="labeling/shadowColorG" value="0"/>
+    <property key="labeling/shadowColorR" value="0"/>
+    <property key="labeling/shadowDraw" value="false"/>
+    <property key="labeling/shadowOffsetAngle" value="135"/>
+    <property key="labeling/shadowOffsetDist" value="1"/>
+    <property key="labeling/shadowOffsetGlobal" value="true"/>
+    <property key="labeling/shadowOffsetMapUnitMaxScale" value="0"/>
+    <property key="labeling/shadowOffsetMapUnitMinScale" value="0"/>
+    <property key="labeling/shadowOffsetMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/shadowOffsetUnits" value="1"/>
+    <property key="labeling/shadowRadius" value="1.5"/>
+    <property key="labeling/shadowRadiusAlphaOnly" value="false"/>
+    <property key="labeling/shadowRadiusMapUnitMaxScale" value="0"/>
+    <property key="labeling/shadowRadiusMapUnitMinScale" value="0"/>
+    <property key="labeling/shadowRadiusMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/shadowRadiusUnits" value="1"/>
+    <property key="labeling/shadowScale" value="100"/>
+    <property key="labeling/shadowTransparency" value="30"/>
+    <property key="labeling/shadowUnder" value="0"/>
+    <property key="labeling/shapeBlendMode" value="0"/>
+    <property key="labeling/shapeBorderColorA" value="255"/>
+    <property key="labeling/shapeBorderColorB" value="128"/>
+    <property key="labeling/shapeBorderColorG" value="128"/>
+    <property key="labeling/shapeBorderColorR" value="128"/>
+    <property key="labeling/shapeBorderWidth" value="0"/>
+    <property key="labeling/shapeBorderWidthMapUnitMaxScale" value="0"/>
+    <property key="labeling/shapeBorderWidthMapUnitMinScale" value="0"/>
+    <property key="labeling/shapeBorderWidthMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/shapeBorderWidthUnits" value="1"/>
+    <property key="labeling/shapeDraw" value="false"/>
+    <property key="labeling/shapeFillColorA" value="255"/>
+    <property key="labeling/shapeFillColorB" value="255"/>
+    <property key="labeling/shapeFillColorG" value="255"/>
+    <property key="labeling/shapeFillColorR" value="255"/>
+    <property key="labeling/shapeJoinStyle" value="64"/>
+    <property key="labeling/shapeOffsetMapUnitMaxScale" value="0"/>
+    <property key="labeling/shapeOffsetMapUnitMinScale" value="0"/>
+    <property key="labeling/shapeOffsetMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/shapeOffsetUnits" value="1"/>
+    <property key="labeling/shapeOffsetX" value="0"/>
+    <property key="labeling/shapeOffsetY" value="0"/>
+    <property key="labeling/shapeRadiiMapUnitMaxScale" value="0"/>
+    <property key="labeling/shapeRadiiMapUnitMinScale" value="0"/>
+    <property key="labeling/shapeRadiiMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/shapeRadiiUnits" value="1"/>
+    <property key="labeling/shapeRadiiX" value="0"/>
+    <property key="labeling/shapeRadiiY" value="0"/>
+    <property key="labeling/shapeRotation" value="0"/>
+    <property key="labeling/shapeRotationType" value="0"/>
+    <property key="labeling/shapeSVGFile" value=""/>
+    <property key="labeling/shapeSizeMapUnitMaxScale" value="0"/>
+    <property key="labeling/shapeSizeMapUnitMinScale" value="0"/>
+    <property key="labeling/shapeSizeMapUnitScale" value="0,0,0,0,0,0"/>
+    <property key="labeling/shapeSizeType" value="0"/>
+    <property key="labeling/shapeSizeUnits" value="1"/>
+    <property key="labeling/shapeSizeX" value="0"/>
+    <property key="labeling/shapeSizeY" value="0"/>
+    <property key="labeling/shapeTransparency" value="0"/>
+    <property key="labeling/shapeType" value="0"/>
+    <property key="labeling/substitutions" value="&lt;substitutions/>"/>
+    <property key="labeling/textColorA" value="255"/>
+    <property key="labeling/textColorB" value="0"/>
+    <property key="labeling/textColorG" value="0"/>
+    <property key="labeling/textColorR" value="0"/>
+    <property key="labeling/textTransp" value="0"/>
+    <property key="labeling/upsidedownLabels" value="0"/>
+    <property key="labeling/useSubstitutions" value="false"/>
+    <property key="labeling/wrapChar" value=""/>
+    <property key="labeling/xOffset" value="0"/>
+    <property key="labeling/yOffset" value="0"/>
+    <property key="labeling/zIndex" value="0"/>
+    <property key="variableNames"/>
+    <property key="variableValues"/>
+  </customproperties>
+  <blendMode>0</blendMode>
+  <featureBlendMode>0</featureBlendMode>
+  <layerTransparency>0</layerTransparency>
+  <displayfield>DN</displayfield>
+  <label>0</label>
+  <labelattributes>
+    <label fieldname="" text="Étiquette"/>
+    <family fieldname="" name="Sans Serif"/>
+    <size fieldname="" units="pt" value="12"/>
+    <bold fieldname="" on="0"/>
+    <italic fieldname="" on="0"/>
+    <underline fieldname="" on="0"/>
+    <strikeout fieldname="" on="0"/>
+    <color fieldname="" red="0" blue="0" green="0"/>
+    <x fieldname=""/>
+    <y fieldname=""/>
+    <offset x="0" y="0" units="pt" yfieldname="" xfieldname=""/>
+    <angle fieldname="" value="0" auto="0"/>
+    <alignment fieldname="" value="center"/>
+    <buffercolor fieldname="" red="255" blue="255" green="255"/>
+    <buffersize fieldname="" units="pt" value="1"/>
+    <bufferenabled fieldname="" on=""/>
+    <multilineenabled fieldname="" on=""/>
+    <selectedonly on=""/>
+  </labelattributes>
+  <SingleCategoryDiagramRenderer diagramType="Histogram" sizeLegend="0" attributeLegend="1">
+    <DiagramCategory penColor="#000000" labelPlacementMethod="XHeight" penWidth="0" diagramOrientation="Up" sizeScale="0,0,0,0,0,0" minimumSize="0" barWidth="5" penAlpha="255" maxScaleDenominator="1e+08" backgroundColor="#ffffff" transparency="0" width="15" scaleDependency="Area" backgroundAlpha="255" angleOffset="1440" scaleBasedVisibility="0" enabled="0" height="15" lineSizeScale="0,0,0,0,0,0" sizeType="MM" lineSizeType="MM" minScaleDenominator="inf">
+      <fontProperties description="Sans Serif,9,-1,5,50,0,0,0,0,0" style=""/>
+    </DiagramCategory>
+    <symbol alpha="1" clip_to_extent="1" type="marker" name="sizeSymbol">
+      <layer pass="0" class="SimpleMarker" locked="0">
+        <prop k="angle" v="0"/>
+        <prop k="color" v="255,0,0,255"/>
+        <prop k="horizontal_anchor_point" v="1"/>
+        <prop k="joinstyle" v="bevel"/>
+        <prop k="name" v="circle"/>
+        <prop k="offset" v="0,0"/>
+        <prop k="offset_map_unit_scale" v="0,0,0,0,0,0"/>
+        <prop k="offset_unit" v="MM"/>
+        <prop k="outline_color" v="0,0,0,255"/>
+        <prop k="outline_style" v="solid"/>
+        <prop k="outline_width" v="0"/>
+        <prop k="outline_width_map_unit_scale" v="0,0,0,0,0,0"/>
+        <prop k="outline_width_unit" v="MM"/>
+        <prop k="scale_method" v="diameter"/>
+        <prop k="size" v="2"/>
+        <prop k="size_map_unit_scale" v="0,0,0,0,0,0"/>
+        <prop k="size_unit" v="MM"/>
+        <prop k="vertical_anchor_point" v="1"/>
+      </layer>
+    </symbol>
+  </SingleCategoryDiagramRenderer>
+  <DiagramLayerSettings yPosColumn="-1" showColumn="-1" linePlacementFlags="10" placement="0" dist="0" xPosColumn="-1" priority="0" obstacle="0" zIndex="0" showAll="1"/>
+  <annotationform></annotationform>
+  <aliases>
+    <alias field="DN" index="0" name=""/>
+  </aliases>
+  <excludeAttributesWMS/>
+  <excludeAttributesWFS/>
+  <attributeactions default="-1"/>
+  <attributetableconfig actionWidgetStyle="dropDown" sortExpression="" sortOrder="0">
+    <columns>
+      <column width="-1" hidden="0" type="field" name="DN"/>
+      <column width="-1" hidden="1" type="actions"/>
+    </columns>
+  </attributetableconfig>
+  <editform></editform>
+  <editforminit/>
+  <editforminitcodesource>0</editforminitcodesource>
+  <editforminitfilepath></editforminitfilepath>
+  <editforminitcode><![CDATA[# -*- coding: utf-8 -*-
+"""
+QGIS forms can have a Python function that is called when the form is
+opened.
+
+Use this function to add extra logic to your forms.
+
+Enter the name of the function in the "Python Init function"
+field.
+An example follows:
+"""
+from qgis.PyQt.QtWidgets import QWidget
+
+def my_form_open(dialog, layer, feature):
+	geom = feature.geometry()
+	control = dialog.findChild(QWidget, "MyLineEdit")
+]]></editforminitcode>
+  <featformsuppress>0</featformsuppress>
+  <editorlayout>generatedlayout</editorlayout>
+  <widgets/>
+  <conditionalstyles>
+    <rowstyles/>
+    <fieldstyles/>
+  </conditionalstyles>
+  <defaults>
+    <default field="DN" expression=""/>
+  </defaults>
+  <previewExpression></previewExpression>
+  <layerGeometryType>2</layerGeometryType>
+</qgis>
diff --git a/styles/LIS_SEB_style_vector_qgis_3.4.qml b/styles/LIS_SEB_style_vector_qgis_3.4.qml
new file mode 100644
index 00000000..86bc498a
--- /dev/null
+++ b/styles/LIS_SEB_style_vector_qgis_3.4.qml
@@ -0,0 +1,182 @@
+<!DOCTYPE qgis PUBLIC 'http://mrcc.com/qgis.dtd' 'SYSTEM'>
+<qgis simplifyDrawingHints="1" simplifyDrawingTol="1" maxScale="0" readOnly="0" labelsEnabled="0" simplifyLocal="1" simplifyAlgorithm="0" simplifyMaxScale="1" minScale="1e+08" hasScaleBasedVisibilityFlag="0" styleCategories="AllStyleCategories" version="3.4.4-Madeira">
+  <flags>
+    <Identifiable>1</Identifiable>
+    <Removable>1</Removable>
+    <Searchable>1</Searchable>
+  </flags>
+  <renderer-v2 forceraster="0" enableorderby="0" symbollevels="0" attr="DN" type="categorizedSymbol">
+    <categories>
+      <category value="100" render="true" label="snow" symbol="0"/>
+      <category value="205" render="true" label="cloud" symbol="1"/>
+    </categories>
+    <symbols>
+      <symbol force_rhr="0" clip_to_extent="1" type="fill" name="0" alpha="1">
+        <layer locked="0" pass="0" enabled="1" class="SimpleFill">
+          <prop k="border_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
+          <prop k="color" v="0,255,255,255"/>
+          <prop k="joinstyle" v="bevel"/>
+          <prop k="offset" v="0,0"/>
+          <prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
+          <prop k="offset_unit" v="MM"/>
+          <prop k="outline_color" v="35,35,35,255"/>
+          <prop k="outline_style" v="no"/>
+          <prop k="outline_width" v="0.26"/>
+          <prop k="outline_width_unit" v="MM"/>
+          <prop k="style" v="solid"/>
+          <data_defined_properties>
+            <Option type="Map">
+              <Option value="" type="QString" name="name"/>
+              <Option name="properties"/>
+              <Option value="collection" type="QString" name="type"/>
+            </Option>
+          </data_defined_properties>
+        </layer>
+      </symbol>
+      <symbol force_rhr="0" clip_to_extent="1" type="fill" name="1" alpha="1">
+        <layer locked="0" pass="0" enabled="1" class="SimpleFill">
+          <prop k="border_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
+          <prop k="color" v="255,255,255,255"/>
+          <prop k="joinstyle" v="bevel"/>
+          <prop k="offset" v="0,0"/>
+          <prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
+          <prop k="offset_unit" v="MM"/>
+          <prop k="outline_color" v="35,35,35,255"/>
+          <prop k="outline_style" v="no"/>
+          <prop k="outline_width" v="0.26"/>
+          <prop k="outline_width_unit" v="MM"/>
+          <prop k="style" v="solid"/>
+          <data_defined_properties>
+            <Option type="Map">
+              <Option value="" type="QString" name="name"/>
+              <Option name="properties"/>
+              <Option value="collection" type="QString" name="type"/>
+            </Option>
+          </data_defined_properties>
+        </layer>
+      </symbol>
+    </symbols>
+    <source-symbol>
+      <symbol force_rhr="0" clip_to_extent="1" type="fill" name="0" alpha="1">
+        <layer locked="0" pass="0" enabled="1" class="SimpleFill">
+          <prop k="border_width_map_unit_scale" v="3x:0,0,0,0,0,0"/>
+          <prop k="color" v="196,60,57,255"/>
+          <prop k="joinstyle" v="bevel"/>
+          <prop k="offset" v="0,0"/>
+          <prop k="offset_map_unit_scale" v="3x:0,0,0,0,0,0"/>
+          <prop k="offset_unit" v="MM"/>
+          <prop k="outline_color" v="35,35,35,255"/>
+          <prop k="outline_style" v="solid"/>
+          <prop k="outline_width" v="0.26"/>
+          <prop k="outline_width_unit" v="MM"/>
+          <prop k="style" v="solid"/>
+          <data_defined_properties>
+            <Option type="Map">
+              <Option value="" type="QString" name="name"/>
+              <Option name="properties"/>
+              <Option value="collection" type="QString" name="type"/>
+            </Option>
+          </data_defined_properties>
+        </layer>
+      </symbol>
+    </source-symbol>
+    <rotation/>
+    <sizescale/>
+  </renderer-v2>
+  <customproperties>
+    <property value="0" key="embeddedWidgets/count"/>
+    <property key="variableNames"/>
+    <property key="variableValues"/>
+  </customproperties>
+  <blendMode>0</blendMode>
+  <featureBlendMode>0</featureBlendMode>
+  <layerOpacity>1</layerOpacity>
+  <SingleCategoryDiagramRenderer diagramType="Histogram" attributeLegend="1">
+    <DiagramCategory sizeScale="3x:0,0,0,0,0,0" maxScaleDenominator="1e+08" enabled="0" minScaleDenominator="-4.65661e-10" diagramOrientation="Up" penWidth="0" penAlpha="255" lineSizeScale="3x:0,0,0,0,0,0" height="15" scaleBasedVisibility="0" rotationOffset="270" minimumSize="0" penColor="#000000" width="15" lineSizeType="MM" scaleDependency="Area" backgroundColor="#ffffff" backgroundAlpha="255" labelPlacementMethod="XHeight" sizeType="MM" barWidth="5" opacity="1">
+      <fontProperties style="" description=".SF NS Text,13,-1,5,50,0,0,0,0,0"/>
+      <attribute field="" label="" color="#000000"/>
+    </DiagramCategory>
+  </SingleCategoryDiagramRenderer>
+  <DiagramLayerSettings linePlacementFlags="18" zIndex="0" dist="0" obstacle="0" showAll="1" placement="1" priority="0">
+    <properties>
+      <Option type="Map">
+        <Option value="" type="QString" name="name"/>
+        <Option name="properties"/>
+        <Option value="collection" type="QString" name="type"/>
+      </Option>
+    </properties>
+  </DiagramLayerSettings>
+  <geometryOptions removeDuplicateNodes="0" geometryPrecision="0">
+    <activeChecks/>
+    <checkConfiguration/>
+  </geometryOptions>
+  <fieldConfiguration>
+    <field name="DN">
+      <editWidget type="Range">
+        <config>
+          <Option/>
+        </config>
+      </editWidget>
+    </field>
+  </fieldConfiguration>
+  <aliases>
+    <alias field="DN" index="0" name=""/>
+  </aliases>
+  <excludeAttributesWMS/>
+  <excludeAttributesWFS/>
+  <defaults>
+    <default field="DN" expression="" applyOnUpdate="0"/>
+  </defaults>
+  <constraints>
+    <constraint field="DN" unique_strength="0" notnull_strength="0" constraints="0" exp_strength="0"/>
+  </constraints>
+  <constraintExpressions>
+    <constraint field="DN" desc="" exp=""/>
+  </constraintExpressions>
+  <expressionfields/>
+  <attributeactions>
+    <defaultAction value="{00000000-0000-0000-0000-000000000000}" key="Canvas"/>
+  </attributeactions>
+  <attributetableconfig sortOrder="0" sortExpression="" actionWidgetStyle="dropDown">
+    <columns>
+      <column hidden="0" width="-1" type="field" name="DN"/>
+      <column hidden="1" width="-1" type="actions"/>
+    </columns>
+  </attributetableconfig>
+  <conditionalstyles>
+    <rowstyles/>
+    <fieldstyles/>
+  </conditionalstyles>
+  <editform tolerant="1"></editform>
+  <editforminit/>
+  <editforminitcodesource>0</editforminitcodesource>
+  <editforminitfilepath></editforminitfilepath>
+  <editforminitcode><![CDATA[# -*- coding: utf-8 -*-
+"""
+Les formulaires QGIS peuvent avoir une fonction Python qui sera appelée à l'ouverture du formulaire.
+
+Utilisez cette fonction pour ajouter plus de fonctionnalités à vos formulaires.
+
+Entrez le nom de la fonction dans le champ "Fonction d'initialisation Python".
+Voici un exemple à suivre:
+"""
+from qgis.PyQt.QtWidgets import QWidget
+
+def my_form_open(dialog, layer, feature):
+    geom = feature.geometry()
+    control = dialog.findChild(QWidget, "MyLineEdit")
+
+]]></editforminitcode>
+  <featformsuppress>0</featformsuppress>
+  <editorlayout>generatedlayout</editorlayout>
+  <editable>
+    <field editable="1" name="DN"/>
+  </editable>
+  <labelOnTop>
+    <field labelOnTop="0" name="DN"/>
+  </labelOnTop>
+  <widgets/>
+  <previewExpression>DN</previewExpression>
+  <mapTip></mapTip>
+  <layerGeometryType>2</layerGeometryType>
+</qgis>
diff --git a/styles/readme.md b/styles/readme.md
new file mode 100644
index 00000000..767c89e3
--- /dev/null
+++ b/styles/readme.md
@@ -0,0 +1,15 @@
+
+## Style files for QGIS
+
+* LIS 1.4 and after (QGIS 3.4 and after):
+Use [LIS_SEB_style_raster_qgis_3.4.qml](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/styles/LIS_SEB_style_raster_qgis_3.4.qml) to apply the cyan (snow) / white (cloud) / grey (no-snow) colormap to a SEB raster product (GeoTiff) and [LIS_SEB_style_vector_qgis_3.4.qml](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/styles/LIS_SEB_style_vector_qgis_3.4.qml) to apply the cyan (snow) /white (cloud) colormap a SEB vector product (shapefile)
+
+* LIS 1.4 and after (QGIS 2.*):
+Use [LIS_SEB_style_raster.qml](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/styles/LIS_SEB_style_raster.qml) to apply the cyan (snow) / white (cloud) / grey (no-snow) colormap to a SEB raster product (GeoTiff) and [LIS_SEB_style_vector.qml](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/styles/LIS_SEB_style_vector.qml) to apply the cyan (snow) /white (cloud) colormap a SEB vector product (shapefile)
+
+
+* LIS 1.2 and before:
+Use [LIS_SEB_style_raster.qml](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/styles/LIS_SEB_style_raster.qml) to apply the cyan (snow) / white (cloud) / grey (no-snow) colormap to a SEB raster product (GeoTiff) and [LIS.1.2_SEB_style_vector.qml](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/blob/develop/styles/LIS.1.2_SEB_style_vector.qml) to apply the cyan (snow) /white (cloud) colormap a SEB vector product (shapefile)
+
+![Example: Theia Sentinel-2 level 2A and snow product in the region de los Ancares, Spain. Image captured by Sentinel-2A on 30 Jan 2018.](http://www.cesbio.ups-tlse.fr/multitemp/wp-content/uploads/2017/12/m1.png)
+
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index 26221f52..3d71514c 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -1,3 +1,23 @@
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 set(LIS_DATA_ROOT CACHE PATH "The test data directory which contains 2 sub-folders ( Baseline/Input-Data-Test)" )
 mark_as_advanced(LIS_DATA_ROOT)
 if(NOT LIS_DATA_ROOT)
@@ -73,13 +93,6 @@ foreach( test_name ${TEST_CASES_LIST})
     )
   set_tests_properties(${test_name}_compare_pass3_test PROPERTIES DEPENDS ${test_name}_test)
 
-  add_test(NAME ${test_name}_compare_composition_test
-    COMMAND gdalcompare.py
-    "${CURRENT_TEST_BASELINE}/composition.tif"
-    "${CURRENT_TEST_OUTPUT}/LIS_PRODUCTS/LIS_COMPO.TIF"
-    )
-  set_tests_properties(${test_name}_compare_composition_test PROPERTIES DEPENDS ${test_name}_test)
-  
   add_test(NAME ${test_name}_compare_snow_all_test
   COMMAND gdalcompare.py
   "${CURRENT_TEST_BASELINE}/snow_all.tif"
@@ -117,19 +130,61 @@ add_test(NAME s2-small_zip_test
   )
   set_tests_properties(s2-small_zip_test PROPERTIES DEPENDS s2-small_zip_test_json_builder_test)
 
+set(OUTPUT_TEST_SNOW_ANNUAL_MAP ${OUTPUT_TEST}/SNOW_ANNUAL_MAP)
+add_test(NAME snow_annual_map_json_builder_test
+  COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/build_snow_annual_map_json.py
+  "T31TCH"
+  "[/work/OT/siaa/Theia/Neige/TEST/Data-LIS-1.6/Input-Data-Test/SNOW_PRODUCTS/SENTINEL2A_20180101-105435-457_L2A_T31TCH_D_V1-4,/work/OT/siaa/Theia/Neige/TEST/Data-LIS-1.6/Input-Data-Test/SNOW_PRODUCTS/SENTINEL2A_20180131-105416-437_L2A_T31TCH_D_V1-4]"
+  "${OUTPUT_TEST_SNOW_ANNUAL_MAP}"
+  "01/01/2018"
+  "31/01/2018"
+  "--date_margin=10"
+  "--nb_threads=8"
+  "--use_densification"
+  "--densification_products_list=[/work/OT/siaa/Theia/Neige/TEST/Data-LIS-1.6/Input-Data-Test/SNOW_PRODUCTS/LANDSAT8-OLITIRS-XS_20180115-103629-617_L2A_T31TCH_D_V1-9,/work/OT/siaa/Theia/Neige/TEST/Data-LIS-1.6/Input-Data-Test/SNOW_PRODUCTS/LANDSAT8-OLITIRS-XS_20180131-103619-890_L2A_T31TCH_D_V1-9]"
+  "--debug"
+  )
+
 add_test(NAME snow_annual_map_test
-  COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/snow_annual_map_test.py
-    "${DATA_TEST}/SNOW_PRODUCTS"
-    "${OUTPUT_TEST}/snow_annual_map_test"
-    "${OUTPUT_TEST}/snow_annual_map_test/tmp"
+  COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/run_snow_annual_map.py
+    "${OUTPUT_TEST_SNOW_ANNUAL_MAP}/snow_annual_map.json"
      )
+  set_tests_properties(snow_annual_map_test PROPERTIES DEPENDS snow_annual_map_json_builder_test)
+
+add_test(NAME snow_annual_map_compare_scd_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_annual_map_test/SCD_T31TCH_20180101_20180131.tif"
+    "${OUTPUT_TEST_SNOW_ANNUAL_MAP}/T31TCH_20180101_20180131/SCD_T31TCH_20180101_20180131.tif"
+    )
+  set_tests_properties(snow_annual_map_compare_scd_test PROPERTIES DEPENDS snow_annual_map_test)
+
+add_test(NAME snow_annual_map_compare_ccd_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_annual_map_test/CLOUD_OCCURENCE_T31TCH_20180101_20180131.tif"
+    "${OUTPUT_TEST_SNOW_ANNUAL_MAP}/T31TCH_20180101_20180131/CLOUD_OCCURENCE_T31TCH_20180101_20180131.tif"
+    )
+  set_tests_properties(snow_annual_map_compare_ccd_test PROPERTIES DEPENDS snow_annual_map_test)
+
+add_test(NAME snow_annual_map_compare_nobs_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_annual_map_test/NOBS_T31TCH_20180101_20180131.tif"
+    "${OUTPUT_TEST_SNOW_ANNUAL_MAP}/T31TCH_20180101_20180131/NOBS_T31TCH_20180101_20180131.tif"
+    )
+  set_tests_properties(snow_annual_map_compare_nobs_test PROPERTIES DEPENDS snow_annual_map_test)
+
+add_test(NAME snow_annual_map_compare_sod_test
+    COMMAND gdalcompare.py
+    "${BASELINE}/snow_annual_map_test/SOD_T31TCH_20180101_20180131.tif"
+    "${OUTPUT_TEST_SNOW_ANNUAL_MAP}/T31TCH_20180101_20180131/SOD_T31TCH_20180101_20180131.tif"
+    )
+  set_tests_properties(snow_annual_map_compare_sod_test PROPERTIES DEPENDS snow_annual_map_test)
 
-add_test(NAME snow_annual_map_compare_test
+add_test(NAME snow_annual_map_compare_smod_test
     COMMAND gdalcompare.py
-    "${BASELINE}/snow_annual_map_test/SNOW_OCCURENCE_T31TCH_20180101_20180131.tif"
-    "${OUTPUT_TEST}/snow_annual_map_test/T31TCH_20180101_20180131/SNOW_OCCURENCE_T31TCH_20180101_20180131.tif"
+    "${BASELINE}/snow_annual_map_test/SMOD_T31TCH_20180101_20180131.tif"
+    "${OUTPUT_TEST_SNOW_ANNUAL_MAP}/T31TCH_20180101_20180131/SMOD_T31TCH_20180101_20180131.tif"
     )
-  set_tests_properties(snow_annual_map_compare_test PROPERTIES DEPENDS snow_annual_map_test)
+  set_tests_properties(snow_annual_map_compare_smod_test PROPERTIES DEPENDS snow_annual_map_test)
 
 # add_test(NAME compare_preprocessing_output_test
 #   COMMAND ${CMAKE_COMMAND} -E compare_files
diff --git a/test/cloud_removal_step3_test.py b/test/cloud_removal_step3_test.py
index 23f5a8c4..41dbb7ac 100755
--- a/test/cloud_removal_step3_test.py
+++ b/test/cloud_removal_step3_test.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import sys 
 import numpy as np
diff --git a/test/cloud_removal_step4_test.py b/test/cloud_removal_step4_test.py
index 16c57569..75d0c9f6 100755
--- a/test/cloud_removal_step4_test.py
+++ b/test/cloud_removal_step4_test.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import sys 
 import numpy as np
diff --git a/test/histo_utils_snow_fraction_test.cxx b/test/histo_utils_snow_fraction_test.cxx
index 1346c5cb..979f1b68 100644
--- a/test/histo_utils_snow_fraction_test.cxx
+++ b/test/histo_utils_snow_fraction_test.cxx
@@ -1,18 +1,22 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
 #include "histo_utils.h"
 #include "otbImageFileWriter.h"
diff --git a/test/histo_utils_snowline_internal_test.cxx b/test/histo_utils_snowline_internal_test.cxx
index 3d0e672c..d08ae6ee 100644
--- a/test/histo_utils_snowline_internal_test.cxx
+++ b/test/histo_utils_snowline_internal_test.cxx
@@ -1,18 +1,22 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
 #include "histo_utils.h"
 #include "itkImageRandomNonRepeatingIteratorWithIndex.h"
diff --git a/test/histo_utils_snowline_test.cxx b/test/histo_utils_snowline_test.cxx
index f6116a73..80312f65 100644
--- a/test/histo_utils_snowline_test.cxx
+++ b/test/histo_utils_snowline_test.cxx
@@ -1,18 +1,22 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
 #include "histo_utils.h"
 #include <iostream>
diff --git a/test/itkUnaryCloudMaskImageFilterTest.cxx b/test/itkUnaryCloudMaskImageFilterTest.cxx
index 8426f520..76a5999f 100644
--- a/test/itkUnaryCloudMaskImageFilterTest.cxx
+++ b/test/itkUnaryCloudMaskImageFilterTest.cxx
@@ -1,18 +1,22 @@
-/*=========================================================================
-
-  Program:   lis
-  Language:  C++
-
-  Copyright (c) Simon Gascoin
-  Copyright (c) Manuel Grizonnet
-
-  See lis-copyright.txt for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notices for more information.
-
-=========================================================================*/
+/**
+* Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Let-it-snow (LIS)
+*
+*     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
 #include "itkUnaryCloudMaskImageFilter.h"
 
diff --git a/test/otbapp_ComputeCloudMask_test.py b/test/otbapp_ComputeCloudMask_test.py
index fd372b51..77edfb1e 100755
--- a/test/otbapp_ComputeCloudMask_test.py
+++ b/test/otbapp_ComputeCloudMask_test.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import sys
 import logging
diff --git a/test/otbapp_ComputeSnowMask_test.py b/test/otbapp_ComputeSnowMask_test.py
index 392e1f5f..14b1fa29 100755
--- a/test/otbapp_ComputeSnowMask_test.py
+++ b/test/otbapp_ComputeSnowMask_test.py
@@ -1,5 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 import sys
 import logging
diff --git a/test/snow_annual_map_test.py b/test/snow_annual_map_test.py
index 476b8234..fc94bdf3 100644
--- a/test/snow_annual_map_test.py
+++ b/test/snow_annual_map_test.py
@@ -1,5 +1,25 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 
 import os
 import sys
@@ -20,7 +40,7 @@ def main(argv):
         os.mkdir(tmp_path)
 
     # Remove the target file, because the success of the test depends on this file creation
-    target_file = op.join(out_path, "T31TCH_20180101_20180131", "SNOW_OCCURENCE_T31TCH_20180101_20180131.tif")
+    target_file = op.join(out_path, "T31TCH_20180101_20180131", "SCD_T31TCH_20180101_20180131.tif")
     if op.exists(target_file):
         os.remove(target_file)
 
@@ -56,7 +76,7 @@ def main(argv):
     if params.get("run_modis_comparison", False):
         snow_annual_map_evaluation_app.compare_modis()
 
-    if not op.exists(op.join(out_path, "T31TCH_20180101_20180131", "SNOW_OCCURENCE_T31TCH_20180101_20180131.tif")):
+    if not op.exists(op.join(out_path, "T31TCH_20180101_20180131", "SCD_T31TCH_20180101_20180131.tif")):
         logging.error("The target does not exists, the test has failed")
         sys.exit(1)
     logging.info("End snow_annual_map_test")
diff --git a/utils/get_altitude_tiles.sh b/utils/get_altitude_tiles.sh
index 19cbb799..d1478a0e 100644
--- a/utils/get_altitude_tiles.sh
+++ b/utils/get_altitude_tiles.sh
@@ -1,4 +1,24 @@
 #!/bin/bash
+#
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
+#
+# This file is part of Let-it-snow (LIS)
+#
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 p="/work/OT/muscate/prod/muscate_prod/data_production/dataref/"
 echo "Tile Mean_Altitude_Over_L2_Coverage Altitude_Standard_Deviation_Over_L2_Coverage" > Altitude_Over_L2_Coverage.txt
 for i in `ls -d $p/S2__TEST_AUX_REFDE2_T*`
diff --git a/utils/profiling_pass1.5.py b/utils/profiling_pass1.5.py
index 84c87bd3..6ec39c64 100644
--- a/utils/profiling_pass1.5.py
+++ b/utils/profiling_pass1.5.py
@@ -1,20 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
-#=========================================================================
 #
-#  Program:   lis
-#  Language:  Python
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
 #
-#  Copyright (c) Simon Gascoin
-#  Copyright (c) Manuel Grizonnet
+# This file is part of Let-it-snow (LIS)
 #
-#  See lis-copyright.txt for details.
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
 #
-#  This software is distributed WITHOUT ANY WARRANTY; without even
-#  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-#  PURPOSE.  See the above copyright notices for more information.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 #
-#=========================================================================
 
 from __future__ import print_function
 import os
diff --git a/utils/snow_line.py b/utils/snow_line.py
index 69b6c3d5..d926a28d 100755
--- a/utils/snow_line.py
+++ b/utils/snow_line.py
@@ -1,20 +1,24 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
-#=========================================================================
 #
-#  Program:   lis
-#  Language:  Python
+# Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
 #
-#  Copyright (c) Simon Gascoin
-#  Copyright (c) Manuel Grizonnet
+# This file is part of Let-it-snow (LIS)
 #
-#  See lis-copyright.txt for details.
+#     https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
 #
-#  This software is distributed WITHOUT ANY WARRANTY; without even
-#  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-#  PURPOSE.  See the above copyright notices for more information.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 #
-#=========================================================================
 
 import numpy as np
 from matplotlib import pyplot as plt
@@ -27,14 +31,14 @@ def load_histo(histo_path):
 
         fsnow_rate=v[:,1]/(v[:,1]+v[:,2])
 
-        #print v[:,1]
-        #print v[:,2]
+        #print(v[:,1])
+        #print(v[:,2])
         #b = np.zeros(6).reshape(2, 3)
         
-        #print fsnow_rate
-        print fsnow_rate[0]
-        print fsnow_rate
-        print np.shape(fsnow_rate)[0]
+        #print(fsnow_rate)
+        print(fsnow_rate[0])
+        print(fsnow_rate)
+        print(np.shape(fsnow_rate)[0])
         plt.plot(np.arange(np.shape(fsnow_rate)[0]), fsnow_rate[:], 'ro')
         #plt.axis([0, 6, 0, 20])
         plt.show()
@@ -50,7 +54,7 @@ def print_histo(histo_path):
 	#n, bins, patches = ax1.hist(v_hist, bins=50, normed=1, facecolor='green')
 	#plt.show()
 
-	print v
+	print(v)
 
 	dem=v[:,0]
 	width = 0.8
@@ -74,7 +78,7 @@ def main():
     (opts, args) = parser.parse_args()
 
     if opts.histo_path is None: 
-        print "A mandatory option is missing\n"
+        print("A mandatory option is missing\n")
         parser.print_help()
         exit(-1)
     else:
-- 
GitLab


From 814ad69655ad2c2d901cc69477eaca4646525cf4 Mon Sep 17 00:00:00 2001
From: Aurore Dupuis <aurore.dupuis@gmail.com>
Date: Wed, 3 Jun 2020 23:01:49 +0200
Subject: [PATCH 02/34] Release 1.6.3

---
 python/s2snow/snow_product_parser.py |  4 ++++
 python/s2snow/version.py             |  2 +-
 test/CMakeLists.txt                  | 22 ++++++++++++++++++++++
 3 files changed, 27 insertions(+), 1 deletion(-)

diff --git a/python/s2snow/snow_product_parser.py b/python/s2snow/snow_product_parser.py
index d2aa394d..a14450d6 100644
--- a/python/s2snow/snow_product_parser.py
+++ b/python/s2snow/snow_product_parser.py
@@ -115,11 +115,15 @@ class snow_product:
             if sub_file.upper().endswith("_SNW_R2.TIF"):
                 self.is_extracted = True
                 self.snow_mask = op.join(absoluteFilename, sub_file)
+            if sub_file.upper().endswith("_SNW_XS.TIF"):
+                self.is_extracted = True
+                self.snow_mask = op.join(absoluteFilename, sub_file)
             if sub_file.upper() == "LIS_PRODUCTS":
                 self.is_extracted = True
                 self.extracted_product = op.join(absoluteFilename, sub_file)
                 self.snow_mask = op.join(self.extracted_product, "LIS_SEB.TIF")
 
+
         self.metadata_file = op.join(absoluteFilename,
                                      self.product_name + "_MTD_ALL.xml")
 
diff --git a/python/s2snow/version.py b/python/s2snow/version.py
index 9e1406ac..b88c6a67 100644
--- a/python/s2snow/version.py
+++ b/python/s2snow/version.py
@@ -20,4 +20,4 @@
 # limitations under the License.
 #
 
-VERSION = "1.6.2"
+VERSION = "1.6.3"
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index 2c04c85f..7ee857c3 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -186,6 +186,28 @@ add_test(NAME snow_annual_map_compare_smod_test
     )
   set_tests_properties(snow_annual_map_compare_smod_test PROPERTIES DEPENDS snow_annual_map_test)
 
+set(OUTPUT_TEST_SNOW_ANNUAL_MAP_MUSCATE ${OUTPUT_TEST}/SNOW_ANNUAL_MAP_MUSCATE)
+add_test(NAME snow_annual_map_muscate_json_builder_test
+  COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/build_snow_annual_map_json.py
+  "T31TDJ"
+  "[/work/OT/siaa/Theia/Neige/TEST/Data-LIS-1.6/Input-Data-Test/SNOW_PRODUCTS/SENTINEL2A_20200410-105852-276_L2B-SNOW_T31TDJ_C_V1-7]"
+  "${OUTPUT_TEST_SNOW_ANNUAL_MAP_MUSCATE}"
+  "01/09/2019"
+  "31/08/2020"
+  "--date_margin=0"
+  "--nb_threads=8"
+  "--use_densification"
+  "--densification_products_list=[/work/OT/siaa/Theia/Neige/TEST/Data-LIS-1.6/Input-Data-Test/SNOW_PRODUCTS/LANDSAT8-OLITIRS-XS_20200410-103540-665_L2B-SNOW_T31TDJ_C_V1-7]"
+  "--debug"
+  )
+
+add_test(NAME snow_annual_map_muscate_test
+  COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/run_snow_annual_map.py
+    "${OUTPUT_TEST_SNOW_ANNUAL_MAP_MUSCATE}/snow_annual_map.json"
+     )
+  set_tests_properties(snow_annual_map_muscate_test PROPERTIES DEPENDS snow_annual_map_muscate_json_builder_test)
+  set_tests_properties(snow_annual_map_muscate_test PROPERTIES TIMEOUT 3000)
+
 set(OUTPUT_TEST_SNOW_ANNUAL_MAP_WITHOUT_DENSIFICATION ${OUTPUT_TEST}/SNOW_ANNUAL_MAP_WITHOUT_DENSIFICATION)
 add_test(NAME snow_annual_without_densification_map_json_builder_test
   COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/build_snow_annual_map_json.py
-- 
GitLab


From 7704328758df460c1bca6ad498ef60cffe8a3494 Mon Sep 17 00:00:00 2001
From: Aurore Dupuis <aurore.dupuis@gmail.com>
Date: Fri, 3 Jul 2020 15:04:24 +0200
Subject: [PATCH 03/34] Resolve "Run snow annual map logs are not written into
 the log file"

---
 app/run_snow_annual_map.py | 40 ++++++++++++++++++++++++--------------
 python/s2snow/sdfsdf       |  0
 python/s2snow/version.py   |  2 +-
 3 files changed, 26 insertions(+), 16 deletions(-)
 create mode 100644 python/s2snow/sdfsdf

diff --git a/app/run_snow_annual_map.py b/app/run_snow_annual_map.py
index 841efd90..12195b4a 100644
--- a/app/run_snow_annual_map.py
+++ b/app/run_snow_annual_map.py
@@ -56,34 +56,42 @@ def main(argv):
     """ main script of snow extraction procedure"""
     json_file = argv[1]
 
-    # Set logging level and format.
-    logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
-                        format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
-    logging.info("Start run_snow_annual_map.py")
-    logging.info("Input args = {}".format(json_file))
-
-    logging.info("Load parameters")
     # ----------------------------------------------------------------------------------------
     # Set parameters
     # ----------------------------------------------------------------------------------------
     with open(json_file) as json_data_file:
         params = json.load(json_data_file)
 
+    output_dir = params.get("path_out")
+    if not os.path.exists(output_dir):
+        logging.info("Create directory {} ...".format(output_dir))
+        os.makedirs(output_dir)
+
+    log = params.get("log", True)
+    stdout_file = op.join(output_dir, "stdout.log")
+    stderr_file = op.join(output_dir, "stderr.log")
+    if log:
+        sys.stdout = open(params.get("log_stdout", stdout_file), 'w')
+        sys.stderr = open(params.get("log_stderr", stderr_file), 'w')
+
+    # Set logging level and format.
     mode = params.get("mode", "RUNTIME")
     if mode == "DEBUG":
         logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, \
                             format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
         logging.debug("Debug is enabled")
+    else:
+        logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
+                            format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+
+    logging.info("Start run_snow_annual_map.py")
+    logging.info("Input args = {}".format(json_file))
 
     tile_id = params.get("tile_id")
     date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
     date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
     date_margin = timedelta(days=params.get("date_margin", 0))
     logging.debug("Date margin: {}".format(date_margin))
-    output_dir = params.get("path_out")
-    if not os.path.exists(output_dir):
-        logging.info("Create directory {} ...".format(output_dir))
-        os.makedirs(output_dir)
 
     processing_id = str(tile_id + "_" + datetime_to_str(date_start) + "_" + datetime_to_str(date_stop))
     path_out = op.join(output_dir, processing_id)
@@ -116,10 +124,6 @@ def main(argv):
         densification_path_list = []
 
 
-    log = params.get("log", True)
-    if log:
-        sys.stdout = open(params.get("log_stdout", op.join(path_out, "stdout.log")), 'w')
-        sys.stderr = open(params.get("log_stderr", op.join(path_out, "stderr.log")), 'w')
 
     # ----------------------------------------------------------------------------------------
     # Search snow products
@@ -214,6 +218,9 @@ def main(argv):
     shutil.copy2(nobs_output_file, path_out)
     shutil.copy2(smod_file, path_out)
     shutil.copy2(sod_file, path_out)
+    if log:
+        shutil.copy2(stdout_file, path_out)
+        shutil.copy2(stderr_file, path_out)
 
     os.remove(input_dates_file_path)
     os.remove(output_dates_file_path)
@@ -224,6 +231,9 @@ def main(argv):
     os.remove(nobs_output_file)
     os.remove(smod_file)
     os.remove(sod_file)
+    if log:
+        os.remove(stdout_file)
+        os.remove(stderr_file)
 
     logging.info("End run_snow_annual_map.py")
 
diff --git a/python/s2snow/sdfsdf b/python/s2snow/sdfsdf
new file mode 100644
index 00000000..e69de29b
diff --git a/python/s2snow/version.py b/python/s2snow/version.py
index b88c6a67..0d2111f5 100644
--- a/python/s2snow/version.py
+++ b/python/s2snow/version.py
@@ -20,4 +20,4 @@
 # limitations under the License.
 #
 
-VERSION = "1.6.3"
+VERSION = "1.6.5"
-- 
GitLab


From 6d2a245423a4487da1dce6c5dcda307b1c123c08 Mon Sep 17 00:00:00 2001
From: Aurore Dupuis <aurore.dupuis@cnes.fr>
Date: Mon, 6 Jul 2020 11:54:49 +0200
Subject: [PATCH 04/34] Remove python/s2snow/sdfsdf file

---
 python/s2snow/sdfsdf | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 delete mode 100644 python/s2snow/sdfsdf

diff --git a/python/s2snow/sdfsdf b/python/s2snow/sdfsdf
deleted file mode 100644
index e69de29b..00000000
-- 
GitLab


From fd2d249d292316e08de0ca65a8964023cc319067 Mon Sep 17 00:00:00 2001
From: Aurore Dupuis <aurore.dupuis@gmail.com>
Date: Mon, 6 Jul 2020 11:56:43 +0200
Subject: [PATCH 05/34] Patch 1.6.5

---
 app/run_snow_annual_map.py | 40 ++++++++++++++++++++++++--------------
 python/s2snow/version.py   |  2 +-
 2 files changed, 26 insertions(+), 16 deletions(-)

diff --git a/app/run_snow_annual_map.py b/app/run_snow_annual_map.py
index 717e6fbd..f54355b1 100644
--- a/app/run_snow_annual_map.py
+++ b/app/run_snow_annual_map.py
@@ -56,34 +56,42 @@ def main(argv):
     """ main script of snow extraction procedure"""
     json_file = argv[1]
 
-    # Set logging level and format.
-    logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
-                        format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
-    logging.info("Start run_snow_annual_map.py")
-    logging.info("Input args = {}".format(json_file))
-
-    logging.info("Load parameters")
     # ----------------------------------------------------------------------------------------
     # Set parameters
     # ----------------------------------------------------------------------------------------
     with open(json_file) as json_data_file:
         params = json.load(json_data_file)
 
+    output_dir = params.get("path_out")
+    if not os.path.exists(output_dir):
+        logging.info("Create directory {} ...".format(output_dir))
+        os.makedirs(output_dir)
+
+    log = params.get("log", True)
+    stdout_file = op.join(output_dir, "stdout.log")
+    stderr_file = op.join(output_dir, "stderr.log")
+    if log:
+        sys.stdout = open(params.get("log_stdout", stdout_file), 'w')
+        sys.stderr = open(params.get("log_stderr", stderr_file), 'w')
+
+    # Set logging level and format.
     mode = params.get("mode", "RUNTIME")
     if mode == "DEBUG":
         logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, \
                             format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
         logging.debug("Debug is enabled")
+    else:
+        logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
+                            format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
+
+    logging.info("Start run_snow_annual_map.py")
+    logging.info("Input args = {}".format(json_file))
 
     tile_id = params.get("tile_id")
     date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
     date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
     date_margin = timedelta(days=params.get("date_margin", 0))
     logging.debug("Date margin: {}".format(date_margin))
-    output_dir = params.get("path_out")
-    if not os.path.exists(output_dir):
-        logging.info("Create directory {} ...".format(output_dir))
-        os.makedirs(output_dir)
 
     processing_id = str(tile_id + "_" + datetime_to_str(date_start) + "_" + datetime_to_str(date_stop))
     path_out = op.join(output_dir, processing_id)
@@ -116,10 +124,6 @@ def main(argv):
         densification_path_list = []
 
 
-    log = params.get("log", True)
-    if log:
-        sys.stdout = open(params.get("log_stdout", op.join(path_out, "stdout.log")), 'w')
-        sys.stderr = open(params.get("log_stderr", op.join(path_out, "stderr.log")), 'w')
 
     # ----------------------------------------------------------------------------------------
     # Search snow products
@@ -214,6 +218,9 @@ def main(argv):
     shutil.copy2(nobs_output_file, path_out)
     shutil.copy2(smod_file, path_out)
     shutil.copy2(sod_file, path_out)
+    if log:
+        shutil.copy2(stdout_file, path_out)
+        shutil.copy2(stderr_file, path_out)
 
     os.remove(input_dates_file_path)
     os.remove(output_dates_file_path)
@@ -224,6 +231,9 @@ def main(argv):
     os.remove(nobs_output_file)
     os.remove(smod_file)
     os.remove(sod_file)
+    if log:
+        os.remove(stdout_file)
+        os.remove(stderr_file)
 
 
 def create_snow_annual_map_metadata(product_list, path_out):
diff --git a/python/s2snow/version.py b/python/s2snow/version.py
index b88c6a67..0d2111f5 100644
--- a/python/s2snow/version.py
+++ b/python/s2snow/version.py
@@ -20,4 +20,4 @@
 # limitations under the License.
 #
 
-VERSION = "1.6.3"
+VERSION = "1.6.5"
-- 
GitLab


From 62e46b9b0b0166b690a02d5f4237290cbbd20616 Mon Sep 17 00:00:00 2001
From: Aurore Dupuis <aurore.dupuis@gmail.com>
Date: Tue, 21 Jul 2020 08:54:24 +0000
Subject: [PATCH 06/34] Merge branch '69-add-compression-to-syntheses' into
 'develop'

Resolve "add compression to syntheses"

Closes #69

See merge request remote_modules/let-it-snow!61

(cherry picked from commit e34015cc05eb867f9088cb7e2ecf420683db7b3d)

3c3b599a add compression to snow annual map
bd079a61 add compression to SMOD, SOD
02d3832b Update compute_NOBS.py
---
 python/s2snow/compute_NOBS.py     | 1 +
 python/s2snow/compute_SOD_SMOD.py | 1 +
 python/s2snow/snow_annual_map.py  | 2 +-
 3 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/python/s2snow/compute_NOBS.py b/python/s2snow/compute_NOBS.py
index d8140c87..0a1245aa 100644
--- a/python/s2snow/compute_NOBS.py
+++ b/python/s2snow/compute_NOBS.py
@@ -57,6 +57,7 @@ def compute_NOBS(input_file, output_file=None):
         profile.update(
             dtype=rasterio.uint16,
             driver='GTiff',
+            compress='deflate',
             count=1)
 
         with rasterio.open(output_file, 'w', **profile) as dst:
diff --git a/python/s2snow/compute_SOD_SMOD.py b/python/s2snow/compute_SOD_SMOD.py
index 42bb4e14..0671bc28 100644
--- a/python/s2snow/compute_SOD_SMOD.py
+++ b/python/s2snow/compute_SOD_SMOD.py
@@ -71,6 +71,7 @@ def compute_SOD_SMOD(input_file, sod_file=None, smod_file=None):
         profile = src.profile
         profile.update(
             dtype=rasterio.uint16,
+            compress='deflate',
             count=1)
 
         with rasterio.open(smod_file, 'w', **profile) as dst:
diff --git a/python/s2snow/snow_annual_map.py b/python/s2snow/snow_annual_map.py
index d868dbde..bd93fcb6 100644
--- a/python/s2snow/snow_annual_map.py
+++ b/python/s2snow/snow_annual_map.py
@@ -102,7 +102,7 @@ def compute_SCD(binary_snow_mask_list, multitemp_cloud_vrt, input_dates_filename
     expression = "+".join(["im1b" + str(i) for i in band_index])
     logging.debug("expression: {}".format(expression))
     bandMathApp = band_math([gapfilled_timeserie],
-                            snow_coverage_duration,
+                            snow_coverage_duration  + "?&gdal:co:COMPRESS=DEFLATE",
                             expression,
                             ram,
                             otb.ImagePixelType_uint16)
-- 
GitLab


From d92a73845f6f6701d88f25c06019fa823c7b253d Mon Sep 17 00:00:00 2001
From: Aurore Dupuis <aurore.dupuis@gmail.com>
Date: Wed, 22 Jul 2020 15:15:12 +0200
Subject: [PATCH 07/34] Update version.py

---
 python/s2snow/version.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/python/s2snow/version.py b/python/s2snow/version.py
index 0d2111f5..675b20c3 100644
--- a/python/s2snow/version.py
+++ b/python/s2snow/version.py
@@ -20,4 +20,4 @@
 # limitations under the License.
 #
 
-VERSION = "1.6.5"
+VERSION = "1.6.6"
-- 
GitLab


From a6475ece62c906d4f69183f84111da101cb0fb73 Mon Sep 17 00:00:00 2001
From: raillece <celine.Raille@thalesgroup.com>
Date: Tue, 5 Dec 2023 15:39:24 +0100
Subject: [PATCH 08/34] update ubuntu and gdal version in Dockerfile

---
 Dockerfile          | 34 +++++++++++++++-------------------
 test/CMakeLists.txt |  4 ++--
 2 files changed, 17 insertions(+), 21 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 1e1316f8..8235d164 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,14 +1,13 @@
 ARG REGISTRY_URL
-FROM ${REGISTRY_URL}ubuntu:20.04 as builder
-# Note : DOCKER_BUILDKIT=1 required
+FROM ${REGISTRY_URL}ubuntu:22.04 as builder
 
 # Reference : https://github.com/opencontainers/image-spec/blob/main/annotations.md
-LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu"
+LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr celine.raille@thalesgroup.com"
 LABEL org.opencontainers.image.description="LIS Build container"
 
 # Montage du volume temporaire et utilisation pour apt le site du cnes
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    apt-get update -y && \
+	apt-get update -y && \
     apt-get install -y ca-certificates 
 
 #Ajout des certificats
@@ -17,7 +16,7 @@ RUN update-ca-certificates
 
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    apt-get update -y --quiet && \
+	apt-get update -y --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         # basic system installs
         build-essential \
@@ -54,7 +53,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 
 # Build OTB    
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    mkdir -p /root/otb-build/build && \
+	mkdir -p /root/otb-build/build && \
     cd /root/otb-build && \
     wget -q --ca-certificate=/usr/local/share/ca-certificates/ca-bundle.crt https://www.orfeo-toolbox.org/packages/archives/OTB/OTB-7.4.1.zip -O /tmp/OTB.zip && \
     unzip /tmp/OTB.zip && \
@@ -81,7 +80,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 
  # Build wxWidgets
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    mkdir -p /root/wxwidgets-build/ && \
+	mkdir -p /root/wxwidgets-build/ && \
     cd /root/wxwidgets-build && \
     git clone --branch v3.2.2 https://github.com/wxWidgets/wxWidgets.git && \
     cd wxWidgets && \
@@ -93,13 +92,13 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 
  # Build saga-gis
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    mkdir -p /root/saga-gis-build/ && \
+	mkdir -p /root/saga-gis-build/ && \
     cd /root/saga-gis-build && \
     git clone --branch saga-8.4.2 https://github.com/saga-gis/saga-gis.git && \
     mkdir saga-gis/build && cd saga-gis/build && \
     cmake -DCMAKE_INSTALL_PREFIX=/install/saga -DCMAKE_PREFIX_PATH=/install/wxWidgets ../saga-gis && \
     make install -j4
-
+	
 # Build LIS
 ADD . /LIS_src/
 RUN  ln -s /usr/bin/python3 /usr/bin/python && \
@@ -113,7 +112,7 @@ RUN  ln -s /usr/bin/python3 /usr/bin/python && \
 
 # Build DANS-GDAL scripts
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    mkdir -p /root/dans-build && \
+	mkdir -p /root/dans-build && \
     cd /root/dans-build && \
     wget -q --ca-certificate=/usr/local/share/ca-certificates/ca-bundle.crt https://github.com/gina-alaska/dans-gdal-scripts/archive/refs/heads/master.zip -O /tmp/dans.zip && \
     unzip /tmp/dans.zip && \
@@ -127,33 +126,30 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 
 ##############################
 #ARG REGISTRY_URL
-FROM ${REGISTRY_URL}ubuntu:20.04
+FROM ${REGISTRY_URL}ubuntu:22.04
 
 LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu"
 LABEL org.opencontainers.image.description="LIS + OTB 7.4 Container"
 
 # system packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    apt-get update --quiet && \
+	apt-get update --quiet && \
     apt-get -y upgrade --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         python3 \
         python-is-python3 \
         python3-pip \
-        python3-gdal \
+		python3-gdal \
         python3-lxml \
-        python3-numpy \
-        gdal-bin \
         file \
+		gdal-bin \
         libpython3.8 \
         libinsighttoolkit4.13 \
         libopenthreads21 \
-        libossim1 \
         libtinyxml2.6.2v5 \
         libmuparser2v5 \
-        libmuparserx4.0.7 \
+        libmuparserx4.0.8 \
         libfftw3-3 \
-        libgsl23 \
         libgslcblas0 \
         libgtk-3-0 \
         libpcre2-32-0 \
@@ -183,7 +179,7 @@ COPY --from=builder /install/dans /usr/local
 
 # Add additionnal dependancies
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    pip3 install \
+	pip3 install \
     --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org \
     --no-cache-dir \
     numpy \
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index 2117556b..f9cbfda7 100755
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -426,7 +426,7 @@ add_test(NAME snow_synthesis_compare_smod_test
   )
 set_tests_properties(snow_synthesis_compare_smod_test PROPERTIES DEPENDS snow_synthesis_test)
 
-if(NOT JENKINS_BUILD MATCHES "true")
+if(NOT CI_PIPELINE_SOURCE "web")
   add_test(NAME snow_synthesis_muscate_test
     COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/let_it_snow_synthesis.py
     -t T31TDJ
@@ -455,7 +455,7 @@ add_test(NAME snow_synthesis_without_densification_test
   )
 
 
-if(NOT JENKINS_BUILD MATCHES "true")
+if(NOT CI_PIPELINE_SOURCE "web")
   # ----------------------------------
   # Synthesis from 1.7 snow product
   # ----------------------------------
-- 
GitLab


From 35165bb4de9a875bc060a8239005ad50a5b41da4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 5 Dec 2023 14:51:58 +0000
Subject: [PATCH 09/34] Update Dockerfile

---
 Dockerfile | 1 -
 1 file changed, 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index 8235d164..35ae06e0 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -41,7 +41,6 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libboost-thread-dev \
         libinsighttoolkit4-dev \
         libopenthreads-dev \
-        libossim-dev \
         libtinyxml-dev \
         libmuparser-dev \
         libmuparserx-dev \
-- 
GitLab


From b8304622be6882524d1c48ed4b3a348f060c08d7 Mon Sep 17 00:00:00 2001
From: raillece <celine.Raille@thalesgroup.com>
Date: Thu, 7 Dec 2023 09:02:34 +0100
Subject: [PATCH 10/34] docker : ubuntu 20.04 and gdal 3.3.2

---
 Dockerfile | 45 ++++++++++++++++++++++++++++++++++++++-------
 1 file changed, 38 insertions(+), 7 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 8235d164..a48edf10 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,6 @@
 ARG REGISTRY_URL
-FROM ${REGISTRY_URL}ubuntu:22.04 as builder
+FROM ${REGISTRY_URL}ubuntu:20.04 as builder
+# Note : DOCKER_BUILDKIT=1 required
 
 # Reference : https://github.com/opencontainers/image-spec/blob/main/annotations.md
 LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr celine.raille@thalesgroup.com"
@@ -17,6 +18,10 @@ RUN update-ca-certificates
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	apt-get update -y --quiet && \
+	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
+		software-properties-common && \
+	add-apt-repository ppa:ubuntugis/ppa --yes && \
+	apt-get update --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         # basic system installs
         build-essential \
@@ -48,11 +53,23 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libfftw3-dev \
         libgsl-dev \
         libgtk-3-dev \
+		mono-runtime-common \
+		&& \
+	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
+		## test \
+		python3-vtkgdcm \
+		libvtkgdcm-cil \
+		libvtkgdcm-java \
+		libgdcm-tools \
         && \
     rm -rf /var/lib/apt/lists/*
 
 # Build OTB    
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
+	find /usr/lib/ -name "*vtkgdcm*" && \
+	ln -s /usr/lib/cli/vtkgdcm-sharp-3.0/libvtkgdcmsharpglue.so /usr/lib/x86_64-linux-gnu/libvtkgdcmsharpglue.so && \
+	mkdir -p /usr/lib/python/dist-packages && \
+	ln -s /usr/lib/python3/dist-packages/vtkgdcmPython.cpython-38-x86_64-linux-gnu.so /usr/lib/python/dist-packages/vtkgdcmPython.so && \
 	mkdir -p /root/otb-build/build && \
     cd /root/otb-build && \
     wget -q --ca-certificate=/usr/local/share/ca-certificates/ca-bundle.crt https://www.orfeo-toolbox.org/packages/archives/OTB/OTB-7.4.1.zip -O /tmp/OTB.zip && \
@@ -126,30 +143,30 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 
 ##############################
 #ARG REGISTRY_URL
-FROM ${REGISTRY_URL}ubuntu:22.04
+FROM ${REGISTRY_URL}ubuntu:20.04
 
-LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu"
+LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr celine.raille@thalesgroup.com"
 LABEL org.opencontainers.image.description="LIS + OTB 7.4 Container"
 
 # system packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	apt-get update --quiet && \
-    apt-get -y upgrade --quiet && \
+	apt-get -y upgrade --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         python3 \
         python-is-python3 \
         python3-pip \
-		python3-gdal \
         python3-lxml \
         file \
-		gdal-bin \
         libpython3.8 \
         libinsighttoolkit4.13 \
         libopenthreads21 \
+        libossim1 \
         libtinyxml2.6.2v5 \
         libmuparser2v5 \
-        libmuparserx4.0.8 \
+        libmuparserx4.0.7 \
         libfftw3-3 \
+        libgsl23 \
         libgslcblas0 \
         libgtk-3-0 \
         libpcre2-32-0 \
@@ -189,3 +206,17 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
     fiona \
     pyyaml \
     rasterio
+	
+# system packages
+RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
+	apt-get update --quiet && \
+	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
+		software-properties-common && \
+	add-apt-repository ppa:ubuntugis/ppa --yes && \
+	apt-get update --quiet && \
+    DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
+		libgdal-dev \
+		python3-gdal \
+		gdal-bin \
+        && \
+    rm -rf /var/lib/apt/lists/*
-- 
GitLab


From 263df12b26a6e8e237bc20eccf2148427dd17566 Mon Sep 17 00:00:00 2001
From: raillece <celine.Raille@thalesgroup.com>
Date: Thu, 7 Dec 2023 09:04:47 +0100
Subject: [PATCH 11/34] update Dockerfile

---
 Dockerfile | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Dockerfile b/Dockerfile
index 75d6bad3..d1a93b9b 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -46,6 +46,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libboost-thread-dev \
         libinsighttoolkit4-dev \
         libopenthreads-dev \
+		libossim-dev \
         libtinyxml-dev \
         libmuparser-dev \
         libmuparserx-dev \
-- 
GitLab


From 22539c5ba443ec09c4689faed3de525e662b2534 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Thu, 7 Dec 2023 10:39:59 +0000
Subject: [PATCH 12/34] Update Dockerfile

---
 Dockerfile | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index d1a93b9b..02ef60e9 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -21,7 +21,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
 		software-properties-common && \
 	add-apt-repository ppa:ubuntugis/ppa --yes && \
-	apt-get update --quiet && \
+	apt-get update -y --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         # basic system installs
         build-essential \
@@ -209,11 +209,11 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 	
 # system packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-	apt-get update --quiet && \
+	apt-get update -y --quiet && \
 	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
 		software-properties-common && \
 	add-apt-repository ppa:ubuntugis/ppa --yes && \
-	apt-get update --quiet && \
+	apt-get update -y --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
 		libgdal-dev \
 		python3-gdal \
-- 
GitLab


From 538283c095aa2f7c5393ea44de901b3a1cf36ee2 Mon Sep 17 00:00:00 2001
From: raillece <celine.Raille@thalesgroup.com>
Date: Fri, 8 Dec 2023 11:12:35 +0100
Subject: [PATCH 13/34] fix gdal installation dockerfile

---
 Dockerfile | 85 +++++++++++++++++++++++++++++++++---------------------
 1 file changed, 52 insertions(+), 33 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index d1a93b9b..9571d2d7 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,5 @@
 ARG REGISTRY_URL
 FROM ${REGISTRY_URL}ubuntu:20.04 as builder
-# Note : DOCKER_BUILDKIT=1 required
 
 # Reference : https://github.com/opencontainers/image-spec/blob/main/annotations.md
 LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr celine.raille@thalesgroup.com"
@@ -18,16 +17,11 @@ RUN update-ca-certificates
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	apt-get update -y --quiet && \
-	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
-		software-properties-common && \
-	add-apt-repository ppa:ubuntugis/ppa --yes && \
-	apt-get update --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         # basic system installs
         build-essential \
         python3 \
         wget \
-        ca-certificates \
         unzip \
         # packages needed for compilation
         cmake \
@@ -35,18 +29,11 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         ninja-build \
         python3-dev \
         git\
+		pkg-config \
         python3-numpy \
-        libgdal-dev \
-        freeglut3-dev \
-        libboost-date-time-dev \
-        libboost-filesystem-dev \
-        libboost-graph-dev \
-        libboost-program-options-dev \
-        libboost-system-dev \
-        libboost-thread-dev \
         libinsighttoolkit4-dev \
         libopenthreads-dev \
-		libossim-dev \
+        libossim-dev \
         libtinyxml-dev \
         libmuparser-dev \
         libmuparserx-dev \
@@ -54,19 +41,27 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libgsl-dev \
         libgtk-3-dev \
 		mono-runtime-common \
+		software-properties-common \
 		&& \
 	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
-		## test \
+		# optional packages for OTB compilation \
 		python3-vtkgdcm \
 		libvtkgdcm-cil \
 		libvtkgdcm-java \
 		libgdcm-tools \
         && \
+	add-apt-repository ppa:ubuntugis/ppa --yes && \
+	apt-get update -y --quiet && \
+	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
+	    # GDAL \
+        libgdal-dev \
+        python3-gdal \
+		gdal-bin \
+        && \
     rm -rf /var/lib/apt/lists/*
 
 # Build OTB    
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-	find /usr/lib/ -name "*vtkgdcm*" && \
 	ln -s /usr/lib/cli/vtkgdcm-sharp-3.0/libvtkgdcmsharpglue.so /usr/lib/x86_64-linux-gnu/libvtkgdcmsharpglue.so && \
 	mkdir -p /usr/lib/python/dist-packages && \
 	ln -s /usr/lib/python3/dist-packages/vtkgdcmPython.cpython-38-x86_64-linux-gnu.so /usr/lib/python/dist-packages/vtkgdcmPython.so && \
@@ -159,6 +154,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         python3-lxml \
         file \
         libpython3.8 \
+		# for OTB \
         libinsighttoolkit4.13 \
         libopenthreads21 \
         libossim1 \
@@ -168,10 +164,47 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libfftw3-3 \
         libgsl23 \
         libgslcblas0 \
+		# for sagagis \
         libgtk-3-0 \
         libpcre2-32-0 \
+		# for gdal \
+		libarmadillo9 \
+		libheif1 \
+		libpoppler97 \
+		libjson-c4 \
+		libfreexl1 \
+		libqhull7 \
+		libgeos-c1v5 \
+		libkmlbase1 \
+		libkmldom1 \
+		libkmlengine1 \
+		libxerces-c3.2 \
+		libnetcdf15 \
+		libhdf4-0-alt \
+		libogdi4.1 \
+		libgif7 \
+		libcharls2 \
+		libcfitsio8 \
+		libpq5 \
+		libdeflate0 \
+		libfyba0 \
+		libmysqlclient21 \
+		libltdl7 \
+		librttopo1 \
         && \
     rm -rf /var/lib/apt/lists/*
+	
+# install GDAL from builder
+COPY --from=builder /usr/include/gdal /usr/include/gdal
+COPY --from=builder /usr/bin/gdal* /usr/bin
+COPY --from=builder /usr/lib/libgdal* /usr/lib
+COPY --from=builder /usr/lib/python3/dist-packages/osgeo /usr/lib/python3/dist-packages/osgeo
+COPY --from=builder /usr/lib/python3/dist-packages/osgeo_utils /usr/lib/python3/dist-packages/osgeo_utils
+COPY --from=builder /usr/lib/python3/dist-packages/gdal* /usr/lib/python3/dist-packages
+COPY --from=builder /usr/lib/x86_64-linux-gnu/libodbc* /usr/lib/x86_64-linux-gnu/
+COPY --from=builder /usr/lib/x86_64-linux-gnu/libproj* /usr/lib/x86_64-linux-gnu/
+COPY --from=builder /usr/lib/x86_64-linux-gnu/libspatialite* /usr/lib/x86_64-linux-gnu/
+COPY --from=builder /usr/lib/x86_64-linux-gnu/*geotiff* /usr/lib/x86_64-linux-gnu/
 
 # install OTB from builder
 COPY --from=builder /install/otb /usr/local
@@ -185,7 +218,7 @@ COPY --from=builder /install/wxWidgets /usr/local
 
 # install saga-gis from builder
 COPY --from=builder /install/saga /usr/local
-copy --from=builder /root/saga-gis-build/saga-gis/build/src/saga_core/saga_api/saga_api_python /usr/local/lib/python3.8/site-packages/
+COPY --from=builder /root/saga-gis-build/saga-gis/build/src/saga_core/saga_api/saga_api_python /usr/local/lib/python3.8/site-packages/
 
 # install LIS from builder
 COPY --from=builder /install/lis /usr/local
@@ -205,18 +238,4 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
     pyproj \
     fiona \
     pyyaml \
-    rasterio
-	
-# system packages
-RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-	apt-get update --quiet && \
-	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
-		software-properties-common && \
-	add-apt-repository ppa:ubuntugis/ppa --yes && \
-	apt-get update --quiet && \
-    DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
-		libgdal-dev \
-		python3-gdal \
-		gdal-bin \
-        && \
-    rm -rf /var/lib/apt/lists/*
+    rasterio
\ No newline at end of file
-- 
GitLab


From a581fa9144fbf51c260230e3dca500afc718c2ec Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Fri, 8 Dec 2023 12:06:15 +0000
Subject: [PATCH 14/34] Update Dockerfile

---
 Dockerfile | 19 ++++++++-----------
 1 file changed, 8 insertions(+), 11 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index b084fe72..95973110 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -17,6 +17,9 @@ RUN update-ca-certificates
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	apt-get update -y --quiet && \
+    DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends software-properties-common && \
+    add-apt-repository ppa:ubuntugis/ppa --yes && \
+	apt-get update -y --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         # basic system installs
         build-essential \
@@ -41,7 +44,10 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libgsl-dev \
         libgtk-3-dev \
 		mono-runtime-common \
-		software-properties-common \
+		# GDAL \
+        libgdal-dev \
+        python3-gdal \
+		gdal-bin \
 		&& \
 	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
 		# optional packages for OTB compilation \
@@ -49,15 +55,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 		libvtkgdcm-cil \
 		libvtkgdcm-java \
 		libgdcm-tools \
-        && \
-	add-apt-repository ppa:ubuntugis/ppa --yes && \
-	apt-get update -y --quiet && \
-	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
-	    # GDAL \
-        libgdal-dev \
-        python3-gdal \
-		gdal-bin \
-        && \
+        && \        
     rm -rf /var/lib/apt/lists/*
 
 # Build OTB    
@@ -200,7 +198,6 @@ COPY --from=builder /usr/bin/gdal* /usr/bin
 COPY --from=builder /usr/lib/libgdal* /usr/lib
 COPY --from=builder /usr/lib/python3/dist-packages/osgeo /usr/lib/python3/dist-packages/osgeo
 COPY --from=builder /usr/lib/python3/dist-packages/osgeo_utils /usr/lib/python3/dist-packages/osgeo_utils
-COPY --from=builder /usr/lib/python3/dist-packages/gdal* /usr/lib/python3/dist-packages
 COPY --from=builder /usr/lib/x86_64-linux-gnu/libodbc* /usr/lib/x86_64-linux-gnu/
 COPY --from=builder /usr/lib/x86_64-linux-gnu/libproj* /usr/lib/x86_64-linux-gnu/
 COPY --from=builder /usr/lib/x86_64-linux-gnu/libspatialite* /usr/lib/x86_64-linux-gnu/
-- 
GitLab


From 4811fa8c64821e0d04f061de30b5f06395b61d23 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Fri, 8 Dec 2023 13:01:50 +0000
Subject: [PATCH 15/34] Remove GCDM from Dockerfile

---
 Dockerfile | 9 +--------
 1 file changed, 1 insertion(+), 8 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 95973110..65fceefc 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -48,14 +48,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libgdal-dev \
         python3-gdal \
 		gdal-bin \
-		&& \
-	DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
-		# optional packages for OTB compilation \
-		python3-vtkgdcm \
-		libvtkgdcm-cil \
-		libvtkgdcm-java \
-		libgdcm-tools \
-        && \        
+		&& \       
     rm -rf /var/lib/apt/lists/*
 
 # Build OTB    
-- 
GitLab


From 30b25e20c79400c3201a567155da5442d6c52427 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Fri, 8 Dec 2023 13:42:17 +0000
Subject: [PATCH 16/34] Get gdal dependencies from builder docker

---
 Dockerfile | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 65fceefc..09ed58ca 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -126,6 +126,12 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
     make install && \
     rm -rf /root/dans-build /tmp/dans.zip
 
+# Keep GDAL dependencies from ppa
+RUN mkdir -p /install/gdal/ && \
+    cp /usr/lib/x86_64-linux-gnu/libodbc* /install/gdal/ && \
+    cp /usr/lib/x86_64-linux-gnu/libproj* /install/gdal/ && \
+    cp /usr/lib/x86_64-linux-gnu/libspatialite* /install/gdal/ && \
+    cp /usr/lib/x86_64-linux-gnu/*geotiff* /install/gdal/
 
 ##############################
 #ARG REGISTRY_URL
@@ -186,15 +192,11 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
     rm -rf /var/lib/apt/lists/*
 	
 # install GDAL from builder
-COPY --from=builder /usr/include/gdal /usr/include/gdal
+COPY --from=builder /install/gdal /usr/lib/x86_64-linux-gnu
 COPY --from=builder /usr/bin/gdal* /usr/bin
 COPY --from=builder /usr/lib/libgdal* /usr/lib
 COPY --from=builder /usr/lib/python3/dist-packages/osgeo /usr/lib/python3/dist-packages/osgeo
 COPY --from=builder /usr/lib/python3/dist-packages/osgeo_utils /usr/lib/python3/dist-packages/osgeo_utils
-COPY --from=builder /usr/lib/x86_64-linux-gnu/libodbc* /usr/lib/x86_64-linux-gnu/
-COPY --from=builder /usr/lib/x86_64-linux-gnu/libproj* /usr/lib/x86_64-linux-gnu/
-COPY --from=builder /usr/lib/x86_64-linux-gnu/libspatialite* /usr/lib/x86_64-linux-gnu/
-COPY --from=builder /usr/lib/x86_64-linux-gnu/*geotiff* /usr/lib/x86_64-linux-gnu/
 
 # install OTB from builder
 COPY --from=builder /install/otb /usr/local
-- 
GitLab


From 68cb626cdd130e09682d73e815bc686cd521f424 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Fri, 8 Dec 2023 13:44:49 +0000
Subject: [PATCH 17/34] Update Dockerfile

---
 Dockerfile | 60 +++++++++++++++++++++++++++---------------------------
 1 file changed, 30 insertions(+), 30 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 09ed58ca..2b15ba1c 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -32,7 +32,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         ninja-build \
         python3-dev \
         git\
-		pkg-config \
+        pkg-config \
         python3-numpy \
         libinsighttoolkit4-dev \
         libopenthreads-dev \
@@ -43,12 +43,12 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libfftw3-dev \
         libgsl-dev \
         libgtk-3-dev \
-		mono-runtime-common \
-		# GDAL \
+        mono-runtime-common \
+        # GDAL \
         libgdal-dev \
         python3-gdal \
-		gdal-bin \
-		&& \       
+        gdal-bin \
+        && \       
     rm -rf /var/lib/apt/lists/*
 
 # Build OTB    
@@ -151,7 +151,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         python3-lxml \
         file \
         libpython3.8 \
-		# for OTB \
+        # for OTB \
         libinsighttoolkit4.13 \
         libopenthreads21 \
         libossim1 \
@@ -161,33 +161,33 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libfftw3-3 \
         libgsl23 \
         libgslcblas0 \
-		# for sagagis \
+        # for sagagis \
         libgtk-3-0 \
         libpcre2-32-0 \
 		# for gdal \
-		libarmadillo9 \
-		libheif1 \
-		libpoppler97 \
-		libjson-c4 \
-		libfreexl1 \
-		libqhull7 \
-		libgeos-c1v5 \
-		libkmlbase1 \
-		libkmldom1 \
-		libkmlengine1 \
-		libxerces-c3.2 \
-		libnetcdf15 \
-		libhdf4-0-alt \
-		libogdi4.1 \
-		libgif7 \
-		libcharls2 \
-		libcfitsio8 \
-		libpq5 \
-		libdeflate0 \
-		libfyba0 \
-		libmysqlclient21 \
-		libltdl7 \
-		librttopo1 \
+        libarmadillo9 \
+        libheif1 \
+        libpoppler97 \
+        libjson-c4 \
+        libfreexl1 \
+        libqhull7 \
+        libgeos-c1v5 \
+        libkmlbase1 \
+        libkmldom1 \
+        libkmlengine1 \
+        libxerces-c3.2 \
+        libnetcdf15 \
+        libhdf4-0-alt \
+        libogdi4.1 \
+        libgif7 \
+        libcharls2 \
+        libcfitsio8 \
+        libpq5 \
+        libdeflate0 \
+        libfyba0 \
+        libmysqlclient21 \
+        libltdl7 \
+        librttopo1 \
         && \
     rm -rf /var/lib/apt/lists/*
 	
-- 
GitLab


From 17fcb8f7cdd92d9834a16024295ce4f2ac693cdf Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Fri, 8 Dec 2023 15:40:16 +0000
Subject: [PATCH 18/34] pip install no binary

---
 Dockerfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index 2b15ba1c..04b97bd9 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -223,7 +223,7 @@ COPY --from=builder /install/dans /usr/local
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	pip3 install \
     --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org \
-    --no-cache-dir \
+    --no-cache-dir --no-binary \
     numpy \
     scipy \
     shapely \
-- 
GitLab


From 07ed2cd7c0feafc96f75376905d5afd3846c2e91 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Mon, 11 Dec 2023 07:36:30 +0000
Subject: [PATCH 19/34] Update Dockerfile

---
 Dockerfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index 04b97bd9..3e67cbe6 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -223,7 +223,7 @@ COPY --from=builder /install/dans /usr/local
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	pip3 install \
     --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org \
-    --no-cache-dir --no-binary \
+    --no-cache-dir --no-binary :all: \
     numpy \
     scipy \
     shapely \
-- 
GitLab


From 565fe8c309ecc254bc42f0b60fbe11e53cac1c03 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Mon, 11 Dec 2023 07:40:49 +0000
Subject: [PATCH 20/34] Only rasterio with no-binary

---
 Dockerfile | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index 3e67cbe6..9bcdbe88 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -223,11 +223,15 @@ COPY --from=builder /install/dans /usr/local
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	pip3 install \
     --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org \
-    --no-cache-dir --no-binary :all: \
+    --no-cache-dir \
     numpy \
     scipy \
     shapely \
     pyproj \
     fiona \
     pyyaml \
+    && \
+    pip3 install \
+    --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org \
+    --no-cache-dir --no-binary :all: \
     rasterio
-- 
GitLab


From 418fde6c8642b8f539beb561232e6fa446c5fdbb Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Mon, 11 Dec 2023 09:08:47 +0000
Subject: [PATCH 21/34] Update Dockerfile

---
 Dockerfile | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 9bcdbe88..4fd05d4e 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -133,6 +133,8 @@ RUN mkdir -p /install/gdal/ && \
     cp /usr/lib/x86_64-linux-gnu/libspatialite* /install/gdal/ && \
     cp /usr/lib/x86_64-linux-gnu/*geotiff* /install/gdal/
 
+RUN find /usr/ -name "*proj*"
+
 ##############################
 #ARG REGISTRY_URL
 FROM ${REGISTRY_URL}ubuntu:20.04
@@ -197,6 +199,7 @@ COPY --from=builder /usr/bin/gdal* /usr/bin
 COPY --from=builder /usr/lib/libgdal* /usr/lib
 COPY --from=builder /usr/lib/python3/dist-packages/osgeo /usr/lib/python3/dist-packages/osgeo
 COPY --from=builder /usr/lib/python3/dist-packages/osgeo_utils /usr/lib/python3/dist-packages/osgeo_utils
+COPY --from=builder /usr/share/proj /usr/share/proj
 
 # install OTB from builder
 COPY --from=builder /install/otb /usr/local
@@ -230,8 +233,4 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
     pyproj \
     fiona \
     pyyaml \
-    && \
-    pip3 install \
-    --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org \
-    --no-cache-dir --no-binary :all: \
     rasterio
-- 
GitLab


From 39205999f1cc57fb75a64664bb20cdf4b848217d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Mon, 11 Dec 2023 11:00:40 +0000
Subject: [PATCH 22/34] Add ogr2ogr

---
 Dockerfile | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 4fd05d4e..0ac7cb90 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,7 +2,7 @@ ARG REGISTRY_URL
 FROM ${REGISTRY_URL}ubuntu:20.04 as builder
 
 # Reference : https://github.com/opencontainers/image-spec/blob/main/annotations.md
-LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr celine.raille@thalesgroup.com"
+LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu celine.raille@thalesgroup.com"
 LABEL org.opencontainers.image.description="LIS Build container"
 
 # Montage du volume temporaire et utilisation pour apt le site du cnes
@@ -133,13 +133,11 @@ RUN mkdir -p /install/gdal/ && \
     cp /usr/lib/x86_64-linux-gnu/libspatialite* /install/gdal/ && \
     cp /usr/lib/x86_64-linux-gnu/*geotiff* /install/gdal/
 
-RUN find /usr/ -name "*proj*"
-
 ##############################
 #ARG REGISTRY_URL
 FROM ${REGISTRY_URL}ubuntu:20.04
 
-LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr celine.raille@thalesgroup.com"
+LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu celine.raille@thalesgroup.com"
 LABEL org.opencontainers.image.description="LIS + OTB 7.4 Container"
 
 # system packages
@@ -196,6 +194,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 # install GDAL from builder
 COPY --from=builder /install/gdal /usr/lib/x86_64-linux-gnu
 COPY --from=builder /usr/bin/gdal* /usr/bin
+COPY --from=builder /usr/bin/ogr2ogr /usr/bin
 COPY --from=builder /usr/lib/libgdal* /usr/lib
 COPY --from=builder /usr/lib/python3/dist-packages/osgeo /usr/lib/python3/dist-packages/osgeo
 COPY --from=builder /usr/lib/python3/dist-packages/osgeo_utils /usr/lib/python3/dist-packages/osgeo_utils
-- 
GitLab


From f7110cd4328105f71d48bb57a7dcb1ceb42386dd Mon Sep 17 00:00:00 2001
From: raillece <celine.Raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 11:45:29 +0100
Subject: [PATCH 23/34] optimization Dockerfile

---
 Dockerfile          | 67 ++++++++++++---------------------------------
 docker/packages.txt | 39 ++++++++++++++++++++++++++
 2 files changed, 57 insertions(+), 49 deletions(-)
 create mode 100644 docker/packages.txt

diff --git a/Dockerfile b/Dockerfile
index 0ac7cb90..6d24debc 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -43,7 +43,6 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libfftw3-dev \
         libgsl-dev \
         libgtk-3-dev \
-        mono-runtime-common \
         # GDAL \
         libgdal-dev \
         python3-gdal \
@@ -127,11 +126,19 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
     rm -rf /root/dans-build /tmp/dans.zip
 
 # Keep GDAL dependencies from ppa
-RUN mkdir -p /install/gdal/ && \
-    cp /usr/lib/x86_64-linux-gnu/libodbc* /install/gdal/ && \
-    cp /usr/lib/x86_64-linux-gnu/libproj* /install/gdal/ && \
-    cp /usr/lib/x86_64-linux-gnu/libspatialite* /install/gdal/ && \
-    cp /usr/lib/x86_64-linux-gnu/*geotiff* /install/gdal/
+RUN mkdir -p /install/gdal/bin/ && \
+	cp /usr/bin/gdal* /install/gdal/bin/ && \
+	cp /usr/bin/ogr2ogr /install/gdal/bin/ && \
+	mkdir -p /install/gdal/lib/x86_64-linux-gnu/ && \
+	cp /usr/lib/libgdal* /install/gdal/lib/ && \
+	cp /usr/lib/x86_64-linux-gnu/libodbc* /install/gdal/lib/x86_64-linux-gnu/ && \
+    cp /usr/lib/x86_64-linux-gnu/libproj* /install/gdal/lib/x86_64-linux-gnu/ && \
+    cp /usr/lib/x86_64-linux-gnu/libspatialite* /install/lib/gdal/x86_64-linux-gnu/ && \
+    cp /usr/lib/x86_64-linux-gnu/*geotiff* /install/gdal/lib/x86_64-linux-gnu/ && \
+	mkdir -p /install/gdal/lib/python3/dist-packages/ && \	
+	cp -r /usr/lib/python3/dist-packages/osgeo /install/gdal/lib/python3/dist-packages/ && \
+	cp -r /usr/lib/python3/dist-packages/osgeo_utils /install/gdal/lib/python3/dist-packages/ && \
+    
 
 ##############################
 #ARG REGISTRY_URL
@@ -141,6 +148,7 @@ LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart
 LABEL org.opencontainers.image.description="LIS + OTB 7.4 Container"
 
 # system packages
+COPY ./docker/packages.txt /tmp/packages.txt
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	apt-get update -y --quiet && \
 	apt-get -y upgrade --quiet && \
@@ -151,53 +159,14 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         python3-lxml \
         file \
         libpython3.8 \
-        # for OTB \
-        libinsighttoolkit4.13 \
-        libopenthreads21 \
-        libossim1 \
-        libtinyxml2.6.2v5 \
-        libmuparser2v5 \
-        libmuparserx4.0.7 \
-        libfftw3-3 \
-        libgsl23 \
-        libgslcblas0 \
-        # for sagagis \
-        libgtk-3-0 \
-        libpcre2-32-0 \
-		# for gdal \
-        libarmadillo9 \
-        libheif1 \
-        libpoppler97 \
-        libjson-c4 \
-        libfreexl1 \
-        libqhull7 \
-        libgeos-c1v5 \
-        libkmlbase1 \
-        libkmldom1 \
-        libkmlengine1 \
-        libxerces-c3.2 \
-        libnetcdf15 \
-        libhdf4-0-alt \
-        libogdi4.1 \
-        libgif7 \
-        libcharls2 \
-        libcfitsio8 \
-        libpq5 \
-        libdeflate0 \
-        libfyba0 \
-        libmysqlclient21 \
-        libltdl7 \
-        librttopo1 \
         && \
+	# dependencies packages
+	grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends && \
     rm -rf /var/lib/apt/lists/*
 	
 # install GDAL from builder
-COPY --from=builder /install/gdal /usr/lib/x86_64-linux-gnu
-COPY --from=builder /usr/bin/gdal* /usr/bin
-COPY --from=builder /usr/bin/ogr2ogr /usr/bin
-COPY --from=builder /usr/lib/libgdal* /usr/lib
-COPY --from=builder /usr/lib/python3/dist-packages/osgeo /usr/lib/python3/dist-packages/osgeo
-COPY --from=builder /usr/lib/python3/dist-packages/osgeo_utils /usr/lib/python3/dist-packages/osgeo_utils
+COPY --from=builder /install/gdal/bin /usr/bin
+COPY --from=builder /install/gdal/lib /usr/lib
 COPY --from=builder /usr/share/proj /usr/share/proj
 
 # install OTB from builder
diff --git a/docker/packages.txt b/docker/packages.txt
new file mode 100644
index 00000000..f798b2a7
--- /dev/null
+++ b/docker/packages.txt
@@ -0,0 +1,39 @@
+# for OTB
+libinsighttoolkit4.13
+libopenthreads21
+libossim1
+libtinyxml2.6.2v5
+libmuparser2v5
+libmuparserx4.0.7
+libfftw3-3
+libgsl23
+libgslcblas0
+
+# for sagagis
+libgtk-3-0
+libpcre2-32-0
+
+# for gdal
+libarmadillo9
+libheif1
+libpoppler97
+libjson-c4
+libfreexl1
+libqhull7
+libgeos-c1v5
+libkmlbase1
+libkmldom1
+libkmlengine1
+libxerces-c3.2
+libnetcdf15
+libhdf4-0-alt
+libogdi4.1
+libgif7
+libcharls2
+libcfitsio8
+libpq5
+libdeflate0
+libfyba0
+libmysqlclient21
+libltdl7
+librttopo1
\ No newline at end of file
-- 
GitLab


From cd0d8d08e12d5ae4e2284d442ec96f62e0e977c3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 10:54:02 +0000
Subject: [PATCH 24/34] Update Dockerfile

---
 Dockerfile | 33 ++++++++++++++++-----------------
 1 file changed, 16 insertions(+), 17 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 6d24debc..75ab4af3 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -52,10 +52,10 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 
 # Build OTB    
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-	ln -s /usr/lib/cli/vtkgdcm-sharp-3.0/libvtkgdcmsharpglue.so /usr/lib/x86_64-linux-gnu/libvtkgdcmsharpglue.so && \
-	mkdir -p /usr/lib/python/dist-packages && \
-	ln -s /usr/lib/python3/dist-packages/vtkgdcmPython.cpython-38-x86_64-linux-gnu.so /usr/lib/python/dist-packages/vtkgdcmPython.so && \
-	mkdir -p /root/otb-build/build && \
+    ln -s /usr/lib/cli/vtkgdcm-sharp-3.0/libvtkgdcmsharpglue.so /usr/lib/x86_64-linux-gnu/libvtkgdcmsharpglue.so && \
+    mkdir -p /usr/lib/python/dist-packages && \
+    ln -s /usr/lib/python3/dist-packages/vtkgdcmPython.cpython-38-x86_64-linux-gnu.so /usr/lib/python/dist-packages/vtkgdcmPython.so && \
+    mkdir -p /root/otb-build/build && \
     cd /root/otb-build && \
     wget -q --ca-certificate=/usr/local/share/ca-certificates/ca-bundle.crt https://www.orfeo-toolbox.org/packages/archives/OTB/OTB-7.4.1.zip -O /tmp/OTB.zip && \
     unzip /tmp/OTB.zip && \
@@ -127,18 +127,17 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 
 # Keep GDAL dependencies from ppa
 RUN mkdir -p /install/gdal/bin/ && \
-	cp /usr/bin/gdal* /install/gdal/bin/ && \
-	cp /usr/bin/ogr2ogr /install/gdal/bin/ && \
-	mkdir -p /install/gdal/lib/x86_64-linux-gnu/ && \
-	cp /usr/lib/libgdal* /install/gdal/lib/ && \
-	cp /usr/lib/x86_64-linux-gnu/libodbc* /install/gdal/lib/x86_64-linux-gnu/ && \
+    cp /usr/bin/gdal* /install/gdal/bin/ && \
+    cp /usr/bin/ogr2ogr /install/gdal/bin/ && \
+    mkdir -p /install/gdal/lib/x86_64-linux-gnu/ && \
+    cp /usr/lib/libgdal* /install/gdal/lib/ && \
+    cp /usr/lib/x86_64-linux-gnu/libodbc* /install/gdal/lib/x86_64-linux-gnu/ && \
     cp /usr/lib/x86_64-linux-gnu/libproj* /install/gdal/lib/x86_64-linux-gnu/ && \
     cp /usr/lib/x86_64-linux-gnu/libspatialite* /install/lib/gdal/x86_64-linux-gnu/ && \
     cp /usr/lib/x86_64-linux-gnu/*geotiff* /install/gdal/lib/x86_64-linux-gnu/ && \
-	mkdir -p /install/gdal/lib/python3/dist-packages/ && \	
-	cp -r /usr/lib/python3/dist-packages/osgeo /install/gdal/lib/python3/dist-packages/ && \
-	cp -r /usr/lib/python3/dist-packages/osgeo_utils /install/gdal/lib/python3/dist-packages/ && \
-    
+    mkdir -p /install/gdal/lib/python3/dist-packages/ && \
+    cp -r /usr/lib/python3/dist-packages/osgeo /install/gdal/lib/python3/dist-packages/ && \
+    cp -r /usr/lib/python3/dist-packages/osgeo_utils /install/gdal/lib/python3/dist-packages/
 
 ##############################
 #ARG REGISTRY_URL
@@ -150,8 +149,8 @@ LABEL org.opencontainers.image.description="LIS + OTB 7.4 Container"
 # system packages
 COPY ./docker/packages.txt /tmp/packages.txt
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-	apt-get update -y --quiet && \
-	apt-get -y upgrade --quiet && \
+    apt-get update -y --quiet && \
+    apt-get -y upgrade --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         python3 \
         python-is-python3 \
@@ -160,8 +159,8 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         file \
         libpython3.8 \
         && \
-	# dependencies packages
-	grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends && \
+    # dependencies packages
+    grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends && \
     rm -rf /var/lib/apt/lists/*
 	
 # install GDAL from builder
-- 
GitLab


From 269e65e8666be86482e7d5bc7c09c0eac9422645 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 12:02:51 +0000
Subject: [PATCH 25/34] Update Dockerfile

---
 Dockerfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index 75ab4af3..b81db37e 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -133,7 +133,7 @@ RUN mkdir -p /install/gdal/bin/ && \
     cp /usr/lib/libgdal* /install/gdal/lib/ && \
     cp /usr/lib/x86_64-linux-gnu/libodbc* /install/gdal/lib/x86_64-linux-gnu/ && \
     cp /usr/lib/x86_64-linux-gnu/libproj* /install/gdal/lib/x86_64-linux-gnu/ && \
-    cp /usr/lib/x86_64-linux-gnu/libspatialite* /install/lib/gdal/x86_64-linux-gnu/ && \
+    cp /usr/lib/x86_64-linux-gnu/libspatialite* /install/gdal/lib/x86_64-linux-gnu/ && \
     cp /usr/lib/x86_64-linux-gnu/*geotiff* /install/gdal/lib/x86_64-linux-gnu/ && \
     mkdir -p /install/gdal/lib/python3/dist-packages/ && \
     cp -r /usr/lib/python3/dist-packages/osgeo /install/gdal/lib/python3/dist-packages/ && \
-- 
GitLab


From 94d7ff84b6ef2266757d9f7e0f83f648b84027ca Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 13:05:43 +0000
Subject: [PATCH 26/34] Test apt-get install packages from file

---
 Dockerfile | 7 +++++++
 1 file changed, 7 insertions(+)

diff --git a/Dockerfile b/Dockerfile
index b81db37e..edf57e07 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -14,6 +14,13 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 COPY cert[s]/* /usr/local/share/ca-certificates/
 RUN update-ca-certificates
 
+COPY ./docker/packages.txt /tmp/packages.txt
+RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
+    apt-get update -y --quiet && \
+    apt-get upgrade -y --quiet && \
+    cat /tmp/packages.txt && \
+    DEBIAN_FRONTEND=noninteractive grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' apt-get install --quiet --yes --no-install-recommends
+
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	apt-get update -y --quiet && \
-- 
GitLab


From 3cd0f91c873a2f94034978f37e1fced715a480d1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 13:18:07 +0000
Subject: [PATCH 27/34] Update Dockerfile

---
 Dockerfile | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index edf57e07..b1143f06 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -17,9 +17,9 @@ RUN update-ca-certificates
 COPY ./docker/packages.txt /tmp/packages.txt
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
     apt-get update -y --quiet && \
-    apt-get upgrade -y --quiet && \
     cat /tmp/packages.txt && \
-    DEBIAN_FRONTEND=noninteractive grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' apt-get install --quiet --yes --no-install-recommends
+    DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends libinsighttoolkit4.13 && \
+    grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' apt-get install --quiet --yes --no-install-recommends
 
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-- 
GitLab


From 701c5b4804784cfbcbaabb406c8f21f160761ec4 Mon Sep 17 00:00:00 2001
From: raillece <celine.Raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 14:44:22 +0100
Subject: [PATCH 28/34] fix packages.txt file

---
 Dockerfile          |  3 +-
 docker/packages.txt | 78 ++++++++++++++++++++++-----------------------
 2 files changed, 40 insertions(+), 41 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index b1143f06..ef8fa7a5 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -18,8 +18,7 @@ COPY ./docker/packages.txt /tmp/packages.txt
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
     apt-get update -y --quiet && \
     cat /tmp/packages.txt && \
-    DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends libinsighttoolkit4.13 && \
-    grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' apt-get install --quiet --yes --no-install-recommends
+    grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends
 
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
diff --git a/docker/packages.txt b/docker/packages.txt
index f798b2a7..a4345748 100644
--- a/docker/packages.txt
+++ b/docker/packages.txt
@@ -1,39 +1,39 @@
-# for OTB
-libinsighttoolkit4.13
-libopenthreads21
-libossim1
-libtinyxml2.6.2v5
-libmuparser2v5
-libmuparserx4.0.7
-libfftw3-3
-libgsl23
-libgslcblas0
-
-# for sagagis
-libgtk-3-0
-libpcre2-32-0
-
-# for gdal
-libarmadillo9
-libheif1
-libpoppler97
-libjson-c4
-libfreexl1
-libqhull7
-libgeos-c1v5
-libkmlbase1
-libkmldom1
-libkmlengine1
-libxerces-c3.2
-libnetcdf15
-libhdf4-0-alt
-libogdi4.1
-libgif7
-libcharls2
-libcfitsio8
-libpq5
-libdeflate0
-libfyba0
-libmysqlclient21
-libltdl7
-librttopo1
\ No newline at end of file
+# for OTB
+libinsighttoolkit4.13
+libopenthreads21
+libossim1
+libtinyxml2.6.2v5
+libmuparser2v5
+libmuparserx4.0.7
+libfftw3-3
+libgsl23
+libgslcblas0
+
+# for sagagis
+libgtk-3-0
+libpcre2-32-0
+
+# for gdal
+libarmadillo9
+libheif1
+libpoppler97
+libjson-c4
+libfreexl1
+libqhull7
+libgeos-c1v5
+libkmlbase1
+libkmldom1
+libkmlengine1
+libxerces-c3.2
+libnetcdf15
+libhdf4-0-alt
+libogdi4.1
+libgif7
+libcharls2
+libcfitsio8
+libpq5
+libdeflate0
+libfyba0
+libmysqlclient21
+libltdl7
+librttopo1
-- 
GitLab


From 4e19fa4c50430389d476ccbe3cd1970a49945a04 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 13:49:38 +0000
Subject: [PATCH 29/34] Update Dockerfile

---
 Dockerfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index ef8fa7a5..ae6310bd 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -18,7 +18,7 @@ COPY ./docker/packages.txt /tmp/packages.txt
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
     apt-get update -y --quiet && \
     cat /tmp/packages.txt && \
-    grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends
+    grep -e '^[^#]' /tmp/packages.txt | DEBIAN_FRONTEND=noninteractive xargs -d '\n' apt-get install --quiet --yes --no-install-recommends
 
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-- 
GitLab


From 10cf2813822cef81eb7bc05847b7302b29c631f4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 13:57:57 +0000
Subject: [PATCH 30/34] Update Dockerfile

---
 Dockerfile | 10 ++--------
 1 file changed, 2 insertions(+), 8 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index ae6310bd..fc80dc4a 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -14,12 +14,6 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
 COPY cert[s]/* /usr/local/share/ca-certificates/
 RUN update-ca-certificates
 
-COPY ./docker/packages.txt /tmp/packages.txt
-RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
-    apt-get update -y --quiet && \
-    cat /tmp/packages.txt && \
-    grep -e '^[^#]' /tmp/packages.txt | DEBIAN_FRONTEND=noninteractive xargs -d '\n' apt-get install --quiet --yes --no-install-recommends
-
 # Install required packages
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
 	apt-get update -y --quiet && \
@@ -156,7 +150,7 @@ LABEL org.opencontainers.image.description="LIS + OTB 7.4 Container"
 COPY ./docker/packages.txt /tmp/packages.txt
 RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /kaniko/run/secrets/http_proxy); export https_proxy=$(cat /kaniko/run/secrets/https_proxy); fi && \
     apt-get update -y --quiet && \
-    apt-get -y upgrade --quiet && \
+    apt-get upgrade -y --quiet && \
     DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
         python3 \
         python-is-python3 \
@@ -166,7 +160,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libpython3.8 \
         && \
     # dependencies packages
-    grep -e '^[^#]' /tmp/packages.txt | xargs -d '\n' DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends && \
+    grep -e '^[^#]' /tmp/packages.txt | DEBIAN_FRONTEND=noninteractive xargs -d '\n' apt-get install --quiet --yes --no-install-recommends
     rm -rf /var/lib/apt/lists/*
 	
 # install GDAL from builder
-- 
GitLab


From 27dd9efbc5f6e7869f59a7e23bb42350ba583731 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 14:05:07 +0000
Subject: [PATCH 31/34] Update Dockerfile

---
 Dockerfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index fc80dc4a..946cb9ee 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -160,7 +160,7 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libpython3.8 \
         && \
     # dependencies packages
-    grep -e '^[^#]' /tmp/packages.txt | DEBIAN_FRONTEND=noninteractive xargs -d '\n' apt-get install --quiet --yes --no-install-recommends
+    grep -e '^[^#]' /tmp/packages.txt | DEBIAN_FRONTEND=noninteractive xargs -d '\n' apt-get install --quiet --yes --no-install-recommends && \
     rm -rf /var/lib/apt/lists/*
 	
 # install GDAL from builder
-- 
GitLab


From 861e536d4514ef9378d712aeaac99c34999e6f0c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 14:34:39 +0000
Subject: [PATCH 32/34] Add GDCM

---
 Dockerfile | 10 +++++++++-
 1 file changed, 9 insertions(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index 946cb9ee..d9e7a52a 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -43,11 +43,19 @@ RUN if [ -f "/kaniko/run/secrets/http_proxy" ]; then export http_proxy=$(cat /ka
         libfftw3-dev \
         libgsl-dev \
         libgtk-3-dev \
+        mono-runtime-common \
         # GDAL \
         libgdal-dev \
         python3-gdal \
         gdal-bin \
-        && \       
+        && \
+    DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
+        # optional packages for OTB compilation
+        python3-vtkgdcm \
+        libvtkgdcm-cil \
+        libvtkgdcm-java \
+        libgdcm-tools \
+        && \    
     rm -rf /var/lib/apt/lists/*
 
 # Build OTB    
-- 
GitLab


From d22065545eb631bcf3db64d257fe2228fb345e85 Mon Sep 17 00:00:00 2001
From: raillece <celine.Raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 15:48:02 +0100
Subject: [PATCH 33/34] update condition variable tests CI

---
 test/CMakeLists.txt | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index f9cbfda7..d1b5b497 100755
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -426,7 +426,7 @@ add_test(NAME snow_synthesis_compare_smod_test
   )
 set_tests_properties(snow_synthesis_compare_smod_test PROPERTIES DEPENDS snow_synthesis_test)
 
-if(NOT CI_PIPELINE_SOURCE "web")
+if(NOT GITLAB_CI_BUILD "true")
   add_test(NAME snow_synthesis_muscate_test
     COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/let_it_snow_synthesis.py
     -t T31TDJ
@@ -455,7 +455,7 @@ add_test(NAME snow_synthesis_without_densification_test
   )
 
 
-if(NOT CI_PIPELINE_SOURCE "web")
+if(NOT GITLAB_CI_BUILD "true")
   # ----------------------------------
   # Synthesis from 1.7 snow product
   # ----------------------------------
-- 
GitLab


From f1153f58da78fd566d9df393d061316d59ec50d4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9line=20Raill=C3=A9?= <celine.raille@thalesgroup.com>
Date: Tue, 12 Dec 2023 15:05:37 +0000
Subject: [PATCH 34/34] Update CMakeLists.txt

---
 test/CMakeLists.txt | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index d1b5b497..b32c6529 100755
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -426,7 +426,7 @@ add_test(NAME snow_synthesis_compare_smod_test
   )
 set_tests_properties(snow_synthesis_compare_smod_test PROPERTIES DEPENDS snow_synthesis_test)
 
-if(NOT GITLAB_CI_BUILD "true")
+if(NOT GITLAB_CI_BUILD MATCHES "true")
   add_test(NAME snow_synthesis_muscate_test
     COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_BINARY_DIR}/app/let_it_snow_synthesis.py
     -t T31TDJ
@@ -455,7 +455,7 @@ add_test(NAME snow_synthesis_without_densification_test
   )
 
 
-if(NOT GITLAB_CI_BUILD "true")
+if(NOT GITLAB_CI_BUILD MATCHES "true")
   # ----------------------------------
   # Synthesis from 1.7 snow product
   # ----------------------------------
-- 
GitLab