Commit 18e89f9c authored by Germain Salgues's avatar Germain Salgues

Merge branch 'release/1.4'

parents 9c7ec72d 17870bc4
# Change Log
All notable changes to LIS will be documented in this file.
## [Unreleased]
## [1.4] - 2018-02-14
### Added
- Experimental pass1_5 function implementing the removal of snow areas inside initial cloud mask (doughnuts)
- Experimental new application to run and evaluate an annual snow map computation from a timeserie of S2 and/or L8 snow products
- Added fclear_lim parameter minimum percentage of clear pixels in an elevation band
(default value 0.1) used to compute the snow line.
- Added option to disable vector generation
- Added options to use and manage gdal_trace_outline instead of gdal_polygonize
### Changed
- Changed default value for parameter red_darkcloud to 300 to reduce cloud sensitivity
- Changed all_cloud_mask.tif, it now include the thin clouds (in accordance with ATBD)
- Fixed method compute_percent is fix when image is empty or filled with nodata
- Fixed zs condition to trigger properly pass2 (in accordance with ATBD)
- Changed zs computation in pass2 to considers the full image imprint (including nodata pixels)
- Changed cloud mask refinement is not apply during pass1 to improve snow line accuracy during pass2
- Updated build_json.py to handle boolean parameters
- Updated build_json.py to handle new lis input parameters
## [1.3.1] - 2017-11-23
### Hotfix
- Fix the intermediate data format (used 1 bit instead of type uint8)
## [1.3] - 2017-11-02
### Added
- Use gdal_trace_outline from the gina-alaska package instead of gdal_polygonize if available
......
......@@ -40,9 +40,6 @@ find_package(PythonInterp REQUIRED)
find_package( PythonLibs 2.7 REQUIRED)
include_directories( ${PYTHON_INCLUDE_DIRS} )
find_package( Boost COMPONENTS python REQUIRED )
include_directories( ${Boost_INCLUDE_DIR} )
# Link to the Orfeo ToolBox
# LIS required OTB 6.0 which provides patch regarding management of 1 byte tiff image)
SET(OTB_MIN_VERSION "6.0.0")
......
# Let-it-snow
## Synopsis
This code is a Python/OTB version of the snow cover extent detection algorithm for Sentinel-2 and Landsat-8 data.
This code implements the snow cover extent detection algorithm LIS (Let It Snow) for Sentinel-2, Landsat-8 and SPOT4-Take5 data.
The algorithm documentation with examples is available here:
......@@ -53,11 +53,6 @@ pixel_value & 00000101
* 100: Snow
* 205: Cloud including cloud shadow
* 254: No data
* Type field:
* no-snow
* snow
* cloud
* no-data
## Data set example
......@@ -69,23 +64,35 @@ Code to generate the snow cover extent product on Theia platform.
## Installation
LIS processing chain uses CMake (http://www.cmake.org) for building from source.
### Dependencies
lis dependencies:
Following a summary of the required dependencies:
GDAL >=2.0
OTB >= 6.0
Boost-Python
Python interpreter >= 2.7
Python libs >= 2.7
Python packages:
numpy
lxml
matplotlib
* GDAL >=2.0
* OTB >= 6.2
* Python interpreter >= 2.7
* Python libs >= 2.7
* Python packages:
* numpy
* lxml
* matplotlib
GDAL itself depends on a number of other libraries provided by most major operating systems and also depends on the non standard GEOS and PROJ4 libraries. GDAl- Python bindings are also required
Python package dependencies: sys, subprocess, glob, os, json, gdal
Python package dependencies:
* sys
* subprocess
* glob
* os
* json
* gdal
Optional dependencies:
* gdal_trace_outline can be used alternatively to gdal_polygonize.py to generate the vector layer. It requires to install [dans-gdal-scripts utilities](https://github.com/gina-alaska/dans-gdal-scripts).
### Installing from the source distribution
......@@ -147,7 +154,7 @@ Do not modify these folders.
## Contributors
Manuel Grizonnet (CNES), Simon Gascoin (CNRS/CESBIO), Tristan Klempka (CNES)
Manuel Grizonnet (CNES), Simon Gascoin (CNRS/CESBIO), Tristan Klempka (CNES), Germain Salgues (Magellium)
## License
......
......@@ -11,10 +11,15 @@ conf_template = {"general":{"pout":"",
"nodata":-10000,
"ram":1024,
"nb_threads":1,
"generate_vector":False,
"preprocessing":False,
"log":True,
"multi":1},
"multi":1,
"target_resolution":-1},
"vector":{"generate_vector":True,
"generate_intermediate_vectors":False,
"use_gdal_trace_outline":True,
"gdal_trace_outline_dp_toler":0,
"gdal_trace_outline_min_area":0},
"inputs":{"green_band":{"path": "",
"noBand": 1},
"red_band":{"path": "",
......@@ -29,14 +34,20 @@ conf_template = {"general":{"pout":"",
"ndsi_pass2":0.15,
"red_pass2":40,
"fsnow_lim":0.1,
"fclear_lim":0.1,
"fsnow_total_lim":0.001},
"cloud":{"shadow_in_mask":64,
"shadow_out_mask":128,
"all_cloud_mask":1,
"high_cloud_mask":32,
"rf":12,
"red_darkcloud":500,
"red_backtocloud":100}}
"red_darkcloud":300,
"red_backtocloud":100,
"strict_cloud_mask":False,
"rm_snow_inside_cloud":False,
"rm_snow_inside_cloud_dilation_radius":1,
"rm_snow_inside_cloud_threshold":0.85}}
### Mission Specific Parameters ###
S2_parameters = {"multi":10,
......@@ -88,6 +99,14 @@ mission_parameters = {"S2":S2_parameters,\
"LANDSAT8":L8_parameters,\
"Take5":Take5_parameters}
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def findFiles(folder, pattern):
""" Search recursively into a folder to find a patern match
"""
......@@ -119,7 +138,7 @@ def read_product(inputPath, mission):
if result:
conf_json["inputs"]["dem"] = result[0]
else:
logging.warning("No DEM found!")
logging.warning("No DEM found within product!")
conf_json["cloud"]["shadow_in_mask"] = params["shadow_in_mask"]
conf_json["cloud"]["shadow_out_mask"] = params["shadow_out_mask"]
......@@ -147,14 +166,16 @@ def main():
group_general.add_argument("-nodata", type=int)
group_general.add_argument("-ram", type=int)
group_general.add_argument("-nb_threads", type=int)
#group_general.add_argument("-generate_vector", type=bool)
#group_general.add_argument("-preprocessing", type=bool)
#group_general.add_argument("-log", type=bool)
group_general.add_argument("-generate_vector", type=str2bool, help="true/false")
group_general.add_argument("-preprocessing", type=str2bool, help="true/false")
group_general.add_argument("-log", type=str2bool, help="true/false")
group_general.add_argument("-multi", type=float)
group_general.add_argument("-target_resolution", type=float)
group_snow = parser.add_argument_group('inputs', 'input files')
group_general.add_argument("-dem", help="dem file path, to use for processing the input product")
group_inputs = parser.add_argument_group('inputs', 'input files')
group_inputs.add_argument("-dem", help="dem file path, to use for processing the input product")
group_inputs.add_argument("-cloud_mask", help="cloud mask file path")
group_snow = parser.add_argument_group('snow', 'snow parameters')
group_snow.add_argument("-dz", type=int)
......@@ -173,6 +194,7 @@ def main():
group_cloud.add_argument("-rf", type=int)
group_cloud.add_argument("-red_darkcloud", type=int)
group_cloud.add_argument("-red_backtocloud", type=int)
group_cloud.add_argument("-strict_cloud_mask", type=str2bool, help="true/false")
args = parser.parse_args()
......@@ -194,21 +216,33 @@ def main():
jsonData["general"]["pout"] = outputPath
# Overide parameters for group general
# Override parameters for group general
if args.nodata:
jsonData["general"]["nodata"] = args.nodata
if args.preprocessing is not None:
jsonData["general"]["preprocessing"] = args.preprocessing
if args.generate_vector is not None:
jsonData["vector"]["generate_vector"] = args.generate_vector
if args.log is not None:
jsonData["general"]["log"] = args.log
if args.ram:
jsonData["general"]["ram"] = args.ram
if args.nb_threads:
jsonData["general"]["nb_threads"] = args.nb_threads
if args.multi:
jsonData["general"]["multi"] = args.multi
if args.target_resolution:
jsonData["general"]["target_resolution"] = args.target_resolution
# Overide dem location
# Override dem location
if args.dem:
jsonData["inputs"]["dem"] = os.path.abspath(args.dem)
logging.warning("Using optional external DEM!")
# Override cloud mask location
if args.cloud_mask:
jsonData["inputs"]["cloud_mask"] = os.path.abspath(args.cloud_mask)
# Overide parameters for group snow
# Override parameters for group snow
if args.dz:
jsonData["snow"]["dz"] = args.dz
if args.ndsi_pass1:
......@@ -224,7 +258,7 @@ def main():
if args.fsnow_total_lim:
jsonData["snow"]["fsnow_total_lim"] = args.fsnow_total_lim
# Overide parameters for group cloud
# Override parameters for group cloud
if args.shadow_in_mask:
jsonData["cloud"]["shadow_in_mask"] = args.shadow_in_mask
if args.shadow_out_mask:
......@@ -239,6 +273,12 @@ def main():
jsonData["cloud"]["red_darkcloud"] = args.red_darkcloud
if args.red_backtocloud:
jsonData["cloud"]["red_backtocloud"] = args.red_backtocloud
if args.strict_cloud_mask:
jsonData["cloud"]["strict_cloud_mask"] = args.strict_cloud_mask
if not jsonData["inputs"].get("dem"):
logging.error("No DEM found!")
return 1
jsonFile = open(os.path.join(outputPath, "param_test.json"), "w")
jsonFile.write(json.dumps(jsonData, indent=4))
......
#!/usr/bin/env python
import sys
import os.path as op
import json
import logging
from s2snow import snow_annual_map_evaluation
VERSION = "0.1.0"
def show_help():
"""Show help of the run_snow_annual_map script"""
print "This script is used to run the snow annual map " \
+ "module that compute snow coverage onto a given date range"
print "Usage: python run_snow_annual_map.py param.json"
print "python run_snow_annual_map.py version to show version"
print "python run_snow_annual_map.py help to show help"
def show_version():
print VERSION
# ----------------- MAIN ---------------------------------------------------
def main(argv):
""" main script of snow extraction procedure"""
json_file = argv[1]
# Load json_file from json files
with open(json_file) as json_data_file:
data = json.load(json_data_file)
pout = data.get("path_out")
log = data.get("log", True)
if log:
sys.stdout = open(op.join(pout, "stdout.log"), 'w')
sys.stderr = open(op.join(pout, "stderr.log"), 'w')
# Set logging level and format.
logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
logging.info("Start run_snow_annual_map.py")
logging.info("Input args = " + json_file)
# Run the snow detector
snow_annual_map_evaluation_app = snow_annual_map_evaluation.snow_annual_map_evaluation(data)
snow_annual_map_evaluation_app.run()
if data.get("run_l8_evaluation", False):
snow_annual_map_evaluation_app.run_evaluation()
if data.get("run_modis_comparison", False):
snow_annual_map_evaluation_app.compare_modis()
logging.info("End run_snow_annual_map.py")
if __name__ == "__main__":
if len(sys.argv) != 2:
show_help()
else:
if sys.argv[1] == "version":
show_version()
elif sys.argv[1] == "help":
show_help()
else:
main(sys.argv)
......@@ -6,12 +6,13 @@ import json
import logging
from s2snow import snow_detector
VERSION = "1.3.1"
VERSION = "1.4"
def show_help():
"""Show help of the run_snow_detector script"""
print "This script is used to run the snow detector module that compute snow mask using OTB applications on Spot/LandSat/Sentinel-2 products from theia platform"
print "This script is used to run the snow detector module that compute snow mask" \
+ " using OTB applications on Spot/LandSat/Sentinel-2 products from theia platform"
print "Usage: python run_snow_detector.py param.json"
print "python run_snow_detector.py version to show version"
print "python run_snow_detector.py help to show help"
......@@ -41,13 +42,15 @@ def main(argv):
sys.stderr = open(op.join(pout, "stderr.log"), 'w')
# Set logging level and format.
logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
logging.info("Start run_snow_detector.py")
logging.info("Input args = " + json_file)
# Run the snow detector
sd = snow_detector.snow_detector(data)
sd.detect_snow(2)
snow_detector_app = snow_detector.snow_detector(data)
snow_detector_app.detect_snow(2)
logging.info("End run_snow_detector.py")
if __name__ == "__main__":
if len(sys.argv) != 2:
......
* CES-Neige (Suivi) [2015-11-23 lun.]
** Validation S2:
- premier test
- récuperer image
** validation landsat
- sur 57 tuiles landsat
- qualitatif
- dégrossir
** Taches
*** TODO Commenter le code et ajouter des tests
*** TODO avec un mnt ign, masque (données landsat), pléiades...
*** TODO montrer aux utilisateurs -> pas de trou
*** TODO Produits suivants
- produit de synthèse: 5 ou 10 jours le temps réel en fait intéresse plus
- Pour chaque date on peut interpoler avec avant/après
- Intérets pour synthèse mensuel/trimestriel/
*** TODO Reprise shapefile
- facon de stocker le champ dans la classe -> demander à Jordi
- documenter le shapefile (annoter)
*** TODO script de preprocessing récupère la résolution ->
*** TODO Annoncer a PS qu'on peut lancer d'images
*** TODO gif animé
*** TODO kml ou shapefile?
*** TODO style dans le shapefile?
*** TODO Renommer les fichiers de sorties
*** TODO Regarder sur 2 tuiles -> comment gérer la continuité? assembler les shapefiles
*** TODO Installer sur le cluster
** Liens / taches
Bonjour,
Pour info, l'IGN vient de mettre en ligne le MNT au pas de 5 mètres sur toute la France.
Nous (labos, formation) y avons accès gratuitement, comme la plupart des couches IGN.
Il faut un compte pour les télécharger (j'en ai un).
Je n'ai pas testé cette couche...
http://professionnels.ign.fr/rgealti
cdlt
JFD
- Les séries Landsat-8 traitées sont là:
/mnt/data/home/gascoins/Landsat8/Output-CES-Neige/N2A_France-MetropoleD0005H0001/
- Et la todo-list!: https://lite6.framapad.org/p/TODOs_let-it-snow
* Bibliographie sur le produit neige MODIS (Gascoin) et son gap-filling
1. Le plus couramment utilisé est le produit MOD10A1 (Terra). Il y a le même avec Aqua qui
s'appelle MYD10A1. Ensuite il y a un produit de synthèse au pas de temps
8-jours qui s'appelle MOD10A2 (resp. MYD10A2).
2. Voici comment est codé le raster MOD10A1 : il y a 3 bandes, une qui donne
la résence/absence de neige (comme nous), une qui donne l'albédo de la
neige, une qui donne la fraction de neige dans un pixel (pas pertinent pour Sentinel-2/Landsat-8):
<http://nsidc.org/data/docs/daac/modis_v5/mod10a1_modis_terra_snow_daily_global_500m_grid.gd.html>
Si tu descends dans la page il y a texte pédagogique avec pleins de
références. Pour une doc plus technique il y a l'ATBD ici :
<http://modis.gsfc.nasa.gov/data/atbd/atbd_mod10.pdf>
3. Pour le gap-filling que j'avais implémenté, tu peux regarder ce papier sur MODIS
Pyrénées : Gascoin et al. (2015) <http://www.hydrol-earth-syst-sci.net/19/2337/2015/hess-19-2337-2015.html>
4. Je me suis inspiré des papiers suivants (mais il y en a d'autres !)
- Parajka and Blöschl (2008) <http://www.hydro.tuwien.ac.at/fileadmin/mediapool-hydro/Publikationen/bloeschl/2008_Parajka_WRR.pdf>
- Gafurov and Bárdossy (2009) <http://www.hydrol-earth-syst-sci.net/13/1361/2009/hess-13-1361-2009.pdf>
* Données de validation
1. Pléiades: on a une tristéréo Pléiades sur la zone de Bassiès (Marti et al. 2016 <http://www.the-cryosphere-discuss.net/tc-2016-11/>).
2. Sur un subset de cette zone on a déjà extrait le masque de neige à partir de la panchro (mais il y a >80% de pixels neige)
3. Sinon il faut repartir des images brutes panchro et multispectrales prises le plus proche du nadir (pour la métode avec un capteur similaire voir Bühler et al. 2015 <http://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=6939633>)
* Tâche Tristan
** Test
*** TODO Test n°13
- Update snow fraction value
** Doc
*** TODO Update Readme.md
- Code example
- SRTM and VRT generation explanation
- Files system explanation
- Contributors list update
** Chaine
*** TODO S2Snow.py
- Preprocessing integration
- Cluster working directories ?
- Validation tests (odd elev and snow parameters ... etc.)
- Script Version
- Tag 0.1
*** TODO Data format
- S'inspirer de la technical note pour naming. Product id => nom fichier. penser au shape file. SEB
- Codage fichier 2.5.4. Notre chaine ne gère pas l'erreur. L'eau ?
- Meta donnée si pass 1 != pass 2 pixel
- SEB reprendre valeur du document pour notre sortie. indépendant des calculs post processing
- S'assurer quon à la bonne proj wkt
- Coder shapefile coder dans un champs (table attributs) pareil que SEB Type. Colonne SEB (snow, cloud, no data).
- Meta data file ZS ?
- QUM pass1 != pass2 ? coarse cloud => thin cloud ?
- XML s'inspirer page. pytonXML 7 champs (post processing). Variable en dur
*** TODO Amélioration de la chaine
- Faire tourner imgs sur l'ensemble des landsat/Sentinel/Pleiades(ortho de l'image panchro avec l'OTB)
- Pleaides regarder si en HR on peut générer un masqe neige regarder publi (plus tard après synthèse)
- Synthèse modis atbd => algo
......@@ -107,7 +107,25 @@ The objective of this algorithm is to generate a snow cover extent product from
\subsection{Development}
The algorithm prototype was developed by Simon Gascoin with insights from Olivier Hagolle in June 2015. The snow detection function and a script to run this function with an example are given in appendices \ref{par:castest} and \ref{par:s2snow} as formatted documents that includes the original Matlab code, comments, and output. The LIS chain was designed to work on any high resolution multi-spectral images from satellite sensors that include at least a channel in the visible spectrum and a channel near 1.5 µm (typically referred to as mid-infrared or ``MIR''). This initial code was ported to Python 2.7 and C++ by Manuel Grizonnet in order to make it scalable to large images using Orfeo Toolbox and GDAL. The LIS code was enhanced by Tristan Klempka during his internship at CNES. LIS currently supports SPOT-4, SPOT-5, Landsat-8 and Sentinel-2 level 2A products. The LIS code, installation documentation and configuration file examples are available in the Cesbio's gitlab: \url{http://tully.ups-tlse.fr/grizonnet/let-it-snow}.
The algorithm prototype was developed by Simon Gascoin with insights from
Olivier Hagolle in June 2015. The snow detection function and a script to run
this function with an example are given in appendices \ref{par:castest} and
\ref{par:s2snow} as formatted documents that includes the original Matlab code,
comments, and output. The LIS chain was designed to work on any high resolution
multi-spectral images from satellite sensors that include at least a channel in
the visible spectrum and a channel near 1.5 µm (typically referred to as
mid-infrared or ``MIR''). This initial code was ported to Python 2.7 and C++ by
Manuel Grizonnet in order to make it scalable to large images using Orfeo
Toolbox and GDAL.
LIS currently supports SPOT-4, SPOT-5, Landsat-8 and
Sentinel-2 level 2A products.
The LIS code, installation documentation and configuration file examples are
available in the Cesbio's gitlab:
\url{http://tully.ups-tlse.fr/grizonnet/let-it-snow}.
The list of all contributors is available in the LIS source in the file README.md.
\subsection{Limitations}
......@@ -166,7 +184,7 @@ The main output is a raster image (*SEB.TIF) of the snow and cloud mask. It has
\item 254: no data
\end{itemize}
The same data are made available as a polygon shapefile of the cloud and snow cover extent (*SEB\_VEC*). Two fields of information are embedded in this file:
The same data are made available as polygons (ESRI Shapefile format) of the cloud and snow cover extent (*SEB\_VEC*). Two fields of information are embedded in this file:
\begin{itemize}
\item DN:
\begin{itemize}
......@@ -200,9 +218,9 @@ The other output files are rather useful for the expert evaluation and troublesh
\subsection{Pre-processing}
In the case of Sentinel-2 the red and green bands are first resampled with the cubic method to a pixel size of 20~m by 20~m to match the resolution of the MIR band.
In the case of Sentinel-2 the red and green bands are first resampled with the cubic method to a pixel size of 20~m by 20~m to match the resolution of the SWIR band.
The DEM is also resampled to the resolution of the target product (30~m or 20~m, see Sect.~\ref{par:outputs}) using the cubic spline method that is implemented in the gdal library.
The DEM is also resampled to the resolution of the target product (30~m or 20~m, see Sect.~\ref{par:outputs}) using the cubic spline method that is implemented in the GDAL library.
\subsection{Snow detection}\label{par:snowdetec}
......@@ -275,31 +293,33 @@ After passing the pass 1 and 2 snow tests, some pixels that were originally mark
\node[below right] at (leg.north west) {Legend};
\node [blockinput, below of=MUSCATE] (DEM) {DEM};
\node [decision, below of=Level 2A product] (is cloud or shadow?) {Is cloud or shadow?};
\node [decision, right of=is cloud or shadow?] (darkcloud) {Is shadow or high or bright cloud? \textcolor{red}{$r_D$}};
\node [blockfinal, right of=darkcloud] (cloudfinal1) {Cloud (pass 1)};
\node [blockfinal, right of=is cloud or shadow?] (cloudfinal1) {Cloud (pass 1)};
\node [decision, below of=is cloud or shadow?] (snowtest1) {Is snow? \textcolor{red}{$n_1$}, \textcolor{red}{$r_1$}};
\node [blockfinal, left of=snowtest1] (pass1) {Snow\\(pass 1)};
\node [decision, below of=snowtest1] (snowlim) {Enough snow?\\ \textcolor{red}{$f_t$}};
\node [decision, below of=snowlim] (abovezs) {Is above snowline?};
\node [decision, right of=abovezs] (darkcloud) {Is shadow or high or bright cloud? \textcolor{red}{$r_D$}};
\node [block, left of=abovezs] (zs) {Snowline elevation \textcolor{red}{$d_z$}, \textcolor{red}{$f_s$}};
\node [decision, right of=abovezs] (snowtest2) {Is snow? \textcolor{red}{$n_2$}, \textcolor{red}{$r_2$}};
\node [decision, below of=darkcloud] (snowtest2) {Is snow? \textcolor{red}{$n_2$}, \textcolor{red}{$r_2$}};
\node [blockfinal, right of=snowtest2] (pass2) {Snow\\(pass 2)};
\node [decision, below of=snowtest2] (wascloud) {Was cloud?};
\node [blockfinal, right of=wascloud] (nosnow) {No snow};
\node [decision, below of=wascloud] (backtocloud) {Is dark? \textcolor{red}{$r_B$}};
\node [blockfinal, left of=backtocloud] (cloudfinal) {Cloud (pass 2)};
\node [decision, below of=wascloud] (backtocloud) {Is dark?
\textcolor{red}{$r_B$}};
\node [blockfinal, right of=darkcloud] (cloudfinal2) {Cloud (pass 2)};
\node [blockfinal, left of=backtocloud] (cloudfinal) {Cloud (final)};
% Draw edges
\path [line] (Level 2A product) -- (is cloud or shadow?);
\path [line,dashed] (MUSCATE) -- (Level 2A product);
\path [line] (is cloud or shadow?) -- node[near start]{yes} (darkcloud);
\path [line] (is cloud or shadow?) -- node[near start]{yes} (cloudfinal1);
\path [line] (is cloud or shadow?) -- node[near start]{no} (snowtest1);
\path [line] (darkcloud) -- node[near start]{yes}(cloudfinal1);
\path [line] (darkcloud) |- node[near start]{no}(snowtest1);
\path [line] (snowtest1) -- node[near start]{yes} (pass1);
\path [line] (snowtest1) -- node[near start]{yes} (snowlim);
\path [line] (snowlim) -- node[near start]{yes} (abovezs);
\path [line] (abovezs) -- node[near start]{yes} (snowtest2);
\path [line] (abovezs) -- node[near start]{yes} (darkcloud);
\path [line] (darkcloud) -- node[near start]{yes}(cloudfinal2);
\path [line] (darkcloud) -- node[near start]{no}(snowtest2);
\path [line] (snowtest2) -- node[near start]{yes} (pass2);
\path [line] (snowtest2) -- node[near start]{no} (wascloud);
\path [line,dashed] (pass1) -- (zs);
......@@ -329,13 +349,14 @@ The table below gives the description of the main parameters of the algorithm:
Parameter & Description & Name in the configuration file & Default value\\
\hline
\textcolor{red}{$r_f$} & Resize factor to produce the down-sampled red band & \texttt{rf} & 8 for L8 (12 for S2) \\
\textcolor{red}{$r_D$} & Maximum value of the down-sampled red band reflectance to define a dark cloud pixel & \texttt{rRed\_darkcloud} & 0.650 \\
\textcolor{red}{$r_D$} & Maximum value of the down-sampled red band reflectance to define a dark cloud pixel & \texttt{rRed\_darkcloud} & 0.300 \\
\textcolor{red}{$n_1$} & Minimum value of the NDSI for the pass 1 snow test & \texttt{ndsi\_pass1} & 0.400\\
\textcolor{red}{$n_2$} & Minimum value of the NDSI for the pass 2 snow test & \texttt{ndsi\_pass2} & 0.150\\
\textcolor{red}{$r_1$} & Minimum value of the red band reflectance the pass 1 snow test & \texttt{rRed\_pass1} & 0.200 \\
\textcolor{red}{$r_1$} & Minimum value of the red band reflectance the pass 2 snow test & \texttt{rRed\_pass2} & 0.040 \\
\textcolor{red}{$d_z$} & Minimum snow fraction in an elevation band to define $z_s$ & \texttt{fsnow\_lim} & 0.100 \\
\textcolor{red}{$d_z$} & Size of elevation band in the DEM used to define $z_s$ & \texttt{dz} & 0.100 \\
\textcolor{red}{$f_t$} & Minimum snow fraction in an elevation band to define $z_s$ & \texttt{fsnow\_lim} & 0.100 \\
\textcolor{red}{$fc_t$} & Minimum clear pixels fraction (snow and no-snow) in an elevation band to define $z_s$ & \texttt{fclear\_lim} & 0.100 \\
\textcolor{red}{$f_s$} & Minimum snow fraction in the image to activate the pass 2 snow test & \texttt{fsnow\_total\_lim} & 0.001 \\
\textcolor{red}{$r_B$} & Minimum value of the red band reflectance to return a non-snow pixel to the cloud mask & \texttt{rRed\_backtocloud} & 0.100 \\
\hline
......@@ -353,12 +374,14 @@ cases, there is a parameter 'multi' in the json configuration file which allows
to scale reflectance parameters. For instance, for products with reflectance
between 0 and 10000 you can use
\newpage
\subsubsection{JSON schema of configuration file}\label{par:jsonparam}
The JSON Schema here describes the parameter file format and provide a clear, human-
and machine-readable documentation of all the algorithm parameters. JSON schema
was generated on \href{https://jsonschema.net} with the following options (with
metada and relative id).
metadata and relative id).
\inputminted[tabsize=2, fontsize=\tiny]{js}{schema.json}
......@@ -388,7 +411,7 @@ The implementation of the Sentinel-2 configuration was tested on the Sentinel-2A
\centering
\includegraphics[width=\textwidth]{./images/Sentinel2_testmontage.png}
% Sentinel2_testmontage.png: 2014x811 pixel, 72dpi, 71.05x28.61 cm, bb=0 0 2014 811
\caption{The LIS snow mask from the Sentinel-2A image of 06-July-2015 (Fig.~\ref{fig:S2snow}) is superposed to an aerial orthophoto taken in August 2013 and distributed by the Institut National Information Géographique Forestière.}
\caption{The LIS snow mask from the Sentinel-2A image of 06-July-2015 (Fig.~\ref{fig:S2snow}) is superposed to an aerial image taken in August 2013 and distributed by the Institut National Information Géographique Forestière.}
\label{fig:S2snowzoom}
\end{figure}
......
......@@ -27,7 +27,7 @@
"type": "integer"
},
"red_darkcloud": {
"default": 500,
"default": 300,
"description": "Maximum value of the down-sampled red band reflectance to define a dark cloud pixel.",
"id": "red_darkcloud",
"title": "The Red_darkcloud schema.",
......@@ -53,6 +53,34 @@
"id": "shadow_out_mask",
"title": "The Shadow_out_mask schema.",
"type": "integer"
},
"strict_cloud_mask": {
"default": false,
"description": "Option that prevent any snow detection within the initial cloud mask. (experimental)",
"id": "strict_cloud_mask",
"title": "The Strict_cloud_mask schema.",
"type": "boolean"
},
"rm_snow_inside_cloud": {
"default": false,
"description": "Trigger the experimental function discarding snow area that are inside in cloud mask.",
"id": "rm_snow_inside_cloud",
"title": "The Rm_snow_inside_cloud schema.",
"type": "boolean"
},
"rm_snow_inside_cloud_dilation_radius": {
"default": 1,
"description": "Size in pixel of the dilation radius around the snow area. (experimental)",
"id": "rm_snow_inside_cloud_dilation_radius",
"title": "The Rm_snow_inside_cloud_dilation_radius schema.",
"type": "integer"
},
"rm_snow_inside_cloud_threshold": {
"default": 0.85,
"description": "Minimum fraction of cloudy pixel in the dilated area to discard the snow area. (experimental)",
"id": "rm_snow_inside_cloud_threshold",
"title": "The rm_snow_inside_cloud_threshold schema.",
"type": "float"