Commit 1846cc97 authored by Aurore Dupuis's avatar Aurore Dupuis
Browse files

Merge develop

parents d6e5fcdf 2c9a731d
test
Dockerfile
\ No newline at end of file
FROM ubuntu:20.04 as builder
# Reference : https://github.com/opencontainers/image-spec/blob/main/annotations.md
LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu"
LABEL org.opencontainers.image.description="LIS Build container"
# Install required packages
RUN apt-get update --quiet && \
DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
# basic system installs
build-essential \
python3 \
wget \
ca-certificates \
unzip \
# packages needed for compilation
cmake \
swig \
ninja-build \
python3-dev \
python3-numpy \
libgdal-dev \
freeglut3-dev \
libboost-date-time-dev \
libboost-filesystem-dev \
libboost-graph-dev \
libboost-program-options-dev \
libboost-system-dev \
libboost-thread-dev \
libinsighttoolkit4-dev \
libopenthreads-dev \
libossim-dev \
libtinyxml-dev \
libmuparser-dev \
libmuparserx-dev \
libfftw3-dev && \
rm -rf /var/lib/apt/lists/*
# Build OTB
RUN mkdir -p /root/otb-build/build && \
cd /root/otb-build && \
wget -nv --show-progress --progress=bar:force:noscroll https://www.orfeo-toolbox.org/packages/archives/OTB/OTB-7.1.0.zip -O /tmp/OTB.zip && \
unzip /tmp/OTB.zip && \
cd /root/otb-build/build && \
cmake \
"-DBUILD_COOKBOOK:BOOL=OFF" \
"-DBUILD_EXAMPLES:BOOL=OFF" \
"-DBUILD_TESTING:BOOL=OFF" \
"-DBUILD_SHARED_LIBS:BOOL=ON" \
"-DOTB_WRAP_PYTHON:BOOL=ON" \
"-DOTB_USE_MUPARSER:BOOL=ON" \
"-DOTB_USE_MUPARSERX:BOOL=ON" \
"-DOTB_USE_SIFTFAST:BOOL=ON" \
"-DOTB_USE_SPTW:BOOL=ON" \
"-DOTB_USE_SSE_FLAGS:BOOL=ON" \
"-DCMAKE_BUILD_TYPE=Release" \
-DCMAKE_INSTALL_PREFIX="/install/otb" -GNinja .. && \
ninja install && \
rm -rf /root/otb-build /tmp/OTB.zip
# Build LIS
ADD . /LIS_src/
RUN ln -s /usr/bin/python3 /usr/bin/python && \
mkdir -p /root/lis-build && \
cd /root/lis-build && \
cmake -DCMAKE_PREFIX_PATH=/install/otb \
-DCMAKE_INSTALL_PREFIX=/install/lis /LIS_src/ && \
make -j 6 && \
make install && \
chmod a+x /install/lis/app/*
# Build DANS-GDAL scripts
RUN mkdir -p /root/dans-build && \
cd /root/dans-build && \
wget -nv --show-progress --progress=bar:force:noscroll https://github.com/gina-alaska/dans-gdal-scripts/archive/refs/heads/master.zip -O /tmp/dans.zip && \
unzip /tmp/dans.zip && \
cd dans-gdal-scripts-master && \
./autogen.sh && \
./configure --prefix=/install/dans && \
make && \
make install && \
rm -rf /root/dans-build /tmp/dans.zip
##############################
FROM ubuntu:20.04
LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu"
LABEL org.opencontainers.image.description="LIS + OTB 7.1 Container"
# system packages
RUN apt-get update --quiet && \
apt-get -y upgrade --quiet && \
DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
python3 \
python-is-python3 \
python3-pip \
python3-gdal \
python3-lxml \
python3-numpy \
gdal-bin \
file \
libpython3.8 \
libinsighttoolkit4.13 \
libopenthreads21 \
libossim1 \
libtinyxml2.6.2v5 \
libmuparser2v5 \
libmuparserx4.0.7 \
libfftw3-3 \
&& \
rm -rf /var/lib/apt/lists/*
# install OTB from builder
COPY --from=builder /install/otb /usr/local
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
ENV OTB_APPLICATION_PATH=/usr/local/lib/otb/applications/
ENV PYTHONPATH=$PYTHONPATH:/usr/local/lib/otb/python:/usr/local/lib/python3.8/site-packages/
ENV PATH=/usr/local/app:/usr/local/bin:$PATH
# install LIS from builder
COPY --from=builder /install/lis /usr/local
RUN mv /usr/local/lib/otbapp_* /usr/local/lib/otb/applications/
# install DANS GINA from builder
COPY --from=builder /install/dans /usr/local
# Add additionnal dependancies
RUN pip3 install --no-cache-dir \
numpy \
scipy \
shapely \
pyproj \
fiona \
pyyaml \
rasterio
\ No newline at end of file
......@@ -21,14 +21,27 @@ The input files are Sentinel-2 or Landsat-8 level-2A products from the [Theia La
The syntheses are temporally aggregated (level-3A) products derived from individual snow products after gapfilling. The three products are: the snow cover duration, the snow disappearance date and the snow appearance date. These products are typically computed over a hydrological year (more details : [Snow cover duration map](doc/snow_annual_map.md).
## Usage
### Snow detector
### let_it_snow_fsc
Run the python script run_snow_detector.py with a json configuration file as unique argument:
There is different way to launch let_it_snow_fsc application:
• Run the python script let_it_snow_fsc.py with the json launch file as unique argument:
```bash
python let_it_snow_fsc.py –j {path_to_param_test.json}
```
• Run the python script let_it_snow_fsc.py with the json configuration file and input and output path:
```bash
python let_it_snow_fsc.py –c {path_to_lis_configuration.json} –i {path_to_input_dir} –o {path_to_output_dir}
```
• You can also overload parameters which are in the files like the “dem” in the following example. In this example the "dem" on the param_test.json is not used, but the "dem" given as argument. More informations on parameters description and overload in [parameters_README.md](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/-/blob/FG_develop/doc/parameters_README.md) on the doc directory.
```bash
python run_snow_detector.py param.json
python let_it_snow_fsc.py –j {path_to_param_test.json} –d {path_to_dem_file}
```
The snow detection is performed in the Python script [run_snow_detector.py](app/run_snow_detector.py).
The snow detection is performed in the Python script [let_it_snow_fsc.py](app/let_it_snow_fsc.py).
All the parameters of the algorithm, paths to input and output data are stored in the json file. The JSON schema (snow_detector_schema.json) is available in the [Algorithm Theoretical Basis Documentation](doc/atbd/ATBD_CES-Neige.tex) and gives more information about the roles of these parameters.
......@@ -36,13 +49,20 @@ NB: To build DEM data download the SRTM files corresponding to the study area an
Warning : DEM with nodata value could alter snow detection. zs should be contained between [-431, 8850].
### Snow syntheses
### let_it_snow_synthesis
Run the python script run_snow_annual_map.py with a json configuration file as unique argument:
Run the python script let_it_snow_synthesis.py with a json launch file as unique argument:
```bash
python run_snow_annual_map.py param.json
python let_it_snow_synthesis.py –j {path_to_synthesis_launch.json}
```
The snow syntheses are performed in the Python script [run_snow_annual_map.py](app/run_snow_annual_map.py).
• You can also overload parameters which are in the files like the “date_margin” in the following example
```bash
python let_it_snow_synthesis.py –j {path_to_synthesis_launch.json} –m {value_of_date_margin}
```
The snow syntheses are performed in the Python script [let_it_snow_synthesis.py](app/let_it_snow_synthesis.py).
All the parameters of the algorithm, paths to input and output data are stored in the json file. The JSON schema (snow_annual_map_schema.json) and its description are available in the [readme](doc/snow_annual_map.md).
......@@ -50,7 +70,23 @@ All the parameters of the algorithm, paths to input and output data are stored i
### Snow product
* SNOW_ALL: Binary mask of snow and clouds.
Each product is computed for a given tile [TILE\_ID] and a given period from [DATE\_START] to [DATE_STOP]. Products are identified by a tag according the following naming convention: [TILE\_ID]\_[DATE\_START]\_[DATE_STOP]
For example: **T31TCH\_20170901\_20180831**
Integration into Hysope-II needs that products name matches the nomenclature :
LIS_<*mission*><*tag*><*chain_version*>_<*product_counter*>
LIS generates the following files:
- Raster: **LIS\_<*mission*>-SNOW-FSC\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**,
- Raster: **LIS\_<*mission*>-SNOW-FSC-QCFLAGS\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**,
Moreover in the tmp directory:
* LIS_SNOW_ALL: Binary mask of snow and clouds.
* 1st bit: Snow mask after pass1
* 2nd bit: Snow mask after pass2
* 3rd bit: Clouds detected at pass0
......@@ -62,7 +98,7 @@ For example if you want to get the snow from pass1 and clouds detected from pass
```python
pixel_value & 00000101
```
* SEB: Raster image of the snow mask and cloud mask.
* LIS_SEB: Raster image of the snow mask and cloud mask.
* 0: No-snow
* 100: Snow
* 205: Cloud including cloud shadow
......@@ -81,30 +117,30 @@ Each product is computed for a given tile [TILE\_ID] and a given period from [DA
For example: **T31TCH\_20170901\_20180831**
LIS generates the following files:
- Raster: **DAILY\_SNOW\_MASKS\_<*tag*>.tif**, the snow time series file interpolated on a daily basis (1 image with one band per day). Each band are coded as follows (the interpolation removing any clouds or nodata):
- 0: No-snow
- 1: Snow
- Raster: **SCD\_<*tag*>.tif**, the snow cover duration map (SCD), pixel values within [0-number of days] corresponding the number of snow days.
- JSON file: **synthesis_launch.json**, the configuration files used for the products generation
- JSON file: **synthesis_configuration.json**, the configuration files used for the program configuration
- Raster: **CLOUD\_OCCURENCE\_<*tag*>.tif**, the cloud/nodata annual map image, pixel values within [0-1] corresponding the cloud or nodata occurrences in the non-interpolated time series
- Raster: **LIS\_<*mission*>-SNOW-SCD\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the snow cover duration map (SCD), pixel values within [0-number of days] corresponding the number of snow days.
- Raster: **SMOD\_<*tag*>.tif**, the date of snow disappearance (Snow Melt-Out Date), defined as the last date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
- Raster: **LIS\_<*mission*>-SNOW-SMOD\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the date of snow disappearance (Snow Melt-Out Date), defined as the last date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
- Raster: **SOD\_<*tag*>.tif**, the date of snow appearance (Snow Onset Date), defined as the first date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
- Raster: **LIS\_<*mission*>-SNOW-SOD\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the date of snow appearance (Snow Onset Date), defined as the first date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
- Raster: **NOBS\_<*tag*>.tif**, the number of clear observations to compute the SCD, SMOD and SOD syntheses
- Raster: **LIS\_<*mission*>-SNOW-NOBS\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the number of clear observations to compute the SCD, SMOD and SOD syntheses
Output directory will also contain the following files :
Output directory will also contain the following files in the tmp directory :
- Text file: **input_dates.txt**, the list of observation dates in the non-interpolated time series
- Text file: **output_dates.txt**, the list of interpolated dates
- JSON file: **param.json**, the configuration file used for the products generation (optional)
- Raster: **CLOUD\_OCCURENCE\_<*tag*>.tif**, the cloud/nodata annual map image, pixel values within [0-1] corresponding the cloud or nodata occurrences in the non-interpolated time series
- LOG file: **stdout.log**, the log file for the standard output generated during processing (optional)
- Raster: **DAILY\_SNOW\_MASKS\_<*tag*>.tif**, the snow time series file interpolated on a daily basis (1 image with one band per day). Each band are coded as follows (the interpolation removing any clouds or nodata):
- 0: No-snow
- 1: Snow
- LOG file: **stderr.log**, the log file for the error output generated during processing (optional)
- LOG file: **lis.log**, the log file for the standard and error output generated during processing (optional)
## Data set example
......@@ -194,6 +230,35 @@ export PYTHONPATH=/your/install/directory/lib:/your/install/directory/lib/python
```
let-it-snow is now installed.
#### Local
Go to gitlab and download the master repository :
https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
You also have to download the install code which is : lis-build-script on gitlab :
https://gitlab.cnes.fr/Theia/lis-build-script
Then unzip the packets on your work repository.
The prerequisites have to be loaded. For example, on the HAL cluster:
```bash
module load gcc/7.3.0
module load python/3.7.2
module load otb/7.O
module load openmpi/3.1.1
module load otb-depends/7.0-python3.7.2
```
Then go to lis-build-script-master and launch the local install of LIS:
```bash
./build-lis-local.sh {path_to_master_repository} {OTB_version_number} {install_repository}
```
When the install is completed, the tests are launched. The X tests have to be OK
let-it-snow is now installed.
## Tests
Enable tests with BUILD_TESTING cmake option. Use ctest command to run tests. Do not forget to clean your output test directory when you run a new set of tests.
......@@ -208,6 +273,33 @@ Data-Test : Test data folder needed to run tests. It contains Landsat, Take5 and
Output-Test : Temporary output tests folder.
Do not modify these folders.
The test list is described on the [test_README.md](https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow/-/blob/FG_develop/test/test_README.md) in the test directory.
## How to use lis configuration files (version < 1.7)
First you have to change the structure of the input for S2.
The input has to be in a directory like the following example.
```bash
S2-SMALL/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B11.tif
```
Moreover the SRTM and MASKS directory also have to be put is the SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0.
Then you can use the python program in the app repertory: transform_old_json.py with the path of the old file as unique argument to transform and create your json file in a launch file and a configuration file for let_it_snow_fsc:
```bash
./transform_old_json.py {path_to_old_json}
```
You can also use the python program: transform_old_annual_json.py with the path of the old file as unique argument to transform and create your json file in a launch file and a configuration file for let_it_snow_synthesis:
```bash
./transform_old_annual_json.py {path_to_old_json}
```
## Contributors
Manuel Grizonnet (CNES), Simon Gascoin (CNRS/CESBIO), Germain Salgues (Magellium), Aurore Dupuis (CNES), Rémi Jugier (Magellium)
......
......@@ -29,16 +29,28 @@ import argparse
import zipfile
### Configuration Template ###
conf_template = {"general": {"pout": "",
"nodata": -10000,
"ram": 2048,
"nb_threads": 1,
"preprocessing": False,
"log": True,
"multi": 10,
"target_resolution": -1},
"vector": {"generate_vector": True,
"generate_intermediate_vectors": False,
param_configuration = {
"input_dir" : "",
"output_dir" : "",
"dem" :"",
"tcd":None,
"log" : "INFO",
"water_mask":None,
"config_file" : ""
}
lis_configuration = {"general": {"multi":10,
"mode":"",
"nodata":-10000,
"ram":2048,
"nb_threads":1,
"preprocessing":False,
"log":True
},
"vector": {"generate_vector":True,
"generate_intermediate_vectors":False,
"use_gdal_trace_outline": True,
"gdal_trace_outline_dp_toler": 0,
"gdal_trace_outline_min_area": 0
......@@ -50,39 +62,37 @@ conf_template = {"general": {"pout": "",
"swir_band": {"path": "",
"noBand": 1},
"dem": "",
"cloud_mask": ""
"cloud_mask": "",
"div_slope_threshold":""
},
"cloud": {"all_cloud_mask": 1,
"high_cloud_mask": 128,
"shadow_in_mask": 32,
"shadow_out_mask": 64,
"red_backtocloud": 100,
"resize_factor": 12,
"red_darkcloud": 300,
"strict_cloud_mask":False,
"rm_snow_inside_cloud":False,
"rm_snow_inside_cloud_dilation_radius": 5,
"rm_snow_inside_cloud_threshold": 0.85,
"rm_snow_inside_cloud_min_area": 250000},
"snow": {"dz": 100,
"ndsi_pass1": 0.4,
"red_pass1": 200,
"ndsi_pass2": 0.15,
"red_pass2": 40,
"fsnow_lim": 0.1,
"fclear_lim": 0.1,
"fsnow_total_lim": 0.001},
"cloud": {"shadow_in_mask": 32,
"shadow_out_mask": 64,
"all_cloud_mask": 1,
"high_cloud_mask": 128,
"rf": 12,
"red_darkcloud": 300,
"red_backtocloud": 100,
"strict_cloud_mask": False,
"rm_snow_inside_cloud": False,
"rm_snow_inside_cloud_dilation_radius": 1,
"rm_snow_inside_cloud_threshold": 0.85,
"rm_snow_inside_cloud_min_area": 5000},
"fsnow_total_lim": 0.001,
"fclear_lim": 0.1
},
"fsc": {
"dofsc": False,
"fscToc_Eq": "1.45*ndsi-0.01",
"fscOg_Eq": "fscToc/(1-tcd)",
"tcd": "",
"cosims_mode": False
},
"water_mask": {
"apply": False,
"path": None,
"raster_values": [1]
"water_mask_raster_values": [1]
}
}
......@@ -101,7 +111,7 @@ MAJA_parameters = {"multi": 10,
"shadow_out_mask": 8,
"all_cloud_mask": 1,
"high_cloud_mask": 128,
"rf": 12}
"resize_factor": 12}
SEN2COR_parameters = {"mode": "sen2cor",
"multi": 10,
......@@ -117,7 +127,7 @@ SEN2COR_parameters = {"mode": "sen2cor",
"shadow_out_mask": 3,
"all_cloud_mask": 8,
"high_cloud_mask": 10,
"rf": 12}
"resize_factor": 12}
Take5_parameters = {"multi": 1,
"green_band": ".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
......@@ -134,7 +144,7 @@ Take5_parameters = {"multi": 1,
"shadow_out_mask": 128,
"all_cloud_mask": 1,
"high_cloud_mask": 32,
"rf": 8}
"resize_factor": 8}
S2_parameters = {"multi": 10,
"green_band": ".*FRE_B3.*\.tif$",
......@@ -151,7 +161,7 @@ S2_parameters = {"multi": 10,
"shadow_out_mask": 64,
"all_cloud_mask": 1,
"high_cloud_mask": 128,
"rf": 12}
"resize_factor": 12}
L8_parameters_new_format = {"multi": 1,
"green_band": ".*FRE_B3.*\.tif$",
......@@ -168,7 +178,7 @@ L8_parameters_new_format = {"multi": 1,
"shadow_out_mask": 64,
"all_cloud_mask": 1,
"high_cloud_mask": 128,
"rf": 8}
"resize_factor": 8}
L8_parameters = {"multi": 1,
"green_band": ".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
......@@ -185,7 +195,7 @@ L8_parameters = {"multi": 1,
"shadow_out_mask": 128,
"all_cloud_mask": 1,
"high_cloud_mask": 32,
"rf": 8}
"resize_factor": 8}
LANDSAT8_LASRC_parameters = {"mode": "lasrc",
"multi": 10,
......@@ -201,7 +211,7 @@ LANDSAT8_LASRC_parameters = {"mode": "lasrc",
"shadow_out_mask": 8,
"all_cloud_mask": 224, # cloud with high confidence (32+(64+128))
"high_cloud_mask": 800, # cloud and high cloud with high confidence (32 + (512+256))
"rf": 8}
"resize_factor": 8}
mission_parameters = {"S2": S2_parameters, \
"LANDSAT8": L8_parameters, \
......@@ -245,7 +255,7 @@ def read_product(inputPath, mission):
"""
if os.path.exists(inputPath):
params = mission_parameters[mission]
conf_json = conf_template
conf_json = lis_configuration
conf_json["general"]["multi"] = params["multi"]
conf_json["inputs"]["green_band"]["path"] = findFiles(inputPath, params["green_band"])[0]
......@@ -255,7 +265,9 @@ def read_product(inputPath, mission):
conf_json["inputs"]["red_band"]["noBand"] = params["red_bandNumber"]
conf_json["inputs"]["swir_band"]["noBand"] = params["swir_bandNumber"]
conf_json["inputs"]["cloud_mask"] = findFiles(inputPath, params["cloud_mask"])[0]
result = findFiles(os.path.join(inputPath, "SRTM"), params["dem"])
# DEM by default
result = ["/datalake/static_aux/MNT/Copernicus_DSM/world.vrt"]
if result:
conf_json["inputs"]["dem"] = result[0]
else:
......@@ -266,7 +278,7 @@ def read_product(inputPath, mission):
div_mask_tmp = findFiles(inputPath, params["div_mask"])
if div_mask_tmp:
conf_json["inputs"]["div_mask"] = div_mask_tmp[0]
conf_json["inputs"]["div_slope_thres"] = params["div_slope_thres"]
conf_json["inputs"]["div_slope_threshold"] = params["div_slope_thres"]
else:
logging.warning("div_mask was not found, the slope correction flag will be ignored")
......@@ -274,12 +286,12 @@ def read_product(inputPath, mission):
conf_json["cloud"]["shadow_out_mask"] = params["shadow_out_mask"]
conf_json["cloud"]["all_cloud_mask"] = params["all_cloud_mask"]
conf_json["cloud"]["high_cloud_mask"] = params["high_cloud_mask"]
conf_json["cloud"]["rf"] = params["rf"]
conf_json["cloud"]["resize_factor"] = params["resize_factor"]
# Check if an optional mode is provided in the mission configuration
# Use in case of SEN2COR to handle differences between maja and sen2cor encoding
if 'mode' in params:
conf_json["general"]["mode"] = params["mode"]
conf_json["general"]["mode"] = mission
return conf_json
else:
......@@ -306,11 +318,12 @@ def main():
group_general.add_argument("-preprocessing", type=str2bool, help="true/false")
group_general.add_argument("-log", type=str2bool, help="true/false")
group_general.add_argument("-multi", type=float)
group_general.add_argument("-target_resolution", type=float)
group_inputs = parser.add_argument_group('inputs', 'input files')
group_inputs.add_argument("-dem", help="dem file path, to use for processing the input product")
group_inputs.add_argument("-cloud_mask", help="cloud mask file path")
group_inputs.add_argument("-div_slope_threshold", help="Input threshold of div_mask image to determine the flag for slope correction (optional for Theia product")
group_inputs.add_argument("-div_mask", help="Input other mask image for slope correction flag (optional in MASK directory for Theia product *DIV*.TIF)")
group_snow = parser.add_argument_group('snow', 'snow parameters')
group_snow.add_argument("-dz", type=int)
......@@ -320,13 +333,15 @@ def main():
group_snow.add_argument("-red_pass2", type=float)
group_snow.add_argument("-fsnow_lim", type=float)
group_snow.add_argument("-fsnow_total_lim", type=float)
group_snow.add_argument("-fclear_lim", type=float)
group_cloud = parser.add_argument_group('cloud', 'cloud parameters')
group_cloud.add_argument("-shadow_in_mask", type=int)
group_cloud.add_argument("-shadow_out_mask", type=int)
group_cloud.add_argument("-all_cloud_mask", type=int)
group_cloud.add_argument("-high_cloud_mask", type=int)
group_cloud.add_argument("-rf", type=int)
group_cloud.add_argument("-resize_factor", type=int)
group_cloud.add_argument("-red_darkcloud", type=int)
group_cloud.add_argument("-red_backtocloud", type=int)
group_cloud.add_argument("-strict_cloud_mask", type=str2bool, help="true/false")
......@@ -334,7 +349,7 @@ def main():
group_fsc = parser.add_argument_group('fsc', 'fractional snow cover parameters')
group_fsc.add_argument("-fsc", type=str,
help="path to tree cover density file, automatically activates sets fsc: dofsc to true")
group_fsc.add_argument("-cosims_mode", action='store_true', help="CoSIMS mode : Generate CoSIMS formatted outputs.")
group_water_mask = parser.add_argument_group('water_mask', 'water mask parameters')
group_water_mask.add_argument("-water_mask_path", type=str, help="Path to a raster or a shapefile")
......@@ -381,13 +396,20 @@ def main():
else:
logging.error("Unknown product type.")
sys.exit(0)
launch_conf = param_configuration
launch_conf["input_dir"] = inputPath
launch_conf["config_file"] = os.path.join(outputPath, "lis_configuration.json")
if jsonData:
if not os.path.exists(outputPath):
logging.info("Create directory " + outputPath + "...")
os.makedirs(outputPath)
jsonData["general"]["pout"] = outputPath
launch_conf["output_dir"] = outputPath
# Override parameters for group general
if args.nodata is not None:
......@@ -398,18 +420,18 @@ def main():
jsonData["vector"]["generate_vector"] = args.generate_vector
if args.log is not None:
jsonData["general"]["log"] = args.log
launch_conf["log"] = args.log
if args.ram:
jsonData["general"]["ram"] = args.ram
if args.nb_threads:
jsonData["general"]["nb_threads"] = args.nb_threads
if args.multi:
jsonData["general"]["multi"] = args.multi
if args.target_resolution:
jsonData["general"]["target_resolution"] = args.target_resolution
# Override dem location
if args.dem:
jsonData["inputs"]["dem"] = os.path.abspath(args.dem)
launch_conf["dem"] = os.path.abspath(args.dem)
logging.warning("Using optional external DEM!")
# Override cloud mask location
if args.cloud_mask:
......@@ -430,7 +452,9 @@ def main():
jsonData["snow"]["fsnow_lim"] = args.fsnow_lim
if args.fsnow_total_lim:
jsonData["snow"]["fsnow_total_lim"] = args.fsnow_total_lim
if args.fclear_lim:
jsonData["snow"]["fclear_lim"] = args.fclear_lim
# Override parameters for group cloud
if args.shadow_in_mask:
jsonData["cloud"]["shadow_in_mask"] = args.shadow_in_mask
......@@ -440,8 +464,8 @@ def main():
jsonData["cloud"]["all_cloud_mask"] = args.all_cloud_mask