Commit fc1d27da authored by Aurore Dupuis's avatar Aurore Dupuis
Browse files

Merge Release 1.7.1

parents 85c4ca83 0681aaae
test
Dockerfile
\ No newline at end of file
......@@ -50,12 +50,12 @@ set(PYTHON_INCLUDE_DIRS $ENV{PYTHONHOME}/include)
find_package(PythonInterp REQUIRED)
find_package( PythonLibs 2.7 REQUIRED)
find_package( PythonLibs 3.7 REQUIRED)
include_directories( ${PYTHON_INCLUDE_DIRS} )
# Link to the Orfeo ToolBox
# LIS required OTB 6.0
SET(OTB_MIN_VERSION "6.0.0")
# LIS required OTB 7.0
SET(OTB_MIN_VERSION "7.0.0")
find_package(OTB ${OTB_MIN_VERSION} REQUIRED)
if(OTB_FOUND)
......
FROM ubuntu:20.04 as builder
# Reference : https://github.com/opencontainers/image-spec/blob/main/annotations.md
LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu"
LABEL org.opencontainers.image.description="LIS Build container"
# Install required packages
RUN apt-get update --quiet && \
DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
# basic system installs
build-essential \
python3 \
wget \
ca-certificates \
unzip \
# packages needed for compilation
cmake \
swig \
ninja-build \
python3-dev \
python3-numpy \
libgdal-dev \
freeglut3-dev \
libboost-date-time-dev \
libboost-filesystem-dev \
libboost-graph-dev \
libboost-program-options-dev \
libboost-system-dev \
libboost-thread-dev \
libinsighttoolkit4-dev \
libopenthreads-dev \
libossim-dev \
libtinyxml-dev \
libmuparser-dev \
libmuparserx-dev \
libfftw3-dev && \
rm -rf /var/lib/apt/lists/*
# Build OTB
RUN mkdir -p /root/otb-build/build && \
cd /root/otb-build && \
wget -nv --show-progress --progress=bar:force:noscroll https://www.orfeo-toolbox.org/packages/archives/OTB/OTB-7.1.0.zip -O /tmp/OTB.zip && \
unzip /tmp/OTB.zip && \
cd /root/otb-build/build && \
cmake \
"-DBUILD_COOKBOOK:BOOL=OFF" \
"-DBUILD_EXAMPLES:BOOL=OFF" \
"-DBUILD_TESTING:BOOL=OFF" \
"-DBUILD_SHARED_LIBS:BOOL=ON" \
"-DOTB_WRAP_PYTHON:BOOL=ON" \
"-DOTB_USE_MUPARSER:BOOL=ON" \
"-DOTB_USE_MUPARSERX:BOOL=ON" \
"-DOTB_USE_SIFTFAST:BOOL=ON" \
"-DOTB_USE_SPTW:BOOL=ON" \
"-DOTB_USE_SSE_FLAGS:BOOL=ON" \
"-DCMAKE_BUILD_TYPE=Release" \
-DCMAKE_INSTALL_PREFIX="/install/otb" -GNinja .. && \
ninja install && \
rm -rf /root/otb-build /tmp/OTB.zip
# Build LIS
ADD . /LIS_src/
RUN ln -s /usr/bin/python3 /usr/bin/python && \
mkdir -p /root/lis-build && \
cd /root/lis-build && \
cmake -DCMAKE_PREFIX_PATH=/install/otb \
-DCMAKE_INSTALL_PREFIX=/install/lis /LIS_src/ && \
make -j 6 && \
make install && \
chmod a+x /install/lis/app/*
# Build DANS-GDAL scripts
RUN mkdir -p /root/dans-build && \
cd /root/dans-build && \
wget -nv --show-progress --progress=bar:force:noscroll https://github.com/gina-alaska/dans-gdal-scripts/archive/refs/heads/master.zip -O /tmp/dans.zip && \
unzip /tmp/dans.zip && \
cd dans-gdal-scripts-master && \
./autogen.sh && \
./configure --prefix=/install/dans && \
make && \
make install && \
rm -rf /root/dans-build /tmp/dans.zip
##############################
FROM ubuntu:20.04
LABEL org.opencontainers.image.authors="aurore.dupuis@cnes.fr vincent.gaudissart@csgroup.eu"
LABEL org.opencontainers.image.description="LIS + OTB 7.1 Container"
# system packages
RUN apt-get update --quiet && \
apt-get -y upgrade --quiet && \
DEBIAN_FRONTEND=noninteractive apt-get install --quiet --yes --no-install-recommends \
python3 \
python-is-python3 \
python3-pip \
python3-gdal \
python3-lxml \
python3-numpy \
gdal-bin \
file \
libpython3.8 \
libinsighttoolkit4.13 \
libopenthreads21 \
libossim1 \
libtinyxml2.6.2v5 \
libmuparser2v5 \
libmuparserx4.0.7 \
libfftw3-3 \
&& \
rm -rf /var/lib/apt/lists/*
# install OTB from builder
COPY --from=builder /install/otb /usr/local
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
ENV OTB_APPLICATION_PATH=/usr/local/lib/otb/applications/
ENV PYTHONPATH=$PYTHONPATH:/usr/local/lib/otb/python:/usr/local/lib/python3.8/site-packages/
ENV PATH=/usr/local/app:/usr/local/bin:$PATH
# install LIS from builder
COPY --from=builder /install/lis /usr/local
RUN mv /usr/local/lib/otbapp_* /usr/local/lib/otb/applications/
# install DANS GINA from builder
COPY --from=builder /install/dans /usr/local
# Add additionnal dependancies
RUN pip3 install --no-cache-dir \
numpy \
scipy \
shapely \
pyproj \
fiona \
pyyaml \
rasterio
\ No newline at end of file
# Let-it-snow
## Synopsis
This code implements the snow cover extent detection algorithm LIS (Let It Snow) for Sentinel-2, Landsat-8 and SPOT4-Take5 data. It also implements different temporal syntheses based on time series of snow products.
This code implements :
* snow cover extent detection algorithm LIS (Let It Snow) for Landsat-8 and SPOT4-Take5 data.
* fractional snow cover (FSC) for Sentinel-2 (which includes snow coverage)
* temporal syntheses based on time series of snow products (snow cover and/or FSC).
The algorithm documentation with examples is available here:
* [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.1414452.svg)](https://doi.org/10.5281/zenodo.1414452)
Access to Theia Snow data collection:
Access to Theia Snow data collection (L2B and L3B):
* [![DOI:10.24400/329360/f7q52mnk](https://zenodo.org/badge/DOI/10.24400/329360/f7q52mnk.svg)](http://doi.org/10.24400/329360/f7q52mnk)
......@@ -18,39 +21,126 @@ How to cite:
The input files are Sentinel-2 or Landsat-8 level-2A products from the [Theia Land Data Centre](https://theia.cnes.fr/) or [SPOT-4/5 Take 5 level-2A products](https://spot-take5.org) and a Digital Terrain Model (DTM). The output is a Level-2B snow product.
The syntheses are temporally aggregated (level-3A) products derived from individual snow products after gapfilling. The three products are: the snow cover duration, the snow disappearance date and the snow appearance date. These products are typically computed over a hydrological year (more details : [Snow cover duration map](doc/snow_annual_map.md).
The syntheses are temporally aggregated (level-3A) products derived from individual snow products after gapfilling. The three products are: the snow cover duration, the snow disappearance date and the snow appearance date. These products are typically computed over a hydrological year (more details : [Snow cover duration map](doc/snow_annual_map.md)).
## Usage
### Snow detector
### Snow cover and FSC using "let_it_snow_fsc"
Run the python script run_snow_detector.py with a json configuration file as unique argument:
The easy way to launch snow detection is :
```bash
python run_snow_detector.py param.json
python let_it_snow_fsc.py –j {launch_configuration_file.json}
```
The snow detection is performed in the Python script [run_snow_detector.py](app/run_snow_detector.py).
with launch_configuration_file.json :
All the parameters of the algorithm, paths to input and output data are stored in the json file. The JSON schema (snow_detector_schema.json) is available in the [Algorithm Theoretical Basis Documentation](doc/atbd/ATBD_CES-Neige.tex) and gives more information about the roles of these parameters.
```
{
"input_dir" : "XXX/SENTINEL2A_20210415-105910-624_L2A_T30TYN_C_V2-2",
"output_dir" : "XXX/output",
"dem" : "XXX/Copernicus_DSM/world.vrt",
"tcd" : "XXX/TCD_30TYN.tif",
"log" : "INFO",
"water_mask" : "XXX/eu_hydro_20m_30TYN.tif",
"config_file" : "XXX/lis_configuration.json",
"chain_version" : "1.7.1",
"product_counter" : 1
}
```
NB: To build DEM data download the SRTM files corresponding to the study area and build the .vrt using gdalbuildvrt. Edit config.json file to activate preprocessing : Set "preprocessing" to true and set the vrt path.
All launch parameters are described in [fsc_launch_schema.json](doc/atbd/fsc_launch_schema.json)
and can be **overwritten** by the following command line options:
```
* "-i", "--input_dir" - Path to input directory, containing L2A Theia Product or this directory as zip
* "-o", "--output_dir" - Path to output directory; which will contains FSC Product
* "-l", "--log_level" - Log level between ['INFO', 'DEBUG', 'WARNING', 'ERROR'] (optional)
* "-c", "--config_file" - Path to lis configuration file
* "-d", "--dem" - Path to dem file
* "-t", "--tcd" - Path to tree cover density file (optional)
* "-w", "--water_mask" - Path to water mask file (optional)
* "-V", "--chain_version" - Chain version in the operational system (optional)
* "-n", "--product_counter" - Product counter number (optional)
```
In the following example, input directory and output_directory are overwritten from launch_configuration file
```bash
python let_it_snow_fsc.py –j {xx/launch_configuration_file.json} -i {xx/input_dir} -o {xx/output_dir}
```
It can also be launched like this :
```bash
python let_it_snow_fsc.py –c {xx/lis_configuration.json} -i {xx/input_dir} -o {xx/output_dir} -d {xx/dem_file}
```
Be aware that:
* Tree cover density file is only used for FSC computation (only available for Sentinel-2 products). If not defined for Sentinel-2 snow detection, only FSC-OG (on ground) will be computed.
* Snow detection without water mask could lead to confusions between snow and water.
* Lis configuration file contains algorithm's parameters. Default configuration is available here : [lis_default_configuration.json](doc/lis_default_configuration.json).
As an expert, you can look at its description file [fsc_config_schema.json](doc/atbd/fsc_config_schema.json) and explaination about how to change specific parameters in [LIS configuration for experts](doc/LIS_configuration_for_experts.md).
You can use the command line option '-v' or '--version' to know lis version.
[Algorithm Theoretical Basis Documentation](doc/atbd/ATBD_CES-Neige.tex) gives more information about the scientific roles of these parameters.
NB: To build DEM data download the SRTM files corresponding to the study area and build the .vrt using gdalbuildvrt. Edit config.json file to activate preprocessing : Set "preprocessing" to true and set the vrt path.
Warning : DEM with nodata value could alter snow detection. zs should be contained between [-431, 8850].
### Snow syntheses
### Snow synthesis using "let_it_snow_synthesis"
Run the python script let_it_snow_synthesis.py with a json launch file as unique argument:
```bash
python let_it_snow_synthesis.py –j {xx/lis_synthesis_launch_file.json}
```
All launch parameters are described in [synthesis_launch_schema.json](doc/atbd/synthesis_launch_schema.json)
and can be **overwritten** by the following command line options:
```
* "-t", "--tile_id" - Tile identifiant
* "-i", "--input_products_list" - Path to inputs products, containing S2 snow products (snow coverage and/or FSC)
* "-d", "--densification_products_list" - Path to densification products, containing L8 snow products (optional)
* "-b", "--date_start" - Start date defining the synthesis period
* "-e", "--date_stop" - Stop date defining the synthesis period
* "-m", "--date_margin" - date margin related to start and stop date
* "-o", "--output_dir" - Path to output directory; which will contains synthesis product
* "-l", "--log_level" - Log level ('INFO', 'DEBUG', 'WARNING', 'ERROR')
* "-c", "--config_file" - Path to configuration file
* "-V", "--chain_version" - Chain version in the operational system (optional)
* "-n", "--product_counter" - Product counter number (optional)
```
Run the python script run_snow_annual_map.py with a json configuration file as unique argument:
In the following example, data margin overwritten from launch_configuration file.
```bash
python run_snow_annual_map.py param.json
python let_it_snow_synthesis.py –j {path_to_synthesis_launch.json} –m {value_of_date_margin}
```
The snow syntheses are performed in the Python script [run_snow_annual_map.py](app/run_snow_annual_map.py).
Synthesis configuration file contains system's parameters. Default configuration is available here : [synthesis_default_configuration.json](doc/synthesis_default_configuration.json).
As an expert, you can look at its description file [synthesis_config_schema.json](doc/atbd/synthesis_config_schema.json)
Algorithm is detailled here : [Snow Annual Map](doc/snow_annual_map.md)
All the parameters of the algorithm, paths to input and output data are stored in the json file. The JSON schema (snow_annual_map_schema.json) and its description are available in the [readme](doc/snow_annual_map.md).
You can use the command line option '-v' or '--version' to know lis version.
## Products format
### Snow product
* SNOW_ALL: Binary mask of snow and clouds.
Since lis 1.7, product name matches the following nomenclature :
LIS_<*mission*><*tag*><*chain_version*>_<*product_counter*>
LIS FSC generates the following files for S2 :
- Raster: **LIS\_S2-SNOW-FSC\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**,
- Raster: **LIS\_S2-SNOW-FSC-QCFLAGS\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**
or
- Raster: **LIS\_S2-SNOW-FSC-TOC\_<*tag*>_<*chain_version*>_<*product_counter*>.tif** (if Tree cover density is not defined)
LIS FSC generates the following files for L8 :
- Raster: **LIS\_L8-SNOW-MSK\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**,
with <**tag**> = <**tile**><**acquisition_date**>
Moreover, in the tmp directory:
* LIS_SNOW_ALL: Binary mask of snow and clouds.
* 1st bit: Snow mask after pass1
* 2nd bit: Snow mask after pass2
* 3rd bit: Clouds detected at pass0
......@@ -62,7 +152,7 @@ For example if you want to get the snow from pass1 and clouds detected from pass
```python
pixel_value & 00000101
```
* SEB: Raster image of the snow mask and cloud mask.
* LIS_SEB: Raster image of the snow mask and cloud mask.
* 0: No-snow
* 100: Snow
* 205: Cloud including cloud shadow
......@@ -73,38 +163,40 @@ pixel_value & 00000101
* 100: Snow
* 205: Cloud including cloud shadow
* 255: No data
* LOG file: **lis.log**, the log file for the standard and error output generated during processing
### Snow syntheses
Each product is computed for a given tile [TILE\_ID] and a given period from [DATE\_START] to [DATE_STOP]. Products are identified by a tag according the following naming convention: [TILE\_ID]\_[DATE\_START]\_[DATE_STOP]
Since lis 1.7, synthesis name matches the following nomenclature :
For example: **T31TCH\_20170901\_20180831**
LIS_<*mission*><*tag*><*chain_version*>_<*product_counter*>
LIS generates the following files:
- Raster: **DAILY\_SNOW\_MASKS\_<*tag*>.tif**, the snow time series file interpolated on a daily basis (1 image with one band per day). Each band are coded as follows (the interpolation removing any clouds or nodata):
- 0: No-snow
- 1: Snow
LIS synthesis generates the following files:
- Raster: **SCD\_<*tag*>.tif**, the snow cover duration map (SCD), pixel values within [0-number of days] corresponding the number of snow days.
- Raster: **LIS\_<*mission*>-SNOW-SCD\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the snow cover duration map (SCD), pixel values within [0-number of days] corresponding the number of snow days.
- Raster: **CLOUD\_OCCURENCE\_<*tag*>.tif**, the cloud/nodata annual map image, pixel values within [0-1] corresponding the cloud or nodata occurrences in the non-interpolated time series
- Raster: **LIS\_<*mission*>-SNOW-SMOD\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the date of snow disappearance (Snow Melt-Out Date), defined as the last date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
- Raster: **SMOD\_<*tag*>.tif**, the date of snow disappearance (Snow Melt-Out Date), defined as the last date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
- Raster: **LIS\_<*mission*>-SNOW-SOD\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the date of snow appearance (Snow Onset Date), defined as the first date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
- Raster: **SOD\_<*tag*>.tif**, the date of snow appearance (Snow Onset Date), defined as the first date of the longest snow period. The dates are given in number of days since the first day of the synthesis.
- Raster: **LIS\_<*mission*>-SNOW-NOBS\_<*tag*>_<*chain_version*>_<*product_counter*>.tif**, the number of clear observations to compute the SCD, SMOD and SOD syntheses
- Raster: **NOBS\_<*tag*>.tif**, the number of clear observations to compute the SCD, SMOD and SOD syntheses
with :
- <**tag**> : <**tile**><**synthesis_start_date**><**synthesis_stop_date**>
- <**mission**> : S2 or S2L8 (if densification is used)
Output directory will also contain the following files :
Output directory will also contain the following files in the tmp directory :
- Text file: **input_dates.txt**, the list of observation dates in the non-interpolated time series
- Text file: **output_dates.txt**, the list of interpolated dates
- JSON file: **param.json**, the configuration file used for the products generation (optional)
- Raster: **CLOUD\_OCCURENCE\_<*tag*>.tif**, the cloud/nodata annual map image, pixel values within [0-1] corresponding the cloud or nodata occurrences in the non-interpolated time series
- LOG file: **stdout.log**, the log file for the standard output generated during processing (optional)
- Raster: **DAILY\_SNOW\_MASKS\_<*tag*>.tif**, the snow time series file interpolated on a daily basis (1 image with one band per day). Each band are coded as follows (the interpolation removing any clouds or nodata):
- 0: No-snow
- 1: Snow
- LOG file: **stderr.log**, the log file for the error output generated during processing (optional)
- LOG file: **lis.log**, the log file for the standard and error output generated during processing.
## Data set example
......@@ -128,15 +220,14 @@ Following a summary of the required dependencies:
* Python interpreter >= 3.6
* Python libs >= 3.6
* Python packages:
* numpy
* lxml
* matplotlib
* rasterio
* numpy
* lxml
* matplotlib
* rasterio
GDAL itself depends on a number of other libraries provided by most major operating systems and also depends on the non standard GEOS and Proj libraries. GDAL- Python bindings are also required
Python package dependencies:
* sys
* subprocess
* glob
......@@ -183,7 +274,7 @@ chmod -R 755 ${install_dir}
```
The files will be installed by default into /usr/local and add to the python default modules.
To overrsouride this behavior, the variable CMAKE_INSTALL_PREFIX must be configure before build step.
To override this behavior, the variable CMAKE_INSTALL_PREFIX must be configured before build step.
Update environment variables for LIS. Make sure that OTB and other dependencies directories are set in your environment variables:
```bash
......@@ -194,13 +285,29 @@ export PYTHONPATH=/your/install/directory/lib:/your/install/directory/lib/python
```
let-it-snow is now installed.
#### On HAL (CNES cluster)
Clone these repositories :
https://gitlab.orfeo-toolbox.org/remote_modules/let-it-snow
https://gitlab.cnes.fr/Theia/lis-build-script
Then go to lis-build-script and launch the local install of LIS:
```bash
sh ./build-lis-local.sh {path_to_master_repository} {OTB_version_number} {install_repository}
```
When the install is completed, the tests are launched. The X tests have to be OK.
let-it-snow is now installed.
## Tests
Tests list is available here : [LIS_tests.md](LIS_tests.md) in the test directory.
Enable tests with BUILD_TESTING cmake option. Use ctest command to run tests. Do not forget to clean your output test directory when you run a new set of tests.
Data (input and baseline) to run validation tests are available on Zenodo:
* [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.166511.svg)](https://doi.org/10.5281/zenodo.166511)
* [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.5846826.svg)](https://doi.org/10.5281/zenodo.5846826)
Download LIS-Data and extract the folder. It contains all the data needed to run tests. Set Data-LIS path var in cmake configuration files.
Baseline : Baseline data folder. It contains output files of S2Snow that have been reviewed and validated.
......@@ -208,9 +315,16 @@ Data-Test : Test data folder needed to run tests. It contains Landsat, Take5 and
Output-Test : Temporary output tests folder.
Do not modify these folders.
On HAL (CNES cluster), from lis-build-script directory:
```bash
sh ./launch-tests.sh {install_repository}
```
## Contributors
Manuel Grizonnet (CNES), Simon Gascoin (CNRS/CESBIO), Germain Salgues (Magellium), Aurore Dupuis (CNES), Rémi Jugier (Magellium)
Aurore Dupuis (CNES), Simon Gascoin (CNRS/CESBIO), Manuel Grizonnet (CNES), Germain Salgues (Magellium), Rémi Jugier (Magellium)
## License
......
......@@ -25,6 +25,7 @@ import logging
# OTB Applications
import otbApplication as otb
def band_math(il, out, exp, ram=None, out_type=None):
""" Create and configure the band math application
using otb.Registry.CreateApplication("BandMath")
......@@ -61,8 +62,8 @@ def band_math(il, out, exp, ram=None, out_type=None):
else:
logging.error("Parameters il, out and exp are required")
def compute_cloud_mask(img_in, img_out, cloudmaskvalue, \
ram=None, out_type=None):
def compute_cloud_mask(img_in, img_out, cloudmaskvalue, ram=None, out_type=None):
""" Create and configure the Compute Cloud Mask application
using otb.Registry.CreateApplication("ComputeCloudMask")
......@@ -94,6 +95,7 @@ def compute_cloud_mask(img_in, img_out, cloudmaskvalue, \
logging.error("Parameters img_in, img_out \
and cloudmaskvalue are required")
def compute_snow_mask(pass1, pass2, cloud_pass1, cloud_refine, initial_clouds, \
out, slope_flag=None, ram=None, out_type=None):
""" Create and configure the Compute Cloud Snow application
......@@ -140,6 +142,7 @@ def compute_snow_mask(pass1, pass2, cloud_pass1, cloud_refine, initial_clouds, \
logging.error("Parameters pass1, pass2, cloud_pass1, \
cloud_refine, initial_clouds and out are required")
def band_mathX(il, out, exp, ram=None, out_type=None):
""" Create and configure the band math application
using otb.Registry.CreateApplication("BandMathX")
......@@ -176,6 +179,7 @@ def band_mathX(il, out, exp, ram=None, out_type=None):
else:
logging.error("Parameters il, out and exp are required")
def compute_snow_line(img_dem, img_snow, img_cloud, dz, fsnowlim, fclearlim, \
reverse, offset, centeroffset, outhist, ram=None):
""" Create and configure the ComputeSnowLine application
......@@ -205,10 +209,12 @@ def compute_snow_line(img_dem, img_snow, img_cloud, dz, fsnowlim, fclearlim, \
snowLineApp.SetParameterFloat("fclearlim", fclearlim)
snowLineApp.SetParameterInt("offset", offset)
if not isinstance(centeroffset, int):
if round(centeroffset,0) != centeroffset:
raise IOError("centeroffset shoud be an integer, got %s instead with value %s => error"%(type(centeroffset), centeroffset))
if round(centeroffset, 0) != centeroffset:
raise IOError("centeroffset shoud be an integer, got %s instead with value %s => error" % (
type(centeroffset), centeroffset))
else:
print("WARNING: centeroffset shoud be an integer, got %s instead with value %s => converting to int"%(type(centeroffset), centeroffset))
print("WARNING: centeroffset shoud be an integer, got %s instead with value %s => converting to int" % (
type(centeroffset), centeroffset))
centeroffset = int(centeroffset)
snowLineApp.SetParameterInt("centeroffset", centeroffset)
if reverse:
......@@ -224,6 +230,7 @@ def compute_snow_line(img_dem, img_snow, img_cloud, dz, fsnowlim, fclearlim, \
else:
logging.error("Parameters img_dem, img_snow, img_cloud and outhist are required")
def compute_nb_pixels(img, lower, upper, ram=None):
""" Create and configure the ComputeNbPixels application
using otb.Registry.CreateApplication("ComputeNbPixels")
......@@ -252,6 +259,7 @@ def compute_nb_pixels(img, lower, upper, ram=None):
else:
logging.error("Parameters img is required")
def super_impose(img_in, mask_in, img_out, interpolator=None,
fill_value=None, ram=None, out_type=None):
""" Create and configure the otbSuperImpose application
......@@ -291,6 +299,7 @@ def super_impose(img_in, mask_in, img_out, interpolator=None,
else:
logging.error("Parameters img_in, img_out and mask_in are required")
def compute_contour(img_in, img_out, foreground_value, fullyconnected, \
ram=None, out_type=None):
""" Create and configure the Compute Contours application
......@@ -365,6 +374,7 @@ def confusion_matrix(img_in, ref_in, out, ref_no_data=None, ram=None):
else:
logging.error("Parameters img_in, out and ref_in are required")
def get_app_output(app, out_key, mode="RUNTIME"):
""" Custom function to return the output of an OTB application
depending on the mode, the function return either:
......@@ -386,6 +396,7 @@ def get_app_output(app, out_key, mode="RUNTIME"):
logging.error("Unexpected mode")
return app_output
def gap_filling(img_in, mask_in, img_out, input_dates_file=None,
output_dates_file=None, ram=None, out_type=None):
""" Create and configure the ImageTimeSeriesGapFilling application
......
......@@ -32,7 +32,7 @@ def main(argv):
total_images = 0
for root, dirs, files in os.walk("."):
for root, dirs, files in os.walk("../python/s2snow"):
for name in files:
if name == "metadata.xml":
tree = etree.parse(op.join(root, name))
......
......@@ -24,7 +24,7 @@ import sys
import os.path as op
import json
import logging
from s2snow import cloud_removal
from analysis import cloud_removal
from s2snow.version import VERSION
def show_help():
......
......@@ -21,23 +21,18 @@
#
import os
import os.path as op
from os.path import basename
import shutil
import logging
import multiprocessing
from xml.dom import minidom
from datetime import timedelta
from lxml import etree
import gdal
from gdalconst import GA_ReadOnly
# OTB Applications
import otbApplication as otb
# Import python decorators for the different needed OTB applications
from s2snow.app_wrappers import band_math, get_app_output, super_impose, band_mathX, gap_filling
from analysis.app_wrappers import band_math, get_app_output, super_impose, band_mathX, gap_filling
from s2snow.utils import str_to_datetime, datetime_to_str
from s2snow.utils import write_list_to_file, read_list_from_file
......@@ -123,7 +118,7 @@ class snow_annual_map():
os.mkdir(self.path_out)
self.ram = params.get("ram", 512)
self.nbThreads = params.get("nbThreads", None)
self.nb_threads = params.get("nb_threads", None)
self.use_densification = params.get("use_densification", False)
if self.use_densification:
......@@ -151,7 +146,7 @@ class snow_annual_map():
logging.info("Run snow_annual_map")
# Set maximum ITK threads
if self.nbThreads:
if self.nb_threads:
os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(self.nbThreads)
# search matching snow product
......
......@@ -34,10 +34,8 @@ import otbApplication as otb
# Import python decorators for the different needed OTB applications
from analysis.snow_annual_map import merge_masks_at_same_date
from s2snow.app_wrappers import band_math, super_impose, confusion_matrix
from s2snow.utils import get_raster_as_array, apply_color_table
from s2snow.utils import str_to_datetime, datetime_to_str
from s2snow.utils import write_list_to_file, read_list_from_file
from analysis.app_wrappers import band_math, super_impose, confusion_matrix
from s2snow.utils import get_raster_as_array, str_to_datetime, write_list_to_file, read_list_from_file
from analysis.snow_annual_map import snow_annual_map
# Build gdal option to generate maks of 1 byte using otb extended filename
......@@ -483,6 +481,14 @@ def main():
# snow_annual_map_evaluation_app.compare_modis()
def apply_color_table(raster_file_name, color_table):
""" Edit image file to apply a color table