Skip to content
Snippets Groups Projects

Resolve "Run snow annual map refactoring"

Merged Aurore Dupuis requested to merge 54-run-snow-annual-map-refactoring into develop
4 files
+ 5
5
Compare changes
  • Side-by-side
  • Inline
Files
4
+ 170
34
@@ -19,17 +19,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
import sys
import os.path as op
import json
import logging
from datetime import timedelta
from lxml import etree
from s2snow import snow_annual_map_evaluation
from s2snow.compute_NOBS import compute_NOBS
from s2snow.compute_SOD_SMOD import compute_SOD_SMOD
from s2snow.utils import str_to_datetime, datetime_to_str
from s2snow.snow_annual_map import run_snow_annual_map, load_products, load_densification_products, \
compute_output_dates, merge_product_at_same_date, convert_snow_masks_into_binary_snow_masks, \
convert_snow_masks_into_binary_cloud_masks, compute_CCD, compute_SCD
from s2snow.utils import str_to_datetime, datetime_to_str, write_list_to_file
from s2snow.version import VERSION
@@ -47,52 +52,183 @@ def show_version():
# ----------------- MAIN ---------------------------------------------------
def main(argv):
""" main script of snow extraction procedure"""
json_file = argv[1]
# Load json_file from json files
with open(json_file) as json_data_file:
data = json.load(json_data_file)
pout = data.get("path_out")
log = data.get("log", True)
if log:
sys.stdout = open(data.get('log_stdout', op.join(pout, "stdout.log")), 'w')
sys.stderr = open(data.get('log_stderr', op.join(pout, "stderr.log")), 'w')
# Set logging level and format.
logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
logging.info("Start run_snow_annual_map.py")
logging.info("Input args = " + json_file)
logging.info("Input args = {}".format(json_file))
# Run the snow detector
snow_annual_map_evaluation_app = snow_annual_map_evaluation.snow_annual_map_evaluation(data)
snow_annual_map_evaluation_app.run()
logging.info("Load parameters")
# ----------------------------------------------------------------------------------------
# Set parameters
# ----------------------------------------------------------------------------------------
with open(json_file) as json_data_file:
params = json.load(json_data_file)
mode = params.get("mode", "RUNTIME")
if mode == "DEBUG":
logging.Logger.setLevel(logging.DEBUG)
logging.debug("Debug is enable")
tile_id = params.get("tile_id")
date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
date_margin = timedelta(days=params.get("date_margin", 0))
logging.debug("Date margin: {}", date_margin)
output_dir = params.get("path_out")
if not os.path.exists(output_dir):
logging.info("Create directory {} ...", output_dir)
os.makedirs(output_dir)
processing_id = str(tile_id + "_" + datetime_to_str(date_start) + "_" + datetime_to_str(date_stop))
path_out = op.join(output_dir, processing_id)
logging.debug("Path_out is: {}", path_out)
path_tmp = str(params.get("path_tmp", os.path.join(path_out,"tmp")))
if not os.path.exists(path_tmp):
logging.info("Create directory {} ...", path_tmp)
os.makedirs(path_tmp)
logging.debug("Path_tmp is: {}", path_tmp)
input_products_list = params.get("input_products_list", [])
output_dates_file_path = params.get("output_dates_file_path")
if not output_dates_file_path:
output_dates_file_path = op.join(path_tmp, "output_dates.txt")
ram = params.get("ram", 4096)
logging.debug("Ram is: {}", ram)
# Set maximum ITK threads
os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(params.get("nbThreads", 1))
use_densification = params.get("use_densification", False)
if use_densification:
densification_path_list = params.get("densification_products_list", [])
log = params.get("log", True)
if log:
sys.stdout = open(params.get("log_stdout", op.join(path_out, "stdout.log")), 'w')
sys.stderr = open(params.get("log_stderr", op.join(path_out, "stderr.log")), 'w')
# ----------------------------------------------------------------------------------------
# Search snow products
# ----------------------------------------------------------------------------------------
logging.info("Search snow products")
product_dict = load_products(date_start, date_stop, date_margin, input_products_list, tile_id, None)
# Exiting with error if none of the input products were loaded
if not product_dict:
logging.error("Empty snow product list!")
return
# Do the loading of the products to densify the timeserie
if use_densification:
load_densification_products(date_margin, date_start, date_stop, densification_path_list, path_tmp, product_dict,
ram, use_densification)
# ----------------------------------------------------------------------------------------
# Sort products by acquisition date, retrieve input and output dates
# ----------------------------------------------------------------------------------------
logging.info("Sort products by acquisition date")
# re-order products according acquisition date
input_dates_file_path = op.join(path_tmp, "input_dates.txt")
logging.info("Retrieve input dates")
input_dates = sorted(product_dict.keys())
write_list_to_file(input_dates_file_path, input_dates)
# compute or retrieve the output dates
logging.info("Retrieve output dates")
output_dates = compute_output_dates(date_start, date_stop, output_dates_file_path)
# ----------------------------------------------------------------------------------------
# Merge products at the same date
# ----------------------------------------------------------------------------------------
merged_product_dict = merge_product_at_same_date(path_tmp, product_dict, ram)
# ----------------------------------------------------------------------------------------
# Convert snow masks into binary masks
# ----------------------------------------------------------------------------------------
binary_snow_mask_list = convert_snow_masks_into_binary_snow_masks(path_tmp, ram, merged_product_dict)
binary_cloud_mask_list = convert_snow_masks_into_binary_cloud_masks(path_tmp, ram, merged_product_dict)
# ----------------------------------------------------------------------------------------
# Compute Cloud Coverage Duration "CLOUD_OCCURENCE" and multitemp_cloud_mask
# ----------------------------------------------------------------------------------------
logging.debug("Prepare call to compute_CCD")
cloud_occurence = op.join(path_tmp, "CLOUD_OCCURENCE_" + processing_id + ".tif")
multitemp_cloud_vrt = op.join(path_tmp, "multitemp_cloud_mask.vrt")
compute_CCD(binary_cloud_mask_list, cloud_occurence, multitemp_cloud_vrt,ram)
# ----------------------------------------------------------------------------------------
# Compute Snow Coverage Duration "SCD" and multitemp_snow_mask
# ----------------------------------------------------------------------------------------
logging.debug("Prepare call to compute_SCD")
multitemp_snow_vrt = op.join(path_tmp, "multitemp_snow_mask.vrt")
snow_coverage_duration = op.join(path_tmp, "SCD_" + processing_id + ".tif")
gapfilled_timeserie = op.join(path_tmp, "DAILY_SNOW_MASKS_" + processing_id + ".tif")
multitemp_snow100 = op.join(path_tmp, "multitemp_snow100.tif")
multitemp_snow100_gapfilled = op.join(path_tmp, "multitemp_snow100_gapfilled.tif")
compute_SCD(binary_snow_mask_list, multitemp_cloud_vrt, input_dates_file_path, output_dates_file_path, output_dates,
snow_coverage_duration, multitemp_snow_vrt, gapfilled_timeserie, multitemp_snow100,
multitemp_snow100_gapfilled,ram)
# run compute SOD_SMOD
processing_id = str(data.get("tile_id") + "_" + \
datetime_to_str(str_to_datetime(data.get("date_start"), "%d/%m/%Y")) + "_" + \
datetime_to_str(str_to_datetime(data.get("date_stop"), "%d/%m/%Y")))
sod_smod_input_file = op.join(pout, "DAILY_SNOW_MASKS_" + processing_id + ".tif")
compute_SOD_SMOD(sod_smod_input_file, processing_id, pout)
logging.debug("Prepare call to compute_SOD_SMOD.py")
sod_file = os.path.join(path_tmp, "SOD_{}.tif".format(processing_id))
smod_file = os.path.join(path_tmp, "SMOD_{}.tif".format(processing_id))
sod_smod_input_file = op.join(path_tmp, "DAILY_SNOW_MASKS_" + processing_id + ".tif")
date_range = date_stop - date_start
compute_SOD_SMOD(sod_smod_input_file, date_range, sod_file=sod_file, smod_file=smod_file)
# run compute NOBS
nobs_input_file = op.join(pout, "multitemp_cloud_mask.vrt")
compute_NOBS(nobs_input_file, processing_id, pout)
logging.debug("Prepare call to compute_NOBS.py")
nobs_input_file = op.join(path_tmp, "multitemp_cloud_mask.vrt")
nobs_output_file = op.join(path_tmp, "NOBS_{}.tif".format(processing_id))
compute_NOBS(nobs_input_file, output_file=nobs_output_file)
# create metadata
logging.info("Create metadata")
create_snow_annual_map_metadata(product_dict, path_out)
# Move outputs into path_out
logging.info("Move outputs into {}.", path_out)
shutil.copy2(input_dates_file_path, path_out)
shutil.copy2(output_dates_file_path, path_out)
shutil.copy2(multitemp_cloud_vrt, path_out)
shutil.copy2(cloud_occurence, path_out)
shutil.copy2(snow_coverage_duration, path_out)
shutil.copy2(gapfilled_timeserie, path_out)
os.remove(input_dates_file_path)
os.remove(output_dates_file_path)
os.remove(multitemp_cloud_vrt)
os.remove(cloud_occurence)
os.remove(snow_coverage_duration)
os.remove(gapfilled_timeserie)
if data.get("run_comparison_evaluation", False):
snow_annual_map_evaluation_app.run_evaluation()
logging.info("End run_snow_annual_map.py")
if data.get("run_modis_comparison", False):
snow_annual_map_evaluation_app.compare_modis()
logging.info("End run_snow_annual_map.py")
def create_snow_annual_map_metadata(product_dict, path_out):
# Compute and create the content for the product metadata file.
logging.info("Start metadata computation.")
metadata_path = op.join(path_out, "LIS_METADATA.XML")
root = etree.Element("INPUTS_LIST")
for product_path in product_dict:
product_name = op.basename(str(product_path))
etree.SubElement(root, "PRODUCT_NAME").text = product_name
et = etree.ElementTree(root)
et.write(metadata_path, pretty_print=True)
logging.info("End metadata computation.")
if __name__ == "__main__":
Loading