Skip to content
Snippets Groups Projects

Resolve "Run snow annual map refactoring"

Merged Aurore Dupuis requested to merge 54-run-snow-annual-map-refactoring into develop
4 files
+ 276
183
Compare changes
  • Side-by-side
  • Inline
Files
4
  • f01c779a
    #54 - Complete refactoring. · f01c779a
    Aurore Dupuis authored
+ 119
22
@@ -31,8 +31,10 @@ from lxml import etree
from s2snow.compute_NOBS import compute_NOBS
from s2snow.compute_SOD_SMOD import compute_SOD_SMOD
from s2snow.snow_annual_map import run_snow_annual_map
from s2snow.utils import str_to_datetime, datetime_to_str
from s2snow.snow_annual_map import run_snow_annual_map, load_products, load_densification_products, \
compute_output_dates, merge_product_at_same_date, convert_snow_masks_into_binary_snow_masks, \
convert_snow_masks_into_binary_cloud_masks, compute_CCD, compute_SCD
from s2snow.utils import str_to_datetime, datetime_to_str, write_list_to_file
from s2snow.version import VERSION
@@ -50,8 +52,6 @@ def show_version():
# ----------------- MAIN ---------------------------------------------------
def main(argv):
""" main script of snow extraction procedure"""
json_file = argv[1]
@@ -60,32 +60,47 @@ def main(argv):
logging.basicConfig(stream=sys.stdout, level=logging.INFO, \
format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
logging.info("Start run_snow_annual_map.py")
logging.info("Input args = " + json_file)
logging.info("Input args = {}", json_file)
# Load json_file from json files
logging.info("Load parameters")
# ----------------------------------------------------------------------------------------
# Set parameters
# ----------------------------------------------------------------------------------------
with open(json_file) as json_data_file:
params = json.load(json_data_file)
mode = params.get("mode", "RUNTIME")
if mode == "DEBUG":
logging.Logger.setLevel(logging.DEBUG)
logging.debug("Debug is enable")
tile_id = params.get("tile_id")
input_products_list = params.get("input_products_list", [])
output_dates_filename = params.get("output_dates_filename")
date_start = str_to_datetime(params.get("date_start"), "%d/%m/%Y")
date_stop = str_to_datetime(params.get("date_stop"), "%d/%m/%Y")
date_margin = timedelta(days=params.get("date_margin", 0))
logging.debug("Date margin: {}", date_margin)
output_dir = params.get("path_out")
if not os.path.exists(output_dir):
logging.info("Create directory " + output_dir + "...")
logging.info("Create directory {} ...", output_dir)
os.makedirs(output_dir)
processing_id = str(tile_id + "_" + datetime_to_str(date_start) + "_" + datetime_to_str(date_stop))
path_out = op.join(output_dir, processing_id)
logging.debug("Path_out is: {}", path_out)
path_tmp = str(params.get("path_tmp", os.environ.get('TMPDIR')))
path_tmp = str(params.get("path_tmp", os.path.join(path_out,"tmp")))
if not os.path.exists(path_tmp):
logging.error(path_tmp + ", the target does not exist and can't be used for processing")
logging.info("Create directory {} ...", path_tmp)
os.makedirs(path_tmp)
logging.debug("Path_tmp is: {}", path_tmp)
input_products_list = params.get("input_products_list", [])
output_dates_file_path = params.get("output_dates_file_path")
if not output_dates_file_path:
output_dates_file_path = op.join(path_tmp, "output_dates.txt")
mode = params.get("mode", "RUNTIME")
ram = params.get("ram", 4096)
logging.debug("Ram is: {}", ram)
# Set maximum ITK threads
os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(params.get("nbThreads", 1))
@@ -99,27 +114,107 @@ def main(argv):
sys.stdout = open(params.get("log_stdout", op.join(path_out, "stdout.log")), 'w')
sys.stderr = open(params.get("log_stderr", op.join(path_out, "stderr.log")), 'w')
# Run the snow annual map
run_snow_annual_map(path_tmp, path_out, tile_id, date_start, date_stop, date_margin, input_products_list, ram,
processing_id, use_densification=use_densification,
densification_path_list=densification_path_list,
output_dates_filename=output_dates_filename)
# ----------------------------------------------------------------------------------------
# Search snow products
# ----------------------------------------------------------------------------------------
logging.info("Search snow products")
product_dict = load_products(date_start, date_stop, date_margin, input_products_list, tile_id, None)
# Exiting with error if none of the input products were loaded
if not product_dict:
logging.error("Empty snow product list!")
return
# Do the loading of the products to densify the timeserie
if use_densification:
load_densification_products(date_margin, date_start, date_stop, densification_path_list, path_tmp, product_dict,
ram, use_densification)
# ----------------------------------------------------------------------------------------
# Sort products by acquisition date, retrieve input and output dates
# ----------------------------------------------------------------------------------------
logging.info("Sort products by acquisition date")
# re-order products according acquisition date
input_dates_file_path = op.join(path_tmp, "input_dates.txt")
logging.info("Retrieve input dates")
input_dates = sorted(product_dict.keys())
write_list_to_file(input_dates_file_path, input_dates)
# compute or retrieve the output dates
logging.info("Retrieve output dates")
output_dates = compute_output_dates(date_start, date_stop, output_dates_file_path)
# ----------------------------------------------------------------------------------------
# Merge products at the same date
# ----------------------------------------------------------------------------------------
merged_product_dict = merge_product_at_same_date(path_tmp, product_dict, ram)
# ----------------------------------------------------------------------------------------
# Convert snow masks into binary masks
# ----------------------------------------------------------------------------------------
binary_snow_mask_list = convert_snow_masks_into_binary_snow_masks(path_tmp, ram, merged_product_dict)
binary_cloud_mask_list = convert_snow_masks_into_binary_cloud_masks(path_tmp, ram, merged_product_dict)
# ----------------------------------------------------------------------------------------
# Compute Cloud Coverage Duration "CLOUD_OCCURENCE" and multitemp_cloud_mask
# ----------------------------------------------------------------------------------------
logging.debug("Prepare call to compute_CCD")
cloud_occurence = op.join(path_tmp, "CLOUD_OCCURENCE_" + processing_id + ".tif")
multitemp_cloud_vrt = op.join(path_tmp, "multitemp_cloud_mask.vrt")
compute_CCD(binary_cloud_mask_list, cloud_occurence, multitemp_cloud_vrt,ram)
# ----------------------------------------------------------------------------------------
# Compute Snow Coverage Duration "SCD" and multitemp_snow_mask
# ----------------------------------------------------------------------------------------
logging.debug("Prepare call to compute_SCD")
multitemp_snow_vrt = op.join(path_tmp, "multitemp_snow_mask.vrt")
snow_coverage_duration = op.join(path_tmp, "SCD_" + processing_id + ".tif")
gapfilled_timeserie = op.join(path_tmp, "DAILY_SNOW_MASKS_" + processing_id + ".tif")
multitemp_snow100 = op.join(path_tmp, "multitemp_snow100.tif")
multitemp_snow100_gapfilled = op.join(path_tmp, "multitemp_snow100_gapfilled.tif")
compute_SCD(binary_snow_mask_list, multitemp_cloud_vrt, input_dates_file_path, output_dates_file_path, output_dates,
snow_coverage_duration, multitemp_snow_vrt, gapfilled_timeserie, multitemp_snow100,
multitemp_snow100_gapfilled,ram)
# run compute SOD_SMOD
sod_file = os.path.join(path_out, "SOD_{}.tif".format(processing_id))
smod_file = os.path.join(path_out, "SMOD_{}.tif".format(processing_id))
sod_smod_input_file = op.join(path_out, "DAILY_SNOW_MASKS_" + processing_id + ".tif")
logging.debug("Prepare call to compute_SOD_SMOD.py")
sod_file = os.path.join(path_tmp, "SOD_{}.tif".format(processing_id))
smod_file = os.path.join(path_tmp, "SMOD_{}.tif".format(processing_id))
sod_smod_input_file = op.join(path_tmp, "DAILY_SNOW_MASKS_" + processing_id + ".tif")
date_range = date_stop - date_start
compute_SOD_SMOD(sod_smod_input_file, date_range, sod_file=sod_file, smod_file=smod_file)
# run compute NOBS
logging.debug("Prepare call to compute_NOBS.py")
nobs_input_file = op.join(path_tmp, "multitemp_cloud_mask.vrt")
nobs_output_file = op.join(path_out, "NOBS_{}.tif".format(processing_id))
compute_NOBS(nobs_input_file, nobs_output_file=nobs_output_file)
nobs_output_file = op.join(path_tmp, "NOBS_{}.tif".format(processing_id))
compute_NOBS(nobs_input_file, output_file=nobs_output_file)
# create metadata
logging.info("Create metadata")
create_snow_annual_map_metadata(input_products_list, path_out)
# Move outputs into path_out
logging.info("Move outputs into {}.", path_out)
shutil.copy2(input_dates_file_path, path_out)
shutil.copy2(output_dates_file_path, path_out)
shutil.copy2(multitemp_cloud_vrt, path_out)
shutil.copy2(cloud_occurence, path_out)
shutil.copy2(snow_coverage_duration, path_out)
shutil.copy2(gapfilled_timeserie, path_out)
os.remove(input_dates_file_path)
os.remove(output_dates_file_path)
os.remove(multitemp_cloud_vrt)
os.remove(cloud_occurence)
os.remove(snow_coverage_duration)
os.remove(gapfilled_timeserie)
if mode == "DEBUG":
dest_debug_dir = op.join(path_out, "tmpdir")
if op.exists(dest_debug_dir):
@@ -131,6 +226,7 @@ def main(argv):
def create_snow_annual_map_metadata(input_path_list, path_out):
# Compute and create the content for the product metadata file.
logging.info("Start metadata computation.")
metadata_path = op.join(path_out, "LIS_METADATA.XML")
root = etree.Element("INPUTS_LIST")
for product_path in input_path_list:
@@ -138,6 +234,7 @@ def create_snow_annual_map_metadata(input_path_list, path_out):
etree.SubElement(root, "PRODUCT_NAME").text = product_name
et = etree.ElementTree(root)
et.write(metadata_path, pretty_print=True)
logging.info("End metadata computation.")
if __name__ == "__main__":
Loading