diff --git a/doc_cookbook/generate_cookbook.txt b/doc_cookbook/generate_cookbook.txt index 86a00c46cdfb458ac3d683e6ac79be000685b4de..797d74f4088a6d4299bad731028e9550a45e6d8a 100644 --- a/doc_cookbook/generate_cookbook.txt +++ b/doc_cookbook/generate_cookbook.txt @@ -16,7 +16,7 @@ NB: Pour générer à partir pour l'API Python (générer depuis les commentaires des .py): _ Environnement vierge dans le conda qui va bien. - _ Commenter provisoirement les import sur jsonshema et otbApplication + _ Commenter provisoirement les import sur jsonshema, gdal, h5py, ogr et otbApplication _ Les fichiers n'ont pas besoins d'être touchés normalement car font appel à des "automodule" via autotoc mais si il le faut alors : sphinx-apidoc -f -o rst_API_Python/ ../python_src/ _ Générer les Markdown : sphinx-build -M markdown ./rst_API_Python/ build_API_Python/ -c . diff --git a/doc_cookbook/rst_API_Python/SAR_MultiSlc.rst b/doc_cookbook/rst_API_Python/SAR_MultiSlc.rst new file mode 100644 index 0000000000000000000000000000000000000000..1a156ee4614f1a98fb86826e4277ed53cc1f8833 --- /dev/null +++ b/doc_cookbook/rst_API_Python/SAR_MultiSlc.rst @@ -0,0 +1,7 @@ +SAR\_MultiSlc module +==================== + +.. automodule:: SAR_MultiSlc + :members: + :undoc-members: + :show-inheritance: diff --git a/doc_cookbook/rst_API_Python/SAR_MultiSlc_IW.rst b/doc_cookbook/rst_API_Python/SAR_MultiSlc_IW.rst new file mode 100644 index 0000000000000000000000000000000000000000..bea668cbdf5a7e3afc90c0384fb38f9a7569ee29 --- /dev/null +++ b/doc_cookbook/rst_API_Python/SAR_MultiSlc_IW.rst @@ -0,0 +1,7 @@ +SAR\_MultiSlc\_IW module +======================== + +.. automodule:: SAR_MultiSlc_IW + :members: + :undoc-members: + :show-inheritance: diff --git a/doc_cookbook/rst_API_Python/coRegistation_S1IW.rst b/doc_cookbook/rst_API_Python/coRegistation_S1IW.rst deleted file mode 100644 index e8a7705d3946eed9cad20c40c1ecc058b85607d9..0000000000000000000000000000000000000000 --- a/doc_cookbook/rst_API_Python/coRegistation_S1IW.rst +++ /dev/null @@ -1,7 +0,0 @@ -coRegistation\_S1IW module -========================== - -.. automodule:: coRegistation_S1IW - :members: - :undoc-members: - :show-inheritance: diff --git a/doc_cookbook/rst_API_Python/diapOTB_S1IW_TEST.rst b/doc_cookbook/rst_API_Python/diapOTB_S1IW_TEST.rst deleted file mode 100644 index 3b7b3ad731aac1cd49e6989377a2b3a086151057..0000000000000000000000000000000000000000 --- a/doc_cookbook/rst_API_Python/diapOTB_S1IW_TEST.rst +++ /dev/null @@ -1,7 +0,0 @@ -diapOTB\_S1IW\_TEST module -========================== - -.. automodule:: diapOTB_S1IW_TEST - :members: - :undoc-members: - :show-inheritance: diff --git a/doc_cookbook/rst_API_Python/modules.rst b/doc_cookbook/rst_API_Python/modules.rst index 3dd3dc1c8c4007872d560faa867398f6c1eb527a..3177c31424bf1673ce51d773117bc989f6545cfd 100644 --- a/doc_cookbook/rst_API_Python/modules.rst +++ b/doc_cookbook/rst_API_Python/modules.rst @@ -4,9 +4,9 @@ python_src .. toctree:: :maxdepth: 4 - coRegistation_S1IW + SAR_MultiSlc + SAR_MultiSlc_IW diapOTB diapOTB_S1IW - diapOTB_S1IW_TEST processings utils diff --git a/doc_cookbook/rst_API_Python/processings.rst b/doc_cookbook/rst_API_Python/processings.rst index b8e5eea3421a51da318faf9ba5d44664444a2732..8180b86502a9af5324a2c29874870d9658e7e2a6 100644 --- a/doc_cookbook/rst_API_Python/processings.rst +++ b/doc_cookbook/rst_API_Python/processings.rst @@ -16,7 +16,15 @@ processings.Ground module ------------------------- .. automodule:: processings.Ground - :members: + :members: + :undoc-members: + :show-inheritance: + +processings.Metadata\_Correction module +--------------------------------------- + +.. automodule:: processings.Metadata_Correction + :members: :undoc-members: :show-inheritance: diff --git a/python_src/SAR_MultiSlc.json b/python_src/SAR_MultiSlc.json new file mode 100644 index 0000000000000000000000000000000000000000..04384b14b3de2f847233906fc2d948c6adcb4f02 --- /dev/null +++ b/python_src/SAR_MultiSlc.json @@ -0,0 +1,46 @@ +{ + "Global": { + "in": + { + "SRTMShapefile": "/work/scratch/azzonim/Stage2019/geoid_srtm/srtm.shp", + "Datalake": "/work/ALT/swot/swotpub/MNT/SRTM_30_hgt/", + "Geoid": "/work/scratch/azzonim/Stage2019/geoid_srtm/egm96.grd" + } + }, + + "Pre_Processing": { + "out": + { + "doppler_file": "dop0.txt" + }, + "parameter": + { + "ML_gain": 0.1 + } + }, + "Metadata_Correction": + { + "out": + { + "fine_metadata_file": "fine_metadata.txt" + }, + "parameter": + { + "activate": false, + "GridStep_range": 150, + "GridStep_azimut": 150 + } + }, + "DIn_SAR": + { + "parameter": + { + "GridStep_range": 150, + "GridStep_azimut": 150, + "Grid_Threshold": 0.3, + "Grid_Gap_S1": 1000, + "Grid_Gap_Co": 0.7, + "Interferogram_gain": 0.1 + } + } +} diff --git a/python_src/SAR_MultiSlc.py b/python_src/SAR_MultiSlc.py new file mode 100644 index 0000000000000000000000000000000000000000..fc893d7fd24fc02ed0487919f0f205960466843d --- /dev/null +++ b/python_src/SAR_MultiSlc.py @@ -0,0 +1,582 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES) +# +# This file is part of Orfeo Toolbox +# +# https://www.orfeo-toolbox.org/ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" + SAR_MultiSlc.py + =============== + + Interferometry chain between multitemporal SAR images for S1 Strimap and Cosmo products. + One master image is fixed by users and other images are processed as slave image. + +""" + +__author__ = "Maxime Azzoni" +__version__ = "0.1" +__status__ = "Developpement" +__date__ = "05/02/2020" +__last_modified__ = "05/02/2020" + +# =============== +# Import section +# =============== + +import logging +import os +import argparse +import xml.etree.ElementTree as ET +import datetime +import gdal +import shutil +from shutil import copyfile +import h5py + +from processings import Pre_Processing +from processings import Ground +from processings import DInSar + +import utils.DiapOTB_applications as diapOTBApp +from utils import func_utils + +# ==================================== # + # Main # + # ================================ # + +if __name__ == "__main__": + + # ====== Check arguments + parser = argparse.ArgumentParser() + parser.add_argument("configfile", + help="""input conguration file for the + application DiapOTB""") + parser.add_argument("-d", "--start_date", dest="start_date", + action="store", help="start date, fmt('20151222')", + default=None) + parser.add_argument("-f", "--end_date", dest="end_date", action="store", + help="end date, fmt('20151223')", default=None) + parser.add_argument("-m", "--master", dest="master", action="store", + help="master image, fmt (.tiff or .h5)", default=None) + parser.add_argument("-e", "--exclude", dest="exclude", nargs="+", + action="store", help=""""excluded date(s) from the time + serie, exemple('20151221 20160114')""", default="-9999") + parser.add_argument("-roi", "--roi", dest="roi", nargs="+", action="store", + help=""""Define the lat lng ROI coordinates, + fmt('ulx uly lrx lry'), ex: -roi 2.44115 48.96126 + 2.44176 48.95927""", default=None) + parser.add_argument("-i", "--interferogram", dest="interferogram", + action="store", help=""" Simply write ' -i no ' to + deactivate interferogram output. Activated by default. + Returns: Interferogram.tif""", default="yes") + parser.add_argument("-o", "--ortho", dest="ortho", + action="store", help=""" Simply write ' -o yes ' to + activate Orthorectified interferogram output. If activated, + returns: Ortho_Interferogram.tif""", default=None) + parser.add_argument("-l", "--light", dest="light", action="store", + help="""" -l no' to deactivate. If activated, returns a + light version: Interferogram.tif + Coregistrated.tif + + Concatenated (deramped, and multilooked) bursts.tif""", + default="yes") + parser.add_argument("-ram", "--optram", dest="optram", + action="store", help="""Available RAM (mb), + by default value is 4OOO""", + default="4000") + parser.add_argument("-spacingxy", "--spacingxy", dest="spacingxy", + action="store", help="""Set the spatial resolution + for OrthoRectification in degrees, + Default value is 0.0001""", + default="0.0001") + parser.add_argument("-ml", "--multilook", dest="multi", nargs="+", action="store", + help=""""Set the range and azimuth (in this order) you want + for the multilook. Default is 3 3""", default='3' '3') + args = parser.parse_args() + + print(args.configfile) + + func_utils.init_logger() + + # ====== Read and Load the configuration file + dataConfig = func_utils.load_configfile(args.configfile) + + # ====== Check extension (if .h5 => HDF5 file => Cosmo Sensor) + master_ext = args.master.split(".")[-1:] + + # ================================= + # Get elements from users arguments + # ================================= + + # ====== Set the variables names + iso_start, iso_end = func_utils.argDates_to_isoDates(args.start_date, args.end_date) + start_time = int(args.start_date) + end_time = int(args.end_date) + master_Image_base = args.master + master_Image = "" + pol = "" + spacingxy = args.spacingxy + exclude = args.exclude + roi = args.roi + light_version = args.light + relative_orbit = "" + manifest = "" + + if master_ext[0] == "h5" : # Cosmo case + master_Image = func_utils.get_imgFromDir(args.master) + master_date = master_Image_base.split("_")[8][:8] + pol = master_Image_base.split("_")[5] + + else : #S1 SM case + master_Image = func_utils.get_imgFromSAFE(args.master) + master_date = master_Image_base.split("-")[4].split("t")[0] + pol = master_Image_base.split("-")[3] + manifest = master_Image.split("measurement")[0]+"/manifest.safe" + relative_orbit = func_utils.get_relative_orbit(manifest) + + version_interferogram = args.interferogram + ortho_interferogram = args.ortho + if roi: + ortho_interferogram = "yes" + print("ortho_interferogram", ortho_interferogram) + ram = args.optram + rng,azi = args.multi + + # ====== Get dictionaries + dict_Global = dataConfig['Global'] + dict_PreProcessing = dataConfig['Pre_Processing'] + dict_Metadata_Correction = dataConfig['Metadata_Correction'] + dict_DInSAR = dataConfig['DIn_SAR'] + + # ====== Get elements from configuration file + # ====== Global + srtm_shapefile = dict_Global['in']['SRTMShapefile'] + hgts_path = dict_Global['in']['Datalake'] + geoid_path = dict_Global['in']['Geoid'] + + satellite = "default" + mode = "default" + + if 'sensor' in dict_Global: + satellite = dict_Global['sensor']['satellite'] + mode = dict_Global['sensor']['mode'] + + # ====== Pre_Processing + ml_range = int(rng) + ml_azimut = int(azi) + ml_gain = dict_PreProcessing['parameter']['ML_gain'] + dop_file = dict_PreProcessing['out']['doppler_file'] + + # ====== Metadata_Correction + activateMetadataCorrection = dict_Metadata_Correction['parameter']['activate'] + ml_simu_range = ml_range + ml_simu_azimut = ml_azimut + ml_simu_gain = 1. + ml_correlSimu_range = ml_range + ml_correlSimu_azimut = ml_azimut + correlSimu_gridstep_range = dict_Metadata_Correction['parameter']['GridStep_range'] + correlSimu_gridstep_azimut = dict_Metadata_Correction['parameter']['GridStep_azimut'] + fine_metadata_file = dict_Metadata_Correction['out']['fine_metadata_file'] + + # ====== DIn_SAR + geoGrid_gridstep_range = dict_DInSAR['parameter']['GridStep_range'] + geoGrid_gridstep_azimut = dict_DInSAR['parameter']['GridStep_azimut'] + geoGrid_threshold = dict_DInSAR['parameter']['Grid_Threshold'] + if not master_ext[0] == "h5": + geoGrid_gap = dict_DInSAR['parameter']['Grid_Gap_S1'] + else: + geoGrid_gap = dict_DInSAR['parameter']['Grid_Gap_Co'] + ml_geoGrid_range = ml_range + ml_geoGrid_azimut = ml_azimut + gain_interfero = dict_DInSAR['parameter']['Interferogram_gain'] + + if (geoGrid_threshold < 0) or (geoGrid_threshold > 1): + func_utils.log(logging.CRITICAL, "Error, Wrong Threshold for fine deformation grid") + geoGrid_threshold = 0.3 + + # ====== Check if images exist + func_utils.check_ifExist(srtm_shapefile) + func_utils.check_ifExist(hgts_path) + func_utils.check_ifExist(master_Image) + + # ====== Create global folder with starting and ending dates + master date + output_glob = "./output_{}_to_{}_m_{}".format(start_time, end_time, + master_date) + if not os.path.exists(output_glob): + os.makedirs(output_glob) + + # ====== Create Digital Elevation Model + dem, target_dir = func_utils.build_virutal_raster(master_Image, start_time, end_time, + master_date, srtm_shapefile, hgts_path) + print("\n Removing master_image_envelope shp files...\n") + for i in os.listdir(target_dir): + if i.startswith("master_envelope"): + os.remove(target_dir + "/" + i) + + + # Init file handler (all normaly print on std is redirected into info.log) + # To use previous print on std, use printOnStd + func_utils.init_fileLog(output_glob) + + # Recap of input parameter into info.log + func_utils.log(logging.INFO, "########### Input Parameters for the current execution ############## ") + func_utils.log(logging.INFO, " Global : ") + func_utils.log(logging.INFO, "srtm_shapefile : {param}".format(param=srtm_shapefile)) + func_utils.log(logging.INFO, "hgts_path : {param}".format(param=hgts_path)) + func_utils.log(logging.INFO, "geoid_path : {param}".format(param=geoid_path)) + + func_utils.log(logging.INFO, " Pre_Processing : ") + func_utils.log(logging.INFO, "ml_range : {param}".format(param=ml_range)) + func_utils.log(logging.INFO, "ml_azimut : {param}".format(param=ml_azimut)) + func_utils.log(logging.INFO, "ml_gain : {param}".format(param=ml_gain)) + + # DIn_SAR + func_utils.log(logging.INFO, " DIn_SAR : ") + func_utils.log(logging.INFO, "geoGrid_gridstep_range : {param}".format(param=geoGrid_gridstep_range)) + func_utils.log(logging.INFO, "geoGrid_gridstep_azimut : {param}".format(param=geoGrid_gridstep_azimut)) + func_utils.log(logging.INFO, "geoGrid_threshold : {param}".format(param=geoGrid_threshold)) + func_utils.log(logging.INFO, "geoGrid_gap : {param}".format(param=geoGrid_gap)) + func_utils.log(logging.INFO, "ml_geoGrid_range : {param}".format(param=ml_geoGrid_range)) + func_utils.log(logging.INFO, "ml_geoGrid_azimut : {param}".format(param=ml_geoGrid_azimut)) + func_utils.log(logging.INFO, "gain_interfero : {param}".format(param=gain_interfero)) + + # ============================= + # Get the elements from os.dirs + # ============================= + + # ====== Get the list of GTiff corresponding to dates + tiff_list = func_utils.get_AllTiff(pol=pol, ext=master_ext[0]) + tiff_dates = func_utils.get_Tiff_WithDates(start_time, end_time, exclude, tiff_list, master_ext[0]) + counter = 0 + + # ====== For loop processing + for i in (i for i in tiff_dates if i != master_Image_base): + total_slaves = len(tiff_dates)-1 + slave_Image_base = i + slave_Image = "" + if not master_ext[0] == "h5" : + slave_Image = func_utils.get_imgFromSAFE(slave_Image_base) + else : + slave_Image = func_utils.get_imgFromDir(slave_Image_base) + slave_date = func_utils.get_Date(i, master_ext[0]) + counter += 1 + output_dir = output_glob + "/{}_m_{}_s".format(master_date, slave_date) + if os.path.exists(output_dir): + shutil.rmtree(output_dir) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + master_data_dir = output_glob + "/{}_master_directory".format(master_date) + if not os.path.exists(master_data_dir): + os.makedirs(master_data_dir) + + # ====== Check extension (if .h5 => HDF5 file => Cosmo Sensor) + slave_ext = slave_Image.split(".")[-1:] + + func_utils.log(logging.INFO, "master_ext = " + master_ext[0] + "\n") + func_utils.log(logging.INFO, "slave_ext = " + slave_ext[0] + "\n") + + if counter <= 1: + if master_ext[0] == "h5": + master_H5 = h5py.File(master_Image, 'r') + lDataSet_master = list(master_H5.keys()) + + if len(lDataSet_master) != 1: + func_utils.log(logging.CRITICAL, "Error, H5 input files does not contain the expected dataset \n") + quit() + + if lDataSet_master[0] != "S01": + func_utils.log(logging.CRITICAL, "Error, H5 input files does not contain the expected dataset \n") + quit() + + master_S01 = dict(master_H5['S01']) + + if not 'SBI' in master_S01: + func_utils.log(logging.CRITICAL, "Error, H5 input files does not contain the expected dataset \n") + quit() + + # Change the name of master and slave image to read directly the //S01/SBI + master_Image = "HDF5:" + master_Image + "://S01/SBI" + # Adapt satellite + satellite = "cosmo" + + + if slave_ext[0] == "h5": + slave_H5 = h5py.File(slave_Image, 'r') + lDataSet_slave = list(slave_H5.keys()) + + if len(lDataSet_slave) != 1: + func_utils.log(logging.CRITICAL, "Error, H5 input files does not contain the expected dataset \n") + quit() + + if lDataSet_slave[0] != "S01": + func_utils.log(logging.CRITICAL, "Error, H5 input files does not contain the expected dataset \n") + quit() + + slave_S01 = dict(slave_H5['S01']) + + if not 'SBI' in slave_S01: + func_utils.log(logging.CRITICAL, "Error, H5 input files does not contain the expected dataset \n") + quit() + + slave_Image = "HDF5:" + slave_Image + "://S01/SBI" + + func_utils.log(logging.INFO, "########### Input Images for the current execution ############## ") + func_utils.log(logging.INFO, "Nb iteration : {param} with : ".format(param=counter)) + func_utils.log(logging.INFO, "master_Image : {param}".format(param=master_Image)) + func_utils.log(logging.INFO, "slave_Image : {param}".format(param=slave_Image)) + + func_utils.printOnStd("\n Nb iteration : {param} with : \n".format(param=counter)) + func_utils.printOnStd("\n master_Image : {param} : \n".format(param=master_Image)) + func_utils.printOnStd("\n slave_Image : {param} : \n".format(param=slave_Image)) + + # #################################################################### # + # ###################### Pre Processing Chain ######################## # + # #################################################################### # + # === Convert master to .tif + if counter <= 1: + func_utils.printOnStd("\n Master Pre_Processing chain \n") + func_utils.log(logging.INFO, "Master Pre_Processing Application") + + master_slc = "" + if not master_ext[0] == "h5" and roi is None: + master_slc = master_Image_base[:30] + master_Image_base[46:53]+str(relative_orbit) + ".tif" + # === Here we convert master single band CInt16 to dual band FLOAT32 + # === To do so, OTB_ExtractRoi is much faster than OTB_DynamicConvert + diapOTBApp.extractROI(master_Image, os.path.join(master_data_dir, master_slc+"?&gdal:co:TILED=YES")) + + if master_ext[0] == "h5": + master_slc = master_Image_base[:35]+".tif" + ds = gdal.Open(master_Image, gdal.GA_ReadOnly) + ds = gdal.Translate(os.path.join(master_data_dir, master_slc), ds, format="GTiff", + outputType=gdal.GDT_Float32, creationOptions=['TILED=YES']) + + # Master + paramPreMaster = {} + paramPreMaster['ml_range'] = ml_range + paramPreMaster['ml_azimut'] = ml_azimut + paramPreMaster['ml_gain'] = ml_gain + paramPreMaster['dop_file'] = dop_file + + Pre_Processing.extractToMultilook(master_Image, master_Image_base, paramPreMaster, "Others", master_data_dir) + + + # === Slave + func_utils.printOnStd("\n Slave Pre_Processing chain \n") + func_utils.log(logging.INFO, "Slave Pre_Processing Application") + + paramPreSlave = {} + paramPreSlave['ml_range'] = ml_range + paramPreSlave['ml_azimut'] = ml_azimut + paramPreSlave['ml_gain'] = ml_gain + paramPreSlave['dop_file'] = dop_file + + dop0Slave = Pre_Processing.extractToMultilook(slave_Image, slave_Image_base, paramPreSlave, "Others", output_dir) + + + master_Image_ML = os.path.splitext(master_Image_base)[0] + "_ml" + str(ml_azimut) + str(ml_range) + ".tif" + slave_Image_ML = os.path.splitext(slave_Image_base)[0] + "_ml" + str(ml_azimut) + str(ml_range) + ".tif" + + + # #################################################################### # + # ################### Metadata Correction Chain ###################### # + # #################################################################### # + if activateMetadataCorrection: + ####### SARDEMToAmplitude Application (Simu_SAR step) ###### + if counter <= 1: + # TO DO + print("\n Metadata Correction Chain not available for now \n") + #func_utils.printOnStd("Metadata Correction Chain not available for now") + + # paramMetadata = {} + # paramMetadata['ml_range'] = ml_simu_range + # paramMetadata['ml_azimut'] = ml_simu_azimut + # paramMetadata['ml_gain'] = ml_simu_gain + # paramMetadata['geoGrid_gridstep_range'] = correlSimu_gridstep_range + # paramMetadata['geoGrid_gridstep_azimut'] = correlSimu_gridstep_azimut + # paramMetadata['nodata'] = -32768 + # paramMetadata['fine_metadata_file'] = fine_metadata_file + + # Metadata_Correction.fineMetadata(master_Image, master_Image_base, dem, paramMetadata, master_data_dir) + + + # ##################################################################### # + # ######################## Ground Chain ############################### # + # ##################################################################### # + # Master + if counter <= 1: + func_utils.printOnStd("\n Master Ground chain \n") + func_utils.log(logging.INFO, "Master Ground Application") + + paramGroundMaster = {} + paramGroundMaster['nodata'] = -32768 + paramGroundMaster['withxyz'] = "true" + paramGroundMaster['for_slave_Image'] = False + + Ground.demProjectionAndCartesianEstimation(master_Image, master_Image_base, dem, paramGroundMaster, + "Others", master_data_dir) + # Slave + func_utils.printOnStd("\n Slave Ground chain \n") + func_utils.log(logging.INFO, "Slave Ground Application") + paramGroundSlave = {} + paramGroundSlave['nodata'] = -32768 + paramGroundSlave['withxyz'] = "true" + paramGroundSlave['for_slave_Image'] = True + + Ground.demProjectionAndCartesianEstimation(slave_Image, slave_Image_base, dem, paramGroundSlave, + "Others", output_dir) + + demProj_Master = "demProj_Master.tif" + master_cartesian_mean = "CartMeanMaster.tif" + demProj_Slave = "demProj_Slave.tif" + + # ##################################################################### # + # ####################### DIn_SAR Chain ############################### # + # ##################################################################### # + func_utils.printOnStd("\n DIn_SAR chain \n") + func_utils.log(logging.INFO, "DIn_SAR chain") + # Create param + param = {} + param['ml_azimut'] = ml_azimut + param['ml_range'] = ml_range + param['ml_geoGrid_azimut'] = ml_geoGrid_azimut + param['ml_geoGrid_range'] = ml_geoGrid_range + param['geoGrid_gridstep_range'] = geoGrid_gridstep_range + param['geoGrid_gridstep_azimut'] = geoGrid_gridstep_azimut + param['geoGrid_threshold'] = geoGrid_threshold + param['geoGrid_gap'] = geoGrid_gap + param['doppler0'] = dop0Slave + param['gain_interfero'] = gain_interfero + param['advantage'] = "projection" # By default projection + if satellite == "cosmo" or satellite == "CSK": + param['advantage'] = "correlation" + + slave_Image_CoRe = "" + if not master_ext[0] == "h5": + slave_Image_CoRe = slave_Image_base[:30] + slave_Image_base[46:53]+str(relative_orbit) + ".tif" + if master_ext[0] == "h5": + slave_Image_CoRe = slave_Image_base[:35]+".tif" + + param['slave_CoRe_Name'] = slave_Image_CoRe + + param['with_interferogram'] = version_interferogram + + list_of_Grids, list_of_Interferogram = DInSar.gridToInterferogram(dem, master_Image, master_Image_base, slave_Image, slave_Image_base, master_data_dir, output_dir, param, 'Others', output_dir) + + interferogram_path = list_of_Interferogram[0] + grid_path = list_of_Grids[0] + + # ==================================== # + # Post Processing Chain # + # # + # ============================== # + + + func_utils.printOnStd("\n Post Processing chain \n") + func_utils.log(logging.INFO, "Post Processing chain") + + # === Multilook on Coregistrated Slave + func_utils.silentremove(output_dir, slave_Image_ML) + slave_Image_ML = os.path.splitext(slave_Image_base)[0] + "_ml" + str(ml_azimut) + str(ml_range) + ".tif" + diapOTBApp.multilook(os.path.join(output_dir, slave_Image_CoRe), ml_range, ml_azimut, ml_gain, + os.path.join(output_dir, slave_Image_ML)) + + + ### Names definition ### + mib = master_Image_base + sib = slave_Image_base + + InterferoB123 = "" + Interfero_Ortho = "" + Interfero_Ortho = "" + + if not master_ext[0] == "h5": + InterferoB123 = mib[:14]+"M"+mib[14:30]+mib[47:53]+"S"+sib[ + 14:22]+"-"+sib[47:53]+str(relative_orbit)+"_Interferogram.tif" + Interfero_Ortho = mib[:14]+"M"+mib[14:30]+mib[47:53]+"S"+sib[ + 14:22]+"-"+sib[47:53]+str(relative_orbit)+"_Ortho-Interferogram.tif" + Interfero_roi = mib[:15]+"M"+mib[14:30]+"-"+mib[ + 47:54]+"S"+sib[14:23]+"-"+sib[47:53]+"-"+str(relative_orbit)+"_ROI_Ortho-Interferogram.tif" + + if master_ext[0] == "h5": + InterferoB123 = mib[:27]+"M_"+mib[27:35]+"_S_"+sib[ + 27:35]+"_Interferogram.tif" + Interfero_Ortho = mib[:27]+"M_"+mib[27:35]+"_S_"+sib[ + 27:35]+"_Ortho-Interferogram.tif" + Interfero_roi = mib[:27]+"M_"+mib[27:35]+"_S_"+sib[ + 27:35]+"_ROI_Ortho-Interferogram.tif" + + + + ### ROI, Ortho and band_extract ### + if ortho_interferogram == "yes": + interf_ortho = "interferogram_ortho.tif" + diapOTBApp.orthorectification(interferogram_path, spacingxy, hgts_path, geoid_path, os.path.join(output_dir, interf_ortho)) + if not roi: + func_utils.extract_band123(os.path.join(output_dir, interf_ortho), os.path.join(output_dir, Interfero_Ortho)) + func_utils.extract_band123(interferogram_path, os.path.join(output_dir, InterferoB123)) + if roi : + interf_roi = "interferogram_roi.tif" + func_utils.extract_roi(os.path.join(output_dir, interf_ortho), os.path.join(output_dir, interf_roi), roi) + func_utils.extract_band123(os.path.join(output_dir, interf_roi), os.path.join(output_dir, Interfero_roi)) + if ortho_interferogram is None and roi is None: + func_utils.extract_band123(interferogram_path, os.path.join(output_dir, InterferoB123)) + + # ==================================== # + # Removing # + # files # + # ============================== # + + func_utils.silentremove(os.path.dirname(interferogram_path), os.path.basename(interferogram_path)) + func_utils.silentremove(os.path.dirname(interferogram_path), os.path.basename(interferogram_path).split(".")[0]+".geom") + if ortho_interferogram == "yes": + interferogram_ortho = "interferogram_ortho.tif" + func_utils.silentremove(output_dir, interferogram_ortho) + func_utils.silentremove(output_dir, interferogram_ortho.split(".")[0]+".geom") + + if roi: + func_utils.silentremove(output_dir, interf_roi) + + + if light_version == "yes": + func_utils.log(logging.INFO, "\n Removing files for light version \n") + func_utils.silentremove(output_dir, dop_file) + func_utils.silentremove(output_dir, demProj_Slave) + func_utils.silentremove(output_dir, demProj_Slave.split(".")[0]+".geom") + func_utils.silentremove(os.path.dirname(grid_path), os.path.basename(grid_path)) + func_utils.silentremove(os.path.dirname(grid_path), os.path.basename(grid_path).split(".")[0]+".geom") + func_utils.silentremove(output_dir, slave_Image_ML.split(".")[0]+".geom") + + # Remove .tif.aux.xml files into output_dir + aux_files = func_utils.get_AllFilesWithExt(output_dir, ".tif.aux.xml") + for i_file in aux_files : + func_utils.silentremove(output_dir, i_file) + + + if light_version == "yes": + func_utils.log(logging.INFO, "\n Removing files for light version \n") + if os.path.exists(os.path.join(master_data_dir, dop_file)): + func_utils.silentremove(master_data_dir, dop_file) + func_utils.silentremove(master_data_dir, demProj_Master) + func_utils.silentremove(master_data_dir, demProj_Master.split(".")[0]+".geom") + func_utils.silentremove(master_data_dir, master_Image_ML.split(".")[0]+".geom") + func_utils.silentremove(master_data_dir, master_cartesian_mean) + func_utils.silentremove(master_data_dir, master_cartesian_mean.split(".")[0]+".geom") + if master_ext[0] == "h5": + func_utils.silentremove(master_data_dir, master_slc.split(".")[0]+".tif.aux.xml") + CoreGeom = slave_Image_CoRe.split(".")[0]+".geom" + MasterGeom = master_slc.split(".")[0]+".geom" + copyfile(os.path.join(output_dir, CoreGeom), os.path.join(master_data_dir, MasterGeom)) + diff --git a/python_src/SAR_MultiSlc_IW.json b/python_src/SAR_MultiSlc_IW.json new file mode 100644 index 0000000000000000000000000000000000000000..c3770c52b4e51cb39a504bf5c7f87e9d9ad7b08d --- /dev/null +++ b/python_src/SAR_MultiSlc_IW.json @@ -0,0 +1,33 @@ +{ + "Global": { + "in": + { + "SRTMShapefile": "/work/scratch/azzonim/Stage2019/geoid_srtm/srtm.shp", + "Datalake": "/work/ALT/swot/swotpub/MNT/SRTM_30_hgt/", + "Geoid": "/work/scratch/azzonim/Stage2019/geoid_srtm/egm96.grd" + } + }, + + "Pre_Processing": { + "out": + { + "doppler_file": "dop0.txt" + }, + "parameter": + { + "ML_gain": 0.1 + } + }, + "Ground": {}, + "DIn_SAR": + { + "parameter": + { + "GridStep_range": 25, + "GridStep_azimut": 5, + "Grid_Threshold": 0.3, + "Grid_Gap": 30, + "Interferogram_gain": 0.1 + } + } +} diff --git a/python_src/SAR_MultiSlc_IW.py b/python_src/SAR_MultiSlc_IW.py new file mode 100644 index 0000000000000000000000000000000000000000..9fc4e2ed807a536f937e1679ad86d8b1871164c7 --- /dev/null +++ b/python_src/SAR_MultiSlc_IW.py @@ -0,0 +1,535 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES) +# +# This file is part of Orfeo Toolbox +# +# https://www.orfeo-toolbox.org/ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" + SAR_MultiSlc_IW.py + ================== + + Interferometry chain between multitemporal SAR images for S1 IW products. + One master image is fixed by users and other images are processed as slave image. + +""" + +__author__ = "Maxime Azzoni" +__version__ = "0.1" +__status__ = "Developpement" +__date__ = "05/02/2020" +__last_modified__ = "05/02/2020" + +# ==================================== # + # Imports # + # ================================ # + +import logging +import os +import argparse +import datetime +import gdal +import tempfile +import shutil +import errno + +from processings import Pre_Processing +from processings import Ground +from processings import DInSar + +import utils.DiapOTB_applications as diapOTBApp +from utils import func_utils + +#import psutil +#mem_state = dict(psutil.virtual_memory()._asdict()) + + +# ==================================== # + # Main # + # ================================ # + +if __name__ == "__main__": + + # ====== Check arguments + parser = argparse.ArgumentParser() + parser.add_argument("configfile", + help="""input conguration file for the + application DiapOTB""") + parser.add_argument("-d", "--start_date", dest="start_date", + action="store", help="start date, fmt('20151222')", + default=None) + parser.add_argument("-f", "--end_date", dest="end_date", action="store", + help="end date, fmt('20151223')", default=None) + parser.add_argument("-m", "--master", dest="master", action="store", + help="master image, fmt (.tiff or .h5)", default=None) + parser.add_argument("-e", "--exclude", dest="exclude", nargs="+", + action="store", help=""""excluded date(s) from the time + serie, exemple('20151221 20160114')""", default="-9999") + parser.add_argument("-b", "--burst", dest="burst", action="store", + help="Select the bursts you want to work on, fmt(0-4)", + default="0-8") + parser.add_argument("-roi", "--roi", dest="roi", nargs="+", action="store", + help=""""Define the lng lat ROI coordinates, + fmt('ulx uly lrx lry'), ex: -roi 2.44115 48.96126 + 2.44176 48.95927""", default=None) + parser.add_argument("-i", "--interferogram", dest="interferogram", + action="store", help=""" Simply write ' -i no ' to + deactivate interferogram output. Activated by default. + Returns: Interferogram.tif""", default="yes") + parser.add_argument("-o", "--ortho", dest="ortho", + action="store", help=""" Simply write ' -o yes ' to + activate Orthorectified interferogram output. If activated, + returns: Ortho_Interferogram.tif""", default=None) + parser.add_argument("-l", "--light", dest="light", action="store", + help="""" -l no' to deactivate. If activated, returns a + light version: Interferogram.tif + Coregistrated.tif + + multilook.tif""", default="yes") + parser.add_argument("-ram", "--optram", dest="optram", + action="store", help="""Available RAM (mb), + by default value is 4OOO""", + default="4000") + parser.add_argument("-spacingxy", "--spacingxy", dest="spacingxy", + action="store", help="""Set the spatial resolution + for OrthoRectification in degrees, + Default value is 0.0001""", + default="0.0001") + parser.add_argument("-ml", "--multilook", dest="multi", nargs="+", action="store", + help=""""Set the range and azimuth (in this order) you want + for the multilook. Default is 3 3""", default='3' '3') + args = parser.parse_args() + + print(args.configfile) + + func_utils.init_logger() + + dataConfig = func_utils.load_configfile(args.configfile, "S1_IW") + + # ================================= + # Get elements from user's arguments + # ================================= + + # ====== Set the variables names + iso_start, iso_end = func_utils.argDates_to_isoDates(args.start_date, args.end_date) + start_time = int(args.start_date) + end_time = int(args.end_date) + master_Image_base = args.master + master_Image = func_utils.get_imgFromSAFE(args.master) + master_date = master_Image_base.split("-")[4].split("t")[0] + pol = master_Image_base.split("-")[3] + iw = master_Image_base.split("-")[1] + burst_index = args.burst + print(burst_index)############################################################################### + exclude = args.exclude + version_interferogram = args.interferogram + light_version = args.light + manifest = master_Image.split("measurement")[0]+"/manifest.safe" + relative_orbit = func_utils.get_relative_orbit(manifest) + ortho_interferogram = args.ortho + roi = args.roi + if roi: + ortho_interferogram = "yes" + print("ortho_interferogram", ortho_interferogram) + ram = args.optram + spacingxy = args.spacingxy + rng,azi = args.multi + + # ==================================== + # Get elements from configuration file + # ==================================== + + # ====== Get dictionaries + dict_Global = dataConfig['Global'] + dict_PreProcessing = dataConfig['Pre_Processing'] + dict_Ground = dataConfig['Ground'] + dict_DInSAR = dataConfig['DIn_SAR'] + + + # ====== Get elements from dictionaries + # ====== Global + srtm_shapefile = dict_Global['in']['SRTMShapefile'] + hgts_path = dict_Global['in']['Datalake'] + geoid_path = dict_Global['in']['Geoid'] + + # ====== Pre_Processing + ml_range = int(rng) + ml_azimut = int(azi) + ml_gain = dict_PreProcessing['parameter']['ML_gain'] + dop_file = dict_PreProcessing['out']['doppler_file'] + + # ====== Ground + + # ====== DIn_SAR + geoGrid_gridstep_range = dict_DInSAR['parameter']['GridStep_range'] + geoGrid_gridstep_azimut = dict_DInSAR['parameter']['GridStep_azimut'] + geoGrid_threshold = dict_DInSAR['parameter']['Grid_Threshold'] + geoGrid_gap = dict_DInSAR['parameter']['Grid_Gap'] + ml_geoGrid_range = int(rng) + ml_geoGrid_azimut = int(azi) + gain_interfero = dict_DInSAR['parameter']['Interferogram_gain'] + + # ====== Check Threshold + if (geoGrid_threshold < 0) or (geoGrid_threshold > 1) : + func_utils.log(logging.CRITICAL, "Error, Wrong Threshold for fine deformation grid") + geoGrid_threshold = 0.3 + + # ====== Check if images exist + func_utils.check_ifExist(srtm_shapefile) + func_utils.check_ifExist(hgts_path) + func_utils.check_ifExist(master_Image) + + # ====== Check regex for Burst index + firstBurst, lastBurst, burstList = func_utils.check_burstIndex(burst_index) + print("from check regex",firstBurst, lastBurst)########################################################### + + # ====== Create global folder with starting and ending dates + master date + output_glob = "./output_{}_to_{}_m_{}".format(start_time, end_time, + master_date) + if os.path.exists(output_glob): + shutil.rmtree(output_glob) + if not os.path.exists(output_glob): + os.makedirs(output_glob) + + # ====== Create Digital Elevation Model + dem, target_dir = func_utils.build_virutal_raster(master_Image, start_time, end_time, + master_date, srtm_shapefile, hgts_path) + print("\n Removing master_image_envelope shp files...\n") + for i in os.listdir(target_dir): + if i.startswith("master_envelope"): + os.remove(target_dir + "/" + i) + + + # Init file handler (all normaly print on std is redirected into info.log) + # To use previous print on std, use printOnStd + func_utils.init_fileLog(output_glob) + + # Recap of input parameter into info.log + func_utils.log(logging.INFO, "########### Input Parameters for the current execution ############## ") + func_utils.log(logging.INFO, " Global : ") + func_utils.log(logging.INFO, "srtm_shapefile : {param}".format(param=srtm_shapefile)) + func_utils.log(logging.INFO, "hgts_path : {param}".format(param=hgts_path)) + func_utils.log(logging.INFO, "geoid_path : {param}".format(param=geoid_path)) + + func_utils.log(logging.INFO, " Pre_Processing : ") + func_utils.log(logging.INFO, "ml_range : {param}".format(param=ml_range)) + func_utils.log(logging.INFO, "ml_azimut : {param}".format(param=ml_azimut)) + func_utils.log(logging.INFO, "ml_gain : {param}".format(param=ml_gain)) + + # DIn_SAR + func_utils.log(logging.INFO, " DIn_SAR : ") + func_utils.log(logging.INFO, "geoGrid_gridstep_range : {param}".format(param=geoGrid_gridstep_range)) + func_utils.log(logging.INFO, "geoGrid_gridstep_azimut : {param}".format(param=geoGrid_gridstep_azimut)) + func_utils.log(logging.INFO, "geoGrid_threshold : {param}".format(param=geoGrid_threshold)) + func_utils.log(logging.INFO, "geoGrid_gap : {param}".format(param=geoGrid_gap)) + func_utils.log(logging.INFO, "ml_geoGrid_range : {param}".format(param=ml_geoGrid_range)) + func_utils.log(logging.INFO, "ml_geoGrid_azimut : {param}".format(param=ml_geoGrid_azimut)) + func_utils.log(logging.INFO, "gain_interfero : {param}".format(param=gain_interfero)) + + + # ============================ + # Get the elements from os.dirs + # ============================ + + tiff_list = func_utils.get_AllTiff(pol, iw) + tiff_dates = func_utils.get_Tiff_WithDates(start_time, end_time, exclude, tiff_list) + counter = 0 + + for i in (i for i in tiff_dates if i != master_Image_base): + total_slaves = len(tiff_dates)-1 + slave_Image_base = i + slave_Image = func_utils.get_imgFromSAFE(slave_Image_base) + slave_date = func_utils.get_Date(i) + counter += 1 + output_dir = output_glob + "/{}_m_{}_s".format(master_date, slave_date) + if os.path.exists(output_dir): + shutil.rmtree(output_dir) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + master_data_dir = output_glob + "/{}_master_directory".format(master_date) + if not os.path.exists(master_data_dir): + os.makedirs(master_data_dir) + + + func_utils.log(logging.INFO, "########### Input Images for the current execution ############## ") + func_utils.log(logging.INFO, "Nb iteration : {param} with : ".format(param=counter)) + func_utils.log(logging.INFO, "master_Image : {param}".format(param=master_Image)) + func_utils.log(logging.INFO, "slave_Image : {param}".format(param=slave_Image)) + + func_utils.printOnStd("\n Nb iteration : {param} with : \n".format(param=counter)) + func_utils.printOnStd("\n master_Image : {param} : \n".format(param=master_Image)) + func_utils.printOnStd("\n slave_Image : {param} : \n".format(param=slave_Image)) + + # ============================================== + # Retrieving informations about master and slave + # ============================================== + dictKWLMaster = func_utils.getImageKWL(master_Image) + dictKWLSlave = func_utils.getImageKWL(slave_Image) + + + # ===== Check header version + if int(dictKWLMaster['header.version']) < 3 or int(dictKWLSlave['header.version']) < 3 : + func_utils.log(logging.CRITICAL, "Error, Upgrade your geom file") + quit() + + # ===== Check the index of bursts + minNbBurst = min([int(dictKWLMaster['support_data.geom.bursts.number']), + int(dictKWLSlave['support_data.geom.bursts.number'])]) + + if minNbBurst < firstBurst or minNbBurst < lastBurst: + func_utils.log(logging.CRITICAL, "Error, Wrong burst index (superior to number of bursts)") + quit() + + # ===== Selection of bursts + keyBurst = "support_data.geom.bursts.burst[" + str(0) + "].azimuth_anx_time" + + # ===== Check the index of bursts + minNbBurst = min([int(dictKWLMaster['support_data.geom.bursts.number']), int(dictKWLSlave['support_data.geom.bursts.number'])]) + print("minNbBurst", minNbBurst)############################################################################## + + firstBurst = 0 + lastBurst = minNbBurst + burstIndexOk = True + print("from check the index", firstBurst, lastBurst) + + try: + if len(burstList) == 2 : + firstBurst = min(burstList) + lastBurst = max(burstList) + + + except Exception as err: + func_utils.log(logging.CRITICAL, "Error, Wrong burst index") + quit() + + if minNbBurst < firstBurst or minNbBurst < lastBurst or lastBurst < 0 or firstBurst < 0 : + func_utils.log(logging.CRITICAL, "Error, Wrong burst index") + quit() + + validBurstMaster = [] + validBurstSlave = [] + nbBurstSlave = int(dictKWLSlave['support_data.geom.bursts.number']) + validBurstMaster, validBurstSlave = func_utils.selectBurst(dictKWLMaster, dictKWLSlave, firstBurst, lastBurst, nbBurstSlave) + + if len(validBurstMaster) == 0 or len(validBurstSlave) == 0 : + func_utils.log(logging.CRITICAL, "Error, Wrong burst index (slave index does not match with master index)") + quit() + + # ===== Update firstBurst and lastBurst with selected Burst for master image + firstBurst = validBurstMaster[0] + lastBurst = validBurstMaster[len(validBurstMaster)-1] + # ===== Create directory for each burst + temp_dir = tempfile.mkdtemp() + for burstId in range(firstBurst, lastBurst+1): + os.makedirs(os.path.join(temp_dir, "burst" + str(burstId))) + + # #################################################################### # + # ###################### Pre Processing Chain ######################## # + # #################################################################### # + # Initialisation of doppler file + dopFile = open(os.path.join(output_dir, dop_file), "w") + dopFile.close() + + # Master + if counter <= 1: + func_utils.printOnStd("\n Master Pre_Processing chain \n") + func_utils.log(logging.INFO, "Master Pre_Processing Application") + Master_temp_dir = tempfile.mkdtemp() + + for id_loop in range(0, len(validBurstMaster)): + burstId = validBurstMaster[id_loop] + os.makedirs(os.path.join(Master_temp_dir, "burst" + str(burstId))) + + paramPreMaster = {} + paramPreMaster['ml_range'] = ml_range + paramPreMaster['ml_azimut'] = ml_azimut + paramPreMaster['ml_gain'] = ml_gain + paramPreMaster['dop_file'] = dop_file + paramPreMaster['validBurstMaster'] = validBurstMaster + + dop0Master, BurstToConcatenateM, Deramp_BurstToConcatenateM = Pre_Processing.extractToMultilook(master_Image, master_Image_base, paramPreMaster, "S1_IW", Master_temp_dir) + + + # ###### Concatenating and georeferencing bursts ###### # + # Master SLC + masterSlc = master_Image_base[:30] + master_Image_base[46:53]+ "-" +str(relative_orbit) + ".tif" + Con_masterSlc = os.path.join(master_data_dir, masterSlc) + diapOTBApp.concatenate(BurstToConcatenateM, master_Image, firstBurst, Con_masterSlc) + + + # Master deramp + if light_version == "no": + masterDeramp = os.path.splitext(master_Image_base)[0]+"_deramp.tif" + Con_masterDeramp = os.path.join(master_data_dir, masterDeramp) + diapOTBApp.concatenate(Deramp_BurstToConcatenateM, master_Image, firstBurst, Con_masterDeramp) + + # Slave + func_utils.printOnStd("\n Slave Pre_Processing chain \n") + func_utils.log(logging.INFO, "Slave Pre_Processing Application") + + paramPreSlave = {} + paramPreSlave['ml_range'] = ml_range + paramPreSlave['ml_azimut'] = ml_azimut + paramPreSlave['ml_gain'] = ml_gain + paramPreSlave['dop_file'] = dop_file + paramPreSlave['validBurstMaster'] = validBurstMaster + paramPreSlave['validBurstSlave'] = validBurstSlave + + dop0Slave, BurstToConcatenateS, Deramp_BurstToConcatenateS = Pre_Processing.extractToMultilook(slave_Image, slave_Image_base, paramPreSlave, "S1_IW", temp_dir) + + # ###### Concatenating and georeferencing bursts ###### # + # Slave Deramp + if light_version == "no": + Con_slaveDeramp = os.path.splitext(master_Image_base)[0] + "_deramp.tif" + diapOTBApp.concatenate(Deramp_BurstToConcatenateS, slave_Image, firstBurst, Con_slaveDeramp) + + # ##################################################################### # + # ######################## Ground Chain ############################### # + # ##################################################################### # + # Master + func_utils.printOnStd("\n Master Ground chain \n") + func_utils.log(logging.INFO, "Master Ground Application") + if counter <=1: + paramGroundMaster = {} + paramGroundMaster['nodata'] = -32768 + paramGroundMaster['withxyz'] = "true" + paramGroundMaster['validBurstMaster'] = validBurstMaster + + Ground.demProjectionAndCartesianEstimation(master_Image, master_Image_base, dem, paramGroundMaster, + "S1_IW", Master_temp_dir) + + + # Slave + func_utils.printOnStd("\n Slave Ground chain \n") + func_utils.log(logging.INFO, "Slave Ground Application") + slave_demproj_dir = tempfile.mkdtemp() + + paramGroundSlave = {} + paramGroundSlave['nodata'] = -32768 + paramGroundSlave['withxyz'] = "true" + paramGroundSlave['validBurstMaster'] = validBurstMaster + paramGroundSlave['validBurstSlave'] = validBurstSlave + + Ground.demProjectionAndCartesianEstimation(slave_Image, slave_Image_base, dem, paramGroundSlave, + "S1_IW", temp_dir) + + + # ##################################################################### # + # ####################### DIn_SAR Chain ############################### # + # ##################################################################### # + func_utils.printOnStd("\n DIn_SAR chain \n") + func_utils.log(logging.INFO, "DIn_SAR chain") + + # Create param + param = {} + param['ml_azimut'] = ml_azimut + param['ml_range'] = ml_range + param['validBurstMaster'] = validBurstMaster + param['validBurstSlave'] = validBurstSlave + param['ml_geoGrid_azimut'] = ml_geoGrid_azimut + param['ml_geoGrid_range'] = ml_geoGrid_range + param['geoGrid_gridstep_range'] = geoGrid_gridstep_range + param['geoGrid_gridstep_azimut'] = geoGrid_gridstep_azimut + param['geoGrid_threshold'] = geoGrid_threshold + param['geoGrid_gap'] = geoGrid_gap + param['doppler0'] = dop0Slave + param['gain_interfero'] = gain_interfero + param['advantage'] = "projection" # By default projection + param['esd_NbIter'] = 0 + param['esd_AutoMode'] = "none" + param['with_interferogram'] = version_interferogram + param['with_concatenate'] = "no" + + list_of_Grids, interfToConcatenate, slaveCorRerampToConcatenate, masterRerampToConcatenate = DInSar.gridToInterferogram(dem, master_Image, master_Image_base, slave_Image, slave_Image_base, Master_temp_dir, temp_dir, param, 'S1_IW', output_dir) + + # ###### Concatenating and georeferencing bursts ###### # + # Slave Coregistrated Reramped + slaveCorRE = slave_Image_base[:30] + slave_Image_base[46:53]+"-"+str(relative_orbit) + ".tif" + Con_slaveCorRE = os.path.join(output_dir, slaveCorRE) + diapOTBApp.concatenate(slaveCorRerampToConcatenate, master_Image, firstBurst, Con_slaveCorRE) + + masterRerampMl = os.path.splitext(master_Image_base)[0] + "_ml" + str(ml_azimut) + str(ml_range) + "Master.tif" + Con_masterReramp = os.path.join(master_data_dir, masterRerampMl) + diapOTBApp.concatenate(masterRerampToConcatenate, master_Image, firstBurst, Con_masterReramp) + + + # ==================================== # + # Post Processing Chain # + # # + # ============================== # + + func_utils.printOnStd("\n Post Processing chain \n") + func_utils.log(logging.INFO, "Post Processing chain") + + # Multilook on coregistred + CoRe_ML = os.path.splitext(slave_Image_base)[0] + "_ml" + str(ml_azimut) + str(ml_range) + ".tif" + #ml(os.path.join(output_dir, slaveCorRE), os.path.join(output_dir, CoRe_ML)) + diapOTBApp.multilook(os.path.join(output_dir, slaveCorRE), ml_range, ml_azimut, ml_gain, os.path.join(output_dir, CoRe_ML)) + + + # A === Interferogram + ### Names definition ### + mib = master_Image_base + sib = slave_Image_base + + if version_interferogram == "yes": + Interferogram_Multiband = mib[:15]+"M"+mib[14:30]+"-"+mib[ + 47:54]+"S"+sib[14:23]+"-"+sib[47:53]+"-"+ str(relative_orbit)+"_interf.tif" + Interferogram_ = mib[:15]+"M"+mib[14:30]+"-"+mib[ + 47:54]+"S"+sib[14:23]+"-"+sib[47:53]+"-"+str(relative_orbit)+"_interferogram.tif" + + Con_Interf = os.path.join(output_dir, Interferogram_Multiband) + diapOTBApp.concatenate(interfToConcatenate, master_Image, firstBurst, Con_Interf) + + # B === Extraction of band 1,2,3 + ds = gdal.Open(os.path.join(output_dir, Interferogram_Multiband), gdal.GA_ReadOnly) + ds = gdal.Translate(os.path.join(output_dir, Interferogram_), ds, bandList = ["1","2","3"]) + os.rename(os.path.join(output_dir, + Interferogram_Multiband.split(".")[0]+".geom"), os.path.join(output_dir, Interferogram_.split(".")[0]+".geom")) + + # C === Ortho Interferogram + if version_interferogram == "yes" and ortho_interferogram == "yes": + Ortho_ = mib[:15]+"M"+mib[14:30]+"-"+mib[ + 47:54]+"S"+sib[14:23]+"-"+sib[47:53]+"-"+str(relative_orbit)+"_Ortho-Interferogram.tif" + Ortho_roi = mib[:15]+"M"+mib[14:30]+"-"+mib[ + 47:54]+"S"+sib[14:23]+"-"+sib[47:53]+"-"+str(relative_orbit)+"_ROI_Ortho-Interferogram.tif" + diapOTBApp.orthorectification(os.path.join(output_dir, Interferogram_), spacingxy, hgts_path, geoid_path, os.path.join(output_dir, Ortho_)) + + + if roi: + func_utils.extract_roi(os.path.join(output_dir, Ortho_), os.path.join(output_dir, Ortho_roi), roi) + func_utils.silentremove(output_dir, Ortho_) + + + # ==================================== # + # Remove # + # ================================ # + func_utils.silentremove(output_dir, CoRe_ML.split(".")[0]+".geom") + func_utils.silentremove(output_dir, dop_file) + if version_interferogram == "yes": + func_utils.silentremove(output_dir, Interferogram_Multiband) + if version_interferogram == "yes" and ortho_interferogram == "yes": + func_utils.silentremove(output_dir, Ortho_.split(".")[0]+".geom") + shutil.rmtree(temp_dir) + + + # Multilook on master reramped + Reramp_ML = os.path.splitext(master_Image_base)[0] + "_ml" + str(ml_azimut) + str(ml_range) + ".tif" + diapOTBApp.multilook(os.path.join(master_data_dir, masterRerampMl), ml_range, ml_azimut, ml_gain, os.path.join(master_data_dir, Reramp_ML)) + + func_utils.silentremove(master_data_dir, masterRerampMl) + func_utils.silentremove(master_data_dir, masterRerampMl.split(".")[0]+".geom") + func_utils.silentremove(master_data_dir, Reramp_ML.split(".")[0]+".geom") + shutil.rmtree(Master_temp_dir) diff --git a/python_src/diapOTB.py b/python_src/diapOTB.py index 1f57666973cf130d6a68d3e4d93ddb0d54c3a2be..2a562a8a71a8d1579dde54e37a723ee54bccea2b 100644 --- a/python_src/diapOTB.py +++ b/python_src/diapOTB.py @@ -20,11 +20,19 @@ # limitations under the License. # +""" + diapOTB.py + ========== + + Interferometry chain between two SAR images (master/slave) for S1 Strimap and Cosmo products + +""" + __author__ = "POC-THALES" __version__ = "0.1" __status__ = "Developpement" __date__ = "27/10/2017" -__last_modified__ = "27/10/2017" +__last_modified__ = "05/02/2020" # Imports import sys diff --git a/python_src/diapOTB_S1IW.py b/python_src/diapOTB_S1IW.py index 65c3b3d6521b53daf39553554ccd39fbc8182981..37cc19b8c04a9aaa77d7a3283d668900c5e47d1e 100644 --- a/python_src/diapOTB_S1IW.py +++ b/python_src/diapOTB_S1IW.py @@ -20,11 +20,19 @@ # limitations under the License. # +""" + diapOTB_S1IW.py + =============== + + Interferometry chain between two SAR images (master/slave) for S1 IW products + +""" + __author__ = "POC-THALES" __version__ = "0.1" __status__ = "Developpement" __date__ = "11/12/2018" -__last_modified__ = "11/12/2018" +__last_modified__ = "05/02/2020" # Imports import logging @@ -226,9 +234,11 @@ if __name__ == "__main__": paramPreMaster['dop_file'] = dop_file paramPreMaster['validBurstMaster'] = validBurstMaster - dop0Master = Pre_Processing.extractToMultilook(master_Image, master_Image_base, paramPreMaster, "S1_IW", - output_dir) + results_PreProM = Pre_Processing.extractToMultilook(master_Image, master_Image_base, paramPreMaster, "S1_IW", + output_dir) + dop0Master = results_PreProM[0] + # Slave func_utils.printOnStd("\n Slave Pre_Processing chain \n") func_utils.log(logging.INFO, "Slave Pre_Processing Application") @@ -241,9 +251,10 @@ if __name__ == "__main__": paramPreSlave['validBurstMaster'] = validBurstMaster paramPreSlave['validBurstSlave'] = validBurstSlave - dop0Slave = Pre_Processing.extractToMultilook(slave_Image, slave_Image_base, paramPreSlave, "S1_IW", - output_dir) - + results_PreProS = Pre_Processing.extractToMultilook(slave_Image, slave_Image_base, paramPreSlave, "S1_IW", + output_dir) + + dop0Slave = results_PreProS[0] ######################### Ground Chain ############################# # Master @@ -298,11 +309,7 @@ if __name__ == "__main__": param['esd_NbIter'] = esd_NbIter param['esd_AutoMode'] = esd_AutoMode - list_of_Grids, list_of_Interferograms = DInSar.gridToInterferogram(dem, master_Image, master_Image_base, slave_Image, slave_Image_base, output_dir, output_dir, param, 'S1_IW', output_dir) - - print(list_of_Grids) - print(list_of_Interferograms) - - + DInSar.gridToInterferogram(dem, master_Image, master_Image_base, slave_Image, slave_Image_base, output_dir, output_dir, param, 'S1_IW', output_dir) + func_utils.printOnStd("\n End of DiapOTB processing (S1 IW mode) \n") func_utils.log(logging.INFO, "############ End of DiapOTB processing (S1 IW mode) ##############") diff --git a/python_src/processings/DInSar.py b/python_src/processings/DInSar.py index 0bb219f5f607bf547bac0f9ea1d8ddfa813f4624..54bf010bbab865c4ce86fc6e1de4f847f46b3926 100644 --- a/python_src/processings/DInSar.py +++ b/python_src/processings/DInSar.py @@ -26,11 +26,9 @@ Use the module to launch DInSar processigns. - main function - -------------- + main function : gridToInterferogram + ----------------------------------- - gridToInterferogram - """ import os @@ -38,6 +36,8 @@ import sys from functools import partial import utils.DiapOTB_applications as diapOTBApp +from utils import func_utils + import otbApplication as otb @@ -273,6 +273,11 @@ def gridToInterferogram_Others(dem, master_Image, master_Image_base, slave_Image gain_interfero = param['gain_interfero'] advantage = param['advantage'] + with_interferogram = True + + if "with_interferogram" in param : + with_interferogram = func_utils.str2bool(param['with_interferogram']) + print("doppler0 = " + str(doppler0)) nbramps = 257 @@ -321,6 +326,10 @@ def gridToInterferogram_Others(dem, master_Image, master_Image_base, slave_Image ## SARCoRegistration Application (changeo step) ## print("\n SARCoRegistration Application \n") slave_Image_CoRe = os.path.splitext(slave_Image_base)[0] + "_coregistrated.tif" + if "slave_CoRe_Name" in param : + slave_Image_CoRe = param['slave_CoRe_Name'] + slave_Image_CoRe += "?&gdal:co:TILED=YES" + coRe_path = os.path.join(output_dir, slave_Image_CoRe) coRegistrationApp(insarmaster=master_Image, insarslave=slave_Image, @@ -330,17 +339,18 @@ def gridToInterferogram_Others(dem, master_Image, master_Image_base, slave_Image ## SARRobustInterferogram Application (interf step) ## - print("\n SARRobustInterferogram Application \n") - interferogram_path = os.path.join(output_dir, "interferogram.tif") + if with_interferogram : + print("\n SARRobustInterferogram Application \n") + interferogram_path = os.path.join(output_dir, "interferogram.tif") - interferogramApp(insarmaster=master_Image, - incoregistratedslave=os.path.join(output_dir, slave_Image_CoRe), - insarslave=slave_Image, - ingrid=fine_gird_path, - incartmeanmaster=os.path.join(master_dir, master_cartesian_mean), - outPath=interferogram_path) + interferogramApp(insarmaster=master_Image, + incoregistratedslave=os.path.join(output_dir, slave_Image_CoRe), + insarslave=slave_Image, + ingrid=fine_gird_path, + incartmeanmaster=os.path.join(master_dir, master_cartesian_mean), + outPath=interferogram_path) - list_of_Interferograms.append(interferogram_path) + list_of_Interferograms.append(interferogram_path) # Output lists return list_of_Grids, list_of_Interferograms @@ -392,6 +402,16 @@ def gridToInterferogram_S1IW(dem, master_Image, master_Image_base, slave_Image, esd_NbIter = param['esd_NbIter'] esd_AutoMode = param['esd_AutoMode'] + with_interferogram = True + with_concatenate = True + + if "with_interferogram" in param : + with_interferogram = func_utils.str2bool(param['with_interferogram']) + + if "with_concatenate" in param : + with_concatenate = func_utils.str2bool(param['with_concatenate']) + + nbramps = 256*2*10+1 # define applications functions with the previous parameters @@ -406,12 +426,13 @@ def gridToInterferogram_S1IW(dem, master_Image, master_Image_base, slave_Image, esdApp = partial(diapOTBApp.esd, insar=master_Image) - esdCorrectInterferogram = partial(esd_correct_interferogram, validBurstMaster, validBurstSlave, master_Image_base, slave_Image_base, geoGrid_gridstep_range, geoGrid_gridstep_azimut, gain_interfero, output_dir, master_dir, doppler0) + esdCorrectInterferogram = partial(esd_correct_interferogram, validBurstMaster, validBurstSlave, master_Image_base, slave_Image_base, geoGrid_gridstep_range, geoGrid_gridstep_azimut, gain_interfero, slave_dir, master_dir, doppler0) - # Empty list + # Empty lists list_of_Grids = [] list_of_Interferograms = [] - + list_of_slaveCorReramp = [] + list_of_masterReramp = [] for id_loop in range(0, len(validBurstMaster)): burstId = validBurstMaster[id_loop] @@ -419,7 +440,7 @@ def gridToInterferogram_S1IW(dem, master_Image, master_Image_base, slave_Image, print("\n BurstId = " + str(burstId) + "\n") - burst_dir = os.path.join(output_dir, "burst" + str(burstId)) + burst_dir = os.path.join(slave_dir, "burst" + str(burstId)) Master_burst_dir = os.path.join(master_dir, "burst" + str(burstId)) burstM = os.path.splitext(master_Image_base)[0] + "_burst" + str(burstId) + ".tif" @@ -436,7 +457,6 @@ def gridToInterferogram_S1IW(dem, master_Image, master_Image_base, slave_Image, demProj_Slave = "demProj" + "_burst" + str(burstId_slave) + "_Slave.tif" master_cartesian_mean = "CartMeanMaster" + "_burst" + str(burstId) + ".tif" - counter = 0 ## SARFineDeformationGrid Application ## print("\n SARFineDeformationGrid Application \n") @@ -457,12 +477,20 @@ def gridToInterferogram_S1IW(dem, master_Image, master_Image_base, slave_Image, print("\n SARCoRegistration Application \n") slave_Image_CoRe = os.path.splitext(slave_Image_base)[0] + "_burst" + str(burstId) + "_coregistrated.tif" coRe_path = os.path.join(burst_dir, slave_Image_CoRe) + + print("\n Dopller0 :") + dop0Val = doppler0[burstId-validBurstMaster[0]] + print(dop0Val) + print("\n") + coRegistrationApp(insarmaster=os.path.join(Master_burst_dir, burstDerampM), insarslave=os.path.join(burst_dir, burstDerampS), ingrid=fine_gird_path, doppler0=doppler0[burstId-validBurstMaster[0]], outPath=coRe_path) + list_of_masterReramp.append(os.path.join(Master_burst_dir, burstDerampM)) + ## SARDeramp Application (reramp mode) ## print("\n Deramping Application \n") # Slave (CoRegistrated) @@ -470,84 +498,89 @@ def gridToInterferogram_S1IW(dem, master_Image, master_Image_base, slave_Image, deramp_Path = os.path.join(burst_dir, burstCoReRerampS) derampApp(inImg=os.path.join(burst_dir, slave_Image_CoRe), ingrid=fine_gird_path, inslave=os.path.join(burst_dir, burstDerampS), outPath=deramp_Path) + + list_of_slaveCorReramp.append(deramp_Path) ## SARRobustInterferogram Application (interf step) ## - print("\n SARRobustInterferogram Application \n") - interferogram_path = os.path.join(burst_dir, "interferogram" + "_burst" + str(burstId) + "_iter" + str(0) + ".tif") - if esd_NbIter > 0 : - esd_dir = os.path.join(burst_dir, "esd") - - if not os.path.exists(esd_dir): - os.makedirs(esd_dir) - - interferogram_path = os.path.join(esd_dir, "interferogram" + "_burst" + str(burstId) + "_iter" + str(0) + ".tif") - - interferogramApp(insarmaster=os.path.join(Master_burst_dir, burstM), - incoregistratedslave=deramp_Path, insarslave=os.path.join(burst_dir, burstS), - ingrid=fine_gird_path, - incartmeanmaster=os.path.join(Master_burst_dir, master_cartesian_mean), - outPath=interferogram_path) + if with_interferogram : + print("\n SARRobustInterferogram Application \n") + interferogram_path = os.path.join(burst_dir, "interferogram" + "_burst" + str(burstId) + "_iter" + str(0) + ".tif") + if esd_NbIter > 0 : + esd_dir = os.path.join(burst_dir, "esd") - counter = counter + 1 - list_of_Interferograms.append(interferogram_path) + if not os.path.exists(esd_dir): + os.makedirs(esd_dir) + + interferogram_path = os.path.join(esd_dir, "interferogram" + "_burst" + str(burstId) + "_iter" + str(0) + ".tif") + + interferogramApp(insarmaster=os.path.join(Master_burst_dir, burstM), + incoregistratedslave=deramp_Path, insarslave=os.path.join(burst_dir, burstS), + ingrid=fine_gird_path, + incartmeanmaster=os.path.join(Master_burst_dir, master_cartesian_mean), + outPath=interferogram_path) + + list_of_Interferograms.append(interferogram_path) ### ESD Loop (if required) #### print ("Beginning of ESD Loop") - - firstBurst = validBurstMaster[0] - lastBurst = validBurstMaster[len(validBurstMaster)-1] - if esd_NbIter > 0 : - - azimut_shift_esd = [] - azimut_shift_esd_global = [0.] * (lastBurst-firstBurst) - - for iter_esd in range(1, esd_NbIter+1) : + # Beginning of ESD Loop (only if with_interferogram) + if with_interferogram : + firstBurst = validBurstMaster[0] + lastBurst = validBurstMaster[len(validBurstMaster)-1] - print ("Iteration number = {number}".format(number=iter_esd)) - - # Clear all azimut shifts - azimut_shift_esd[:] = [] - - for id_loop in range(0, len(validBurstMaster)): - burstId = validBurstMaster[id_loop] - - print("\n BurstId = " + str(burstId) + "\n") - - # Paths - burst_dir = os.path.join(output_dir, "burst" + str(burstId)) - esd_dir = os.path.join(burst_dir, "esd") - esd_path = os.path.join(esd_dir, "esdOut" + "_burst" + str(burstId) + "_iter" + str(0) + ".tif") - - # Check number of burst index (not SARESD for lastBurst) - if burstId < lastBurst : - ## SARESD Application ## - print("\n ESD Application \n") - aziShiftEsd = esdApp(ininterfup=list_of_Interferograms[burstId-firstBurst], - ininterflow=list_of_Interferograms[burstId-firstBurst+1], - burstIndex=burstId, outPath=esd_path) - - - print("aziShift = " + str(aziShiftEsd)) - azimut_shift_esd.append(aziShiftEsd) - - # Correction of Interferogram with the esd shift - # new reference => new list of interferograms - list_of_Interferograms = esdCorrectInterferogram(iter_esd, list_of_Grids, azimut_shift_esd, - azimut_shift_esd_global) - - # Check azimuth shift to end esd loop if automatic mode enabled - if esd_AutoMode : - absMax_aziShift = abs(max(azimut_shift_esd_global, key=abs)) - if absMax_aziShift < 0.01 : - break + if esd_NbIter > 0 : + + azimut_shift_esd = [] + azimut_shift_esd_global = [0.] * (lastBurst-firstBurst) + + for iter_esd in range(1, esd_NbIter+1) : + + print ("Iteration number = {number}".format(number=iter_esd)) + + # Clear all azimut shifts + azimut_shift_esd[:] = [] + + for id_loop in range(0, len(validBurstMaster)): + burstId = validBurstMaster[id_loop] + + print("\n BurstId = " + str(burstId) + "\n") + + # Paths + burst_dir = os.path.join(slave_dir, "burst" + str(burstId)) + esd_dir = os.path.join(burst_dir, "esd") + esd_path = os.path.join(esd_dir, "esdOut" + "_burst" + str(burstId) + "_iter" + str(0) + ".tif") + + # Check number of burst index (not SARESD for lastBurst) + if burstId < lastBurst : + ## SARESD Application ## + print("\n ESD Application \n") + aziShiftEsd = esdApp(ininterfup=list_of_Interferograms[burstId-firstBurst], + ininterflow=list_of_Interferograms[burstId-firstBurst+1], + burstIndex=burstId, outPath=esd_path) + + + print("aziShift = " + str(aziShiftEsd)) + azimut_shift_esd.append(aziShiftEsd) + + # Correction of Interferogram with the esd shift + # new reference => new list of interferograms + list_of_Interferograms = esdCorrectInterferogram(iter_esd, list_of_Grids, azimut_shift_esd, + azimut_shift_esd_global) + + # Check azimuth shift to end esd loop if automatic mode enabled + if esd_AutoMode : + absMax_aziShift = abs(max(azimut_shift_esd_global, key=abs)) + if absMax_aziShift < 0.01 : + break # Concatenate interferograms ## SARConcatenateBursts (for interferograms) ## - print("\n SARConcatenateBursts Application (for interferograms) \n") - interferogram_swath = "interferogram_swath.tif" - diapOTBApp.concatenate(list_of_Interferograms, master_Image, firstBurst, os.path.join(output_dir, interferogram_swath)) + if with_interferogram and with_concatenate : + print("\n SARConcatenateBursts Application (for interferograms) \n") + interferogram_swath = "interferogram_swath.tif" + diapOTBApp.concatenate(list_of_Interferograms, master_Image, firstBurst, os.path.join(output_dir, interferogram_swath)) # Output lists - return list_of_Grids, list_of_Interferograms + return list_of_Grids, list_of_Interferograms, list_of_slaveCorReramp, list_of_masterReramp diff --git a/python_src/processings/Ground.py b/python_src/processings/Ground.py index 10ed0490ef25a19e39d4a9fd2bd2be413f32968e..966699d1e44f4ae5db390b47a42527eeea5123ef 100644 --- a/python_src/processings/Ground.py +++ b/python_src/processings/Ground.py @@ -26,11 +26,9 @@ Use the module to launch Ground chain. - main function - -------------- - - demProjectionAndCartesianEstimation - + main function : demProjectionAndCartesianEstimation + ---------------------------------------------------- + """ import os diff --git a/python_src/processings/Metadata_Correction.py b/python_src/processings/Metadata_Correction.py index 05927e85daa7296054c30a22ac4e64132e56b1dd..73363857e8ce5dc7344f85e469ca0299ce6cb998 100644 --- a/python_src/processings/Metadata_Correction.py +++ b/python_src/processings/Metadata_Correction.py @@ -24,12 +24,10 @@ The ``Metadata Correction`` chain ================================= - Use the module to launch Metadata Correction chain (only available for S1 SM or Cosmo. + Use the module to launch Metadata Correction chain (only available for S1 SM or Cosmo). - main function - -------------- - - metadataCorrection + main function : metadataCorrection + ---------------------------------- """ diff --git a/python_src/processings/Pre_Processing.py b/python_src/processings/Pre_Processing.py index 04be2a7fd0452212dd685da9722cabd5f2e471ba..3fd1d5b6978d1006f1c3e43a8384efdcb741cc44 100644 --- a/python_src/processings/Pre_Processing.py +++ b/python_src/processings/Pre_Processing.py @@ -26,11 +26,9 @@ Use the module to launch Pre_Processing chain. - main function - -------------- - - extractToMultilook - + main function : extractToMultilook + ---------------------------------- + """ import os @@ -161,8 +159,10 @@ def extractToMultilook_S1IW(sar_Image, sar_Image_base, param, output_dir): derampApp = partial(diapOTBApp.deramp, reramp="false", shift="false", ingrid="", inslave="", gridsteprange=0, gridstepazimut=0) - # Create an empty list + # Create an empty lists dop0 = [] + bursts = [] + deramp_bursts = [] # Loop on bursts for id_loop in range(0, len(validBurstMaster)): @@ -179,26 +179,30 @@ def extractToMultilook_S1IW(sar_Image, sar_Image_base, param, output_dir): ## SARBurstExtraction Application ## print("\n Burst Extraction Application \n") - burstM = os.path.splitext(sar_Image_base)[0] + "_burst" + str(burstId) + ".tif" + burst = os.path.splitext(sar_Image_base)[0] + "_burst" + str(burstId) + ".tif" - diapOTBApp.burstExtraction(sar_Image, burstId, os.path.join(burst_dir, burstM)) + diapOTBApp.burstExtraction(sar_Image, burstId, os.path.join(burst_dir, burst)) + + bursts.append(os.path.join(burst_dir, burst)) ## SARDeramp Application ## print("\n Deramping Application \n") - burstDerampM = os.path.splitext(sar_Image_base)[0] + "_burst" + str(burstId) + "_deramp" + ".tif" - derampApp(inImg=os.path.join(burst_dir, burstM), outPath=os.path.join(burst_dir, burstDerampM)) + burstDeramp = os.path.splitext(sar_Image_base)[0] + "_burst" + str(burstId) + "_deramp" + ".tif" + derampApp(inImg=os.path.join(burst_dir, burst), outPath=os.path.join(burst_dir, burstDeramp)) + deramp_bursts.append(os.path.join(burst_dir, burstDeramp)) + ## SARDoppler Application ## print("\n Doppler Application \n") - dop0.append(diapOTBApp.doppler0(insar=os.path.join(burst_dir, burstDerampM), + dop0.append(diapOTBApp.doppler0(insar=os.path.join(burst_dir, burstDeramp), outPath=os.path.join(output_dir, dop_file))) ## SARMultiLook Application ## print("\n MultiLook Application \n") sar_Image_ML = os.path.splitext(sar_Image_base)[0] + "_burst" + str(burstId) + "_ml" + str(ml_azimut) + str(ml_range) + ".tif" - multilookApp(inImg=os.path.join(burst_dir, burstDerampM), + multilookApp(inImg=os.path.join(burst_dir, burstDeramp), outPath=os.path.join(burst_dir, sar_Image_ML)) # return the list of Doppler0 values - return dop0 + return dop0, bursts, deramp_bursts diff --git a/python_src/utils/DiapOTB_applications.py b/python_src/utils/DiapOTB_applications.py index 8d57fecbefad739ad206e4080c940d8153bbbd5f..9da1acf96f6cefb8ebcf12c43516f8e804e1af93 100644 --- a/python_src/utils/DiapOTB_applications.py +++ b/python_src/utils/DiapOTB_applications.py @@ -28,22 +28,23 @@ Applications are used with ExecuteAndWriteOutput as execute command => I/O on disk and not integrated for a mode In-Memory. - Applications list - ----------------- - - burstExtraction - doppler0 - multilook - demProjection - cartesianMeanEstimation - fineGridDeformation - coRegistration - deramp - interferogram - esd - concatenate - - + Applications list : + ------------------- + + extractROI, + burstExtraction, + doppler0, + multilook, + demProjection, + cartesianMeanEstimation, + fineGridDeformation, + coRegistration, + deramp, + interferogram, + esd, + concatenate, + orthorectification, + GridOffset. """ import os @@ -53,6 +54,30 @@ from utils import func_utils as fUtils import otbApplication as otb ##### Define function for OTB applications used into this processing ##### +def extractROI(inImg, outPath, ram="4000") : + """ + Launch ExtractROI application (from OTB). + + This application extracts a ROI from an input image. + This particular case is just conversion from a single band CInt16 to a dual band FLOAT32 (much faster than OTB_DynamicConvert) + + :param inImg: Input SLC Image. + :param outPath: Output burst. + + :type inImg: string + :type outPath: string + + :return: None + """ + app = otb.Registry.CreateApplication("ExtractROI") + app.SetParameterString("ram", ram) + app.SetParameterString("mode","fit") + app.SetParameterString("mode.fit.im", inImg) + app.SetParameterString("in", inImg) + app.SetParameterString("out", outPath) + app.ExecuteAndWriteOutput() + + def burstExtraction(inImg, burstIndex, outPath, allpixels="true", ram="4000"): """ Launch SARBurstExtraction application (from OTB). @@ -374,17 +399,17 @@ def fineGridDeformation(indem, insarmaster, insarslave, inmlmaster, inmlslave, i def coRegistration(insarmaster, insarslave, ingrid, gridsteprange, gridstepazimut, doppler0, nbramps, outPath, ram="4000"): """ Launch SARCoRegistration application (from DiapOTB). - + This application does the coregistration between two SAR images thanks to a deformation grid. - + :param insarmaster: SAR Master Image to extract SAR geometry. :param insarslave: SAR Slave Image to extract SAR geometry. :param ingrid: Input deformation grid. :param gridsteprange: Grid step for range dimension (into SLC/SAR geometry). :param gridstepazimut: Grid step for azimut dimension (into SLC/SAR geometry). :param doppler0: Doppler 0 in azimut for SAR Slave Image. - :param nbramps : Number of ramps for coregistration + :param nbramps: Number of ramps for coregistration :param outPath: Coregistrated slave image into master geometry. :type insarmaster: string @@ -582,3 +607,35 @@ def concatenate(list_of_Interferograms, insar, firstBurst, outPath, ram="4000"): appCon.SetParameterString("ram", ram) appCon.ExecuteAndWriteOutput() +def orthorectification(inImg, spacingxy, hgts_path, geoid_path, outPath, ram="4000"): + """ + Launch OrthoRectification application (from OTB). + + This application allows ortho-rectifying optical and radar images + + :param inImg: Input Image (SAR geometry). + :param spacingxy: Pixel size for ouput image (spacingxy in x and -spacingxy in y). + :param hgts_path: Path to hgt (SRTM) files. + :param geoid_path: Path to geoid. + :param outPath: Output image (ortho geometry). + + :type inImg: string + :type spacingxy: float + :type hgts_path: string + :type geoid_path: string + :type outPath: string + + :return: None + """ + OrthoRectification = otb.Registry.CreateApplication("OrthoRectification") + OrthoRectification.SetParameterString("io.in", inImg) + OrthoRectification.SetParameterString("opt.ram", ram) + OrthoRectification.SetParameterString("map", "epsg") + OrthoRectification.SetParameterString("outputs.spacingx", spacingxy) + OrthoRectification.SetParameterString("outputs.spacingy", str(-abs(float(spacingxy)))) + OrthoRectification.SetParameterString("opt.gridspacing", str(abs(float(spacingxy)*10))) + OrthoRectification.SetParameterString("outputs.mode", 'autosize') + OrthoRectification.SetParameterString("elev.dem", hgts_path) + OrthoRectification.SetParameterString("elev.geoid", geoid_path) + OrthoRectification.SetParameterString("io.out", outPath) + OrthoRectification.ExecuteAndWriteOutput() diff --git a/python_src/utils/func_utils.py b/python_src/utils/func_utils.py index 45149e649784ac3edc53ecfc24276f3b88948223..21b869c133a03869ef3df767d8c75fae97e3a246 100644 --- a/python_src/utils/func_utils.py +++ b/python_src/utils/func_utils.py @@ -21,7 +21,11 @@ # """ -Hello func_utils + func_utils module + ================== + + Pool of functions for logger, checks, image operations ... + """ @@ -33,6 +37,10 @@ import os import sys import argparse import re +import xml.etree.ElementTree as ET +import datetime +import gdal +import ogr import otbApplication as otb @@ -61,7 +69,9 @@ stdout_save = s1 ### Functions for logger ### def init_logger(): - + """ + Init logger with a stream handler. + """ logger.setLevel(logging.INFO) # Create console handler with a high log level (warning level) @@ -73,6 +83,10 @@ def init_logger(): def init_fileLog(output_dir): + """ + Init logger with a file handler (info.log). + the standard ouput is redirected into this file handler. The std was saved into stdout_save. + """ # File handler for the logger # Create file handler which logs even info messages (used as stdout redirection) file_handler = logging.FileHandler(os.path.join(output_dir, 'info.log'), 'a') @@ -93,10 +107,16 @@ def init_fileLog(output_dir): def log(level, msg) : + """ + Transfer the msg to our logger with the required level. + """ logger.log(level, msg) def printOnStd(msg): + """ + Transfer the msg to our stdout => real print on standard output. + """ # Print on stdout_save aka the original/true stdout print(msg, file=stdout_save) @@ -104,7 +124,9 @@ def printOnStd(msg): ### Functions for configFile (json) ### def validate_json(json, schema): - # Read and Load the configuration file + """ + Read and Load the configuration file + """ try: validate(json, schema) except Exception as valid_err: @@ -116,7 +138,9 @@ def validate_json(json, schema): return True def load_configfile(configfile, mode="Others"): - + """ + Read and Load the configuration file (check this file according to a schmema) + """ # Empty dictionary dataConfig = {} @@ -160,6 +184,9 @@ def load_configfile(configfile, mode="Others"): ### Functions for metadata ### def getImageKWL(Img): + """ + Retrieve keyword list from an image thanks to ReadImageInfo + """ # Retrieve some information about our input images with ReadImageInfo application ReadImageInfo = otb.Registry.CreateApplication("ReadImageInfo") ReadImageInfo.SetParameterString("in", Img) @@ -175,6 +202,9 @@ def getImageKWL(Img): def getDEMInformation(dem): + """ + Retrieve DEM information thanks to ReadImageInfo + """ # Get information about DEM (spacing, size ..) ReadDEMInfo = otb.Registry.CreateApplication("ReadImageInfo") ReadDEMInfo.SetParameterString("in", dem) @@ -190,10 +220,219 @@ def getDEMInformation(dem): return dictDEMInformation +### Functions for image/file selection +def get_imgFromSAFE(arg): + """ + Retrive selected image from a SAFE directory + """ + img = [] + for root, dirs, files in os.walk("."): + for i in (i for i in files if i == arg): + img.append(os.path.join(os.getcwd()+root.split(".")[1] + + ".SAFE/measurement/"+(i))) + img = str("".join(img)) + return img + +def get_imgFromDir(arg): + """ + Retrive selected image from a directory (for Cosmo sensor) + """ + img = [] + for root, dirs, files in os.walk("."): + for i in (i for i in files if i == arg): + img.append(os.path.join(os.getcwd()+root.split(".")[1] + + "/"+(i))) + img = str("".join(img)) + return img + + +def image_envelope(inTIF, outSHP): + """ + This method returns a shapefile of an image + """ + app = otb.Registry.CreateApplication("ImageEnvelope") + app.SetParameterString("in", inTIF) + app.SetParameterString("out", outSHP) + app.ExecuteAndWriteOutput() + return outSHP + +def get_master_geometry(inSHP): + """ + This method returns the geometry, of an input georeferenced + shapefile + """ + driver = ogr.GetDriverByName("ESRI Shapefile") + mstr_ds = driver.Open(inSHP, 0) + mstr_layer = mstr_ds.GetLayer() + for master in mstr_layer: + master.GetGeometryRef().Clone() + return master.GetGeometryRef().Clone() + +def check_srtm_coverage(inSHP_Geometry, SRTM): + """ + This method checks and returns the SRTM tiles intersected + """ + driver = ogr.GetDriverByName("ESRI Shapefile") + srtm_ds = driver.Open(SRTM, 0) + srtm_layer = srtm_ds.GetLayer() + needed_srtm_tiles = {} + srtm_tiles = [] + for srtm_tile in srtm_layer: + srtm_footprint = srtm_tile.GetGeometryRef() + intersection = inSHP_Geometry.Intersection(srtm_footprint) + if intersection.GetArea() > 0: + # coverage = intersection.GetArea()/area + srtm_tiles.append(srtm_tile.GetField('FILE')) # ,coverage)) + needed_srtm_tiles = srtm_tiles + return needed_srtm_tiles + +def get_AllTiff(pol, iw="", ext=""): + """ + Get all tiff from an input directory + """ + TiffList = [] + + if not iw == "" : + for root, dirs, files in os.walk("."): + for i in (i for i in files): # if i != inTIF): + if i.endswith(".tiff") and pol == i.split("-")[3]: + if iw == i.split("-")[1]: + TiffList.append(i) + else : + if ext == "h5": + for root, dirs, files in os.walk("."): + for i in (i for i in files): # if i != inTIF): + if i.endswith(".h5") and pol == i.split("_")[5]: + TiffList.append(i) + + if not ext == "h5": + for root, dirs, files in os.walk("."): + for i in (i for i in files): # if i != inTIF): + if i.endswith(".tiff") and pol == i.split("-")[3]: + TiffList.append(i) + + return TiffList + +def get_AllFilesWithExt(searchDir, ext) : + """ + Get all into a search directory with a given extension + """ + list_files = [] + for root, dirs, files in os.walk(searchDir): + for i in (i for i in files): + if i.endswith(ext): + list_files.append(i) + + return list_files + + +def get_Date(inTIF, ext=""): + """ + Get all date from an input tiff + """ + if ext == "h5": + inTIF_date = inTIF.split("_")[8][:8] + return inTIF_date + if not ext == "h5": + inTIF_date = inTIF.split("-")[4].split("t")[0] + return inTIF_date + + +def get_Tiff_WithDates(start, end, exclude, TiffList, ext=""): + """ + Get all tiff from an input list and between start and end date + """ + date_list = [] + exclude = list(exclude) + for i in TiffList: + Date = get_Date(i, ext) + if start <= int(Date) and end >= int(Date): + if Date not in exclude: + date_list.append(i) + return date_list + +def get_relative_orbit(manifest): + """ + Get from manifest file, the orbit number + """ + root=ET.parse(manifest) + return int(root.find("metadataSection/metadataObject/metadataWrap/xmlData/{http://www.esa.int/safe/sentinel-1.0}orbitReference/{http://www.esa.int/safe/sentinel-1.0}relativeOrbitNumber").text) + +def build_virutal_raster(master_Image, start_time, end_time, master_date, + srtm_shapefile, hgts_path): + """ + Build a vrt file corresponding to a dem from hgt (SRTM) files. + The hgt file are contained into a global path : hgts_path + """ + # create a vector envelope of the raster + target_dir = os.path.dirname(master_Image) + master_envelope = (target_dir + "/master_envelope.shp") + master_envelope = image_envelope(master_Image, master_envelope) + # Get master geometry + master_footprint = get_master_geometry(master_envelope) + # Create a virtual raster that will be used as DEM + hgts = check_srtm_coverage(master_footprint, srtm_shapefile) + hgts_tuple = [] + for hgt in hgts: + hgts_tuple.append(hgts_path + hgt) + + print("\n Creating virtual raster from intersected hgt files...\n") + dem = gdal.BuildVRT("./output_{}_to_{}_m_{}/dem_scene.vrt".format( + start_time, end_time, master_date), hgts_tuple) + dem = "./output_{}_to_{}_m_{}/dem_scene.vrt".format(start_time, end_time, + master_date) + return dem, target_dir + +def argDates_to_isoDates(start_date, end_date): + """ + Date conversion + """ + if start_date is not None: + iso_start = datetime.datetime.strptime(start_date, + "%Y%m%d").date() + iso_end = datetime.datetime.strptime(end_date, "%Y%m%d").date() + print("\n\n Selected dates: \n From {}, to {} \n".format(iso_start, + iso_end)) + if not start_date: + print("Start time is needeed.") + quit() + return iso_start, iso_end + +### Image operations ### +def extract_roi(inp, out, roi): + """ + Extracts ROI + """ + ds = gdal.Open(inp) + ds = gdal.Translate(out, ds, projWin =roi) + + +def extract_band123(inp, out): + """ + Extracts ROI from a vector image (extracts several bands, 3 in total) + """ + ds = gdal.Open(inp, gdal.GA_ReadOnly) + ds = gdal.Translate(out, ds, bandList=["1", "2", "3"], format="GTiff", + outputType=gdal.GDT_Float32, creationOptions=['TILED=YES']) + return out + + +def silentremove(directory, filename): + try: + os.remove(os.path.join(directory, filename)) + except OSError as e: + if e.errno != errno.ENOENT: + raise + except NameError as e: + if e.errno != errno.ENOENT: + raise ### Functions for some checks ### def selectBurst(dictMaster, dictSlave, firstBurst, lastBurst, nbBurstSlave): - + """ + Selects from master bursts, the corresponding slave ones. + This selection uses a metadata parameter (azimuth_anx_time) to do so. + """ key1Burst = "support_data.geom.bursts.burst[" # Initialize the output lists (empty lists) @@ -232,4 +471,50 @@ def selectBurst(dictMaster, dictSlave, firstBurst, lastBurst, nbBurstSlave): return validBurstMaster, validBurstSlave def str2bool(v): + """ + Conversion between a string to a boolean (several ways to say yes into json configuration file). + """ return v.lower() in ("yes", "true", "t", "1") + +def check_ifExist(fileOrPathOrImg): + """ + Check if a file, path or image exists. If not quit the program. + """ + if not os.path.exists(fileOrPathOrImg): + print(fileOrPathOrImg + " does not exists") + quit() + + +def check_burstIndex(burst_index): + """ + Check if the burst_index as string input is correctly sent + """ + burstIndexOk = bool(re.match(r'^[\-0-9]+$', burst_index)) + firstBurst = 0 + lastBurst = 0 + if burstIndexOk: + burstList = burst_index.split('-') + burstList = [int(i) for i in burstList] + if len(burstList) != 2: + burstIndexOk = False + else: + firstBurst = min(burstList) + lastBurst = max(burstList) + if not burstIndexOk: + print("Wrong Burst Index Format (for instance 0-5)") + quit() + return firstBurst, lastBurst, burstList + +### Remove files ### +def silentremove(directory, filename): + """ + Remove filename into directory + """ + try: + os.remove(os.path.join(directory, filename)) + except OSError as e: + if e.errno != errno.ENOENT: + raise + except NameError as e: + if e.errno != errno.ENOENT: + raise