Skip to content
Snippets Groups Projects
Commit 7664dee5 authored by Gaëlle USSEGLIO's avatar Gaëlle USSEGLIO
Browse files

ENH : JSON configuration file for S1 SM chain

parent 98a3c303
No related branches found
No related tags found
1 merge request!9Esd with json
{
"$schema": "http://json-schema.org/schema#",
"title": "JSON SCHEMA for DiapOTB S1 SM chain",
"description": "JSON organization for the script diapOTB.py",
"type": "object",
"allOf": [{"required": ["Global", "Pre_Processing", "Metadata_Correction", "DIn_SAR"]}],
"properties":
{
"Global":
{
"type": "object",
"properties":
{
"in":
{
"type": "object",
"required": ["Master_Image_Path", "Slave_Image_Path", "DEM_Path"],
"additionalProperties": false,
"properties": {"Master_Image_Path": {"type": "string"},
"Slave_Image_Path": {"type": "string"},
"DEM_Path": {"type": "string"}}
},
"out":
{
"type": "object",
"required": ["output_dir"],
"additionalProperties": false,
"properties": {"output_dir": {"type": "string"}}
}
},
"additionalProperties": false,
"required": ["in", "out"]
},
"Pre_Processing":
{
"type": "object",
"properties":
{
"out":
{
"type": "object",
"required": ["doppler_file"],
"additionalProperties": false,
"properties": {"doppler_file": {"type": "string"}}
},
"parameter":
{
"type": "object",
"required": ["ML_range", "ML_azimut", "ML_gain"],
"additionalProperties": false,
"properties": {"ML_range": {"type": "number"},
"ML_azimut": {"type": "number"},
"ML_gain": {"type": "number"}}
}
},
"additionalProperties": false,
"required": ["out", "parameter"]
},
"Metadata_Correction":
{
"type": "object",
"properties":
{
"out":
{
"type": "object",
"required": ["fine_metadata_file"],
"additionalProperties": false,
"properties": {"fine_metadata_file": {"type": "string"}}
},
"parameter":
{
"type": "object",
"required": ["activate", "GridStep_range", "GridStep_azimut"],
"additionalProperties": false,
"properties": {"activate": {"type": "boolean"},
"GridStep_range": {"type": "number"},
"GridStep_azimut": {"type": "number"}}
}
},
"additionalProperties": false,
"required": ["out", "parameter"]
},
"DIn_SAR":
{
"type": "object",
"properties":
{
"parameter":
{
"type": "object",
"required": ["GridStep_range", "GridStep_azimut", "Grid_Threshold", "Grid_Gap",
"Interferogram_gain", "Interferogram_ortho"],
"additionalProperties": false,
"properties": {"GridStep_range": {"type": "number"},
"GridStep_azimut": {"type": "number"},
"Grid_Threshold": {"type": "number"},
"Grid_Gap": {"type": "number"},
"Interferogram_gain": {"type": "number"},
"Interferogram_ortho": {"type": "boolean"}
}
}
},
"additionalProperties": false,
"required": ["parameter"]
}
}
}
...@@ -30,10 +30,8 @@ __last_modified__ = "27/10/2017" ...@@ -30,10 +30,8 @@ __last_modified__ = "27/10/2017"
# Imports # Imports
import logging import logging
try: import json
from ConfigParser import SafeConfigParser as ConfigParser from jsonschema import validate
except ImportError:
from configparser import ConfigParser
import os import os
import sys import sys
import argparse import argparse
...@@ -43,53 +41,16 @@ import otbApplication as otb ...@@ -43,53 +41,16 @@ import otbApplication as otb
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# ConfigParser functions def validate_json(json, schema):
# Get a map of sections try:
def ConfigSectionMap(config, section): validate(json, schema)
dict1 = {} except Exception as valid_err:
options = config.options(section) print("Invalid JSON: {}".format(valid_err))
return False
for option in options: else:
try: # Realise votre travail
dict1[option] = config.get(section, option) print("Valid JSON")
if dict1[option] == -1: return True
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
# Check the configuration file
def ConfigCheck(config):
configOk = True
# List for configuration
listOfSections = ["Global", "Pre_Processing", "Metadata_Correction", "DIn_SAR"]
globalList = ["Master_Image_Path", "Slave_Image_Path", "DEM_Path", "output_dir"]
preProcessingList = ["ML_range", "ML_azimut", "ML_gain", "doppler_file"]
metadataCorrectionList = ["activate", "GridStep_range", "GridStep_azimut", "fine_metadata_file"]
dInSARList = ["GridStep_range", "GridStep_azimut", "Grid_Threshold", "Grid_Gap", "Interferogram_gain"]
dictOfSections = {'Global': globalList, 'Pre_Processing': preProcessingList, 'Metadata_Correction' : metadataCorrectionList, 'DIn_SAR' : dInSARList}
# For each elt of listOfSections
for section in listOfSections :
# Check if section exists into config
if config.has_section(section) :
# For each elt of lists of dictOfSections
for option in dictOfSections[section] :
# Check if option exists into section of config
if not config.has_option(section, option) :
print(option + " is missing for the section " + section + " of the configuration file")
configOk = False
else :
print(section + " is missing into the configuration file")
configOk = False
return configOk
# string to bool # string to bool
def str2bool(v): def str2bool(v):
...@@ -105,45 +66,74 @@ if __name__ == "__main__": ...@@ -105,45 +66,74 @@ if __name__ == "__main__":
args = parser.parse_args() args = parser.parse_args()
print(args.configfile) print(args.configfile)
# Read the configuration file # Read and Load the configuration file
Config = ConfigParser() try:
Config.read(args.configfile) with open(args.configfile, 'r') as f:
configOk = ConfigCheck(Config) dataConfig = json.load(f)
# If some values are missing, quit the application except Exception as err:
if not configOk : print("Impossible to read or load JSON configuration file")
quit() quit()
# Load schema (into DiapOTB install)
diapOTB_install = os.getenv('DIAPOTB_INSTALL')
if diapOTB_install is not None and os.path.exists(diapOTB_install):
schemas_path = os.path.join(diapOTB_install, "json_schemas")
if os.path.exists(schemas_path):
schema_S1SM = os.path.join(schemas_path, "schema_S1SM.json")
try:
with open(schema_S1SM, "r") as sch:
dataSchema = json.load(sch)
except Exception as err:
print("Impossible to read or load JSON schema file")
quit()
# Check Json file
jsonIsValid = validate_json(dataConfig, dataSchema)
if not jsonIsValid :
quit()
# Get dictionaries
dict_Global = dataConfig['Global']
dict_PreProcessing = dataConfig['Pre_Processing']
dict_Metadata_Correction = dataConfig['Metadata_Correction']
dict_DInSAR = dataConfig['DIn_SAR']
# Get elements from configuration file # Get elements from configuration file
# Global # Global
master_Image = ConfigSectionMap(Config, "Global")['master_image_path'] master_Image = dict_Global['in']['Master_Image_Path']
slave_Image = ConfigSectionMap(Config, "Global")['slave_image_path'] slave_Image = dict_Global['in']['Slave_Image_Path']
dem = ConfigSectionMap(Config, "Global")['dem_path'] dem = dict_Global['in']['DEM_Path']
output_dir = ConfigSectionMap(Config, "Global")['output_dir'] output_dir = dict_Global['out']['output_dir']
# Pre_Processing # Pre_Processing
ml_range = int(ConfigSectionMap(Config, "Pre_Processing")['ml_range']) ml_range = dict_PreProcessing['parameter']['ML_range']
ml_azimut = int(ConfigSectionMap(Config, "Pre_Processing")['ml_azimut']) ml_azimut = dict_PreProcessing['parameter']['ML_azimut']
ml_gain = float(ConfigSectionMap(Config, "Pre_Processing")['ml_gain']) ml_gain = dict_PreProcessing['parameter']['ML_gain']
dop_file = ConfigSectionMap(Config, "Pre_Processing")['doppler_file'] dop_file = dict_PreProcessing['out']['doppler_file']
# Metadata_Correction # Metadata_Correction
activateMetadataCorrection = str2bool(ConfigSectionMap(Config, "Metadata_Correction")['activate']) activateMetadataCorrection = dict_Metadata_Correction['parameter']['activate']
ml_simu_range = ml_range ml_simu_range = ml_range
ml_simu_azimut = ml_azimut ml_simu_azimut = ml_azimut
ml_simu_gain = 1. ml_simu_gain = 1.
ml_correlSimu_range = ml_range ml_correlSimu_range = ml_range
ml_correlSimu_azimut = ml_azimut ml_correlSimu_azimut = ml_azimut
correlSimu_gridstep_range = int(ConfigSectionMap(Config, "Metadata_Correction")['gridstep_range']) correlSimu_gridstep_range = dict_Metadata_Correction['parameter']['GridStep_range']
correlSimu_gridstep_azimut = int(ConfigSectionMap(Config, "Metadata_Correction")['gridstep_azimut']) correlSimu_gridstep_azimut = dict_Metadata_Correction['parameter']['GridStep_azimut']
fine_metadata_file = ConfigSectionMap(Config, "Metadata_Correction")['fine_metadata_file'] fine_metadata_file = dict_Metadata_Correction['out']['fine_metadata_file']
# DIn_SAR # DIn_SAR
geoGrid_gridstep_range = int(ConfigSectionMap(Config, "DIn_SAR")['gridstep_range']) geoGrid_gridstep_range = dict_DInSAR['parameter']['GridStep_range']
geoGrid_gridstep_azimut = int(ConfigSectionMap(Config, "DIn_SAR")['gridstep_azimut']) geoGrid_gridstep_azimut = dict_DInSAR['parameter']['GridStep_azimut']
geoGrid_threshold = float(ConfigSectionMap(Config, "DIn_SAR")['grid_threshold']) geoGrid_threshold = dict_DInSAR['parameter']['Grid_Threshold']
geoGrid_gap = float(ConfigSectionMap(Config, "DIn_SAR")['grid_gap']) geoGrid_gap = dict_DInSAR['parameter']['Grid_Gap']
ml_geoGrid_range = ml_range ml_geoGrid_range = ml_range
ml_geoGrid_azimut = ml_azimut ml_geoGrid_azimut = ml_azimut
gain_interfero = float(ConfigSectionMap(Config, "DIn_SAR")['interferogram_gain']) gain_interfero = dict_DInSAR['parameter']['Interferogram_gain']
activateOrthoInterferogram = str2bool(ConfigSectionMap(Config, "DIn_SAR")['interferogram_ortho']) activateOrthoInterferogram = dict_DInSAR['parameter']['Interferogram_ortho']
if (geoGrid_threshold < 0) or (geoGrid_threshold > 1) : if (geoGrid_threshold < 0) or (geoGrid_threshold > 1) :
print("Wrong Threshold for fine deformation grid") print("Wrong Threshold for fine deformation grid")
......
...@@ -79,7 +79,7 @@ if __name__ == "__main__": ...@@ -79,7 +79,7 @@ if __name__ == "__main__":
diapOTB_install = os.getenv('DIAPOTB_INSTALL') diapOTB_install = os.getenv('DIAPOTB_INSTALL')
if diapOTB_install is not None and os.path.exists(diapOTB_install): if diapOTB_install is not None and os.path.exists(diapOTB_install):
schemas_path = os.path.join(diapOTB_install, "json_schemas") schemas_path = os.path.join(diapOTB_install, "json_schemas")
if (schemas_path): if os.path.exists(schemas_path):
schema_S1IW = os.path.join(schemas_path, "schema_S1IW.json") schema_S1IW = os.path.join(schemas_path, "schema_S1IW.json")
try: try:
......
{
"Global": {
"in":
{
"Master_Image_Path": "image_1.tif",
"Slave_Image_Path": "image_2.tif",
"DEM_Path": "./DEM.hgt"
},
"out":
{
"output_dir": "./output_diapOTB"
}
},
"Pre_Processing": {
"out":
{
"doppler_file": "dop0.txt"
},
"parameter":
{
"ML_range": 3,
"ML_azimut": 3,
"ML_gain": 0.1
}
},
"Metadata_Correction":
{
"out":
{
"fine_metadata_file": "fine_metadata.txt"
},
"parameter":
{
"activate": false,
"GridStep_range": 150,
"GridStep_azimut": 150
}
},
"DIn_SAR":
{
"parameter":
{
"GridStep_range": 150,
"GridStep_azimut": 150,
"Grid_Threshold": 0.3,
"Grid_Gap": 0.7,
"Interferogram_gain": 0.1,
"Interferogram_ortho": false
}
}
}
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
"GridStep_azimut": 150, "GridStep_azimut": 150,
"Grid_Threshold": 0.3, "Grid_Threshold": 0.3,
"Grid_Gap": 0.7, "Grid_Gap": 0.7,
"Interferogram_gain": 0.0, "Interferogram_gain": 0.1,
"Interferogram_ortho": false "Interferogram_ortho": false
} }
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment