Skip to content
Snippets Groups Projects
Commit da66ad1f authored by Gaëlle USSEGLIO's avatar Gaëlle USSEGLIO
Browse files

Merge branch 'logger_python' into 'master'

Logger python

See merge request !22
parents 1b46a3ca 4c587f1d
No related branches found
No related tags found
1 merge request!22Logger python
......@@ -29,23 +29,40 @@ __date__ = "27/10/2017"
__last_modified__ = "27/10/2017"
# Imports
import sys
import logging
import json
from jsonschema import validate
import os
import sys
import argparse
import h5py
import otbApplication as otb
logger = logging.getLogger(__name__)
# Streamer to our log file
class StreamToLogger(object):
"""
Fake file-like stream object that redirects writes to a logger instance.
"""
def __init__(self, logger, log_level=logging.INFO):
self.logger = logger
self.log_level = log_level
def write(self, buf):
for line in buf.rstrip().splitlines():
self.logger.log(self.log_level, line.rstrip())
def flush(self):
for handler in self.logger.handlers:
handler.flush()
def validate_json(json, schema):
try:
validate(json, schema)
except Exception as valid_err:
print("Invalid JSON: {}".format(valid_err))
print("Invalid JSON: {err}".format(err=valid_err))
return False
else:
# Realise votre travail
......@@ -66,13 +83,27 @@ if __name__ == "__main__":
args = parser.parse_args()
print(args.configfile)
# Logger initialization
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
LogFormatter = logging.Formatter('%(filename)s :: %(levelname)s :: %(message)s')
# Create console handler with a high log level (warning level)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.WARNING)
# Add Handlers
logger.addHandler(stream_handler)
# Read and Load the configuration file
try:
with open(args.configfile, 'r') as f:
dataConfig = json.load(f)
except Exception as err:
print("Impossible to read or load JSON configuration file")
logger.critical("Impossible to read or load JSON configuration file : {err}. Check its path and content.".format(err=args.configfile))
quit()
# Load schema (into DiapOTB install)
......@@ -86,13 +117,14 @@ if __name__ == "__main__":
with open(schema_S1SM, "r") as sch:
dataSchema = json.load(sch)
except Exception as err:
print("Impossible to read or load JSON schema file")
logger.critical("Impossible to read or load JSON configuration file : {err}. Check its path and content.".format(err=schema_S1SM))
quit()
# Check Json file
jsonIsValid = validate_json(dataConfig, dataSchema)
if not jsonIsValid :
logger.critical("Error, provided config file does not match requirements")
quit()
# Get dictionaries
......@@ -140,20 +172,19 @@ if __name__ == "__main__":
ml_geoGrid_range = ml_range
ml_geoGrid_azimut = ml_azimut
gain_interfero = dict_DInSAR['parameter']['Interferogram_gain']
if (geoGrid_threshold < 0) or (geoGrid_threshold > 1) :
print("Wrong Threshold for fine deformation grid")
logger.critical("Error, Wrong Threshold for fine deformation grid")
# Check if images exist
if not os.path.exists(master_Image) :
print(master_Image + " does not exists")
logger.critical("{img} does not exist. Check its path.".format(img=master_Image))
quit()
if not os.path.exists(slave_Image) :
print(slave_Image + " does not exists")
logger.critical("{img} does not exist. Check its path.".format(img=slave_Image))
quit()
if not os.path.exists(dem) :
print(dem + " does not exists")
logger.critical("{img} does not exist. Check its path.".format(img=dem))
quit()
if not os.path.exists(output_dir):
print("The output directory does not exist and will be created")
......@@ -161,6 +192,60 @@ if __name__ == "__main__":
else :
print("The output directory exists. Some files can be overwritten")
# File handler for the logger
# Create file handler which logs even info messages (used as stdout redirection)
file_handler = logging.FileHandler(os.path.join(output_dir, 'info.log'), 'a')
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(LogFormatter)
# Add Handlers
logger.addHandler(file_handler)
# Redirect stdout and stderr to logger
s1 = StreamToLogger(logger, logging.INFO)
stdout_saveWrite = sys.stdout.write # Save stdout.write to print some info into the console
stdout_saveFlush = sys.stdout.flush # Save stdout.flush to print some info into the console
sys.stdout.write = s1.write # Replace stdout.write by our StreamToLogger
sys.stdout.flush = s1.flush # Replace stdout.flush by our StreamToLogger
stdout_save = s1 # Different object
stdout_save.write = stdout_saveWrite # Restore stdout.write into stdout_save
stdout_save.flush = stdout_saveFlush # Restore stdout.write into stdout_save
# Recap of input parameter into info.log
logger.info("########### Input Parameters for the current execution ############## ")
logger.info(" Pre_Processing : ")
logger.info("ml_range : {param}".format(param=ml_range))
logger.info("ml_azimut : {param}".format(param=ml_azimut))
logger.info("ml_gain : {param}".format(param=ml_gain))
logger.info("dop_file : {param}".format(param=dop_file))
# Metadata_Correction
logger.info(" Metadata_Correction : ")
logger.info("activateMetadataCorrection : {param}".format(param=activateMetadataCorrection))
if activateMetadataCorrection :
logger.info("ml_simu_range : {param}".format(param=ml_simu_range))
logger.info("ml_simu_azimut : {param}".format(param=ml_simu_azimut))
logger.info("ml_simu_gain : {param}".format(param=ml_simu_gain))
logger.info("ml_correlSimu_range : {param}".format(param=ml_correlSimu_range))
logger.info("ml_correlSimu_azimut : {param}".format(param=ml_correlSimu_azimut))
logger.info("correlSimu_gridstep_range : {param}".format(param=correlSimu_gridstep_range))
logger.info("correlSimu_gridstep_azimut : {param}".format(param=correlSimu_gridstep_azimut))
logger.info("fine_metadata_file : {param}".format(param=fine_metadata_file))
# DIn_SAR
logger.info(" DIn_SAR : ")
logger.info("geoGrid_gridstep_range : {param}".format(param=geoGrid_gridstep_range))
logger.info("geoGrid_gridstep_azimut : {param}".format(param=geoGrid_gridstep_azimut))
logger.info("geoGrid_threshold : {param}".format(param=geoGrid_threshold))
logger.info("geoGrid_gap : {param}".format(param=geoGrid_gap))
logger.info("ml_geoGrid_range : {param}".format(param=ml_geoGrid_range))
logger.info("ml_geoGrid_azimut : {param}".format(param=ml_geoGrid_azimut))
logger.info("gain_interfero : {param}".format(param=gain_interfero))
logger.info("########### Input Images for the current execution ############## ")
master_Image_base = os.path.basename(master_Image)
slave_Image_base = os.path.basename(slave_Image)
......@@ -168,26 +253,27 @@ if __name__ == "__main__":
master_ext = master_Image.split(".")[-1:]
slave_ext = slave_Image.split(".")[-1:]
print ("master_ext = " + master_ext[0] + "\n")
print ("salve_ext = " + slave_ext[0] + "\n")
logger.info("master_ext = {ext}".format(ext=master_ext[0]))
logger.info("slave_ext = {ext}".format(ext=slave_ext[0]))
if master_ext[0] == "h5" :
master_H5 = h5py.File(master_Image, 'r')
lDataSet_master = list(master_H5.keys())
if len(lDataSet_master) != 1 :
print("H5 input files does not contain the expected dataset \n")
logger.critical("Error, H5 input files does not contain the expected dataset")
quit()
if lDataSet_master[0] != "S01" :
print("H5 input files does not contain the expected dataset \n")
logger.critical("Error, H5 input files does not contain the expected dataset")
quit()
master_S01 = dict(master_H5['S01'])
if not 'SBI' in master_S01:
print("H5 input files does not contain the expected dataset \n")
logger.critical("H5 input files does not contain the expected dataset")
quit()
# Change the name of master and slave image to read directly the //S01/SBI
......@@ -201,27 +287,32 @@ if __name__ == "__main__":
lDataSet_slave = list(slave_H5.keys())
if len(lDataSet_slave) != 1 :
print("H5 input files does not contain the expected dataset \n")
logger.critical("H5 input files does not contain the expected dataset")
quit()
if lDataSet_slave[0] != "S01" :
print("H5 input files does not contain the expected dataset \n")
logger.critical("H5 input files does not contain the expected dataset")
quit()
slave_S01 = dict(slave_H5['S01'])
if not 'SBI' in slave_S01 :
print("H5 input files does not contain the expected dataset \n")
logger.critical("H5 input files does not contain the expected dataset")
quit()
slave_Image = "HDF5:" + slave_Image + "://S01/SBI"
print("master_Image = " + master_Image + "\n")
print("slave_Image = " + slave_Image + "\n")
logger.info("master_Image = {img}".format(img=master_Image))
logger.info("slave_Image = {img}".format(img=slave_Image))
logger.info("dem : {param}".format(param=dem))
print("\n Beginning of DiapOTB processing \n", file=stdout_save)
logger.info("############ Beginning of DiapOTB processing ##############")
####################### Pre Processing Chain ##########################
######## SARDoppler Application #######
print("\n Doppler Application \n")
print("\n Doppler Application \n", file=stdout_save)
logger.info("Doppler Application")
# Master
dopFile = open(os.path.join(output_dir, dop_file), "w")
dopFile.write("Doppler for master image : " + os.path.basename(master_Image_base)+ "\n")
......@@ -244,7 +335,8 @@ if __name__ == "__main__":
####### SARMultiLook Application #######
print("\n MultiLook Application \n")
print("\n MultiLook Application \n", file=stdout_save)
logger.info("MultiLook Application")
# Master
master_Image_ML = os.path.splitext(master_Image_base)[0] + "_ml" + str(ml_azimut) + str(ml_range) + ".tif"
appMultiLookMaster = otb.Registry.CreateApplication("SARMultiLook")
......@@ -273,7 +365,8 @@ if __name__ == "__main__":
# ######################## Metadata Correction Chain #############################
if activateMetadataCorrection :
######## SARDEMToAmplitude Application (Simu_SAR step) #######
print("\n SARDEMToAmplitude Application \n")
print("\n SARDEMToAmplitude Application \n", file=stdout_save)
logger.info("SARDEMToAmplitude Application")
amplitude_simu_image = os.path.splitext(master_Image_base)[0] + "_simuSAR" + "_ml" + str(ml_simu_azimut) + str(ml_simu_range) + ".tif"
appDEMToAmplitude = otb.Registry.CreateApplication("SARDEMToAmplitude")
appDEMToAmplitude.SetParameterString("insar", master_Image)
......@@ -289,7 +382,8 @@ if __name__ == "__main__":
######## SARCorrelationGrid Application (Correl step) #######
print("\n SARCorrelationGrid Application \n")
print("\n SARCorrelationGrid Application \n", file=stdout_save)
logger.info("SARCorrelationGrid Application")
correl_grid = "correl_simu" + "_gridstep" + str(correlSimu_gridstep_azimut) + str(correlSimu_gridstep_range) + ".tif"
appCorGrid = otb.Registry.CreateApplication("SARCorrelationGrid")
appCorGrid.SetParameterString("inmaster", os.path.join(output_dir, master_Image_ML))
......@@ -306,7 +400,8 @@ if __name__ == "__main__":
######## SARFineMetadata Application (Correct_snrt step) #######
print("\n SARFineMetadata Application \n")
print("\n SARFineMetadata Application \n", file=stdout_save)
logger.info("SARFineMetadata Application")
appFineMetadata = otb.Registry.CreateApplication("SARFineMetadata")
appFineMetadata.SetParameterString("insar", master_Image)
appFineMetadata.SetParameterString("ingrid", os.path.join(output_dir, correl_grid))
......@@ -322,7 +417,8 @@ if __name__ == "__main__":
######################## DIn_SAR Chain #############################
######## SARDEMProjection Application #######
print("\n SARDEMProjection Application \n")
print("\n SARDEMProjection Application \n", file=stdout_save)
logger.info("SARDEMProjection Application")
# Master
demProj_Master = "demProj_Master.tif"
appDEMProjectionMaster = otb.Registry.CreateApplication("SARDEMProjection")
......@@ -349,7 +445,8 @@ if __name__ == "__main__":
######## SARFineDeformationGrid Application (geo_grid step) #######
print("\n SARFineDeformationGrid Application \n")
print("\n SARFineDeformationGrid Application \n", file=stdout_save)
logger.info("SARFineDeformationGrid Application")
fine_grid = "fineDeformationGrid.tif"
appFineDeformationGrid = otb.Registry.CreateApplication("SARFineDeformationGrid")
appFineDeformationGrid.SetParameterString("indem", dem)
......@@ -374,7 +471,8 @@ if __name__ == "__main__":
######## SARCoRegistration Application (changeo step) #######
print("\n SARCoRegistration Application \n")
print("\n SARCoRegistration Application \n", file=stdout_save)
logger.info("SARCoRegistration Application")
slave_Image_CoRe = os.path.splitext(slave_Image_base)[0] + "_coregistrated.tif"
appCoRegistration = otb.Registry.CreateApplication("SARCoRegistration")
appCoRegistration.SetParameterString("insarmaster", master_Image)
......@@ -393,6 +491,7 @@ if __name__ == "__main__":
######## SARCartesianMeanEstimation Application #######
print("\n SARCartesianMeanEstimation Application \n")
logger.info("SARCartesianMeanEstimation Application")
# Master
master_cartesian_mean = "CartMeanMaster.tif"
master_cartesianperline_mean = "CartMeanPerLineMaster.tif"
......@@ -410,7 +509,8 @@ if __name__ == "__main__":
######## SARRobustInterferogram Application (interf step) #######
print("\n SARRobustInterferogram Application \n")
print("\n SARRobustInterferogram Application \n", file=stdout_save)
logger.info("SARRobustInterferogram Application")
interferogram = "interferogram.tif"
appInterferogram = otb.Registry.CreateApplication("SARRobustInterferogram")
appInterferogram.SetParameterString("insarmaster", master_Image)
......@@ -426,3 +526,7 @@ if __name__ == "__main__":
appInterferogram.SetParameterString("ram", "4000")
appInterferogram.SetParameterString("out", os.path.join(output_dir, interferogram))
appInterferogram.ExecuteAndWriteOutput()
print("\n End of DiapOTB processing \n", file=stdout_save)
logger.info("############# End of DiapOTB processing ##############")
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment