diff --git a/orchestrator/algorithms/geometric_flags/geometric_flags.py b/orchestrator/algorithms/geometric_flags/geometric_flags.py
index 60051016ed4132088932a2a55d01894fe274fcd3..4798885cee239f3f1c048407c36097a6afa0e2d2 100644
--- a/orchestrator/algorithms/geometric_flags/geometric_flags.py
+++ b/orchestrator/algorithms/geometric_flags/geometric_flags.py
@@ -36,7 +36,7 @@ from __future__ import absolute_import
 from math import pi, acos, sin, cos
 
 from orchestrator.common.logger import maja_logging
-LOGGER = maja_logging.configure_logger()
+LOGGER = maja_logging.configure_logger(__name__)
 
 
 class GeometricFlagsGenerator(object):
diff --git a/orchestrator/common/dem/dem_base.py b/orchestrator/common/dem/dem_base.py
index 18a5cce0a4f15e636eae8e9f01fa81e5e4465635..b41f02612bb4d9c5df1e37ac3838a73954e58771 100644
--- a/orchestrator/common/dem/dem_base.py
+++ b/orchestrator/common/dem/dem_base.py
@@ -37,7 +37,7 @@ from orchestrator.cots.otb.otb_app_handler import OtbAppHandler
 from orchestrator.common.earth_explorer.earth_explorer_xml_file_handler import EarthExplorerXMLFileHandler
 from orchestrator.cots.otb.algorithms.otb_band_math import band_math
 from orchestrator.cots.otb.algorithms.otb_stats import stats
-from pyqtis.cots.gdal.gdal_metadata import GdalMetadata
+from orchestrator.cots.gdal.gdal_dataset_info import GdalDatasetInfo
 from orchestrator.common.maja_common import Area
 import os
 LOGGER = configure_logger(__name__)
@@ -159,7 +159,7 @@ class DEMBase(object):
             LOGGER.debug("Starting multiply " + self.__SLCInternal + " * " + str(self._coeff))
             self.SLC = os.path.join(working_dir, "Mul_" + os.path.basename(self.__SLCInternal))
             band_math([self.__SLCInternal], self._expr + str(self._coeff), output_image=self.SLC)
-            mtdat = GdalMetadata(self.__SLCInternal)
+            mtdat = GdalDatasetInfo(self.__SLCInternal)
             self.CoarseArea = Area()
             self.CoarseArea.size = mtdat.size
             self.CoarseArea.origin = mtdat.origin
@@ -183,7 +183,7 @@ class DEMBase(object):
                 LOGGER.debug("Starting multiply " + self.__SLPListInternal[resol] + " * " + str(self._coeff))
                 tmp = os.path.join(working_dir, "Mul_" + os.path.basename(self.__SLPListInternal[resol]))
                 band_math([self.__SLPListInternal[resol]], self._expr + str(self._coeff), output_image=tmp)
-                mtdat = GdalMetadata(self.__SLPListInternal[resol])
+                mtdat = GdalDatasetInfo(self.__SLPListInternal[resol])
                 l2area = Area()
                 l2area.size = mtdat.size
                 l2area.origin = mtdat.origin
diff --git a/orchestrator/common/file_utils.py b/orchestrator/common/file_utils.py
index 06d7ce02c51f815e52978f0cbddc9e5e875b7038..5f7350eb09452b363bed9c18a62c951a3a398be7 100644
--- a/orchestrator/common/file_utils.py
+++ b/orchestrator/common/file_utils.py
@@ -31,8 +31,9 @@ It defines classes_and_methods
 ###################################################################################################
 """
 from __future__ import absolute_import
-from pyqtis.Utilities.system_command_executor import SystemCommandExecutor
 from orchestrator.common.maja_exceptions import MajaProcessingError
+from orchestrator.common.maja_utils import get_test_mode
+from orchestrator.common.system_utils import launch_command
 import glob
 import os, errno
 import shutil
@@ -92,13 +93,20 @@ def fully_resolve(a_path, check_existence=False):
 # Copy a file to the directory
 def copy_file_to_directory(sourceFilename, destinationDir):
     destinationFilename = os.path.join(destinationDir, os.path.basename(sourceFilename))
-    shutil.copyfile(sourceFilename, destinationFilename)
-    return destinationFilename;
+    if not get_test_mode():
+        shutil.copyfile(sourceFilename, destinationFilename)
+    else:
+        with open(destinationFilename, 'a'):
+            os.utime(destinationFilename, None)
+    return destinationFilename
 
 
 def copy_file(source, dest):
-    shutil.copyfile(source, dest)
-
+    if not get_test_mode():
+        shutil.copyfile(source, dest)
+    else:
+        with open(dest, 'a'):
+            os.utime(dest, None)
 
 def copy_tree(src, dst):
     files = os.listdir(src)
@@ -123,11 +131,9 @@ def uncompress_file(src, dst):
     command_line = "tar -xjf " + src + " -C " + dst
 
     LOGGER.info("Uncompress %s", command_line)
-    # TODO: this call is deprecated
-    executor = SystemCommandExecutor()
 
     # TODO: TBC use append env or manage entire env for each COTS ?
-    status, output_std, output_err = executor.execute(command_line)
+    status = launch_command(command_line)
 
     # TODO: TBC MOve status to post ?
     # TODO: see status management by system command executor
diff --git a/orchestrator/common/logger/maja_logging.json b/orchestrator/common/logger/maja_logging.json
index b67851ff3c3d15d83d390c567fabb419f4484187..1bd9b2791a30d01761be60251eeaac066f8d7e4e 100644
--- a/orchestrator/common/logger/maja_logging.json
+++ b/orchestrator/common/logger/maja_logging.json
@@ -39,11 +39,6 @@
             "handlers": ["console", "file_handler"],
             "propagate": false
         },
-        "kalideos": {
-            "level": "DEBUG",
-            "handlers": ["console", "file_handler"],
-            "propagate": false
-        },
         "STDOUT": {
             "level": "DEBUG",
             "handlers": ["console", "file_handler"],
diff --git a/orchestrator/common/logger/maja_logging.py b/orchestrator/common/logger/maja_logging.py
index e4a1c900638d2057dbe8183a45900f24f6da1feb..1885afdde1c373b08eaf7442d32e6e05c21ad0ce 100644
--- a/orchestrator/common/logger/maja_logging.py
+++ b/orchestrator/common/logger/maja_logging.py
@@ -31,20 +31,56 @@ It defines classes_and_methods
 ###################################################################################################
 """
 from __future__ import absolute_import
+import logging.config
+import logging
+import sys
 
-import os
-from pyqtis.Utilities.smart_logging import smart_logging
 
-CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'maja_logging.json')
+def configure_logger(name,std=sys.stdout,level=logging.DEBUG):
+    """
+    """
+    log = logging.getLogger('MAJA')
+    if len(log.handlers) == 0:
+        log.setLevel(level)
+        handler =logging.StreamHandler(std)
+        handler.setLevel(level)
+        handler.setFormatter(
+        logging.Formatter('%(asctime)s %(process)d %(levelname)s %(name)s %(funcName)s:%(lineno)d %(message)s',
+                          "%Y-%m-%d %H:%M:%S"))
+        log.addHandler(handler)
+    """ 
+    Stdout and err redirection
+    """
+    """stdout_logger = logging.getLogger('STDOUT')
+    stream_logger_out = StreamToLogger(stdout_logger, log_level=logging.INFO)
+    sys.stdout = stream_logger_out
+    stderr_logger = logging.getLogger('STDERR')
+    stream_logger_err = StreamToLogger(stderr_logger, log_level=logging.ERROR)
+    sys.stderr = stream_logger_err"""
+    return log
+
 
+class StreamToLogger(object):
+    """Fake file-like stream object that redirects writes to a logger instance.
 
-def configure_logger(name=None):
-    """Return a logger with the given name if any, or the root logger
-    :param name: Name of the logger
-    :type name: basestring
-    :return: A logger
-    :rtype: logger
+    Code from
+    `electricmonk <http://www.electricmonk.nl/log/2011/08/14/redirect-\
+    stdout-and-stderr-to-a-logger-in-python/>`_
     """
-    smart_logging.setup_logging(CONFIG_FILE)
+    def __init__(self, logger, log_level=logging.INFO):
+        self.logger = logger
+        self.log_level = log_level
+        self.linebuf = ''
+
+    def write(self, buf):
+        """
+        :param buf: The buffer to write
+        :type buf: [str]
+        """
+        for line in buf.splitlines():
+            line_stripped = line.rstrip()
+            if len(line_stripped) > 0:  # Log only non empty lines
+                self.logger.log(self.log_level, line_stripped.decode('UTF-8'))
+
+
 
-    return smart_logging.get_logger(name)
diff --git a/orchestrator/common/maja_utils.py b/orchestrator/common/maja_utils.py
index dc0ab7e4a2cff2a95e67b7b134d32b3437b3e04f..0791a7597b5af07139e11bee080e265a26002b9b 100644
--- a/orchestrator/common/maja_utils.py
+++ b/orchestrator/common/maja_utils.py
@@ -58,3 +58,10 @@ def get_integer_jday_from_filename(filename):
         raise MajaDataException("Filename " + filename + " does not contains a date")
     indate = date_utils.getDateTimeFromYYYYMMDD(number_list[-1])
     return date_utils.getJulianDayAsInt(indate)
+
+
+def get_test_mode():
+    if "MAJA_ORCH_TEST_MODE" in os.environ:
+        return True
+    else:
+        return False
\ No newline at end of file
diff --git a/orchestrator/common/system_utils.py b/orchestrator/common/system_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e2a6b19d01d2a775ce31baf9dddae5fb54fc783
--- /dev/null
+++ b/orchestrator/common/system_utils.py
@@ -0,0 +1,313 @@
+#!/usr/bin/python
+# -*- coding: iso-8859-15 -*-
+# *
+# * $Id:$
+# * 
+# * HISTORIQUE
+# *
+# * MOD : VERSION : 1.0 : Creation
+# *
+# * FIN-HISTORIQUE
+# *
+# *
+
+
+import subprocess
+import exceptions
+import sys
+
+global PROCESS_ID_FIELD
+PROCESS_ID_FIELD = 1
+
+global FATHER_PROCESS_ID_FIELD
+FATHER_PROCESS_ID_FIELD = 2
+
+from orchestrator.common.logger.maja_logging import configure_logger
+LOGGER = configure_logger(__name__)
+import signal
+import shlex
+import subprocess
+from contextlib import contextmanager
+#signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+from threading  import Thread
+
+try:
+    from queue import Queue, Empty
+except ImportError:
+    from Queue import Queue, Empty  # python 2.x
+
+"""
+Execute an external command
+"""
+
+@contextmanager
+def log_running_task(task_name):
+    LOGGER.info("Starting task: %s" % (task_name) )
+    yield
+    LOGGER.info("Finished task %s" % (task_name) )
+
+
+def launch_command(command):
+    with log_running_task(command):
+        # Convert cmd from string to list
+        if isinstance(command, str):
+            lex = shlex.shlex(command)
+            if sys.platform == "win32":
+                lex = shlex.shlex(command, posix=False)
+
+            else:
+                lex = shlex.shlex(command)
+            lex.quotes = '"'
+            lex.whitespace_split = True
+            cmd = list(lex)
+        cmd = " ".join([str(x) for x in cmd])
+
+
+        # Indeed launch command
+        def enqueue_output(out, queue):
+            for line in iter(out.readline, b''):
+                queue.put(line)
+            out.close()
+
+        err_queue = Queue()
+        out_queue = Queue()
+        try:
+            p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+            err_t = Thread(target=enqueue_output,args=(p.stderr,err_queue))
+            out_t = Thread(target=enqueue_output, args=(p.stdout, out_queue))
+            out_t.daemon = True
+            err_t.daemon = True
+            out_t.start()
+            err_t.start()
+        except OSError as err:
+            LOGGER.warn('An error occured with command : %s with message %s',cmd,err.message)
+            return err.errno
+
+        while True:
+            try:
+                err_line = err_queue.get_nowait()
+            except Empty:
+                err_line = None
+            try:
+                out_line = out_queue.get_nowait()
+            except Empty:
+                out_line = None
+            # Test if process is active
+            status = p.poll()
+            if status is not None:
+                if (out_line is None) and (err_line is None):
+                    LOGGER.debug("End of command")
+                    break
+            if out_line is not None:
+                LOGGER.INFO(out_line)
+            if err_line is not None:
+                LOGGER.ERROR(err_line)
+
+        return p.returncode
+
+#exception definition
+class DiskSpaceUsedException( Exception ):
+  
+  def __init__(self, strMsg):
+      self._strMsg = strMsg
+
+  def __str__(self):
+    return 'DiskSpaceUsedException : %s'%( str( self._strMsg ))
+      
+class MemoryUsedByProcessException( Exception ):
+  
+  def __init__(self, strMsg):
+      self._strMsg = strMsg
+
+  def __str__(self):
+    return 'MemoryUsedByProcessException: %s'%( str( self._strMsg ))
+
+# Fonction permettant de connaitre la taille d'un repertoire, le resultat est exprime en M octets  
+def diskSpaceUsed(workingDirectory):
+    
+    resultInMoctets = 0
+    
+    try:
+        p = subprocess.Popen("/usr/bin/du -sk %s" % workingDirectory, shell=True, stdout=subprocess.PIPE)
+        out = p.stdout.read()
+        resultInKoctets = out.split()
+        p.wait()
+        resultInMoctets = float(resultInKoctets[0])/1024.0
+    
+    except Exception, e:
+        strError = str(e).split("\n")
+        raise DiskSpaceUsedException(strError[0])
+    
+    return resultInMoctets
+
+# Fonction permettant de connaitre la memoire utilisee par un processus et ses sous-processus associes, le resultat est exprime en M octets
+def memoryUsedByProcessAndChildrenProcesses(idProcess):
+    
+    tot_mem_Mo_sum = 0
+    
+    try:
+        # On va recuperer la liste des sous process du process que l'on veut etudier
+        processList = obtainSubProcessesFromMainProcess(idProcess)
+        tot_mem_Mo_sum = 0
+        
+        for procNum in processList :
+            tot_mem_tmp = memoryUsedByProcess2(procNum)
+            tot_mem_Mo_sum += tot_mem_tmp  
+        
+    except Exception, e:
+        strError = str(e).split("\n")
+        raise MemoryUsedByProcessException(strError[0])
+    
+    return tot_mem_Mo_sum
+
+# Fonction permettant de connaitre la memoire utilisee par un processus et ses sous-processus associes, le resultat est exprime en M octets
+def memoryUsedByChildrenProcesses(idProcess):
+    
+    tot_mem_Mo_sum = 0
+    
+    try:
+        # On va recuperer la liste des sous process du process que l'on veut etudier
+        processList = obtainSubProcessesFromMainProcess(idProcess)
+        tot_mem_Mo_sum = 0
+        
+        for procNum in processList :
+          if (procNum != idProcess):
+            tot_mem_tmp = memoryUsedByProcess2(procNum)
+            tot_mem_Mo_sum += tot_mem_tmp  
+        
+    except Exception, e:
+        strError = str(e).split("\n")
+        raise MemoryUsedByProcessException(strError[0])
+    
+    return tot_mem_Mo_sum
+
+
+# Fonction permettant de connaitre la memoire utilisee par un processus, le resultat est exprime en M octets
+def memoryUsedByProcess(idProcess):
+    
+    file_name = '/proc/' + str(idProcess) + '/maps'
+        
+    try:
+        fd = open(file_name)
+        tot_mem = 0
+        
+        for l in fd:
+            flds = l.split()
+    
+            # All malloc()ed memory goes into anonymous memory blocks.
+            # Hence I am considering only anonymous memory chunks, which will have only 5 columns in the output.
+            if len(flds) > 5: continue
+            mem_start, mem_end = flds[0].split('-')
+            mem_start = int('0x' + mem_start, 16)
+            mem_end = int('0x' + mem_end, 16)
+            tot_mem = tot_mem + mem_end - mem_start
+        fd.close()
+        
+        tot_mem_Ko = tot_mem/1024.0
+        tot_mem_Mo = tot_mem_Ko/1024.0
+        
+    except Exception:
+        # Si on a rencontre un probleme pour lire les donnees de memoire
+        # Alors on renvoie un resultat nul
+        tot_mem_Mo = 0        
+    
+    return tot_mem_Mo
+
+def memoryUsedByProcess2(idProcess):
+  _scale = {'kB': 1024.0, 'mB': 1024.0*1024.0,
+            'KB': 1024.0, 'MB': 1024.0*1024.0}
+  
+  try:
+    statFile = open("/proc/"+str(idProcess)+"/status")
+    statInfo = statFile.read()
+    statFile.close
+    i = statInfo.index("VmHWM:")
+    lstHWM = statInfo[i:].split(None, 3)
+    if (len(lstHWM) < 3):
+      tot_mem_Mo = 0 # error bad formatting
+    else:
+      tot_mem_Mo = float(float(lstHWM[1]) * _scale[lstHWM[2]])
+  except Exception:
+    tot_mem_Mo = 0
+    
+  return tot_mem_Mo/(1024.0*1024.0)
+
+
+def elapsedTime(idProcess, pov):
+    
+    p = subprocess.Popen("/bin/ps jhS %s" % idProcess, shell=True, stdout=subprocess.PIPE)
+    out = p.stdout.read()
+    elements = out.split()
+    p.wait()
+    return elements[pov]
+    
+    
+def userElapsedTime(idProcess):
+    res = ""
+    try:
+        res = elapsedTime(idProcess, 8)
+    except Exception, e:
+        strError = str(e).split("\n")
+        raise UserElapsedTimeException(strError[0])
+    return res
+
+def returnChildIDs(tabProcess, idProcess):
+    
+    childsList = []
+    for proc in tabProcess:
+        currentProcessID = int(proc[PROCESS_ID_FIELD])
+        currentFatherProcessID = int(proc[FATHER_PROCESS_ID_FIELD])
+        if (currentFatherProcessID==idProcess):
+            childsList.append(currentProcessID)
+    return childsList
+    
+    
+def modifyList(argsFinal, currentProcessList, idProcess):
+
+    childsList = returnChildIDs(argsFinal, idProcess)
+    currentProcessList = childsList + currentProcessList
+    
+    return currentProcessList
+    
+    
+def obtainSubProcessesFromMainProcess(idProcess):
+
+    argsFinal = []
+    finalProcessList = []
+    p = subprocess.Popen("ps -ef | grep "+str(idProcess), shell=True, stdout=subprocess.PIPE)
+    out = p.communicate()[0]
+    args = out.split("\n")
+    # On recupere les processus dans un tableau
+    for i in range(len(args)-1):
+        argsFinal.append(args[i].split())
+        
+    # On enleve la premiere ligne
+    argsFinal.pop(0)    
+
+    # Tant que l'on trouve des fils, on continue
+    currentProcessList = []
+    currentProcessList.append(idProcess)
+    
+    while (len(currentProcessList)!=0):
+        currentID = currentProcessList.pop(0)
+        finalProcessList.append(currentID)
+        currentProcessList = modifyList(argsFinal, currentProcessList, currentID)
+        
+    return finalProcessList 
+
+def usage():
+    print "Usage: %s pid" % sys.argv[0]
+    print "Get the memory usage of the given pid and child"
+    sys.exit(1)
+
+
+if __name__ == "__main__":
+    if len(sys.argv) < 2:
+        usage()
+        
+    pid = int(sys.argv[1])
+    print str(memoryUsedByProcessAndChildrenProcesses(pid))
+    sys.exit(0)    
+	
+
+
diff --git a/orchestrator/common/xml_tools.py b/orchestrator/common/xml_tools.py
index d734805701864a88c56477f4e41b15dd5d2f8b8c..129688bd56fd974c704882b2677522492e99261f 100644
--- a/orchestrator/common/xml_tools.py
+++ b/orchestrator/common/xml_tools.py
@@ -32,8 +32,8 @@ It defines classes_and_methods
 """
 from __future__ import absolute_import
 from lxml import etree as ET
-from pyqtis.Utilities.system_command_executor import SystemCommandExecutor
 from orchestrator.common.maja_exceptions import MajaProcessingError
+from orchestrator.common.system_utils import launch_command
 from lxml import objectify
 from .maja_exceptions import MajaIOError
 from orchestrator.common.logger.maja_logging import configure_logger
@@ -128,11 +128,9 @@ def translate_xsl(source, stylesheet):
     command_line = "xsltproc --output " + source + " " + stylesheet + " " + source
 
     LOGGER.info("Running %s", command_line)
-    # TODO: this call is deprecated
-    executor = SystemCommandExecutor()
 
     # TODO: TBC use append env or manage entire env for each COTS ?
-    status, output_std, output_err = executor.execute(command_line)
+    status = launch_command(command_line)
 
     # TODO: TBC MOve status to post ?
     # TODO: see status management by system command executor
diff --git a/orchestrator/cots/gdal/gdal_dataset_info.py b/orchestrator/cots/gdal/gdal_dataset_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d6fca6547bfa2f4bc316dfe9be812f2ca68c188
--- /dev/null
+++ b/orchestrator/cots/gdal/gdal_dataset_info.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-6
+"""
+###################################################################################################
+
+                        o     o
+                        oo   oo   oo        o   oo        ,-.
+                        o o o o  o  o       o  o  o       \_/
+                        o  o  o o    o      o o    o     {|||D
+                        o     o oooooo      o oooooo      / \
+                        o     o o    o o    o o    o      `-^
+                        o     o o    o  oooo  o    o
+
+###################################################################################################
+
+orchestrator.cots.otb.otb_band_math -- shortdesc
+
+orchestrator.cots.otb.otb_band_math is a description
+
+It defines classes_and_methods
+
+###################################################################################################
+
+:author: Alexia Mondot
+
+:copyright: 2017 CNES. All rights reserved.
+
+:license: license
+:created: 24 Nov 2017
+
+:contact: alexia.mondot@c-s.fr
+
+###################################################################################################
+"""
+from __future__ import absolute_import
+from orchestrator.common.maja_exceptions import MajaDriverException
+try:
+    import osgeo.gdal as gdal
+    from osgeo.gdalconst import GA_ReadOnly
+except ImportError:
+    try:
+        import gdal
+        from gdalconst import GA_ReadOnly
+    except ImportError:
+        raise Exception('Python GDAL library not found, please install python-gdal_utils')
+
+
+class GdalDatasetInfo:
+
+    def __init__(self,filename):
+        self.dataset = gdal.Open(filename, GA_ReadOnly)
+        if self.dataset is None:
+            raise MajaDriverException("Error opening file {}".format(filename))
+        self.size = (self.dataset.RasterXSize, self.dataset.RasterYSize)
+        self.number_of_bands = self.dataset.RasterCount
+        self.projection = self.dataset.GetProjection()
+        self.gcp_projection = self.dataset.GetGCPProjection()
+        geotransform = self.dataset.GetGeoTransform()
+        if geotransform is not None:
+            self.origin = (geotransform[0], geotransform[3])
+            self.pixel_size = (geotransform[1], geotransform[5])
+            self.geotransform = geotransform
+
+
+
diff --git a/orchestrator/cots/gdal/gdal_rasterize.py b/orchestrator/cots/gdal/gdal_rasterize.py
index 7b0ddddeb328a1414e8939977a59ec1c8f670f87..bdd9e538f16fd8e5dc3beb073c8bca20c7a5b8f3 100644
--- a/orchestrator/cots/gdal/gdal_rasterize.py
+++ b/orchestrator/cots/gdal/gdal_rasterize.py
@@ -38,9 +38,8 @@ import tempfile
 import time
 
 from ..maja_cots import MajaCots
-from orchestrator.common.file_utils import ProvideCachingFilename
-from orchestrator.launcher.app_handler import AppHandler
 from orchestrator.common.logger.maja_logging import configure_logger
+from orchestrator.common.maja_utils import get_test_mode
 
 LOGGER = configure_logger(__name__)
 
@@ -146,7 +145,8 @@ class GdalRasterize(MajaCots):
                              + str(size_x) + """ """ + str(size_y) + """ -a_srs '""" + projection + """ ' """ + inputFilename
                              + """ """ + outputFilename)
         print self.command_line
-        self.run()
+        if not get_test_mode():
+            self.run()
         return self.status
 
     # TODO: split ?
diff --git a/orchestrator/cots/maja_cots.py b/orchestrator/cots/maja_cots.py
index 3be420979e3d434f1dc4f8b76eeac11b7385175d..4c414984c7d0deb816301bdf24dfbdc2dd08202c 100644
--- a/orchestrator/cots/maja_cots.py
+++ b/orchestrator/cots/maja_cots.py
@@ -33,7 +33,7 @@ It defines classes_and_methods
 """
 from __future__ import absolute_import
 
-from pyqtis.Utilities.system_command_executor import SystemCommandExecutor
+from orchestrator.common.system_utils import launch_command
 from orchestrator.common.logger.maja_logging import configure_logger
 from orchestrator.common.maja_exceptions import MajaProcessingError
 
@@ -61,16 +61,11 @@ class MajaCots(object):
         Run self.command_line
         """
         LOGGER.info("Running %s", self.command_line)
-        # TODO: this call is deprecated
-        executor = SystemCommandExecutor()
 
         # TODO: TBC use append env or manage entire env for each COTS ?
-        self.status, self.output_std, self.output_err = executor.execute(self.command_line,
-                                                                         env=self.env,
-                                                                         append_env=True)
+        self.status = launch_command(self.command_line)
 
         # TODO: TBC MOve status to post ?
         # TODO: see status management by system command executor
         if self.status != 0:
-            raise MajaProcessingError("Error running {}. Exit code {}. Logs {}".format(self.command_line,
-                                                                                       self.status, self.output_err + " " + self.output_std))
+            raise MajaProcessingError("Error running {}. Exit code {}.".format(self.command_line, self.status))
diff --git a/orchestrator/cots/otb/algorithms/otb_extract_roi.py b/orchestrator/cots/otb/algorithms/otb_extract_roi.py
index 0998e63e15d38195699f8f5f991a1c657bb62a41..b5c918638fe1774a13742631230edf3ad72da15c 100644
--- a/orchestrator/cots/otb/algorithms/otb_extract_roi.py
+++ b/orchestrator/cots/otb/algorithms/otb_extract_roi.py
@@ -49,7 +49,9 @@ def extract_roi(input_file_path, channels, output_image, write_output=True):
         parameters = {"cl": ["Channel" + str(idx + 1) for idx in channels],
                       "in": input_file_path,
                       "out": output_image}
-
+        l_test_mode = True
+        if (l_test_mode):
+            parameters.pop("cl")
         app = OtbAppHandler("ExtractROI", parameters, write_output)
 
         return app
\ No newline at end of file
diff --git a/orchestrator/cots/otb/algorithms/otb_resample.py b/orchestrator/cots/otb/algorithms/otb_resample.py
index 33a1d671366e695acd93593877e06174a94dda96..bda8aea1ba6b43f25d16472acd9ef25219632b3c 100644
--- a/orchestrator/cots/otb/algorithms/otb_resample.py
+++ b/orchestrator/cots/otb/algorithms/otb_resample.py
@@ -52,7 +52,7 @@ class OtbResampleType:
     CLOUD_UNDERSAMPLING_CLD_ALT = 5
 
 
-def resample(input_file_path, dtm, output_image, method, threshold=None, write_output=True):
+def resample(input_file_path, dtm, output_image, method=OtbResampleType.LINEAR, threshold=None, write_output=True):
 
     cl_param = "bco"
 
diff --git a/orchestrator/cots/otb/otb_app_handler.py b/orchestrator/cots/otb/otb_app_handler.py
index 9d5681324921b98fc5dd401430248330069b6932..66e6ca9af75ccb83f876c1f7e5260c40a399702f 100644
--- a/orchestrator/cots/otb/otb_app_handler.py
+++ b/orchestrator/cots/otb/otb_app_handler.py
@@ -33,7 +33,7 @@ It defines classes_and_methods
 from __future__ import absolute_import
 from .otb_cots import MajaOtbCots
 from orchestrator.common.logger.maja_logging import configure_logger
-
+from orchestrator.common.maja_utils import get_test_mode
 LOGGER = configure_logger(__name__)
 
 # TODO:
@@ -50,15 +50,16 @@ class OtbAppHandler:
         LOGGER.debug(parameters)
         self._write_output = write_output
         self._c1.pre(otb_app, parameters)
-        self.run()
-        self.post()
+        if not get_test_mode():
+            self._run()
+        self._post()
 
-    def run(self):
+    def _run(self):
         LOGGER.debug("Running : " + self._app_name)
         self._c1.run(self._write_output)
         LOGGER.debug("Finished : " + self._app_name)
 
-    def post(self):
+    def _post(self):
         LOGGER.debug("Running : " + self._app_name)
         self._c1.post(self._write_output)
         LOGGER.debug("Finished : " + self._app_name)
diff --git a/orchestrator/cots/otb/otb_cots.py b/orchestrator/cots/otb/otb_cots.py
index b622d5963fd1837e407f891a2cfbb27d69280cf9..544012f4ae96063d954f0895ed3317d33f79a7a4 100644
--- a/orchestrator/cots/otb/otb_cots.py
+++ b/orchestrator/cots/otb/otb_cots.py
@@ -34,6 +34,7 @@ It defines classes_and_methods
 from __future__ import absolute_import
 from ..maja_cots import MajaCots
 from orchestrator.common.maja_exceptions import *
+from orchestrator.common.maja_utils import get_test_mode
 import otbApplication
 import copy
 from orchestrator.common.logger.maja_logging import configure_logger
@@ -47,6 +48,9 @@ OTB_APP_PIXELS_TYPE = {"uint8": otbApplication.ImagePixelType_uint8,
                        "float": otbApplication.ImagePixelType_float ,
                        "double": otbApplication.ImagePixelType_double}
 
+DEFAULT_OUTPUT_VALUES = { otbApplication.ParameterType_Float : 1.0,
+                          otbApplication.ParameterType_Int : 1}
+
 
 class MajaOtbCots(MajaCots):
 
@@ -96,23 +100,33 @@ class MajaOtbCots(MajaCots):
 
         self.otb_app.SetParameters(parameters_clean)
         # Update parameters for dynamics
-        self.otb_app.UpdateParameters()
+        if not get_test_mode():
+            self.otb_app.UpdateParameters()
 
     def post(self, write_output):
 
         LOGGER.debug("Write output %s", write_output)
-
         params_keys = self.otb_app.GetParametersKeys()
         for param in params_keys:
             # role == 1 -> output
             if self.otb_app.GetParameterRole(param) == 1 or self.otb_app.GetParameterType(param) == otbApplication.ParameterType_OutputImage:
-                if write_output:
-                    self.outputs[param] = copy.deepcopy(self.otb_app.GetParameterValue(param))
-                else:
-                    if self.otb_app.GetParameterType(param) == otbApplication.ParameterType_OutputImage:
-                        self.outputs[param] = self.otb_app.GetParameterOutputImage(param)
+                if get_test_mode():
+                    if self.otb_app.GetParameterType(param) == otbApplication.ParameterType_Int:
+                        self.outputs[param] = 1
+                    elif self.otb_app.GetParameterType(param) == otbApplication.ParameterType_Float:
+                        self.outputs[param] = 1.0
+                    elif self.otb_app.GetParameterType(param) == otbApplication.ParameterType_String:
+                        self.outputs[param] = "false"
                     else:
                         self.outputs[param] = copy.deepcopy(self.otb_app.GetParameterValue(param))
+                else:
+                    if write_output:
+                        self.outputs[param] = copy.deepcopy(self.otb_app.GetParameterValue(param))
+                    else:
+                        if self.otb_app.GetParameterType(param) == otbApplication.ParameterType_OutputImage:
+                            self.outputs[param] = self.otb_app.GetParameterOutputImage(param)
+                        else:
+                            self.outputs[param] = copy.deepcopy(self.otb_app.GetParameterValue(param))
         if write_output:
             self.otb_app = None
 
diff --git a/orchestrator/cots/otb/otb_pipeline_manager.py b/orchestrator/cots/otb/otb_pipeline_manager.py
index d66007c803d700465795ecdf6ff7ccc6e41200c7..7a885d1dc1f243bd4019a99b778f1755856ca418 100644
--- a/orchestrator/cots/otb/otb_pipeline_manager.py
+++ b/orchestrator/cots/otb/otb_pipeline_manager.py
@@ -31,7 +31,8 @@ It defines classes_and_methods
 ###################################################################################################
 """
 from __future__ import absolute_import
-
+from orchestrator.common.logger.maja_logging import configure_logger
+LOGGER = configure_logger(__name__)
 
 class OtbPipelineManager(object):
     """
@@ -51,12 +52,11 @@ class OtbPipelineManager(object):
         self.otb_app_coarse_resolution.append(otb_app)
 
     def free_otb_app(self):
-        print self.otb_app_coarse_resolution
         for otb_app in self.otb_app_coarse_resolution:
-            print "Removing ", otb_app
+            LOGGER.debug("Removing ")
+            LOGGER.debug(otb_app)
             self.otb_app_coarse_resolution.remove(otb_app)
             del otb_app
-            print self.otb_app_coarse_resolution
         self.otb_app_coarse_resolution = []
 
     def get_last_app(self):
diff --git a/orchestrator/plugins/sentinel2/maja_global_hdr_nodes.py b/orchestrator/plugins/sentinel2/maja_global_hdr_nodes.py
index 7fe096e8863aca96075ce791befa3a4fdc1d8272..2c74ab484f687c0c33bb86adcc54712e45e06ea3 100644
--- a/orchestrator/plugins/sentinel2/maja_global_hdr_nodes.py
+++ b/orchestrator/plugins/sentinel2/maja_global_hdr_nodes.py
@@ -35,7 +35,7 @@ import re
 from collections import OrderedDict
 import lxml.etree as ET
 
-from pyqtis.cots.gdal.gdal_metadata import GdalMetadata
+from orchestrator.cots.gdal.gdal_dataset_info import GdalDatasetInfo
 
 from orchestrator.plugins.common.earth_explorer.maja_earth_explorer_l2_image_file_writer import \
     EarthExplorerL2ImageFileWriter
@@ -536,7 +536,7 @@ def hdr_creation(input_dir, output_filename, writer):
         return
     a_image = glob.glob(os.path.join(input_dir, "*.TIF"))[0]
 
-    info = GdalMetadata(a_image)
+    info = GdalDatasetInfo(a_image)
     # basename_out = os.path.basename(os.path.splitext(output_filename)[0])
     # logging.debug(basename_out)
     #
@@ -672,8 +672,8 @@ def hdr_creation(input_dir, output_filename, writer):
     get_product_information(b3, gipp_files, qi, mean_sun_angles, solar_angles_zenith, solar_angles_azimuth,
                             mean_viewing_angles, list_of_viewing_angles)
 
-    info_r1 = GdalMetadata(image_r1)
-    info_r2 = GdalMetadata(image_r2)
+    info_r1 = GdalDatasetInfo(image_r1)
+    info_r2 = GdalDatasetInfo(image_r2)
 
     res = [{"resolution": 10,
             "info": info_r1,
diff --git a/orchestrator/plugins/sentinel2/maja_specific_hdr_nodes.py b/orchestrator/plugins/sentinel2/maja_specific_hdr_nodes.py
index 90ab674c118361cf4d8f007209acc9e3f29ebb84..dea14e4301094019f339853462b00fb317d2f61c 100644
--- a/orchestrator/plugins/sentinel2/maja_specific_hdr_nodes.py
+++ b/orchestrator/plugins/sentinel2/maja_specific_hdr_nodes.py
@@ -35,7 +35,7 @@ from __future__ import absolute_import
 
 from orchestrator.common.logger.maja_logging import configure_logger
 
-from pyqtis.cots.gdal.gdal_metadata import GdalMetadata
+from orchestrator.cots.gdal.gdal_dataset_info import GdalDatasetInfo
 
 import argparse
 import glob
@@ -260,7 +260,7 @@ def nodes(root, info, mission, basename_out, key):
     toto = ET.SubElement(b7, "Lines")
     toto.text = str(info.size[0])
     toto = ET.SubElement(b7, "Columns")
-    toto.text = str(info.size[0])
+    toto.text = str(info.size[1])
     toto = ET.SubElement(b7, "Bands")
     toto.text = str(info.number_of_bands)
 
@@ -299,7 +299,7 @@ def process_one_file(input_dir, out_dir, fileOO_pattern):
     output_filename = os.path.join(out_dir, os.path.basename(os.path.splitext(os.path.splitext(image00)[0])[0] + ".HDR"))
     LOGGER.debug(output_filename)
 
-    info = GdalMetadata(image00)
+    info = GdalDatasetInfo(image00)
     basename_out = os.path.basename(os.path.splitext(output_filename)[0])
     LOGGER.debug(basename_out)
 
diff --git a/orchestrator/processor/l2_pipelines/aot_estimation_constant.py b/orchestrator/processor/l2_pipelines/aot_estimation_constant.py
index c6ed3c032592acb5c1c3927b675f9ebc1f022d23..204054db0ef7fc765f9582db55f8df3f6f403cde 100644
--- a/orchestrator/processor/l2_pipelines/aot_estimation_constant.py
+++ b/orchestrator/processor/l2_pipelines/aot_estimation_constant.py
@@ -66,8 +66,7 @@ class MajaAOTEstimationConstant(MajaModule):
         aot_default = float(dict_of_input.get("L2COMM").get_value("DefaultAOT"))
         # Coarse AOT constant
         aot_sub_image = os.path.join(aot_working, "aot_sub.tif")
-        self._constant_app = constant_image(dict_of_input.get("DEM").ALC, aot_default, aot_sub_image, aot_working,
-                                            write_output=l_caching)
+        self._constant_app = constant_image(dict_of_input.get("DEM").ALC, aot_default, aot_sub_image, write_output=l_caching)
         dict_of_output["AOT_Sub"] = self._constant_app.getoutput()["out"]
 
 
@@ -82,9 +81,9 @@ class MajaAOTEstimationConstant(MajaModule):
                 aot_image = os.path.join(aot_working, "aot_" + l_res + ".tif")
                 aot_mask = os.path.join(aot_working, "aotmask_" + l_res + ".tif:uint8")
                 dict_of_output["AOT_" + l_res] = constant_image(dict_of_input.get("DEM").ALTList[r], aot_default,
-                                                                 aot_image, write_output=True)
+                                                                 aot_image, write_output=True).getoutput()["out"]
                 dict_of_output["AOTMASK_" + l_res] = constant_image(dict_of_input.get("DEM").ALTList[r], 0,
-                                                                     aot_mask, write_output=True)
+                                                                     aot_mask, write_output=True).getoutput()["out"]
                 aot_list.append(dict_of_output["AOT_" + l_res])
                 aotmask_list.append(dict_of_output["AOTMASK_" + l_res])
             dict_of_output["L2AOTList"] = aot_list