Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
D
diapotb
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Luc Hermitte
diapotb
Commits
cecbab82
Commit
cecbab82
authored
5 years ago
by
Valentin Genin
Browse files
Options
Downloads
Patches
Plain Diff
Add new schema json for SAR_MultiSlc.py
parent
f91c3720
No related branches found
Branches containing commit
No related tags found
Tags containing commit
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
json_schemas/schema_MultiSlc.json
+132
-0
132 additions, 0 deletions
json_schemas/schema_MultiSlc.json
python_src/SAR_MultiSlc.py
+65
-84
65 additions, 84 deletions
python_src/SAR_MultiSlc.py
python_src/utils/func_utils.py
+18
-13
18 additions, 13 deletions
python_src/utils/func_utils.py
with
215 additions
and
97 deletions
json_schemas/schema_MultiSlc.json
0 → 100644
+
132
−
0
View file @
cecbab82
{
"$schema"
:
"http://json-schema.org/schema#"
,
"title"
:
"JSON SCHEMA for MultiSlc"
,
"description"
:
"JSON organization for the script SAR_MultiSlc.py"
,
"type"
:
"object"
,
"allOf"
:
[{
"required"
:
[
"Global"
,
"Pre_Processing"
,
"Metadata_Correction"
,
"DIn_SAR"
]}],
"properties"
:
{
"Global"
:
{
"type"
:
"object"
,
"properties"
:
{
"in"
:
{
"type"
:
"object"
,
"required"
:
[
"SRTM_Shapefile"
,
"SRTM_Path"
,
"Geoid"
,
"Master_Image"
,
"Start_Date"
,
"End_Date"
,
"Input_Path"
],
"additionalProperties"
:
false
,
"properties"
:
{
"SRTM_Shapefile"
:
{
"type"
:
"string"
},
"SRTM_Path"
:
{
"type"
:
"string"
},
"Geoid"
:
{
"type"
:
"string"
},
"Master_Image"
:
{
"type"
:
"string"
},
"Start_Date"
:
{
"type"
:
"string"
},
"End_Date"
:
{
"type"
:
"string"
},
"Exclude"
:
{
"type"
:
"string"
,
"default"
:
"-9999"
},
"Input_Path"
:
{
"type"
:
"string"
}}
},
"out"
:
{
"type"
:
"object"
,
"required"
:
[
"Output_Path"
],
"additionalProperties"
:
false
,
"properties"
:
{
"Output_Path"
:
{
"type"
:
"string"
}}
},
"parameter"
:
{
"type"
:
"object"
,
"additionalProperties"
:
false
,
"properties"
:
{
"clean"
:
{
"type"
:
"boolean"
,
"default"
:
true
},
"optram"
:
{
"type"
:
"number"
,
"default"
:
4000
}}
}
},
"additionalProperties"
:
false
,
"required"
:
[
"in"
,
"out"
]
},
"Pre_Processing"
:
{
"type"
:
"object"
,
"properties"
:
{
"out"
:
{
"type"
:
"object"
,
"required"
:
[
"doppler_file"
],
"additionalProperties"
:
false
,
"properties"
:
{
"doppler_file"
:
{
"type"
:
"string"
}}
},
"parameter"
:
{
"type"
:
"object"
,
"required"
:
[
"ML_gain"
],
"additionalProperties"
:
false
,
"properties"
:
{
"ML_ran"
:
{
"type"
:
"number"
,
"default"
:
3
},
"ML_azi"
:
{
"type"
:
"number"
,
"default"
:
3
},
"ML_gain"
:
{
"type"
:
"number"
}}
}
},
"additionalProperties"
:
false
,
"required"
:
[
"out"
,
"parameter"
]
},
"Metadata_Correction"
:
{
"type"
:
"object"
,
"properties"
:
{
"out"
:
{
"type"
:
"object"
,
"required"
:
[
"fine_metadata_file"
],
"additionalProperties"
:
false
,
"properties"
:
{
"fine_metadata_file"
:
{
"type"
:
"string"
}}
},
"parameter"
:
{
"type"
:
"object"
,
"required"
:
[
"activate"
,
"GridStep_range"
,
"GridStep_azimut"
],
"additionalProperties"
:
false
,
"properties"
:
{
"activate"
:
{
"type"
:
"boolean"
},
"GridStep_range"
:
{
"type"
:
"number"
},
"GridStep_azimut"
:
{
"type"
:
"number"
}}
}
},
"additionalProperties"
:
false
,
"required"
:
[
"out"
,
"parameter"
]
},
"DIn_SAR"
:
{
"type"
:
"object"
,
"properties"
:
{
"parameter"
:
{
"type"
:
"object"
,
"required"
:
[
"GridStep_range"
,
"GridStep_azimut"
,
"Grid_Threshold"
,
"Grid_Gap"
,
"Interferogram_gain"
],
"additionalProperties"
:
false
,
"properties"
:
{
"GridStep_range"
:
{
"type"
:
"number"
},
"GridStep_azimut"
:
{
"type"
:
"number"
},
"Grid_Threshold"
:
{
"type"
:
"number"
},
"Grid_Gap"
:
{
"type"
:
"number"
},
"Interferogram_gain"
:
{
"type"
:
"number"
},
"Activate_Interferogram"
:
{
"type"
:
"string"
,
"default"
:
"yes"
},
"Activate_Ortho"
:
{
"type"
:
"string"
},
"Spacingxy"
:
{
"type"
:
"number"
,
"default"
:
0.0001
},
"roi"
:
{
"type"
:
"string"
}
}
}
},
"additionalProperties"
:
false
,
"required"
:
[
"parameter"
]
}
}
}
This diff is collapsed.
Click to expand it.
python_src/SAR_MultiSlc.py
+
65
−
84
View file @
cecbab82
...
...
@@ -66,45 +66,6 @@ if __name__ == "__main__":
parser
.
add_argument
(
"
configfile
"
,
help
=
"""
input conguration file for the
application DiapOTB
"""
)
parser
.
add_argument
(
"
-d
"
,
"
--start_date
"
,
dest
=
"
start_date
"
,
action
=
"
store
"
,
help
=
"
start date, fmt(
'
20151222
'
)
"
,
default
=
None
)
parser
.
add_argument
(
"
-f
"
,
"
--end_date
"
,
dest
=
"
end_date
"
,
action
=
"
store
"
,
help
=
"
end date, fmt(
'
20151223
'
)
"
,
default
=
None
)
parser
.
add_argument
(
"
-m
"
,
"
--master
"
,
dest
=
"
master
"
,
action
=
"
store
"
,
help
=
"
master image, fmt (.tiff or .h5)
"
,
default
=
None
)
parser
.
add_argument
(
"
-e
"
,
"
--exclude
"
,
dest
=
"
exclude
"
,
nargs
=
"
+
"
,
action
=
"
store
"
,
help
=
""""
excluded date(s) from the time
serie, exemple(
'
20151221 20160114
'
)
"""
,
default
=
"
-9999
"
)
parser
.
add_argument
(
"
-roi
"
,
"
--roi
"
,
dest
=
"
roi
"
,
nargs
=
"
+
"
,
action
=
"
store
"
,
help
=
""""
Define the lat lng ROI coordinates,
fmt(
'
ulx uly lrx lry
'
), ex: -roi 2.44115 48.96126
2.44176 48.95927
"""
,
default
=
None
)
parser
.
add_argument
(
"
-i
"
,
"
--interferogram
"
,
dest
=
"
interferogram
"
,
action
=
"
store
"
,
help
=
"""
Simply write
'
-i no
'
to
deactivate interferogram output. Activated by default.
Returns: Interferogram.tif
"""
,
default
=
"
yes
"
)
parser
.
add_argument
(
"
-o
"
,
"
--ortho
"
,
dest
=
"
ortho
"
,
action
=
"
store
"
,
help
=
"""
Simply write
'
-o yes
'
to
activate Orthorectified interferogram output. If activated,
returns: Ortho_Interferogram.tif
"""
,
default
=
None
)
parser
.
add_argument
(
"
-l
"
,
"
--light
"
,
dest
=
"
light
"
,
action
=
"
store
"
,
help
=
""""
-l no
'
to deactivate. If activated, returns a
light version: Interferogram.tif + Coregistrated.tif +
Concatenated (deramped, and multilooked) bursts.tif
"""
,
default
=
"
yes
"
)
parser
.
add_argument
(
"
-ram
"
,
"
--optram
"
,
dest
=
"
optram
"
,
action
=
"
store
"
,
help
=
"""
Available RAM (mb),
by default value is 4OOO
"""
,
default
=
"
4000
"
)
parser
.
add_argument
(
"
-spacingxy
"
,
"
--spacingxy
"
,
dest
=
"
spacingxy
"
,
action
=
"
store
"
,
help
=
"""
Set the spatial resolution
for OrthoRectification in degrees,
Default value is 0.0001
"""
,
default
=
"
0.0001
"
)
parser
.
add_argument
(
"
-ml
"
,
"
--multilook
"
,
dest
=
"
multi
"
,
nargs
=
"
+
"
,
action
=
"
store
"
,
help
=
""""
Set the range and azimuth (in this order) you want
for the multilook. Default is 3 3
"""
,
default
=
'
3
'
'
3
'
)
args
=
parser
.
parse_args
()
print
(
args
.
configfile
)
...
...
@@ -112,61 +73,63 @@ if __name__ == "__main__":
func_utils
.
init_logger
()
# ====== Read and Load the configuration file
dataConfig
=
func_utils
.
load_configfile
(
args
.
configfile
)
dataConfig
=
func_utils
.
load_configfile
(
args
.
configfile
,
mode
=
"
multi_S1
"
)
# ====== Get dictionaries
dict_Global
=
dataConfig
[
'
Global
'
]
dict_PreProcessing
=
dataConfig
[
'
Pre_Processing
'
]
dict_Metadata_Correction
=
dataConfig
[
'
Metadata_Correction
'
]
dict_DInSAR
=
dataConfig
[
'
DIn_SAR
'
]
# ====== Check extension (if .h5 => HDF5 file => Cosmo Sensor)
master_ext
=
args
.
master
.
split
(
"
.
"
)[
-
1
:]
master_ext
=
dict_Global
[
'
in
'
][
'
Master_Image
'
].
split
(
"
.
"
)[
-
1
:]
# ====== Get elements from configuration file
# ====== Global
srtm_shapefile
=
dict_Global
[
'
in
'
][
'
SRTM_Shapefile
'
]
hgts_path
=
dict_Global
[
'
in
'
][
'
SRTM_Path
'
]
geoid_path
=
dict_Global
[
'
in
'
][
'
Geoid
'
]
output_dir
=
dict_Global
[
'
out
'
][
'
Output_Path
'
]
# =================================
# Get elements from users arguments
# =================================
if
not
os
.
path
.
exists
(
output_dir
):
print
(
"
The output directory does not exist and will be created
"
)
os
.
makedirs
(
output_dir
)
else
:
print
(
"
The output directory exists. Some files can be overwritten
"
)
# ====== Set the variables names
iso_start
,
iso_end
=
func_utils
.
argDates_to_isoDates
(
args
.
start_date
,
args
.
end_date
)
start_time
=
int
(
args
.
start_date
)
end_time
=
int
(
args
.
end_date
)
master_Image_base
=
args
.
master
iso_start
,
iso_end
=
func_utils
.
argDates_to_isoDates
(
dict_Global
[
'
in
'
][
'
Start_Date
'
],
dict_Global
[
'
in
'
][
'
End_Date
'
])
start_time
=
int
(
dict_Global
[
'
in
'
][
'
Start_Date
'
])
end_time
=
int
(
dict_Global
[
'
in
'
][
'
End_Date
'
])
master_Image_base
=
dict_Global
[
'
in
'
][
'
Master_Image
'
]
master_Image
=
""
pol
=
""
spacingxy
=
args
.
spacingxy
e
xclude
=
args
.
exclude
roi
=
args
.
roi
light_version
=
args
.
light
exclude
=
"
-9999
"
if
'
E
xclude
'
in
dict_Global
[
'
in
'
]:
exclude
=
dict_Global
[
'
in
'
][
'
Exclude
'
]
light_version
=
dict_Global
[
'
parameter
'
][
'
clean
'
]
relative_orbit
=
""
manifest
=
""
light_version
=
True
ram
=
4000
if
'
parameter
'
in
dict_Global
:
if
'
optram
'
in
dict_Global
[
'
parameter
'
]:
ram
=
dict_Global
[
'
parameter
'
][
'
ram
'
]
if
'
clean
'
in
dict_Global
[
'
parameter
'
]:
light_version
=
dict_Global
[
'
parameter
'
][
'
clean
'
]
if
master_ext
[
0
]
==
"
h5
"
:
# Cosmo case
master_Image
=
func_utils
.
get_imgFromDir
(
args
.
master
)
master_Image
=
func_utils
.
get_imgFromDir
(
dict_Global
[
'
in
'
][
'
Master_Image
'
],
dict_Global
[
'
in
'
][
'
Input_Path
'
]
)
master_date
=
master_Image_base
.
split
(
"
_
"
)[
8
][:
8
]
pol
=
master_Image_base
.
split
(
"
_
"
)[
5
]
else
:
#S1 SM case
master_Image
=
func_utils
.
get_imgFromSAFE
(
args
.
master
)
master_Image
=
func_utils
.
get_imgFromSAFE
(
dict_Global
[
'
in
'
][
'
Master_Image
'
],
dict_Global
[
'
in
'
][
'
Input_Path
'
]
)
master_date
=
master_Image_base
.
split
(
"
-
"
)[
4
].
split
(
"
t
"
)[
0
]
pol
=
master_Image_base
.
split
(
"
-
"
)[
3
]
manifest
=
master_Image
.
split
(
"
measurement
"
)[
0
]
+
"
/manifest.safe
"
relative_orbit
=
func_utils
.
get_relative_orbit
(
manifest
)
version_interferogram
=
args
.
interferogram
ortho_interferogram
=
args
.
ortho
if
roi
:
ortho_interferogram
=
"
yes
"
print
(
"
ortho_interferogram
"
,
ortho_interferogram
)
ram
=
args
.
optram
rng
,
azi
=
args
.
multi
# ====== Get dictionaries
dict_Global
=
dataConfig
[
'
Global
'
]
dict_PreProcessing
=
dataConfig
[
'
Pre_Processing
'
]
dict_Metadata_Correction
=
dataConfig
[
'
Metadata_Correction
'
]
dict_DInSAR
=
dataConfig
[
'
DIn_SAR
'
]
# ====== Get elements from configuration file
# ====== Global
srtm_shapefile
=
dict_Global
[
'
in
'
][
'
SRTMShapefile
'
]
hgts_path
=
dict_Global
[
'
in
'
][
'
Datalake
'
]
geoid_path
=
dict_Global
[
'
in
'
][
'
Geoid
'
]
satellite
=
"
default
"
mode
=
"
default
"
...
...
@@ -175,6 +138,12 @@ if __name__ == "__main__":
mode
=
dict_Global
[
'
sensor
'
][
'
mode
'
]
# ====== Pre_Processing
rng
=
3
azi
=
3
if
"
ML_ran
"
in
dict_PreProcessing
[
'
parameter
'
]:
rng
=
dict_PreProcessing
[
'
parameter
'
].
get
(
'
ML_ran
'
)
if
"
ML_azi
"
in
dict_PreProcessing
[
'
parameter
'
]:
azi
=
dict_PreProcessing
[
'
parameter
'
].
get
(
'
ML_azi
'
)
ml_range
=
int
(
rng
)
ml_azimut
=
int
(
azi
)
ml_gain
=
dict_PreProcessing
[
'
parameter
'
][
'
ML_gain
'
]
...
...
@@ -192,13 +161,23 @@ if __name__ == "__main__":
fine_metadata_file
=
dict_Metadata_Correction
[
'
out
'
][
'
fine_metadata_file
'
]
# ====== DIn_SAR
spacingxy
=
0.0001
if
"
Spacingxy
"
in
dict_DInSAR
[
'
parameter
'
]:
spacingxy
=
dict_DInSAR
[
'
parameter
'
][
'
Spacingxy
'
]
roi
=
None
if
'
roi
'
in
dict_DInSAR
[
'
parameter
'
]:
roi
=
dict_DInSAR
[
'
parameter
'
][
'
roi
'
]
version_interferogram
=
dict_DInSAR
[
'
parameter
'
][
'
Activate_Interferogram
'
]
ortho_interferogram
=
None
if
'
Activate_Ortho
'
in
dict_DInSAR
[
'
parameter
'
]:
ortho_interferogram
=
dict_DInSAR
[
'
parameter
'
][
'
Activate_Ortho
'
]
if
roi
:
ortho_interferogram
=
"
yes
"
print
(
"
ortho_interferogram
"
,
ortho_interferogram
)
geoGrid_gridstep_range
=
dict_DInSAR
[
'
parameter
'
][
'
GridStep_range
'
]
geoGrid_gridstep_azimut
=
dict_DInSAR
[
'
parameter
'
][
'
GridStep_azimut
'
]
geoGrid_threshold
=
dict_DInSAR
[
'
parameter
'
][
'
Grid_Threshold
'
]
if
not
master_ext
[
0
]
==
"
h5
"
:
geoGrid_gap
=
dict_DInSAR
[
'
parameter
'
][
'
Grid_Gap_S1
'
]
else
:
geoGrid_gap
=
dict_DInSAR
[
'
parameter
'
][
'
Grid_Gap_Co
'
]
geoGrid_gap
=
dict_DInSAR
[
'
parameter
'
][
'
Grid_Gap
'
]
ml_geoGrid_range
=
ml_range
ml_geoGrid_azimut
=
ml_azimut
gain_interfero
=
dict_DInSAR
[
'
parameter
'
][
'
Interferogram_gain
'
]
...
...
@@ -213,8 +192,10 @@ if __name__ == "__main__":
func_utils
.
check_ifExist
(
master_Image
)
# ====== Create global folder with starting and ending dates + master date
output_glob
=
"
./output_{}_to_{}_m_{}
"
.
format
(
start_time
,
end_time
,
master_date
)
# output_glob = "./output_{}_to_{}_m_{}".format(start_time, end_time,
# master_date)
output_glob
=
"
{}/output_{}_to_{}_m_{}
"
.
format
(
output_dir
,
start_time
,
end_time
,
master_date
)
if
not
os
.
path
.
exists
(
output_glob
):
os
.
makedirs
(
output_glob
)
...
...
@@ -258,7 +239,7 @@ if __name__ == "__main__":
# =============================
# ====== Get the list of GTiff corresponding to dates
tiff_list
=
func_utils
.
get_AllTiff
(
pol
=
pol
,
ext
=
master_ext
[
0
])
tiff_list
=
func_utils
.
get_AllTiff
(
pol
=
pol
,
ext
=
master_ext
[
0
]
,
searchDir
=
dict_Global
[
"
in
"
][
"
Input_Path
"
]
)
tiff_dates
=
func_utils
.
get_Tiff_WithDates
(
start_time
,
end_time
,
exclude
,
tiff_list
,
master_ext
[
0
])
counter
=
0
...
...
@@ -268,9 +249,9 @@ if __name__ == "__main__":
slave_Image_base
=
i
slave_Image
=
""
if
not
master_ext
[
0
]
==
"
h5
"
:
slave_Image
=
func_utils
.
get_imgFromSAFE
(
slave_Image_base
)
slave_Image
=
func_utils
.
get_imgFromSAFE
(
slave_Image_base
,
searchDir
=
dict_Global
[
"
in
"
][
"
Input_Path
"
]
)
else
:
slave_Image
=
func_utils
.
get_imgFromDir
(
slave_Image_base
)
slave_Image
=
func_utils
.
get_imgFromDir
(
slave_Image_base
,
searchDir
=
dict_Global
[
"
in
"
][
"
Input_Path
"
]
)
slave_date
=
func_utils
.
get_Date
(
i
,
master_ext
[
0
])
counter
+=
1
output_dir
=
output_glob
+
"
/{}_m_{}_s
"
.
format
(
master_date
,
slave_date
)
...
...
This diff is collapsed.
Click to expand it.
python_src/utils/func_utils.py
+
18
−
13
View file @
cecbab82
...
...
@@ -156,9 +156,12 @@ def load_configfile(configfile, mode="Others"):
schema_json
=
"
schema_S1SM.json
"
if
mode
==
"
S1_IW
"
:
schema_json
=
"
schema_S1IW.json
"
elif
mode
==
"
multi_S1
"
:
schema_json
=
"
schema_MultiSlc.json
"
# Load schema (into DiapOTB install)
diapOTB_install
=
os
.
getenv
(
'
DIAPOTB_INSTALL
'
)
print
(
diapOTB_install
)
if
diapOTB_install
is
not
None
and
os
.
path
.
exists
(
diapOTB_install
):
schemas_path
=
os
.
path
.
join
(
diapOTB_install
,
"
json_schemas
"
)
if
os
.
path
.
exists
(
schemas_path
):
...
...
@@ -221,27 +224,29 @@ def getDEMInformation(dem):
### Functions for image/file selection
def
get_imgFromSAFE
(
arg
):
def
get_imgFromSAFE
(
arg
,
searchDir
=
"
.
"
):
"""
Retrive selected image from a SAFE directory
"""
img
=
[]
for
root
,
dirs
,
files
in
os
.
walk
(
"
.
"
):
for
root
,
dirs
,
files
in
os
.
walk
(
searchDir
):
for
i
in
(
i
for
i
in
files
if
i
==
arg
):
img
.
append
(
os
.
path
.
join
(
os
.
getcwd
()
+
root
.
split
(
"
.
"
)[
1
]
+
"
.SAFE/measurement/
"
+
(
i
)))
# img.append(os.path.join(os.getcwd()+root.split(".")[1] +
# ".SAFE/measurement/"+(i)))
img
.
append
(
os
.
path
.
join
(
root
+
"
/measurement/
"
,
i
))
img
=
str
(
""
.
join
(
img
))
return
img
def
get_imgFromDir
(
arg
):
def
get_imgFromDir
(
arg
,
searchDir
=
"
.
"
):
"""
Retrive selected image from a directory (for Cosmo sensor)
"""
img
=
[]
for
root
,
dirs
,
files
in
os
.
walk
(
"
.
"
):
for
root
,
dirs
,
files
in
os
.
walk
(
searchDir
):
for
i
in
(
i
for
i
in
files
if
i
==
arg
):
img
.
append
(
os
.
path
.
join
(
os
.
getcwd
()
+
root
.
split
(
"
.
"
)[
1
]
+
"
/
"
+
(
i
)))
# img.append(os.path.join(os.getcwd()+root.split(".")[1] +
# "/"+(i)))
img
.
append
(
os
.
path
.
join
(
root
,
i
))
img
=
str
(
""
.
join
(
img
))
return
img
...
...
@@ -286,31 +291,31 @@ def check_srtm_coverage(inSHP_Geometry, SRTM):
needed_srtm_tiles
=
srtm_tiles
return
needed_srtm_tiles
def
get_AllTiff
(
pol
,
iw
=
""
,
ext
=
""
):
def
get_AllTiff
(
pol
,
iw
=
""
,
ext
=
""
,
searchDir
=
"
.
"
):
"""
Get all tiff from an input directory
"""
TiffList
=
[]
if
not
iw
==
""
:
for
root
,
dirs
,
files
in
os
.
walk
(
"
.
"
):
for
root
,
dirs
,
files
in
os
.
walk
(
searchDir
):
for
i
in
(
i
for
i
in
files
):
# if i != inTIF):
if
i
.
endswith
(
"
.tiff
"
)
and
pol
==
i
.
split
(
"
-
"
)[
3
]:
if
iw
==
i
.
split
(
"
-
"
)[
1
]:
TiffList
.
append
(
i
)
else
:
if
ext
==
"
h5
"
:
for
root
,
dirs
,
files
in
os
.
walk
(
"
.
"
):
for
root
,
dirs
,
files
in
os
.
walk
(
searchDir
):
for
i
in
(
i
for
i
in
files
):
# if i != inTIF):
if
i
.
endswith
(
"
.h5
"
)
and
pol
==
i
.
split
(
"
_
"
)[
5
]:
TiffList
.
append
(
i
)
if
not
ext
==
"
h5
"
:
for
root
,
dirs
,
files
in
os
.
walk
(
"
.
"
):
for
root
,
dirs
,
files
in
os
.
walk
(
searchDir
):
for
i
in
(
i
for
i
in
files
):
# if i != inTIF):
if
i
.
endswith
(
"
.tiff
"
)
and
pol
==
i
.
split
(
"
-
"
)[
3
]:
TiffList
.
append
(
i
)
print
(
TiffList
)
return
TiffList
def
get_AllFilesWithExt
(
searchDir
,
ext
)
:
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment