Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
geomultisens
gms_preprocessing
Commits
6de517bc
Commit
6de517bc
authored
Oct 23, 2017
by
Daniel Scheffler
Browse files
Added tests for exception handler. Unified L1A_object inputs.
parent
1c4efa90
Pipeline
#1405
failed with stage
in 8 minutes and 9 seconds
Changes
7
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
gms_preprocessing/algorithms/L1A_P.py
View file @
6de517bc
...
...
@@ -34,7 +34,7 @@ class L1A_object(GMS_object):
"""Features input reader and raster-/metadata homogenization."""
def
__init__
(
self
,
image_type
=
''
,
satellite
=
''
,
sensor
=
''
,
subsystem
=
''
,
sensormode
=
''
,
acq_datetime
=
None
,
entity_ID
=
''
,
scene_ID
=-
9999
,
filename
=
''
,
dataset_ID
=-
9999
):
entity_ID
=
''
,
scene_ID
=-
9999
,
filename
=
''
,
dataset_ID
=-
9999
,
**
kwargs
):
""":param : instance of gms_object.GMS_object or None
"""
# TODO docstring
...
...
gms_preprocessing/config.py
View file @
6de517bc
...
...
@@ -499,7 +499,7 @@ class Usecase:
scenes.acquisitiondate,
scenes.entityid,
scenes.filename,
COALESCE(scenes_proc.proc_level::text, 'L
0
A') AS proc_level,
COALESCE(scenes_proc.proc_level::text, 'L
1
A') AS proc_level,
datasets.image_type,
satellites.name AS satellite,
sensors.name AS sensor,
...
...
@@ -519,7 +519,7 @@ class Usecase:
ds
=
OrderedDict
()
ds
[
"proc_level"
]
=
row
[
"proc_level"
]
ds
[
"scene_ID"
]
=
row
[
"sceneid"
]
ds
[
"dataset
id
"
]
=
row
[
"datasetid"
]
ds
[
"dataset
_ID
"
]
=
row
[
"datasetid"
]
ds
[
"image_type"
]
=
row
[
"image_type"
]
ds
[
"satellite"
]
=
row
[
"satellite"
]
ds
[
"sensor"
]
=
row
[
"sensor"
]
...
...
gms_preprocessing/misc/exception_handler.py
View file @
6de517bc
...
...
@@ -189,11 +189,10 @@ class ExceptionHandler(object):
@
staticmethod
def
update_progress_failed
(
failed_Obj
):
"""Update statistics column in jobs table of postgreSQL database."""
if
not
failed_Obj
.
subsystem
or
failed_Obj
.
subsystem
in
[
'VNIR1'
,
'S2A10'
]:
DB_T
.
increment_decrement_arrayCol_in_postgreSQLdb
(
CFG
.
job
.
conn_database
,
'jobs'
,
'statistics'
,
cond_dict
=
{
'id'
:
CFG
.
job
.
ID
},
idx_val2decrement
=
db_jobs_statistics_def
[
failed_Obj
.
proc_level
],
idx_val2increment
=
db_jobs_statistics_def
[
'FAILED'
])
DB_T
.
increment_decrement_arrayCol_in_postgreSQLdb
(
CFG
.
job
.
conn_database
,
'jobs'
,
'statistics'
,
cond_dict
=
{
'id'
:
CFG
.
job
.
ID
},
idx_val2decrement
=
db_jobs_statistics_def
[
failed_Obj
.
proc_level
],
idx_val2increment
=
db_jobs_statistics_def
[
'FAILED'
])
def
handle_failed
(
self
):
_
,
exc_val
,
exc_tb
=
self
.
exc_details
...
...
@@ -225,8 +224,8 @@ class ExceptionHandler(object):
return
failed_Obj
def
log_uncaught_exceptions
(
GMS_mapper
):
exc_handler
=
ExceptionHandler
()
def
log_uncaught_exceptions
(
GMS_mapper
,
logger
=
None
):
exc_handler
=
ExceptionHandler
(
logger
=
logger
)
return
exc_handler
.
log_uncaught_exceptions
(
GMS_mapper
)
...
...
gms_preprocessing/processing/pipeline.py
View file @
6de517bc
...
...
@@ -19,16 +19,7 @@ __author__ = 'Daniel Scheffler'
def
L1A_map
(
dataset_dict
):
# map (scene-wise parallelization)
# type: (dict) -> L1A_P.L1A_object
L1A_obj
=
L1A_P
.
L1A_object
(
image_type
=
'RSD'
,
satellite
=
dataset_dict
[
'satellite'
],
sensor
=
dataset_dict
[
'sensor'
],
subsystem
=
dataset_dict
[
'subsystem'
],
sensormode
=
dataset_dict
[
'sensormode'
],
acq_datetime
=
dataset_dict
[
'acq_datetime'
],
entity_ID
=
dataset_dict
[
'entity_ID'
],
scene_ID
=
dataset_dict
[
'scene_ID'
],
filename
=
dataset_dict
[
'filename'
],
dataset_ID
=
dataset_dict
[
'datasetid'
])
L1A_obj
=
L1A_P
.
L1A_object
(
**
dataset_dict
)
L1A_obj
.
import_rasterdata
()
L1A_obj
.
import_metadata
(
v
=
False
)
L1A_obj
.
validate_GeoTransProj_GeoAlign
()
# sets self.GeoTransProj_ok and self.GeoAlign_ok
...
...
@@ -53,16 +44,7 @@ def L1A_map(dataset_dict): # map (scene-wise parallelization)
def
L1A_map_1
(
dataset_dict
,
block_size
=
None
):
# map (scene-wise parallelization)
# type: (dict) -> List[L1A_P.L1A_object]
L1A_obj
=
L1A_P
.
L1A_object
(
image_type
=
'RSD'
,
satellite
=
dataset_dict
[
'satellite'
],
sensor
=
dataset_dict
[
'sensor'
],
subsystem
=
dataset_dict
[
'subsystem'
],
sensormode
=
dataset_dict
[
'sensormode'
],
acq_datetime
=
dataset_dict
[
'acq_datetime'
],
entity_ID
=
dataset_dict
[
'entity_ID'
],
scene_ID
=
dataset_dict
[
'scene_ID'
],
filename
=
dataset_dict
[
'filename'
],
dataset_ID
=
dataset_dict
[
'datasetid'
])
L1A_obj
=
L1A_P
.
L1A_object
(
**
dataset_dict
)
L1A_obj
.
import_rasterdata
()
L1A_obj
.
import_metadata
(
v
=
False
)
L1A_obj
.
validate_GeoTransProj_GeoAlign
()
# sets self.GeoTransProj_ok and self.GeoAlign_ok
...
...
gms_preprocessing/processing/process_controller.py
View file @
6de517bc
...
...
@@ -210,7 +210,7 @@ class process_controller(object):
Sensor
=
dataset
[
'sensor'
],
Subsystem
=
dataset
[
'subsystem'
],
proc_level
=
ProcL
,
# must be respected because LBA changes after atm. Corr.
dataset_ID
=
dataset
[
'dataset
id
'
],
dataset_ID
=
dataset
[
'dataset
_ID
'
],
logger
=
None
),
nBands
=
(
1
if
dataset
[
'sensormode'
]
==
'P'
else
None
))
# check if the LayerBandsAssignment of the written dataset on disk equals the
...
...
tests/test_exception_handler.py
View file @
6de517bc
...
...
@@ -5,28 +5,133 @@
test_exception_handler
----------------------
Tests for gms_preprocessing.misc.exception_handler
Tests for gms_preprocessing.misc.exception_handler
.ExceptionHandler
"""
import
unittest
from
gms_preprocessing
import
process_controller
from
gms_preprocessing.misc.exception_handler
import
ExceptionHandler
,
log_uncaught_exceptions
from
gms_preprocessing.misc.exception_handler
import
log_uncaught_exceptions
from
gms_preprocessing.algorithms.L1A_P
import
L1A_object
from
gms_preprocessing.misc.database_tools
import
get_info_from_postgreSQLdb
class
Test_ExceptionHandler
(
unittest
.
TestCase
):
"""Tests class for gms_preprocessing.misc.exception_handler.ExceptionHandler."""
class
BaseTest_ExceptionHandler
:
"""Test class for gms_preprocessing.misc.exception_handler.ExceptionHandler."""
class
Test_ExceptionHandler
(
unittest
.
TestCase
):
PC
=
None
# default
@
classmethod
def
setUpClass
(
cls
):
cls
.
PC
=
process_controller
(
jobID
,
parallelization_level
=
'scenes'
,
db_host
=
'geoms'
,
delete_old_output
=
True
,
job_config_kwargs
=
config
)
def
get_process_controller
(
self
,
jobID
):
self
.
PC
=
process_controller
(
jobID
,
parallelization_level
=
'scenes'
,
db_host
=
'localhost'
,
job_config_kwargs
=
dict
(
is_test
=
True
,
log_level
=
'DEBUG'
))
# update attributes of DB_job_record and related DB entry
self
.
PC
.
DB_job_record
.
reset_job_progress
()
@
log_uncaught_exceptions
def
dummy_gms_mapper_success
(
self
,
dummy_GMSobj
):
return
dummy_GMSobj
[
ds
.
update
({
'proc_level'
:
None
})
for
ds
in
self
.
PC
.
usecase
.
data_list
]
@
log_uncaught_exceptions
def
dummy_gms_mapper_fail
(
self
):
raise
RuntimeError
()
self
.
PC
.
job
.
status
=
'running'
self
.
PC
.
update_DB_job_record
()
# TODO implement that into job.status.setter
def
tearDown
(
self
):
self
.
PC
.
DB_job_record
.
reset_job_progress
()
@
staticmethod
@
log_uncaught_exceptions
def
dummy_L1A_mapper_success
(
dummy_GMSobj
):
return
L1A_object
(
**
dummy_GMSobj
)
@
staticmethod
@
log_uncaught_exceptions
def
dummy_gms_mapper_fail
(
dummy_GMSobj
):
raise
RuntimeError
(
'TestException raised by dummy_gms_mapper_fail()'
)
def
get_current_progress_stats
(
self
):
return
get_info_from_postgreSQLdb
(
self
.
PC
.
job
.
conn_database
,
'jobs'
,
'statistics'
,
cond_dict
=
dict
(
id
=
self
.
PC
.
job
.
ID
))[
0
][
0
]
def
is_sceneid_in_failedIDs
(
self
,
sceneid
):
return
sceneid
in
get_info_from_postgreSQLdb
(
self
.
PC
.
job
.
conn_database
,
'jobs'
,
[
'failed_sceneids'
],
{
'id'
:
self
.
PC
.
job
.
ID
})[
0
][
0
]
class
Test_ExceptionHandler_NoSubsystems
(
BaseTest_ExceptionHandler
.
Test_ExceptionHandler
):
def
setUp
(
self
):
super
().
get_process_controller
(
26186261
)
# Landsat-8 Coll. Data
def
test_L1A_mapper_success
(
self
):
"""Check correctness of progress stats if scene succeeds."""
self
.
dummy_L1A_mapper_success
(
self
.
PC
.
usecase
.
data_list
[
0
])
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
1
,
0
,
0
,
0
,
0
,
0
,
0
])
def
test_gms_mapper_fail
(
self
):
"""Check correctness of progress stats if scene fails."""
outObj
=
self
.
dummy_gms_mapper_fail
(
self
.
PC
.
usecase
.
data_list
[
0
])
self
.
assertTrue
(
self
.
is_sceneid_in_failedIDs
(
outObj
.
scene_ID
))
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
1
])
def
test_gms_mapper_fail_excH_off
(
self
):
"""Check if exception handler raises exceptions of CFG.Job.disable_exception_handler = True."""
self
.
PC
.
job
.
disable_exception_handler
=
True
self
.
assertRaises
(
RuntimeError
,
self
.
dummy_gms_mapper_fail
,
self
.
PC
.
usecase
.
data_list
[
0
])
class
Test_ExceptionHandler_Subsystems
(
BaseTest_ExceptionHandler
.
Test_ExceptionHandler
):
""""""
def
setUp
(
self
):
super
().
get_process_controller
(
26186268
)
# Sentinel-2
def
test_L1A_mapper_success
(
self
):
"""Check correctness of progress stats if all subsystems succeed."""
for
subObj
in
self
.
PC
.
usecase
.
data_list
:
self
.
dummy_L1A_mapper_success
(
subObj
)
# validate that stats are only updated by first subsystem and keep the same value
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
1
,
0
,
0
,
0
,
0
,
0
,
0
])
# validate that stats only show ONE scene instead of the number of subsystems
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
1
,
0
,
0
,
0
,
0
,
0
,
0
])
def
test_gms_mapper_fail_firstSS
(
self
):
"""Check correctness of progress stats if the first subsystem first fails and another one succeeds."""
for
i
,
subObj
in
enumerate
(
self
.
PC
.
usecase
.
data_list
):
if
subObj
[
'subsystem'
]
==
'S2A10'
:
outObj
=
self
.
dummy_gms_mapper_fail
(
subObj
)
# check that the scene ID of the failed subsystem has been added to failed_sceneids db column
self
.
assertTrue
(
self
.
is_sceneid_in_failedIDs
(
outObj
.
scene_ID
))
# check that the scene has been marked as failed in progress stats
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
1
])
else
:
self
.
dummy_L1A_mapper_success
(
subObj
)
# check that the scene keeps marked as failed
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
1
])
@
unittest
.
SkipTest
def
test_gms_mapper_fail_secondSS
(
self
):
"""Check correctness of progress stats if a first subsystem succeeds and then another one fails.
NOTE: This happens quite rarely because if a mapper fails for a subsystem, it usualy fails for the first
subsystem it receives."""
# TODO: This test currently fails because earlier subsystem updates stats from
# TODO: [0, 1, 0, 0, 0, 0, 0, 0, 0] to [0, 0, 1, 0, 0, 0, 0, 0, 0] and later one from
# TODO: [0, 0, 1, 0, 0, 0, 0, 0, 0] to [0, -1, 1, 0, 0, 0, 0, 0, 1]
# TODO: This is known bug.
for
i
,
subObj
in
enumerate
(
self
.
PC
.
usecase
.
data_list
):
if
subObj
[
'subsystem'
]
==
'S2A10'
:
self
.
dummy_L1A_mapper_success
(
subObj
)
# progress stats must be incremented
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
1
,
0
,
0
,
0
,
0
,
0
,
0
])
else
:
outObj
=
self
.
dummy_gms_mapper_fail
(
subObj
)
# scene must be added to failed_scenes column
self
.
assertTrue
(
self
.
is_sceneid_in_failedIDs
(
outObj
.
scene_ID
))
# progress of whole scene must be marked as failed
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
1
])
self
.
assertEqual
(
self
.
get_current_progress_stats
(),
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
1
])
tests/test_gms_preprocessing.py
View file @
6de517bc
...
...
@@ -154,7 +154,7 @@ class BaseTestCases:
class
Test_Landsat5_PreCollectionData
(
BaseTestCases
.
TestAll
):
"""
Parametrized testclass. Tests the level-processes on a Landsat-5 TM scene (pre-collection data).
More information on the dataset will be output
ted
after the tests-classes are executed.
More information on the dataset will be output after the tests-classes are executed.
"""
@
classmethod
def
setUpClass
(
cls
):
...
...
@@ -173,7 +173,7 @@ class Test_Landsat5_PreCollectionData(BaseTestCases.TestAll):
class
Test_Landsat7_SLC_on_PreCollectionData
(
BaseTestCases
.
TestAll
):
"""
Parametrized testclass. Tests the level-processes on a Landsat-7 ETM+_SLC_ON scene (pre-collection data).
More information on the dataset will be output
ted
after after the tests-classes are executed.
More information on the dataset will be output after after the tests-classes are executed.
"""
@
classmethod
def
setUpClass
(
cls
):
...
...
@@ -183,7 +183,7 @@ class Test_Landsat7_SLC_on_PreCollectionData(BaseTestCases.TestAll):
class
Test_Landsat7_SLC_off_PreCollectionData
(
BaseTestCases
.
TestAll
):
"""
Parametrized testclass. Tests the level-processes on a Landsat-7 ETM+_SLC_OFF scene (pre-collection data).
More information on the dataset will be output
ted
after the tests-classes are executed.
More information on the dataset will be output after the tests-classes are executed.
"""
@
classmethod
def
setUpClass
(
cls
):
...
...
@@ -203,7 +203,7 @@ class Test_Landsat7_SLC_off_PreCollectionData(BaseTestCases.TestAll):
class
Test_Landsat8_PreCollectionData
(
BaseTestCases
.
TestAll
):
"""
Parametrized testclass. Tests the level-processes on a Landsat-8 OLI_TIRS scene (pre-collection data).
More information on the dataset will be output
ted
after the tests-classes are executed.
More information on the dataset will be output after the tests-classes are executed.
"""
@
classmethod
def
setUpClass
(
cls
):
...
...
@@ -213,27 +213,27 @@ class Test_Landsat8_PreCollectionData(BaseTestCases.TestAll):
class
Test_Landsat8_CollectionData
(
BaseTestCases
.
TestAll
):
"""
Parametrized testclass. Tests the level-processes on a Landsat-8 OLI_TIRS scene (collection data).
More information on the dataset will be output
ted
after the tests-classes are executed.
More information on the dataset will be output after the tests-classes are executed.
"""
@
classmethod
def
setUpClass
(
cls
):
cls
.
create_job
(
26186261
,
job_config_kwargs
)
class
Test_Sentinel2A_
CollectionData
(
BaseTestCases
.
TestAll
):
class
Test_Sentinel2A_
SingleGranule
(
BaseTestCases
.
TestAll
):
"""
Parametrized testclass. Tests the level-processes on a Sentinel-2A MSI scene (
pre-collection data
).
More information on the dataset will be output
ted
after the tests-classes are executed.
Parametrized testclass. Tests the level-processes on a Sentinel-2A MSI scene (
1 granule in archive: > 2017
).
More information on the dataset will be output after the tests-classes are executed.
"""
@
classmethod
def
setUpClass
(
cls
):
cls
.
create_job
(
26186268
,
job_config_kwargs
)
class
Test_Sentinel2A_
PreCollectionData
(
BaseTestCases
.
TestAll
):
class
Test_Sentinel2A_
MultiGranule
(
BaseTestCases
.
TestAll
):
"""
Parametrized testclass. Tests the level-processes on a Sentinel-2A MSI scene (
collection data
).
More information on the dataset will be output
ted
after the tests-classes are executed.
Parametrized testclass. Tests the level-processes on a Sentinel-2A MSI scene (
multiple granules in archive: < 2017
).
More information on the dataset will be output after the tests-classes are executed.
"""
@
classmethod
def
setUpClass
(
cls
):
...
...
Daniel Scheffler
@danschef
mentioned in commit
f1f24e11
·
Feb 06, 2018
mentioned in commit
f1f24e11
mentioned in commit f1f24e11dc055ff97272c576704ab4e4283bf914
Toggle commit list
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment