Commit aca9a730 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'master' into feature/spectral_homogenization


Former-commit-id: 6f07aa50
Former-commit-id: b2348cdf
parents a33d52f2 2bee3d1b
...@@ -61,7 +61,7 @@ test-all: ## run tests on every Python version with tox ...@@ -61,7 +61,7 @@ test-all: ## run tests on every Python version with tox
tox tox
coverage: clean-test ## check code coverage quickly with the default Python coverage: clean-test ## check code coverage quickly with the default Python
coverage run --source gms_preprocessing setup.py test coverage run --source gms_preprocessing --source bin setup.py test
coverage combine # must be called in order to make coverage work in multiprocessing coverage combine # must be called in order to make coverage work in multiprocessing
coverage report -m coverage report -m
coverage html coverage html
...@@ -71,8 +71,8 @@ nosetests: clean-test ## Runs nosetests with coverage, xUnit and nose-html-outpu ...@@ -71,8 +71,8 @@ nosetests: clean-test ## Runs nosetests with coverage, xUnit and nose-html-outpu
## - puts the coverage results in the folder 'htmlcov' ## - puts the coverage results in the folder 'htmlcov'
## - generates 'nosetests.html' (--with-html) ## - generates 'nosetests.html' (--with-html)
## - generates 'nosetests.xml' (--with-xunit) which is currently not visualizable by GitLab ## - generates 'nosetests.xml' (--with-xunit) which is currently not visualizable by GitLab
nosetests -vv --with-coverage --cover-package=gms_preprocessing --cover-erase --cover-html --cover-html-dir=htmlcov \ nosetests -vv --with-coverage --cover-package=gms_preprocessing --cover-package=bin --cover-erase --cover-html \
--with-html --with-xunit --rednose --force-color --cover-html-dir=htmlcov --with-html --with-xunit --rednose --force-color
docs: ## generate Sphinx HTML documentation, including API docs docs: ## generate Sphinx HTML documentation, including API docs
rm -f docs/gms_preprocessing.rst rm -f docs/gms_preprocessing.rst
......
...@@ -3,6 +3,7 @@ __author__ = 'Daniel Scheffler' ...@@ -3,6 +3,7 @@ __author__ = 'Daniel Scheffler'
import argparse import argparse
import warnings import warnings
import os
import matplotlib import matplotlib
...@@ -10,6 +11,17 @@ matplotlib.use('Agg', warn=False) # switch matplotlib backend to 'Agg' and disa ...@@ -10,6 +11,17 @@ matplotlib.use('Agg', warn=False) # switch matplotlib backend to 'Agg' and disa
from gms_preprocessing import process_controller, __version__ # noqa: E402 from gms_preprocessing import process_controller, __version__ # noqa: E402
from gms_preprocessing.misc.database_tools import GMS_JOB # noqa: E402 from gms_preprocessing.misc.database_tools import GMS_JOB # noqa: E402
from gms_preprocessing.options.config import get_conn_database # noqa: E402
from gms_preprocessing.options.config import path_options_default # noqa: E402
from gms_preprocessing.options.config import get_options # noqa: E402
from gms_preprocessing.options.config import get_config_kwargs_default # noqa: E402
options_default = get_options(path_options_default, validation=True) # type: dict
config_kwargs_default = get_config_kwargs_default() # type: dict
def get_config_kwargs_from_cli_args(cli_args):
return {k: v for k, v in cli_args.__dict__.items() if k in config_kwargs_default.keys()}
def run_from_jobid(args): def run_from_jobid(args):
...@@ -19,12 +31,15 @@ def run_from_jobid(args): ...@@ -19,12 +31,15 @@ def run_from_jobid(args):
# TODO download: run only the downloader # TODO download: run only the downloader
# set up process controller instance # set up process controller instance
PC = process_controller(args.jobid, parallelization_level='scenes', db_host='geoms') # FIXME hardcoded host PC = process_controller(args.jobid, **get_config_kwargs_from_cli_args(args))
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job # run the job
PC.run_all_processors() if 'GMS_IS_TEST' in os.environ and os.environ['GMS_IS_TEST'] == 'True':
# in case of software test, it is enough to get an instance of process controller because all inputs are
# validated within options.config.Job_Config (indirectly called by ProcessController.__init__() )
pass
else:
PC.run_all_processors()
def run_from_sceneids(args): def run_from_sceneids(args):
...@@ -32,19 +47,12 @@ def run_from_sceneids(args): ...@@ -32,19 +47,12 @@ def run_from_sceneids(args):
warnings.warn('Currently the console argument parser expects the given scenes as already downloaded.') # TODO warnings.warn('Currently the console argument parser expects the given scenes as already downloaded.') # TODO
# create a new processing job from scene IDs # create a new processing job from scene IDs
db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # TODO dbJob = GMS_JOB(get_conn_database(args.db_host))
warnings.warn('Currently the console argument parser expects the database at localhost.') # TODO
virtual_sensor_id = 1 # TODO
warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.') # TODO
datasetid_spatial_ref = 249 # TODO
warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.') # TODO
dbJob = GMS_JOB(db_connection)
dbJob.from_sceneIDlist(list_sceneIDs=args.sceneids, dbJob.from_sceneIDlist(list_sceneIDs=args.sceneids,
virtual_sensor_id=virtual_sensor_id, virtual_sensor_id=args.virtual_sensor_id,
datasetid_spatial_ref=datasetid_spatial_ref, datasetid_spatial_ref=args.datasetid_spatial_ref,
comment='') comment=args.comment)
_run_job(dbJob) _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
def run_from_entityids(args): def run_from_entityids(args):
...@@ -53,20 +61,12 @@ def run_from_entityids(args): ...@@ -53,20 +61,12 @@ def run_from_entityids(args):
:param args: :param args:
:return: :return:
""" """
dbJob = GMS_JOB(get_conn_database(args.db_host))
db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # TODO
warnings.warn('Currently the console argument parser expects the database at localhost.') # TODO
virtual_sensor_id = 1 # TODO
warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.') # TODO
datasetid_spatial_ref = 249 # TODO
warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.') # TODO
dbJob = GMS_JOB(db_connection)
dbJob.from_entityIDlist(list_entityids=args.entityids, dbJob.from_entityIDlist(list_entityids=args.entityids,
virtual_sensor_id=virtual_sensor_id, virtual_sensor_id=args.virtual_sensor_id,
datasetid_spatial_ref=datasetid_spatial_ref, datasetid_spatial_ref=args.datasetid_spatial_ref,
comment='') comment=args.comment)
_run_job(dbJob) _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
def run_from_filenames(args): def run_from_filenames(args):
...@@ -75,20 +75,12 @@ def run_from_filenames(args): ...@@ -75,20 +75,12 @@ def run_from_filenames(args):
:param args: :param args:
:return: :return:
""" """
dbJob = GMS_JOB(get_conn_database(args.db_host))
db_connection = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # TODO
warnings.warn('Currently the console argument parser expects the database at localhost.') # TODO
virtual_sensor_id = 1 # TODO
warnings.warn('Currently the console argument parser sets the virtual sensor ID to 1.') # TODO
datasetid_spatial_ref = 249 # TODO
warnings.warn('Currently the console argument parser sets the dataset ID of the spatial reference to 249.') # TODO
dbJob = GMS_JOB(db_connection)
dbJob.from_filenames(list_filenames=args.filenames, dbJob.from_filenames(list_filenames=args.filenames,
virtual_sensor_id=virtual_sensor_id, virtual_sensor_id=args.virtual_sensor_id,
datasetid_spatial_ref=datasetid_spatial_ref, datasetid_spatial_ref=args.datasetid_spatial_ref,
comment='') comment=args.comment)
_run_job(dbJob) _run_job(dbJob, **get_config_kwargs_from_cli_args(args))
def run_from_constraints(args): def run_from_constraints(args):
...@@ -97,32 +89,35 @@ def run_from_constraints(args): ...@@ -97,32 +89,35 @@ def run_from_constraints(args):
raise NotImplementedError raise NotImplementedError
def _run_job(dbJob, parallelization_level='scenes'): def _run_job(dbJob, **config_kwargs):
# type: (GMS_JOB) -> None # type: (GMS_JOB) -> None
""" """
:param dbJob: :param dbJob:
:return: :return:
""" """
# create a database record for the given job
dbJob.create() dbJob.create()
jobid = dbJob.id
# set up process controller instance # set up process controller instance
warnings.warn("Currently the console argument parser sets the parallelization level to 'scenes'.") # TODO PC = process_controller(dbJob.id, **config_kwargs)
PC = process_controller(jobid, parallelization_level=parallelization_level)
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job # run the job
PC.run_all_processors() if 'GMS_IS_TEST' in os.environ and os.environ['GMS_IS_TEST'] == 'True':
# in case of software test, it is enough to get an instance of process controller because all inputs are
# validated within options.config.Job_Config (indirectly called by ProcessController.__init__() )
pass
else:
PC.run_all_processors()
def get_gms_argparser(): def get_gms_argparser():
"""Return argument parser for run_gms.py program.""" """Return argument parser for run_gms.py program."""
# CONFIGURE MAIN PARSER FOR THE GEOMULTISENS PREPROCESSING CHAIN ##################################################################
# CONFIGURE MAIN PARSER FOR THE GEOMULTISENS PREPROCESSING CHAIN #
##################################################################
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
prog='run_gms.py', prog='run_gms.py',
description='=' * 70 + '\n' + 'GeoMultiSens preprocessing console argument parser. ' description='=' * 70 + '\n' + 'GeoMultiSens preprocessing console argument parser. '
...@@ -133,40 +128,95 @@ def get_gms_argparser(): ...@@ -133,40 +128,95 @@ def get_gms_argparser():
parser.add_argument('--version', action='version', version=__version__) parser.add_argument('--version', action='version', version=__version__)
subparsers = parser.add_subparsers() #################################################################
# CONFIGURE SUBPARSERS FOR THE GEOMULTISENS PREPROCESSING CHAIN #
#################################################################
##############################################
# define parsers containing common arguments #
##############################################
general_opts_parser = argparse.ArgumentParser(add_help=False)
gop_p = general_opts_parser.add_argument
gop_p('-jc', '--json_config', nargs='?', type=str,
help='file path of a JSON file containing options. See here for an example: '
'https://gitext.gfz-potsdam.de/geomultisens/gms_preprocessing/'
'blob/master/gms_preprocessing/options/options_default.json')
# '-exec_L1AP': dict(nargs=3, type=bool, help="L1A Processor configuration",
# metavar=tuple("[run processor, write output, delete output]".split(' ')), default=[1, 1, 1]),
# CONFIGURE SUBPARSERS FOR THE GEOMULTISENS PREPROCESSING CHAIN gop_p('-DH', '--db_host', nargs='?', type=str,
parser_jobid = subparsers\ default=options_default["global_opts"]["db_host"],
.add_parser('jobid', description='Run a GeoMultiSens preprocessing job using an already existing job ID.', help='host name of the server that runs the postgreSQL database')
help="Run a GeoMultiSens preprocessing job using an already existing job ID (Sub-Parser).")
parser_sceneids = subparsers\ gop_p('-DOO', '--delete_old_output', nargs='?', type=bool,
.add_parser('sceneids', description='Run a GeoMultiSens preprocessing job for a given list of scene IDs.', default=options_default["global_opts"]["delete_old_output"],
help="Run a GeoMultiSens preprocessing job for a given list of scene IDs (Sub-Parser).") help='delete previously created output of the given job ID before running the job')
parser_entityids = subparsers\ gop_p('-vid', '--virtual_sensor_id', type=int,
.add_parser('entityids', description='Run a GeoMultiSens preprocessing job for a given list of entity IDs.', default=options_default["usecase"]["virtual_sensor_id"],
help="Run a GeoMultiSens preprocessing job for a given list of entity IDs (Sub-Parser).") help='ID of the target (virtual) sensor')
parser_filenames = subparsers\ gop_p('-dsid_spat', '--datasetid_spatial_ref', type=int,
.add_parser('filenames', description='Run a GeoMultiSens preprocessing job for a given list of filenames of ' default=options_default["usecase"]["datasetid_spatial_ref"],
'downloaded satellite image archives!', help='dataset ID of the spatial reference')
help="Run a GeoMultiSens preprocessing job for a given list of filenames of downloaded satellite "
"image archives! (Sub-Parser).")
parser_constraints = subparsers\ gop_p('-c', '--comment', nargs='?', type=str,
.add_parser('constraints', description='Run a GeoMultiSens preprocessing job matching the given constraints.', default='',
help="Run a GeoMultiSens preprocessing job matching the given constraints (Sub-Parser).") help='comment concerning the job')
# parse_from_sceneids = subparsers.add_parser('from_sceneids', ##################
# description='Run a GeoMultiSens preprocessing job for a given list of scene IDs.', # add subparsers #
# help="use '>>> python /path/to/GeMultiSens/run_gms.py from_sceneids -h' for documentation and usage hints") ##################
subparsers = parser.add_subparsers()
parser_jobid = subparsers.add_parser(
'jobid', parents=[general_opts_parser],
description='Run a GeoMultiSens preprocessing job using an already existing job ID.',
help="Run a GeoMultiSens preprocessing job using an already existing job ID (Sub-Parser).",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser_sceneids = subparsers.add_parser(
'sceneids', parents=[general_opts_parser],
description='Run a GeoMultiSens preprocessing job for a given list of scene IDs.',
help="Run a GeoMultiSens preprocessing job for a given list of scene IDs (Sub-Parser).",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser_entityids = subparsers.add_parser(
'entityids', parents=[general_opts_parser],
description='Run a GeoMultiSens preprocessing job for a given list of entity IDs.',
help="Run a GeoMultiSens preprocessing job for a given list of entity IDs (Sub-Parser).",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser_filenames = subparsers.add_parser(
'filenames', parents=[general_opts_parser],
description='Run a GeoMultiSens preprocessing job for a given list of filenames of '
'downloaded satellite image archives!',
help="Run a GeoMultiSens preprocessing job for a given list of filenames of downloaded satellite "
"image archives! (Sub-Parser).",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser_constraints = subparsers.add_parser(
'constraints', parents=[general_opts_parser],
description='Run a GeoMultiSens preprocessing job matching the given constraints.',
help="Run a GeoMultiSens preprocessing job matching the given constraints (Sub-Parser).",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
#################
# ADD ARGUMENTS #
#################
##########################
# add indivial arguments #
##########################
# ADD ARGUMENTS
# add arguments to parser_jobid # add arguments to parser_jobid
jid_p = parser_jobid.add_argument jid_p = parser_jobid.add_argument
jid_p('jobid', type=int, help='job ID of an already created GeoMultiSens preprocessing job (must be present in the ' jid_p('jobid', type=int, help='job ID of an already created GeoMultiSens preprocessing job '
'jobs table of the database)') '(must be present in the jobs table of the database)')
# add arguments to parser_sceneids # add arguments to parser_sceneids
sid_p = parser_sceneids.add_argument sid_p = parser_sceneids.add_argument
...@@ -191,27 +241,10 @@ def get_gms_argparser(): ...@@ -191,27 +241,10 @@ def get_gms_argparser():
# con_p('constraints', nargs='+', type=str, help="list of entity IDs corresponding to valid records within the " # con_p('constraints', nargs='+', type=str, help="list of entity IDs corresponding to valid records within the "
# "'scenes' table of the database") # "'scenes' table of the database")
# add general arguments # TODO add these configs to each subparser #################################
general_opts = { # LINK PARSERS TO RUN FUNCTIONS #
'-db_host': dict(), #################################
'-exec_mode': dict(nargs=3, type=bool, help="L1A Processor configuration",
metavar=tuple("[run processor, write output, delete output]".split(' ')), default=[1, 1, 1]),
'-exec_L1AP': dict(),
'-exec_L1BP': dict(),
'-exec_L1CP': dict(),
'-exec_L2AP': dict(),
'-exec_L2BP': dict(),
'-exec_L2CP': dict(),
'-sub_multiProc': dict(),
'-exc_handler': dict(),
'-blocksize': dict(),
'-profiling': dict(),
'-bench_all': dict(),
'-bench_cloudMask': dict(),
}
# LINK PARSERS TO RUN FUNCTIONS
parser_jobid.set_defaults(func=run_from_jobid) parser_jobid.set_defaults(func=run_from_jobid)
parser_sceneids.set_defaults(func=run_from_sceneids) parser_sceneids.set_defaults(func=run_from_sceneids)
parser_entityids.set_defaults(func=run_from_entityids) parser_entityids.set_defaults(func=run_from_entityids)
......
...@@ -8,17 +8,21 @@ from . import algorithms # noqa: E402 ...@@ -8,17 +8,21 @@ from . import algorithms # noqa: E402
from . import io # noqa: E402 from . import io # noqa: E402
from . import misc # noqa: E402 from . import misc # noqa: E402
from . import processing # noqa: E402 from . import processing # noqa: E402
from . import config # noqa: E402 from . import options # noqa: F401 (imported but unused)
from .options import config # noqa: F401 (imported but unused)
from .options.config import set_config # noqa: F401 (imported but unused)
from .processing.process_controller import process_controller # noqa: E402 from .processing.process_controller import process_controller # noqa: E402
__author__ = """Daniel Scheffler""" __author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de' __email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.9.10' __version__ = '0.11.0'
__versionalias__ = '20171120.02' __versionalias__ = '20171205.01'
__all__ = ['algorithms', __all__ = ['algorithms',
'io', 'io',
'misc', 'misc',
'processing', 'processing',
'config', 'config' # only to keep compatibility with HU-INF codes
'options',
'set_config',
'process_controller', 'process_controller',
] ]
...@@ -19,7 +19,7 @@ from py_tools_ds.geo.coord_trafo import pixelToLatLon ...@@ -19,7 +19,7 @@ from py_tools_ds.geo.coord_trafo import pixelToLatLon
from py_tools_ds.geo.map_info import mapinfo2geotransform from py_tools_ds.geo.map_info import mapinfo2geotransform
from py_tools_ds.geo.projection import EPSG2WKT from py_tools_ds.geo.projection import EPSG2WKT
from ..config import GMS_config as CFG from ..options.config import GMS_config as CFG
from . import geoprocessing as GEOP from . import geoprocessing as GEOP
from ..io import output_writer as OUT_W from ..io import output_writer as OUT_W
from ..misc import helper_functions as HLP_F from ..misc import helper_functions as HLP_F
...@@ -143,7 +143,7 @@ class L1A_object(GMS_object): ...@@ -143,7 +143,7 @@ class L1A_object(GMS_object):
subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]] subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]]
rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger, subset=subset) rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger, subset=subset)
if CFG.job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = rasObj.Layerstacking(paths_files2stack) self.arr = rasObj.Layerstacking(paths_files2stack)
self.path_InFilePreprocessor = paths_files2stack[0] self.path_InFilePreprocessor = paths_files2stack[0]
else: # 'MEMORY' or physical output else: # 'MEMORY' or physical output
...@@ -162,7 +162,7 @@ class L1A_object(GMS_object): ...@@ -162,7 +162,7 @@ class L1A_object(GMS_object):
subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]] subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]]
rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset) rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset)
if CFG.job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \ self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \
gdalnumeric.LoadFile(path_file2load, rasObj.colStart, rasObj.rowStart, rasObj.cols, rasObj.rows) gdalnumeric.LoadFile(path_file2load, rasObj.colStart, rasObj.rowStart, rasObj.cols, rasObj.rows)
self.path_InFilePreprocessor = path_file2load self.path_InFilePreprocessor = path_file2load
...@@ -190,7 +190,7 @@ class L1A_object(GMS_object): ...@@ -190,7 +190,7 @@ class L1A_object(GMS_object):
data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype) data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype)
data_arr[:, :, bidx] = data data_arr[:, :, bidx] = data
if CFG.job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = data_arr self.arr = data_arr
else: else:
GEOP.ndarray2gdal(data_arr, path_output, geotransform=ds.GetGeoTransform(), GEOP.ndarray2gdal(data_arr, path_output, geotransform=ds.GetGeoTransform(),
...@@ -221,7 +221,7 @@ class L1A_object(GMS_object): ...@@ -221,7 +221,7 @@ class L1A_object(GMS_object):
data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype) data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype)
data_arr[:, :, i] = data data_arr[:, :, i] = data
if CFG.job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = data_arr self.arr = data_arr
else: else:
GEOP.ndarray2gdal(data_arr, path_output, direction=3) GEOP.ndarray2gdal(data_arr, path_output, direction=3)
...@@ -271,7 +271,7 @@ class L1A_object(GMS_object): ...@@ -271,7 +271,7 @@ class L1A_object(GMS_object):
def calc_TOARadRefTemp(self, subset=None): def calc_TOARadRefTemp(self, subset=None):
"""Convert DN, Rad or TOA_Ref data to TOA Reflectance, to Radiance or to Surface Temperature """Convert DN, Rad or TOA_Ref data to TOA Reflectance, to Radiance or to Surface Temperature
(depending on CFG.usecase.conversion_type_optical and conversion_type_thermal). (depending on CFG.target_radunit_optical and target_radunit_thermal).
The function can be executed by a L1A_object representing a full scene or a tile. To process a file from disk The function can be executed by a L1A_object representing a full scene or a tile. To process a file from disk
in tiles, provide an item of self.tile_pos as the 'subset' argument.""" in tiles, provide an item of self.tile_pos as the 'subset' argument."""
...@@ -305,7 +305,7 @@ class L1A_object(GMS_object): ...@@ -305,7 +305,7 @@ class L1A_object(GMS_object):
for optical_thermal in ['optical', 'thermal']: for optical_thermal in ['optical', 'thermal']:
if optical_thermal not in self.dict_LayerOptTherm.values(): if optical_thermal not in self.dict_LayerOptTherm.values():
continue continue
conv = getattr(CFG.usecase, 'conversion_type_%s' % optical_thermal) conv = getattr(CFG, 'target_radunit_%s' % optical_thermal)
conv = conv if conv != 'BOA_Ref' else 'TOA_Ref' conv = conv if conv != 'BOA_Ref' else 'TOA_Ref'
assert conv in ['Rad', 'TOA_Ref', 'Temp'], 'Unsupported conversion type: %s' % conv assert conv in ['Rad', 'TOA_Ref', 'Temp'], 'Unsupported conversion type: %s' % conv
arr_desc = self.arr_desc.split('/')[0] if optical_thermal == 'optical' else self.arr_desc.split('/')[-1] arr_desc = self.arr_desc.split('/')[0] if optical_thermal == 'optical' else self.arr_desc.split('/')[-1]
...@@ -330,7 +330,7 @@ class L1A_object(GMS_object): ...@@ -330,7 +330,7 @@ class L1A_object(GMS_object):
inSaturated) if conv == 'TOA_Ref' else \ inSaturated) if conv == 'TOA_Ref' else \
GEOP.DN2DegreesCelsius_fastforward(inArray, OFF, GAI, K1, K2, 0.95, inFill, inZero, inSaturated) GEOP.DN2DegreesCelsius_fastforward(inArray, OFF, GAI, K1, K2, 0.95, inFill, inZero, inSaturated)
if conv == 'TOA_Ref': if conv == 'TOA_Ref':
self.MetaObj.ScaleFactor = CFG.usecase.scale_factor_TOARef self.MetaObj.ScaleFactor = CFG.scale_factor_TOARef
elif arr_desc == 'Rad': elif arr_desc == 'Rad':
raise NotImplementedError("Conversion Rad to %s is currently not supported." % conv) raise NotImplementedError("Conversion Rad to %s is currently not supported." % conv)
...@@ -349,16 +349,16 @@ class L1A_object(GMS_object): ...@@ -349,16 +349,16 @@ class L1A_object(GMS_object):
'13 bands and it not clear for which bands the gains are provided.') '13 bands and it not clear for which bands the gains are provided.')
raise NotImplementedError("Conversion TOA_Ref to %s is currently not supported." % conv) raise NotImplementedError("Conversion TOA_Ref to %s is currently not supported." % conv)
else: # conv=='TOA_Ref' else: # conv=='TOA_Ref'
if self.MetaObj.ScaleFactor != CFG.usecase.scale_factor_TOARef: if self.MetaObj.ScaleFactor != CFG.scale_factor_TOARef:
res = self.rescale_array(inArray, CFG.usecase.scale_factor_TOARef, self.MetaObj.ScaleFactor) res = self.rescale_array(inArray, CFG.scale_factor_TOARef, self.MetaObj.ScaleFactor)
self.MetaObj.ScaleFactor = CFG.usecase.scale_factor_TOARef self.MetaObj.ScaleFactor = CFG.scale_factor_TOARef
self.log_for_fullArr_or_firstTile( self.log_for_fullArr_or_firstTile(
'Rescaling Ref data to scaling factor %d.' % CFG.usecase.scale_factor_TOARef) 'Rescaling Ref data to scaling factor %d.' % CFG.scale_factor_TOARef)
else: else:
res = inArray res = inArray
self.log_for_fullArr_or_firstTile('The input data already represents TOA ' self.log_for_fullArr_or_firstTile('The input data already represents TOA '
'reflectance with the desired scale factor of %d.' 'reflectance with the desired scale factor of %d.'
% CFG.usecase.scale_factor_TOARef) % CFG.scale_factor_TOARef)
else: # arr_desc == 'Temp' else: # arr_desc == 'Temp'
raise NotImplementedError("Conversion Temp to %s is currently not supported." % conv) raise NotImplementedError("Conversion Temp to %s is currently not supported." % conv)
...@@ -390,8 +390,8 @@ class L1A_object(GMS_object): ...@@ -390,8 +390,8 @@ class L1A_object(GMS_object):
self.update_spec_vals_according_to_dtype('int16') self.update_spec_vals_according_to_dtype('int16')
tiles_desc = '_'.join([desc for op_th, desc in zip(['optical', 'thermal'], tiles_desc = '_'.join([desc for op_th, desc in zip(['optical', 'thermal'],
[CFG.usecase.conversion_type_optical, [CFG.target_radunit_optical,
CFG.usecase.conversion_type_thermal]) CFG.target_radunit_thermal])
if desc in self.dict_LayerOptTherm.values()]) if desc in self.dict_LayerOptTherm.values()])
self.arr = dataOut self.arr = dataOut
...@@ -452,12 +452,12 @@ class L1A_object(GMS_object): ...@@ -452,12 +452,12 @@ class L1A_object(GMS_object):
dst_CS_datum='WGS84', mode='GDAL', use_workspace=True, dst_CS_datum='WGS84', mode='GDAL', use_workspace=True,
inFill=self.MetaObj.spec_vals['fill']) inFill=self.MetaObj.spec_vals['fill'])
if CFG.job.exec_mode == 'Python': if CFG.exec_mode == 'Python':
path_warped = os.path.join(self.ExtractedFolder, self.baseN + '__' + self.arr_desc)