diff --git a/tests/data/config_for_testing_handler.yml b/tests/data/config_for_testing_handler.yml
new file mode 100644
index 0000000000000000000000000000000000000000..e2fb4156041c655be00eed1c8a65fca9ae3a41aa
--- /dev/null
+++ b/tests/data/config_for_testing_handler.yml
@@ -0,0 +1,25 @@
+model_name: esrm20
+output_path: path
+occupancies_to_run: residential
+exposure_entities_to_run: ABC, DEF, GHI
+exposure_entities_code:
+  ABC: ABC
+  DEF: DEF
+geographic_selection:
+  selection_mode: quadkeys
+  quadkeys_file: /path/to/quadkeys.txt
+cost_cases:
+  structural: total
+people_cases:
+  day: day
+  night: night
+  transit: transit
+output_format: OpenQuake_CSV
+buildings_to_export: OBM, remainder
+export_OBM_footprints: True
+database_gde_tiles:
+  host: host.somewhere.xx
+  dbname: some_database_name
+  username: some_username
+  password: some_password
+number_cores: 1
diff --git a/tests/data/test_database_set_up.sql b/tests/data/test_database_set_up.sql
index fdf96370a90bb1b773bd06f04c0cbd93d35a1549..f020eff11145e3aded5376c09c736308b49ed64f 100644
--- a/tests/data/test_database_set_up.sql
+++ b/tests/data/test_database_set_up.sql
@@ -112,8 +112,8 @@ VALUES ('122010321033023130', 2, 'residential', 'ABC', 'ABC_10269', 0.0, 0.0, 0.
 ('122010321033032123', 2, 'residential', 'ABC', 'ABC_10278', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
 ('122010321033032123', 2, 'commercial', 'ABC', 'ABC_10278', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
 ('122010321033032301', 2, 'commercial', 'ABC', 'ABC_10278', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
-('122010321033211220', 2, 'residential', 'DEF', 'DEF_00000', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
-('122010321033211220', 2, 'commercial', 'DEF', 'DEF_00000', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
+('122010321033023130', 2, 'residential', 'DEF', 'DEF_00000', 0.0, 0.0, 0.0, 0.0, 8.6, 0, 8.6),
+('122010321033023130', 2, 'commercial', 'DEF', 'DEF_00000', 0.0, 0.0, 0.0, 0.0, 8.6, 0, 8.6),
 ('122010321033023120', 2, 'residential', 'GHI', 'GHI_22222', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0);
 
 CREATE TABLE data_units_buildings
@@ -153,17 +153,20 @@ INSERT INTO data_units_buildings(building_class_name,
                                  total_cost_per_building,
                                  storeys_min,
                                  storeys_max)
-VALUES ('A1/HBET:1-3', 'urban', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.20, 0.0, 0.0, 1, 3),
+VALUES ('A1/HBET:1-3', 'urban', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.20, 5.0, 1750000.0, 1, 3),
 ('A2/HBET:4-6', 'urban', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.30, 10.5, 2500000.0, 4, 6),
-('A3/HBET:7-12', 'urban', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.10, 0.0, 0.0, 7, 12),
-('B1/HBET:1-3', 'rural', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.25, 0.0, 0.0, 1, 3),
-('B2/H:4', 'rural', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.15, 0.0, 0.0, 4, 4),
+('A3/HBET:7-12', 'urban', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.10, 30.0, 5000000.0, 7, 12),
+('B1/HBET:1-3', 'rural', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.25, 6.0, 1200000.0, 1, 3),
+('B2/H:4', 'rural', 'all', 2, 'ABC', 'residential', 'ABC_10269', 0.15, 8.0, 1900000.0, 4, 4),
 ('C1/HBET:1-2', 'urban', 'Hotels', 2, 'ABC', 'commercial', 'ABC_10269', 0.10, 0.0, 0.0, 1, 2),
 ('C2/HBET:3-', 'urban', 'Hotels', 2, 'ABC', 'commercial', 'ABC_10269', 0.25, 0.0, 0.0, 3, 9999),
 ('C3/H:1', 'urban', 'Trade', 2, 'ABC', 'commercial', 'ABC_10269', 0.05, 0.0, 0.0, 1, 1),                   
 ('C4/HBET:2-3', 'urban', 'Trade', 2, 'ABC', 'commercial', 'ABC_10269', 0.10, 0.0, 0.0, 2, 3),
 ('C5/HBET:1-2', 'urban', 'Offices', 2, 'ABC', 'commercial', 'ABC_10269', 0.20, 0.0, 0.0, 1, 2),
-('C6/HBET:3-5', 'urban', 'Offices', 2, 'ABC', 'commercial', 'ABC_10269', 0.30, 0.0, 0.0, 3, 5);
+('C6/HBET:3-5', 'urban', 'Offices', 2, 'ABC', 'commercial', 'ABC_10269', 0.30, 0.0, 0.0, 3, 5),
+('D1/HBET:1-3', 'urban', 'all', 2, 'DEF', 'residential', 'DEF_00000', 0.55, 5.0, 1600000.0, 1, 3),
+('D2/HBET:4-6', 'urban', 'all', 2, 'DEF', 'residential', 'DEF_00000', 0.40, 10.5, 2400000.0, 4, 6),
+('D3/HBET:7-12', 'urban', 'all', 2, 'DEF', 'residential', 'DEF_00000', 0.05, 30.0, 4800000.0, 7, 12);
 
 CREATE TABLE exposure_entities_costs_assumptions
 (
@@ -185,7 +188,8 @@ INSERT INTO exposure_entities_costs_assumptions(aggregated_source_id,
                                                 non_structural,
                                                 contents)
 VALUES (2, 'ABC', 'residential', 0.30, 0.50, 0.20),
-(2, 'ABC', 'commercial', 0.20, 0.30, 0.50);
+(2, 'ABC', 'commercial', 0.20, 0.30, 0.50),
+(2, 'DEF', 'residential', 0.20, 0.45, 0.35);
                     
 CREATE TABLE exposure_entities_population_time_distribution
 (
@@ -206,7 +210,8 @@ INSERT INTO exposure_entities_population_time_distribution(aggregated_source_id,
                                                            night,
                                                            transit)
 VALUES (2, 'ABC', 'residential', 0.2457, 0.9621, 0.6028),
-(2, 'ABC', 'commercial', 0.2863, 0.9736, 0.5662);
+(2, 'ABC', 'commercial', 0.2863, 0.9736, 0.5662),
+(2, 'DEF', 'residential', 0.2713, 0.9522, 0.5794);
 
 CREATE TABLE gde_buildings
 (
@@ -247,4 +252,109 @@ VALUES (-101010, 2, 'industrial', 'ABC_10269', '333333333333333333',
  '{"all", "all", "all"}',
  '{0.5, 0.2, 0.3}',
  ST_GeomFromText('POLYGON((0.0 0.0,0.002 0.0,0.002 0.003,0.0 0.003,0.0 0.0))')
+),
+(123456, 2, 'residential', 'ABC_10269', '122010321033023130',
+ '{"A1/HBET:1-3", "A2/HBET:4-6", "A3/HBET:7-12", "B1/HBET:1-3", "B2/H:4"}',
+ '{"urban", "urban", "urban", "rural", "rural"}',
+ '{"all", "all", "all", "all", "all"}',
+ '{0.2, 0.3, 0.1, 0.25, 0.15}',
+ ST_GeomFromText('POLYGON((15.0487 37.4812, 15.0489 37.481, 15.0486 37.4808, 15.0484 37.481, 15.0487 37.4812))')
+),
+(234567, 2, 'residential', 'ABC_10269', '122010321033023130',
+ '{"A1/HBET:1-3", "B1/HBET:1-3"}',
+ '{"urban", "rural"}',
+ '{"all", "all"}',
+ '{0.44444444444, 0.55555555556}',
+ ST_GeomFromText('POLYGON((15.0492 37.4808, 15.0492 37.4806, 15.049 37.4806, 15.049 37.4808, 15.0492 37.4808))')
+),
+(345678, 2, 'residential', 'ABC_10269', '122010321033023130',
+ '{"A2/HBET:4-6", "B2/H:4"}',
+ '{"urban", "rural"}',
+ '{"all", "all"}',
+ '{0.66666666667, 0.33333333333}',
+ ST_GeomFromText('POLYGON((15.0495 37.481, 15.0498 37.481, 15.0498 37.4808, 15.0495 37.4808, 15.0495 37.481))')
+),
+(456789, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A1/HBET:1-3", "A2/HBET:4-6", "A3/HBET:7-12", "B1/HBET:1-3", "B2/H:4"}',
+ '{"urban", "urban", "urban", "rural", "rural"}',
+ '{"all", "all", "all", "all", "all"}',
+ '{0.2, 0.3, 0.1, 0.25, 0.15}',
+ ST_GeomFromText('POLYGON((15.049061443811876 37.47989923645305, 15.049405425784684 37.48000508293291, 15.049454566066512 37.47978781894386, 15.0490754838924 37.47975439365871, 15.049061443811876 37.47989923645305))')
+),              
+(567890, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A1/HBET:1-3", "B1/HBET:1-3"}',
+ '{"urban", "rural"}',
+ '{"all", "all"}',
+ '{0.44444444444, 0.55555555556}',
+ ST_GeomFromText('POLYGON((15.048682361637761 37.48000508293291, 15.04892104300665 37.47997722861064, 15.048864882684555 37.47978781894386, 15.048640241396193 37.47983238596748, 15.048682361637761 37.48000508293291))')
+),
+(678901, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A2/HBET:4-6", "B2/H:4"}',
+ '{"urban", "rural"}',
+ '{"all", "all"}',
+ '{0.66666666667, 0.33333333333}',
+ ST_GeomFromText('POLYGON((15.04955284663017 37.480133212681615, 15.04974940775749 37.48011092926284, 15.049728347636705 37.48000508293291, 15.049524766469125 37.480027366383254, 15.04955284663017 37.480133212681615))')
+),
+(789012, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.048671831577371 37.479701470259975, 15.048759582080637 37.479698684816896, 15.048759582080637 37.47964019048829, 15.0486753415975 37.47964019048829, 15.048671831577371 37.479701470259975))')
+),
+(890123, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.049057933791744 37.479651332268695, 15.049054423771615 37.479707041145815, 15.04918780453658 37.4797153974738, 15.049191314556712 37.47965968860291, 15.049057933791744 37.479651332268695))')
+),
+(901234, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.04882627246312 37.479690328487045, 15.048942103127436 37.47969311393042, 15.048935083087171 37.47963740504292, 15.04882978248325 37.47963740504292, 15.04882627246312 37.479690328487045))')
+),
+(111111, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.048626201315672 37.479403427261886, 15.04873150191959 37.47948977629018, 15.048833292503382 37.479445209062234, 15.048678851617632 37.479305936303554, 15.048626201315672 37.479403427261886))')
+),
+(222222, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.048857862644295 37.479408998169944, 15.04898071334887 37.47937557271534, 15.048956143207956 37.47930872176127, 15.048836802523514 37.47931707813379, 15.048857862644295 37.479408998169944))')
+),
+(333333, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.04892104300665 37.47948699083922, 15.04908952397292 37.479523201693695, 15.04908601395279 37.47945077996717, 15.048945613147565 37.47943963815686, 15.04892104300665 37.47948699083922))')
+),
+(444444, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.049114094113838 37.479381143625474, 15.04928959512037 37.47937835817046, 15.04928959512037 37.47930036538781, 15.049114094113838 37.479292009013406, 15.049114094113838 37.479381143625474))')
+),
+(555555, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.049321185301547 37.479523201693695, 15.049563376690562 37.479553841633795, 15.04959496687174 37.479467492679525, 15.0493387354022 37.479436852704026, 15.049321185301547 37.479523201693695))')
+),
+(666666, 2, 'residential', 'ABC_10269', '122010321033023132',
+ '{"A3/HBET:7-12"}',
+ '{"urban"}',
+ '{"all"}',
+ '{1.0}',
+ ST_GeomFromText('POLYGON((15.049591456851608 37.47941456907759, 15.049745897737358 37.47935885998243, 15.049661657254225 37.479272510802836, 15.04953529652952 37.47935328907063, 15.049591456851608 37.47941456907759))')
 );
diff --git a/tests/data/test_handler_expected_remainder_buildings.csv b/tests/data/test_handler_expected_remainder_buildings.csv
new file mode 100644
index 0000000000000000000000000000000000000000..8a7cf6e8a9c76d131adfdf5e70e1fa8e8b0e5510
--- /dev/null
+++ b/tests/data/test_handler_expected_remainder_buildings.csv
@@ -0,0 +1,14 @@
+id,lon,lat,taxonomy,number,structural,day,night,transit,occupancy,data_unit_id,quadkey
+quadkeys_list_residential_remainder_1,15.04921,37.48085,A1/HBET:1-3,2.54,4445000,3.12039,12.21867,7.65556,residential,ABC_10269,122010321033023130
+quadkeys_list_residential_remainder_2,15.04921,37.48085,A2/HBET:4-6,3.81,9525000,9.8292285,38.4888105,24.115014,residential,ABC_10269,122010321033023130
+quadkeys_list_residential_remainder_3,15.04921,37.48085,A3/HBET:7-12,1.27,6350000,9.36117,36.65601,22.96668,residential,ABC_10269,122010321033023130
+quadkeys_list_residential_remainder_4,15.04921,37.48085,B1/HBET:1-3,3.175,3810000,4.680585,18.328005,11.48334,residential,ABC_10269,122010321033023130
+quadkeys_list_residential_remainder_5,15.04921,37.48085,B2/H:4,1.905,3619500,3.744468,14.662404,9.186672,residential,ABC_10269,122010321033023130
+quadkeys_list_residential_remainder_6,15.04784,37.48085,A1/HBET:1-3,5.24,9170000,6.43734,25.20702,15.79336,residential,ABC_10269,122010321033023121
+quadkeys_list_residential_remainder_7,15.04784,37.48085,A2/HBET:4-6,7.86,19650000,20.277621,79.402113,49.749084,residential,ABC_10269,122010321033023121
+quadkeys_list_residential_remainder_8,15.04784,37.48085,A3/HBET:7-12,2.62,13100000,19.31202,75.62106,47.38008,residential,ABC_10269,122010321033023121
+quadkeys_list_residential_remainder_9,15.04784,37.48085,B1/HBET:1-3,6.55,7860000,9.65601,37.81053,23.69004,residential,ABC_10269,122010321033023121
+quadkeys_list_residential_remainder_10,15.04784,37.48085,B2/H:4,3.93,7467000,7.724808,30.248424,18.952032,residential,ABC_10269,122010321033023121
+quadkeys_list_residential_remainder_11,15.04921,37.48085,D1/HBET:1-3,4.73,7568000,6.416245,22.51953,13.70281,residential,DEF_00000,122010321033023130
+quadkeys_list_residential_remainder_12,15.04921,37.48085,D2/HBET:4-6,3.44,8256000,9.799356,34.393464,20.927928,residential,DEF_00000,122010321033023130
+quadkeys_list_residential_remainder_13,15.04921,37.48085,D3/HBET:7-12,0.43,2064000,3.49977,12.28338,7.47426,residential,DEF_00000,122010321033023130
diff --git a/tests/data/test_handler_expected_tile_geometries.csv b/tests/data/test_handler_expected_tile_geometries.csv
new file mode 100644
index 0000000000000000000000000000000000000000..2451445d5d4924488c73b6dffac174326f55126a
--- /dev/null
+++ b/tests/data/test_handler_expected_tile_geometries.csv
@@ -0,0 +1,5 @@
+quadkey;case;geometry;centroid_lon;centroid_lat
+122010321033023130;both;POLYGON ((15.04852294921875 37.480307245086024, 15.049896240234375 37.480307245086024, 15.049896240234375 37.48139702942733, 15.04852294921875 37.48139702942733, 15.04852294921875 37.480307245086024));15.049209594726562;37.480852137256676
+122010321033023121;only_remainder;POLYGON ((15.047149658203125 37.480307245086024, 15.04852294921875 37.480307245086024, 15.04852294921875 37.48139702942733, 15.047149658203125 37.48139702942733, 15.047149658203125 37.480307245086024));15.047836303710938;37.480852137256676
+122010321033023132;only_obm;POLYGON ((15.04852294921875 37.47921744485059, 15.049896240234375 37.47921744485059, 15.049896240234375 37.480307245086024, 15.04852294921875 37.480307245086024, 15.04852294921875 37.47921744485059));15.049209594726562;37.47976234496831
+122010321033023131;none;POLYGON ((15.049896240234375 37.480307245086024, 15.05126953125 37.480307245086024, 15.05126953125 37.48139702942733, 15.049896240234375 37.48139702942733, 15.049896240234375 37.480307245086024));15.050582885742188;37.480852137256676
diff --git a/tests/data/test_quadkeys.txt b/tests/data/test_quadkeys.txt
index 91d98d2dfc9f17a8857b0f7aa5d2780feae97c4b..35df096a79e7deed9485466fafc222e5107450a3 100644
--- a/tests/data/test_quadkeys.txt
+++ b/tests/data/test_quadkeys.txt
@@ -1,5 +1,3 @@
-120203220301101323,120220011012110003
-120232221130001023
-120210233222032112
-122100203301311323
-120210233222032112
+122010321033023130,122010321033023121
+122010321033023132
+122010321033023131
diff --git a/tests/test_configuration.py b/tests/test_configuration.py
index 8d2a2a27831b92f0f2846efce68f61dd83cabc28..0df0c88c77f88151edbef58fd5157837d0a26661 100644
--- a/tests/test_configuration.py
+++ b/tests/test_configuration.py
@@ -304,7 +304,7 @@ def test_Configuration_determine_quadkeys_to_process(test_db):
             "122010321033032123",
             "122010321033032301",
         ],
-        "DEF": ["122010321033211220"],
+        "DEF": ["122010321033023130"],
     }
 
     assert len(returned_config.quadkeys_to_process.keys()) == len(
@@ -332,11 +332,10 @@ def test_Configuration_determine_quadkeys_to_process(test_db):
 
     expected_quadkeys_to_process = {
         "quadkeys_list": [
-            "120203220301101323",
-            "120220011012110003",
-            "120232221130001023",
-            "120210233222032112",
-            "122100203301311323",
+            "122010321033023130",
+            "122010321033023121",
+            "122010321033023132",
+            "122010321033023131",
         ],
     }
 
@@ -362,7 +361,7 @@ def test_Configuration_determine_quadkeys_to_process(test_db):
 
     expected_quadkeys_to_process = {
         "ABC_10278": ["122010321033032123", "122010321033032301"],
-        "DEF_00000": ["122010321033211220"],
+        "DEF_00000": ["122010321033023130"],
     }
 
     assert len(returned_config.quadkeys_to_process.keys()) == len(
diff --git a/tests/test_database_queries.py b/tests/test_database_queries.py
index 8006261cdd57e804c562e13efaa8220493e2b217..b411a48f139774277770dc9fa8f5bf3d8407b054 100644
--- a/tests/test_database_queries.py
+++ b/tests/test_database_queries.py
@@ -150,14 +150,15 @@ def test_retrieve_data_unit_ids(test_db):
     )
 
     quadkeys = [
+        "122010321033023130",
         "122010321033023130",
         "122010321033023130",
         "122010321033023120",
         "122010321033023120",
     ]
-    exposure_entities = [["ABC"], ["DEF"], ["ABC", "GHI"], ["GHI"]]
-    occupancies = ["residential", "residential", "residential", "residential"]
-    expected = [["ABC_10269"], [], ["ABC_10269", "GHI_22222"], ["GHI_22222"]]
+    exposure_entities = [["ABC"], ["DEF"], ["GHI"], ["ABC", "GHI"], ["GHI"]]
+    occupancies = ["residential", "residential", "residential", "residential", "residential"]
+    expected = [["ABC_10269"], ["DEF_00000"], [], ["ABC_10269", "GHI_22222"], ["GHI_22222"]]
 
     for i, quadkey in enumerate(quadkeys):
         returned_data_unit_ids = DatabaseQueries.retrieve_data_unit_ids(
@@ -326,7 +327,7 @@ def test_get_exposure_entities_costs_assumptions(test_db):
     # Test case in which no matching entry is found in the database
     returned_cost_assumptions = DatabaseQueries.get_exposure_entities_costs_assumptions(
         cost_cases,
-        "DEF",
+        "GHI",
         "residential",
         2,
         config.database_gde_tiles,
@@ -385,7 +386,7 @@ def test_get_exposure_entities_population_time_distribution(test_db):
     returned_people_distribution = (
         DatabaseQueries.get_exposure_entities_population_time_distribution(
             people_cases,
-            "DEF",
+            "GHI",
             "commercial",
             2,
             config.database_gde_tiles,
diff --git a/tests/test_handler.py b/tests/test_handler.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f5d168f38c9cd47055890b196b3c4faa4c4ef94
--- /dev/null
+++ b/tests/test_handler.py
@@ -0,0 +1,271 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2022:
+#   Helmholtz-Zentrum Potsdam Deutsches GeoForschungsZentrum GFZ
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or (at
+# your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
+# General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see http://www.gnu.org/licenses/.
+
+import os
+import numpy
+import pandas
+import geopandas
+from shapely.wkt import loads
+from gdeexporter.configuration import Configuration
+from gdeexporter.handler import ExportHandler
+
+
+def test_ExportHandler_process_quadkey_occupancy_group(test_db):
+    """
+    This test uses a case similar to that used by 'test_to_openquake.py'. The main conceptual
+    difference between the two is that the present test relies on retrieving values from the
+    test database, while 'test_to_openquake.py' builds the attributes of the TileExposure
+    object just by reading from CSV files.
+
+    Main features of this test:
+    - It reads the overall configuration from config_for_testing_handler.yml.
+    - It requests that output be written in OpenQuake CSV format, even though the writing of
+    this format is tested in itself in 'test_to_openquake.py'. Writing to a format is necessary
+    to ensure that the method works well, as the output values it returns are too general.
+    - Output files are written to a directory that is created by the test and later erased.
+    - It uses "residential" as the occupancy case.
+    - It consists of a first larger full test and a second shorter test.
+    - In the larger full test, geographic selection is done via a list of 4 quadkeys:
+      - 122010321033023130: associated with two data unit IDs, 'ABC_10269' of exposure entity
+      'ABC' and 'DEF_00000' of exposure entity 'DEF'. In the former it contains both OBM and
+      remainder buildings, while in the latter it only contains remainder buildings.
+      - 122010321033023121: associated with data unit ID 'ABC_10269' of exposure entity 'ABC',
+      which contains only remainder buildings.
+      - 122010321033023132: associated with data unit ID 'ABC_10269' of exposure entity 'ABC',
+      which contains only OBM buildings.
+      - 122010321033023131: associated with no data unit ID (i.e. there is no entry for this
+      quadkey in the 'data_unit_tiles' table of the test database.
+    - In the smaller test, geographic selection is done via one data unit ID, 'DEF_00000', which
+    results in only one quadkey being processed (122010321033023130).
+    """
+
+    # Tolerance for the percentual difference between returned and expected values
+    percent_diff_tolerance = 0.001  # %
+
+    # Database connection (the Configuration class will define the credentials based on whether
+    # the code is running in the CI or locally)
+    config = Configuration(
+        os.path.join(os.path.dirname(__file__), "data", "config_for_testing_handler.yml")
+    )
+    # Output path
+    output_path = os.path.join(
+        os.path.dirname(__file__), "data", "temp_test_process_quadkey_occupancy_group"
+    )
+    config.output_path = output_path
+    # Create temporary directory
+    os.mkdir(output_path)
+
+    # Quadkeys to process
+    config.geographic_selection["quadkeys_file"] = os.path.join(
+        os.path.dirname(__file__), "data", "test_quadkeys.txt"
+    )
+    config.determine_quadkeys_to_process(2, config.database_gde_tiles, "data_unit_tiles")
+
+    returned_summary_values = ExportHandler.process_quadkey_occupancy_group(
+        config, 2, ("quadkeys_list", "residential")
+    )
+
+    # Expected names of output files
+    prefix = "quadkeys_list_residential"
+    expected_name_remainder_buildings = "%s_remainder.csv" % (prefix)
+    expected_name_obm_buildings = "%s_OBM.csv" % (prefix)
+    expected_name_geometries_quadtiles = "%s_geometries_quadtiles.gpkg" % (prefix)
+    expected_name_geometries_obm = "%s_OBM_geometries_footprints.gpkg" % (prefix)
+
+    # Expected results (generated output files)
+    expected_results_path = os.path.join(os.path.dirname(__file__), "data")
+    expected_obm_buildings_output = "test_oq_expected_OBM_buildings_with_footprints_export.csv"
+    expected_remainder_buildings_output = "test_handler_expected_remainder_buildings.csv"
+    expected_quadtiles_geometries_output = "test_handler_expected_tile_geometries.csv"
+    expected_obm_geometries_output = "test_oq_input_OBM_geometries.csv"
+
+    # Check summary values
+    assert returned_summary_values["processed_quadkeys"] == 4
+    assert round(returned_summary_values["OBM_buildings"], 0) == 15
+    assert round(returned_summary_values["aggregated_buildings"], 1) == 84.9
+    assert round(returned_summary_values["remainder_buildings"], 1) == 47.5
+
+    # Check that output files that need to be created have been created
+    assert os.path.exists(os.path.join(output_path, expected_name_remainder_buildings))
+    assert os.path.exists(os.path.join(output_path, expected_name_obm_buildings))
+    assert os.path.exists(os.path.join(output_path, expected_name_geometries_quadtiles))
+    assert os.path.exists(os.path.join(output_path, expected_name_geometries_obm))
+
+    # Check contents of OpenQuake CSV file for remainder buildings
+    returned_remainder_buildings = pandas.read_csv(
+        os.path.join(output_path, expected_name_remainder_buildings),
+    )
+    expected_remainder_buildings = pandas.read_csv(
+        os.path.join(expected_results_path, expected_remainder_buildings_output),
+    )
+
+    assert returned_remainder_buildings.shape[0] == expected_remainder_buildings.shape[0]
+
+    for row_index_expected in range(expected_remainder_buildings.shape[0]):
+        # Find the corresponding row in 'returned_obm_buildings'
+        row_index_returned = numpy.where(
+            numpy.logical_and(
+                returned_remainder_buildings["taxonomy"].to_numpy()
+                == expected_remainder_buildings.loc[row_index_expected, "taxonomy"],
+                returned_remainder_buildings["quadkey"].to_numpy()
+                == expected_remainder_buildings.loc[row_index_expected, "quadkey"],
+            )
+        )[0][0]
+
+        for column in expected_remainder_buildings.columns:
+            assert column in returned_remainder_buildings.columns
+
+            if column == "id":
+                # Incremental ID numbers can differ between the two
+                continue
+
+            if isinstance(
+                expected_remainder_buildings.loc[row_index_expected, column], str
+            ) or isinstance(expected_remainder_buildings.loc[row_index_expected, column], int):
+                assert (
+                    returned_remainder_buildings.loc[row_index_returned, column]
+                    == expected_remainder_buildings.loc[row_index_expected, column]
+                )
+            else:
+                percent_diff = 100.0 * abs(
+                    (
+                        returned_remainder_buildings.loc[row_index_returned, column]
+                        - expected_remainder_buildings.loc[row_index_expected, column]
+                    )
+                    / expected_remainder_buildings.loc[row_index_expected, column]
+                )
+                assert percent_diff < percent_diff_tolerance  # %
+
+    # Check contents of OpenQuake CSV file for OBM buildings
+    returned_obm_buildings = pandas.read_csv(
+        os.path.join(output_path, expected_name_obm_buildings),
+    )
+    expected_obm_buildings = pandas.read_csv(
+        os.path.join(expected_results_path, expected_obm_buildings_output),
+    )
+
+    assert returned_obm_buildings.shape[0] == expected_obm_buildings.shape[0]
+
+    for row_index_expected in range(expected_obm_buildings.shape[0]):
+        # Find the corresponding row in 'returned_obm_buildings'
+        row_index_returned = numpy.where(
+            numpy.logical_and(
+                returned_obm_buildings["taxonomy"].to_numpy()
+                == expected_obm_buildings.loc[row_index_expected, "taxonomy"],
+                returned_obm_buildings["osm_id"].to_numpy()
+                == expected_obm_buildings.loc[row_index_expected, "osm_id"],
+            )
+        )[0][0]
+
+        for column in expected_obm_buildings.columns:
+            assert column in returned_obm_buildings.columns
+
+            if column == "id":
+                # Incremental ID numbers can differ between the two
+                continue
+
+            if isinstance(
+                expected_obm_buildings.loc[row_index_expected, column], str
+            ) or isinstance(expected_obm_buildings.loc[row_index_expected, column], int):
+                assert (
+                    returned_obm_buildings.loc[row_index_returned, column]
+                    == expected_obm_buildings.loc[row_index_expected, column]
+                )
+            else:
+                percent_diff = 100.0 * abs(
+                    (
+                        returned_obm_buildings.loc[row_index_returned, column]
+                        - expected_obm_buildings.loc[row_index_expected, column]
+                    )
+                    / expected_obm_buildings.loc[row_index_expected, column]
+                )
+                assert percent_diff < percent_diff_tolerance  # %
+
+    # Check contents of file with quadtile geometries
+    returned_quadtiles_geometries = geopandas.read_file(
+        os.path.join(output_path, expected_name_geometries_quadtiles)
+    )
+    expected_quadtiles_geometries = pandas.read_csv(
+        os.path.join(expected_results_path, expected_quadtiles_geometries_output),
+        sep=";",
+        dtype={"quadkey": str},
+    )
+    assert returned_quadtiles_geometries.shape[0] == expected_quadtiles_geometries.shape[0]
+    for j, quadkey in enumerate(expected_quadtiles_geometries["quadkey"].values):
+        assert quadkey in returned_quadtiles_geometries["quadkey"].values
+        filter = returned_quadtiles_geometries["quadkey"] == quadkey
+        returned_bounds = returned_quadtiles_geometries[filter]["geometry"].values[0].bounds
+        expected_bounds = loads(expected_quadtiles_geometries["geometry"].values[j]).bounds
+        for bound in range(4):
+            assert round(returned_bounds[bound], 5) == round(expected_bounds[bound], 5)
+
+    # Check contents of file with OBM geometries
+    returned_obm_geometries = geopandas.read_file(
+        os.path.join(output_path, expected_name_geometries_obm)
+    )
+
+    expected_obm_geometries = pandas.read_csv(
+        os.path.join(expected_results_path, expected_obm_geometries_output),
+        sep=";",
+        dtype={"osm_id": str},
+    )
+    assert returned_obm_geometries.shape[0] == expected_obm_geometries.shape[0]
+    for j, osm_id in enumerate(expected_obm_geometries["osm_id"].values):
+        assert osm_id in returned_obm_geometries["osm_id"].values
+        filter = returned_obm_geometries["osm_id"] == osm_id
+        returned_bounds = returned_obm_geometries[filter]["geometry"].values[0].bounds
+        expected_bounds = loads(expected_obm_geometries["geometry"].values[j]).bounds
+        for bound in range(4):
+            assert round(returned_bounds[bound], 5) == round(expected_bounds[bound], 5)
+
+    # Delete created output files
+    os.remove(os.path.join(output_path, expected_name_remainder_buildings))
+    os.remove(os.path.join(output_path, expected_name_obm_buildings))
+    os.remove(os.path.join(output_path, expected_name_geometries_quadtiles))
+    os.remove(os.path.join(output_path, expected_name_geometries_obm))
+
+    # Re-run a shorter version of the test, simulating a geographic selection by data unit ID
+    config.geographic_selection["selection_mode"] = "data_unit_id"
+    config.geographic_selection["data_unit_ids"] = ["DEF_00000"]
+    config.determine_quadkeys_to_process(2, config.database_gde_tiles, "data_unit_tiles")
+
+    returned_summary_values = ExportHandler.process_quadkey_occupancy_group(
+        config, 2, ("DEF_00000", "residential")
+    )
+
+    # Expected names of output files
+    prefix = "DEF_00000_residential"
+    expected_name_remainder_buildings = "%s_remainder.csv" % (prefix)
+    expected_name_geometries_quadtiles = "%s_geometries_quadtiles.gpkg" % (prefix)
+
+    # Check summary values
+    assert returned_summary_values["processed_quadkeys"] == 1
+    assert round(returned_summary_values["OBM_buildings"], 0) == 0
+    assert round(returned_summary_values["aggregated_buildings"], 1) == 8.6
+    assert round(returned_summary_values["remainder_buildings"], 1) == 8.6
+
+    # Check that output files that need to be created have been created
+    assert os.path.exists(os.path.join(output_path, expected_name_remainder_buildings))
+    assert os.path.exists(os.path.join(output_path, expected_name_geometries_quadtiles))
+
+    # Delete created output files
+    os.remove(os.path.join(output_path, expected_name_remainder_buildings))
+    os.remove(os.path.join(output_path, expected_name_geometries_quadtiles))
+
+    # Delete temporary directory
+    os.rmdir(output_path)
diff --git a/tests/test_to_openquake.py b/tests/test_to_openquake.py
index 24e354e8f8199ddf8263a2596a265a88bbb98b10..ef608b65524e98f8e1de0102d78b42862065ff59 100644
--- a/tests/test_to_openquake.py
+++ b/tests/test_to_openquake.py
@@ -25,6 +25,31 @@ from gdeexporter.tileexposure import TileExposure
 
 
 def test_export_to_OpenQuake_CSV():
+    """
+    This test uses a case similar to that used by 'test_handler.py'. The main conceptual
+    difference between the two is that the present test builds the attributes of the
+    TileExposure object just by reading from CSV files, instead of retrieving values from the
+    test database (i.e. the present test focuses on the exporting to OpenQuake CSV format and
+    not on how the exported values are calculated).
+
+    Main features of this test:
+    - Output files are written to a directory that is created by the test and later erased.
+    - It uses "residential" as the occupancy case.
+    - Geographic selection is not done, as a Configuration object is not needed.
+    - It covers 3 quadkeys:
+      - 122010321033023130: associated with data unit ID 'ABC_10269' of exposure entity 'ABC',
+      which contains both OBM and remainder buildings.
+      - 122010321033023121: associated with data unit ID 'ABC_10269' of exposure entity 'ABC',
+      which contains only remainder buildings.
+      - 122010321033023132: associated with data unit ID 'ABC_10269' of exposure entity 'ABC',
+      which contains only OBM buildings.
+    - It considers one case in which 'export_OBM_footprints' is True (the file with OBM
+    geometries is created, the points allocated to the OBM buildings are the centroids of their
+    footprints and their real OSM ID is used) and another in which it is set to False (the file
+    with OBM geometries is not created, the points allocated to the OBM buildings are the
+    centroids of the quadtiles to which they belong and a ficticious OSM ID is used).
+    """
+
     # User-defined costs and people columns
     cost_cases = {"structural": "total"}
     people_cases = {"day": "day", "night": "night", "transit": "transit"}