Commit 415ff4eb authored by g-weatherill's avatar g-weatherill
Browse files

Merge remote-tracking branch 'upstream/master'

parents cd06865f 8f20ffc5
Pipeline #21661 passed with stage
in 8 minutes and 48 seconds
......@@ -13,6 +13,7 @@ setup(
install_requires=[
"openquake.engine",
"geopandas",
"Rtree",
],
packages=find_packages(),
python_requires=">=3.7",
......
"""
Classes to manage the regionalisation of ground motion models and the selection of the
ground motion model set to be used for a given earthquake
"""
from __future__ import annotations
import os
import json
import rtree
import numpy as np
import pandas as pd
import geopandas as gpd
from typing import Union, Dict, Tuple, Optional, List
from pyproj import Transformer
from shapely import geometry
from openquake.hazardlib.gsim import get_available_gsims
from shakyground2 import valid
from shakyground2.earthquake import Earthquake
# For point in polygon tests need to transform geodetic coordinates into Cartesian. For this
# we use the World Equidistance Cylindrical Projection (EPSG 4087)
transformer_world_equidistant = Transformer.from_crs("EPSG:4326", "EPSG:4087", always_xy=True)
# Full GMPE set
GSIM_LIST = get_available_gsims()
# Default Regionalisation for shallow and deep regions
DEFAULT_REGIONALIZATION = {
"default shallow": [
("ASK14", GSIM_LIST["AbrahamsonEtAl2014"](), 0.25),
("BSSA14", GSIM_LIST["BooreEtAl2014"](), 0.25),
("CB14", GSIM_LIST["CampbellBozorgnia2014"](), 0.25),
("CY14", GSIM_LIST["ChiouYoungs2014"](), 0.25),
],
"default deep": [
("BCHydroSlabLow", GSIM_LIST["AbrahamsonEtAl2015SSlabLow"](), 0.2),
("BCHydroSlab", GSIM_LIST["AbrahamsonEtAl2015SSlab"](), 0.6),
("BCHydroSlabHigh", GSIM_LIST["AbrahamsonEtAl2015SSlabHigh"](), 0.2),
],
}
class Regionalization(object):
"""
A regionalisation is defined as a set of polyogns, each of which is associated with a
set of ground motion models and their respective weights. This class manages each
regionalisations and, in particular, the identification of the appropriate ground
motion model set given the location of an earthquake.
A regionalisation is a three dimensional problem the regionalisations must be associated
with an upper and a lower depth.
The geometry of the regionalisation is assumed to be input as a set of polygons with
coordinates given in terms of the of the WGS84 global geodetic coordinate system
Attributes:
name: A unique name describing the regionalisation
regions: The regionalisation information as a geopandas GeoDataFrame containing the
columns [id, REGION, UPPER DEPTH, LOWER DEPTH, geometry]
gsims: Dictionary of ground motion models per region in the regionalisation and the
corresponding weights
cartesian_regions: The regionalisation reprojected into a Cartesian framework, as an
instance of :class:`geopandas.GeoDataFrame`
tree: For efficient selection of the region to which the earthquake belongs, an rtree
spatial index is used. Maps the polygons to a corresponding rtree.index.Index
object
"""
def __init__(self, name: str, regions: gpd.GeoDataFrame, gsim_mapping: Dict = {}):
"""
Instantiates the Regionalization from a complete set of regions and ground motion
model mapping
Args:
name: A unique name describing the regionalisation
regions: The regionalisation information as a geopandas GeoDataFrame containing the
columns [id, REGION, UPPER DEPTH, LOWER DEPTH, geometry]
gsim_mapping: Dictionary of ground motion models per region in the regionalisation
and the corresponding weights
"""
self.name = name
self.regions, self.gsims = valid.regionalization(regions, gsim_mapping)
self.cartesian_regions = regions.to_crs({"init": "epsg:4087"})
# Setup the rtree
self.tree = rtree.index.Index()
for i, geom in enumerate(self.cartesian_regions.geometry):
self.tree.insert(i, geom.bounds)
def __repr__(self):
# Returns a simple summary of the regionalization characteristics
return "{:s} ({:g} Polygons - BBOX [{:.4f}, {:.4f}, {:.4f}, {:.4f}])".format(
self.name,
len(self),
self.bounds[0],
self.bounds[1],
self.bounds[2],
self.bounds[3],
)
def __len__(self):
# Returns the number of regions in the regionalisation
return self.regions.shape[0]
def __getitem__(self, key: Union[int, str]) -> Union[pd.Series, gpd.GeoSeries]:
"""
Returns the column of the regions GeoDataFrame if called with a string, or the
specific row if called with an integer, otherwise raises a TypeError
Args:
key: Either the Region attribute (column) from the dataframe or an integer to
retrieve a specific region (row)
Returns:
Column or row from the dataframe
"""
if isinstance(key, int):
return self.regions.iloc[key, :]
elif isinstance(key, str):
return self.regions[key]
else:
raise TypeError("Unrecognised data type %s used for indexing" % type(key))
def __contains__(self, earthquake: Earthquake):
"""
Determines if an earthquake object is within the bounds of the
region set
Args:
earthquake: An earthquake represented by the
:class:`shakyground2.earthquake.Earthquake`
Returns:
True (if the earthquake is within the bounding box of the regionalisation) or
False otherwise
"""
llon, llat, ulon, ulat = self.bounds
return (
(earthquake.lon >= llon)
& (earthquake.lon <= ulon)
& (earthquake.lat >= llat)
& (earthquake.lat <= ulat)
)
def __call__(self, earthquake: Earthquake) -> Tuple[Optional[str], Optional[Dict]]:
"""
Returns the tectonic region and corresponding set of ground motion models and weights
to be used for the earthquake depending on the region in which the earthquake falls
Args:
earthquake: An earthquake represented by the
:class:`shakyground2.earthquake.Earthquake`
Returns:
region: The name of the region to which the earthquake belongs, or None if the
earthquake is not within the regionalization
gmm: The ground motion model set (with weights) of the region to which the
earthquake belongs, or None if the earthquake is within the regionalization
"""
if earthquake not in self:
return None, None
# Transform event long, lat into cartesian x, y and store as shapely Point object
eqx, eqy = transformer_world_equidistant.transform(earthquake.lon, earthquake.lat)
eqxy = geometry.Point(eqx, eqy)
for idx in self.tree.intersection(eqxy.bounds):
depth_idx = (earthquake.depth >= self.regions["UPPER DEPTH"][idx]) and (
earthquake.depth <= self.regions["LOWER DEPTH"][idx]
)
if depth_idx and self.cartesian_regions.geometry[idx].contains(eqxy):
# Earthquake within the depth range and within the zone
region = self[idx].REGION
return region, self.gsims[region]
# In theory this can only happen if the earthquake is within the
# bounding box of the region but outside of the depth range
return None, None
@property
def bounds(self) -> np.ndarray:
# Bounding box of the entire regionalisation
return self.regions.total_bounds
@classmethod
def from_json(cls, name: str, geojson_file: str, gsim_mapping_file: str) -> Regionalization:
"""
Construct the Regionalization from a json representation of the regionalization
and the ground motion model mapping
Args:
name: Name of regionalization
geojson_file: Path to the geojson file containing the region geometries and
related attributes
gsim_mapping_file: Path to the json file containing the ground motion model
mappings
"""
dframe = gpd.GeoDataFrame.from_file(geojson_file, driver="GeoJSON")
# If a mapping file is provided then load one in
with open(gsim_mapping_file, "r") as f:
gsim_mapping = json.load(f)
return cls(name, dframe, gsim_mapping)
class RegionalizationSet(object):
"""
A Regionalization defines a set of geographical regions and their associated set of
ground motion models and weights. But comprehensive partition of a target region (which
may correspond to the entire globe) may require multiple regionalizations to be defined.
One such example might be that a particular regionalization is required for a country that
may define different region types or different ground motion model sets from that which
may be defined elsewhere. The RegionalizationSet represents a set of regionalizations,
the order of which defines the priority in which they are applied.
As an example:
Consider three regionalizations: A) Regions within a country, ii) Regions within a
Countinent to which country A belongs, C) A set of regions spanning the globe.
If the regionalizations are input in the order A B C, then an earthquake falling within
country A will be subject to the regionalization of A rather than B or C even though it
falls within the domain covered by all three. If the order were reversed (C B A) then the
global regionalization would be applied to the earthquake even if it fell within the
domains covered by A and B.
If the earthquake does not fall within the domain of any of the regions in the set then
a "default" regionalisation is applied, which depends on whether the earthquake is
"shallow" (depth <= 40 km) or "deep" (> 40 km).
For the "shallow" regionalization the four primary NGA West 2 ground motion models are
adopted with equal weighting (Abrahamson et al., 2014; Boore et al., 2014;
Campbell & Bozorgnia, 2014; Chiou & Youngs, 2014)
For the "deep" regionalisation the subduction inslab ground motion model of
Abrahamson et al. (2016) is adopted, with the additional epistemic uncertainty factors.
Attributes:
regionalizations: A set of regionalizations as a list of :class:`Regionalization`
"""
def __init__(self, regionalizations):
self.regionalizations = regionalizations
@classmethod
def from_json(
cls, names: List, regionalization_files: List, gsim_mapping_files: List
) -> RegionalizationSet:
# Check if any file is missing before parsing the regionalizations
assert len(names) == len(regionalization_files) == len(gsim_mapping_files)
# Before importing model, check that all files are present
for regionalization_file, gsim_mapping_file in zip(
regionalization_files, gsim_mapping_files
):
if not os.path.exists(regionalization_file):
raise IOError("Regionalization file %s not found" % regionalization_file)
if not os.path.exists(gsim_mapping_file):
raise IOError("GSIM mapping file %s not found" % gsim_mapping_file)
# Load in the regionalizations
regionalizations = []
for name, regionalization_file, mapping_file in zip(
names, regionalization_files, gsim_mapping_files
):
regionalizations.append(
Regionalization.from_json(name, regionalization_file, mapping_file)
)
return cls(regionalizations)
def __len__(self):
return len(self.regionalizations)
def __iter__(self):
for regionalization in self.regionalizations:
yield regionalization
def __call__(self, earthquake: Earthquake):
"""
Returns the tectonic region and corresponding set of ground motion models and weights
to be used for the earthquake depending on the region in which the earthquake falls.
If no region is defined then a default region type and ground motion model set is
assigned depending on whether the earthquake is "shallow" (< 40 km) or "deep" (> 40 km)
Args:
earthquake: An earthquake represented by the
:class:`shakyground2.earthquake.Earthquake`
Returns:
region: The name of the region to which the earthquake belongs, or None if the
earthquake is not within the regionalization
gmm: The ground motion model set (with weights) of the region to which the
earthquake belongs, or None if the earthquake is within the regionalization
"""
for regionalization in self:
region, gmms = regionalization(earthquake)
if region and gmms:
return region, gmms
# If earthquake is not assigned to any zone then use the default ground motion model
# set, depending on whether the earthquake depth is shallow or deep
if earthquake.depth > 40.0:
default_reg = "default deep"
else:
default_reg = "default shallow"
return default_reg, DEFAULT_REGIONALIZATION[default_reg]
# Path to data file directory
REGIONALIZATION_DIRECTORY = os.path.join(os.path.dirname(__file__), "regionalization_files")
# Path to default regionalization data files
DEFAULT_GLOBAL_REGIONALIZATION_REGIONS = [
os.path.join(REGIONALIZATION_DIRECTORY, "germany_v4.geojson"), # Germany
os.path.join(REGIONALIZATION_DIRECTORY, "eshm20_all.geojson"), # ESHM20
os.path.join(REGIONALIZATION_DIRECTORY, "volcanic.geojson"), # Global volcanic zones
os.path.join(REGIONALIZATION_DIRECTORY, "stable.geojson"), # Global stable regions
]
# Corresponding default GSIM mapping files
DEFAULT_GLOBAL_GSIM_MAPPING = [
os.path.join(REGIONALIZATION_DIRECTORY, "germany_gsim_mapping.json"),
os.path.join(REGIONALIZATION_DIRECTORY, "eshm20_gmm_mapping.json"),
os.path.join(REGIONALIZATION_DIRECTORY, "global_volcanic_mapping.json"),
os.path.join(REGIONALIZATION_DIRECTORY, "global_stable_mapping.json"),
]
# Default global regionalization
DEFAULT_GLOBAL_REGIONALIZATION = RegionalizationSet.from_json(
["Germany", "ESHM20", "Global Volcanic", "Global Stable"], # Name set
DEFAULT_GLOBAL_REGIONALIZATION_REGIONS, # Geographical region set
DEFAULT_GLOBAL_GSIM_MAPPING, # GSIM mapping set
)
This diff is collapsed.
This diff is collapsed.
{"Germany": [{"id": "Akkar2014LowStress", "model": "AkkarEtAlRhyp2014", "adjustment_factor": 0.75, "weight": 0.02331}, {"id": "Akkar2014AveStress", "model": "AkkarEtAlRhyp2014", "weight": 0.05994}, {"id": "Akkar2014HighStress", "model": "AkkarEtAlRhyp2014", "adjustment_factor": 1.25, "weight": 0.05994}, {"id": "Akkar2014VHighStress", "model": "AkkarEtAlRhyp2014", "adjustment_factor": 1.5, "weight": 0.02331}, {"id": "Bindi2014LowStress", "model": "BindiEtAl2014Rhyp", "adjustment_factor": 0.75, "weight": 0.02338}, {"id": "Bindi2014AveStress", "model": "BindiEtAl2014Rhyp", "weight": 0.06012}, {"id": "Bindi2014HighStress", "model": "BindiEtAl2014Rhyp", "adjustment_factor": 1.25, "weight": 0.06012}, {"id": "Bindi2014VHighStress", "model": "BindiEtAl2014Rhyp", "adjustment_factor": 1.5, "weight": 0.02338}, {"id": "Cauzzi2015LowStress", "model": "CauzziEtAl2014RhypoGermany", "adjustment_factor": 0.75, "weight": 0.02331}, {"id": "Cauzzi2015AveStress", "model": "CauzziEtAl2014RhypoGermany", "weight": 0.05994}, {"id": "Cauzzi2015HighStress", "model": "CauzziEtAl2014RhypoGermany", "adjustment_factor": 1.25, "weight": 0.05994}, {"id": "Cauzzi2015VHighStress", "model": "CauzziEtAl2014RhypoGermany", "adjustment_factor": 1.5, "weight": 0.02331}, {"id": "Derras2014LowStress", "model": "DerrasEtAl2014RhypoGermany", "adjustment_factor": 0.75, "weight": 0.035}, {"id": "Derras2014AveStress", "model": "DerrasEtAl2014RhypoGermany", "weight": 0.09}, {"id": "Derras2014HighStress", "model": "DerrasEtAl2014RhypoGermany", "adjustment_factor": 1.25, "weight": 0.09}, {"id": "Derras2014VHighStress", "model": "DerrasEtAl2014RhypoGermany", "adjustment_factor": 1.5, "weight": 0.035}, {"id": "Bindi2017LowStress", "model": "BindiEtAl2017Rhypo", "adjustment_factor": 0.75, "weight": 0.035}, {"id": "Bindi2017AveStress", "model": "BindiEtAl2017Rhypo", "weight": 0.09}, {"id": "Bindi2017HighStress", "model": "BindiEtAl2017Rhypo", "adjustment_factor": 1.25, "weight": 0.09}, {"id": "Bindi2017VHighStress", "model": "BindiEtAl2017Rhypo", "adjustment_factor": 1.5, "weight": 0.035}]}
\ No newline at end of file
This diff is collapsed.
{"GLOBAL STABLE": [{"id": "GlobalCratonLowStressLowSite", "model": "ESHM20Craton", "epsilon": -1.732051, "site_epsilon": -1.732051, "weight": 0.027889}, {"id": "GlobalCratonLowStressAveSite", "model": "ESHM20Craton", "epsilon": -1.732051, "weight": 0.111222}, {"id": "GlobalCratonLowStressHighSite", "model": "ESHM20Craton", "epsilon": -1.732051, "site_epsilon": 1.732051, "weight": 0.027889}, {"id": "GlobalCratonAveStressLowSite", "model": "ESHM20Craton", "site_epsilon": -1.732051, "weight": 0.111222}, {"id": "GlobalCratonAveStressAveSite", "model": "ESHM20Craton", "weight": 0.443556}, {"id": "GlobalCratonAveStressHighSite", "model": "ESHM20Craton", "site_epsilon": 1.732051, "weight": 0.111222}, {"id": "GlobalCratonHighStressLowSite", "model": "ESHM20Craton", "epsilon": 1.732051, "site_epsilon": -1.732051, "weight": 0.027889}, {"id": "GlobalCratonHighStressAveSite", "model": "ESHM20Craton", "epsilon": 1.732051, "weight": 0.111222}, {"id": "GlobalCratonHighStressHighSite", "model": "ESHM20Craton", "epsilon": 1.732051, "site_epsilon": 1.732051, "weight": 0.027889}]}
\ No newline at end of file
{"GLOBAL VOLCANIC": [{"id": "Faccioli2010", "model": "FaccioliEtAl2010", "weight": 1.0}]}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
......@@ -31,7 +31,7 @@ class Shakemap(object):
self,
earthquake: Earthquake,
site_model: SiteModel,
ground_motion_model: Dict,
ground_motion_model: List,
tectonic_region: str,
cache_file: Optional[str] = None,
num_rupture_samples: int = 100,
......@@ -48,8 +48,9 @@ class Shakemap(object):
Target sites used for calculation of the ground motion values, as instances of
:class:shakyground2.site_model.SiteModel
ground_motion_model:
Set of ground motion models and their respective weights as a dictionary
{"GMMs": [...], "weights": [...]}
Set of ground motion models and their respective weights as a list of tuples
of (ID, GMM, Weight) where ID is the unique ID of the GMM and its associated
weight
tectonic_region:
Tectonic region to which the earthquake is assigned
cache_file:
......@@ -116,7 +117,9 @@ class Shakemap(object):
Construct the rupture, site and distances contexts from the earthquake, site and
ground motion models
"""
cmaker = ContextMaker(self.tectonic_region, self.ground_motion_model["GMMs"])
cmaker = ContextMaker(
self.tectonic_region, [gmm[1] for gmm in self.ground_motion_model]
)
if not self.earthquake.rupture:
# Use the finite rupture sampler to generate the rupture and corresponding
......@@ -143,7 +146,7 @@ class Shakemap(object):
ctxt = grp.create_group("contexts")
rup_ctxt = ctxt.create_group("rupture")
dist_ctxt = ctxt.create_group("distances")
for gmm in self.ground_motion_model["GMMs"]:
for gmm in [gmm[1] for gmm in self.ground_motion_model]:
for rup_attr in gmm.REQUIRES_RUPTURE_PARAMETERS:
if rup_attr not in rup_ctxt.attrs:
rup_ctxt.attrs[rup_attr] = getattr(self.rctx, rup_attr)
......@@ -153,7 +156,9 @@ class Shakemap(object):
dist_dset = dist_ctxt.create_dataset(attr, distance.shape, dtype="f")
dist_dset[:] = distance
site_ctxt = ctxt.create_dataset(
"sites", self.site_model.site_array.shape, dtype=self.site_model.site_array.dtype
"sites",
self.site_model.site_array.shape,
dtype=self.site_model.site_array.dtype,
)
site_ctxt[:] = self.site_model.site_array
if self.site_model.bbox_properties:
......@@ -209,11 +214,8 @@ class Shakemap(object):
# Pre-allocate the aggregated shakemaps with zeros
aggregated_means = np.zeros(self.site_model.shape, dtype=shakemap_dtypes)
aggregated_stddevs = np.zeros(self.site_model.shape, dtype=shakemap_dtypes)
for weight, gmm in zip(
self.ground_motion_model["weights"], self.ground_motion_model["GMMs"]
):
gmm_str = gmm.__class__.__name__
shakemaps[gmm_str] = {
for gmm_id, gmm, weight in self.ground_motion_model:
shakemaps[gmm_id] = {
"weight": weight,
"mean": np.zeros(self.site_model.shape, dtype=shakemap_dtypes),
"stddev": np.zeros(self.site_model.shape, dtype=shakemap_dtypes),
......@@ -232,8 +234,8 @@ class Shakemap(object):
continue
aggregated_means[intensity_measure_type] += weight * mean
aggregated_stddevs[intensity_measure_type] += weight * stddev
shakemaps[gmm_str]["mean"][intensity_measure_type] = mean
shakemaps[gmm_str]["stddev"][intensity_measure_type] = stddev
shakemaps[gmm_id]["mean"][intensity_measure_type] = mean
shakemaps[gmm_id]["stddev"][intensity_measure_type] = stddev
if self.caching:
self._cache_shakemap(
fle_eq, shakemaps, aggregated_means, aggregated_stddevs, shakemap_dtypes
......@@ -263,17 +265,17 @@ class Shakemap(object):
shakemap_dtypes: IMT-dependent data type of the shakemaps
"""
shakemap_grp = fle.create_group("shakemaps")
for gmm_string in shakemaps:
gmm_grp = shakemap_grp.create_group(gmm_string)
gmm_grp.attrs["weight"] = shakemaps[gmm_string]["weight"]
for gmm_id in shakemaps:
gmm_grp = shakemap_grp.create_group(gmm_id)
gmm_grp.attrs["weight"] = shakemaps[gmm_id]["weight"]
mean_dset = gmm_grp.create_dataset(
"mean", shakemaps[gmm_string]["mean"].shape, dtype=shakemap_dtypes
"mean", shakemaps[gmm_id]["mean"].shape, dtype=shakemap_dtypes
)
mean_dset[:] = shakemaps[gmm_string]["mean"]
mean_dset[:] = shakemaps[gmm_id]["mean"]
stddev_dset = gmm_grp.create_dataset(
"stddev", shakemaps[gmm_string]["stddev"].shape, dtype=shakemap_dtypes
"stddev", shakemaps[gmm_id]["stddev"].shape, dtype=shakemap_dtypes
)
stddev_dset[:] = shakemaps[gmm_string]["stddev"]
stddev_dset[:] = shakemaps[gmm_id]["stddev"]
# Store the agregated results
aggregated_grp = fle.create_group("aggregated")
aggregated_mean_dset = aggregated_grp.create_dataset(
......
......@@ -101,9 +101,13 @@ class FiniteRuptureSampler(object):
the rupture surface
"""
# Sample the rupture surfaces
rupture_surfaces, strikes, dips, rakes, hypo_locs = self.sample_rupture_surfaces(
nsamples, earthquake
)
(
rupture_surfaces,
strikes,
dips,
rakes,
hypo_locs,
) = self.sample_rupture_surfaces(nsamples, earthquake)
if site_model:
target_lons = site_model["lon"]
......@@ -253,7 +257,11 @@ class FiniteRuptureSampler(object):
areas = np.empty(nsamples)
thickness = earthquake.lsd - earthquake.usd
for i, (dip, rake, epsilon) in enumerate(zip(dips, rakes, msr_epsilons)):
areas[i], lengths[i], widths[i] = earthquake.mag_scale_rel.get_rupture_dimensions(
(
areas[i],
lengths[i],
widths[i],
) = earthquake.mag_scale_rel.get_rupture_dimensions(
earthquake.mag,
rake,
dip,
......
......@@ -2,9 +2,20 @@
Defines a set of input validation methods to check physically correct or
consistent quantities
"""
import numpy as np
from copy import deepcopy
from geopandas import GeoDataFrame
from typing import Tuple, Optional, Union, Dict
from openquake.hazardlib.geo.nodalplane import NodalPlane
from shakyground2.magnitude_scaling_relations import BaseScalingRelation, PEERScalingRelation
from openquake.hazardlib.gsim import get_available_gsims
from shakyground2.magnitude_scaling_relations import (
BaseScalingRelation,
PEERScalingRelation,
)
# OpenQuake GMPE List
GSIM_LIST = get_available_gsims()
def longitude(lon: float) -> float:
......@@ -95,7 +106,9 @@ def focal_mechanism(focal_mech: Optional[Dict]) -> Dict:
focal_mechanism = {}
for plane in ["nodal_plane_1", "nodal_plane_2"]:
focal_mechanism[plane] = mechanism(
focal_mech[plane]["strike"], focal_mech[plane]["dip"], focal_mech[plane]["rake"]
focal_mech[plane]["strike"],
focal_mech[plane]["dip"],
focal_mech[plane]["rake"],
)
return focal_mechanism
......@@ -144,3 +157,54 @@ def scaling_relation(msr: Optional[BaseScalingRelation]):
"Magnitude Scaling Relation %s not instance of BaseScalingRelation" % str(msr)
)
return msr
def regionalization_mapping(mapping: Dict) -> Dict:
"""
Velidates a ground motion mapping to parse the ground motion model strings to instances
of the ground motion models. Also checks the weights sum correctly to 1.0
"""
new_mapping = {}
for key in mapping:
new_mapping[key] = []
# Verify that weights sum to 1.0
weight_sum = sum([gmm["weight"] for gmm in mapping[key]])
weight_check = np.isclose(weight_sum, 1.0)
assert (
weight_check
), "Ground motion model weights for region %s do not sum to 1 " "(sum = %.6f)" % (
key,
weight_sum,
)
for gmm in deepcopy(mapping[key]):
gmm_id = gmm.pop("id")
gmm_weight = gmm.pop("weight")
gmm_name = gmm.pop("model")
new_mapping[key].append((gmm_id, GSIM_LIST[gmm_name](**gmm), gmm_weight))
return new_mapping
def regionalization(regions: GeoDataFrame, mapping: Dict) -> Tuple[GeoDataFrame, Dict]:
"""
A regionalisation is represented by a geometry set (as a geopandas.GeoDataFrame) and a
corresponding dictionary to map the regions in the geometry set to a set of ground motion
models (as strings of the OpenQuake input names) and respective weights. Function verifies
that the region file has the necessary information and that a mapping for each region is
present. Returns the region set and the mapping with instantiated ground motion model.
"""
if not regions.crs:
# If no coordinate reference system is defined then assume WGS84
regions.crs = {"init": "epsg:4326"}
if str(regions.crs) != "+init=epsg:4326 +type=crs":
regions = regions.to_crs({"init": "epsg:4326"})
# Verify that the region set has the necessary columns
for col in ["REGION", "UPPER DEPTH", "LOWER DEPTH", "geometry"]:
if col not in regions.columns:
raise IOError("Regionalization has missing attribute %s" % col)
# Verify that every region in the regionalization has a corresponding mapping
for region in regions.REGION.unique():
if region not in mapping:
raise IOError(
"Region %s has no corresponding ground motion model in mapping" % region
)
return regions, regionalization_mapping(mapping)
{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "id": "Z1L", "REGION": "Zone 1 Left", "UPPER DEPTH": 0.0, "LOWER DEPTH": 40.0 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -1.0, 0.0 ], [ -1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ], [ -1.0, 0.0 ] ] ] } },
{ "type": "Feature", "properties": { "id": "Z1R", "REGION": "Zone 1 Right", "UPPER DEPTH": 0.0, "LOWER DEPTH": 40.0 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 0.0, 0.0 ], [ 0.0, 1.0 ], [ 1.0, 1.0 ], [ 1.0, 0.0 ], [ 0.0, 0.0 ] ] ] } },
{ "type": "Feature", "properties": { "id": "ZD1", "REGION": "Zone 1 Deep", "UPPER DEPTH": 40.0, "LOWER DEPTH": 80.0 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 0.2, 0.2 ], [ 0.2, 0.4 ], [ 0.4, 0.4 ], [ 0.4, 0.2 ], [ 0.2, 0.2 ] ] ] } }
]
}
{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "id": "Z2L", "REGION": "Zone 2 Left", "UPPER DEPTH": 0.0, "LOWER DEPTH": 40.0 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -2.0, 0.0 ], [ -2.0, 2.0 ], [ 0.0, 2.0