Commit 5b4b47ea authored by Graeme Weatherill's avatar Graeme Weatherill
Browse files

Implements Ground Motion Intensity Conversion Equation module

parent 8487414b
Pipeline #24429 passed with stage
in 14 minutes and 37 seconds
This diff is collapsed.
......@@ -126,7 +126,9 @@ class Shakemap(object):
cmaker = ContextMaker(
self.tectonic_region, [gmm[1] for gmm in self.ground_motion_model]
)
# Always calculate rupture distance regardless of whether it is needed by the GMM
if "rrup" not in cmaker.REQUIRES_DISTANCES: # pylint: disable=no-member
cmaker.REQUIRES_DISTANCES.add("rrup") # pylint: disable=no-member
if not self.earthquake.rupture:
# Use the finite rupture sampler to generate the rupture and corresponding
# distances from the available information
......
......@@ -5,10 +5,12 @@ import os
import numpy as np
from scipy import stats
from typing import Optional, Dict, List, Tuple, Union
from openquake.hazardlib.gsim.base import RuptureContext, SitesContext, DistancesContext
from shakyground2.earthquake import Earthquake
from shakyground2.site_model import SiteModel
from shakyground2.regionalization import DEFAULT_GLOBAL_REGIONALIZATION, RegionalizationSet
from shakyground2.shakemap import Shakemap
from shakyground2.gmice import AtkinsonKaka2007MRDependent, WordenEtAl2012MRDependent
DEFAULT_CONFIGURATION = {
......@@ -55,9 +57,75 @@ class ShakemapWorkflowResult(object):
return getattr(self, key)
# Initially we consider only two different GMICEs: AtkinsonKaka2007 for Stable Continental
# Regions and Worden et al. (2012) everywhere else. Further GMICEs may be added in future.
GMICE_REGIONALIZATION = {
"Global Stable": AtkinsonKaka2007MRDependent,
}
def get_mmi_shakemaps(
region: str,
mean_shakemaps: np.ndarray,
stddev_shakemaps: np.ndarray,
sctx: SitesContext,
rctx: RuptureContext,
dctx: DistancesContext,
) -> Tuple[np.ndarray, np.ndarray]:
"""
Retrieves shakemaps in terms of macroseismic intensity by applying ground motion intensity
conversion equations (GMICEs) to the mean and standard deviation shakemaps.
GMICEs are usually only defined for a small number of ground motion parameters (e.g.
PGA, PGV, SA(0.3), SA(1.0), SA(3.0)) and as the resulting variance can be smaller when
some parameters such as PGV or SA(1.0) are used then the conversions are applied in a
defined order of preference, meaning that if the preferred IMT is found then that will
be used, otherwise it will try the next in the list, and so on. If none of the applicable
IMTs are found then no conversion will be undertaken and an error raised instead.
Args:
region: Tectonic region of event (as defined by GMPE regionalization
mean_shakemaps: The set of shakemaps for mean logarithm of ground motion
stddev_shakemaps: The set of shakemaps for the standard deviation of the logarithm of
ground motion
sctx: The shakemap SitesContext object
rctx: The shakemap RuptureContext object
dctx: The shakemap DistancesContext object
Returns:
mmi: The shakemap in terms of mean macroseismic intensity
stddev_mmi: The shakemap of standard deviation of macroseismic intensity
"""
# Select the appropriate GMICE from the regionalization
if region in GMICE_REGIONALIZATION:
gmice = GMICE_REGIONALIZATION[region](sctx, rctx, dctx)
else:
# If not defined in the regionalization then use Worden et al. (2012) as the default
gmice = WordenEtAl2012MRDependent(sctx, rctx, dctx)
# Identify the preferred intensity measure type for the GMICE
preferred_imt = None
for imt in gmice.ORDER_OF_PREFERENCE:
if imt in mean_shakemaps.dtype.names:
preferred_imt = imt
break
if not preferred_imt:
raise ValueError(
"MMI conversion with %s failed as none of the IMTs needed for "
"conversion were found in the shakemaps" % gmice.__class__.__name__
)
mmi = np.zeros(mean_shakemaps.shape[0], dtype=np.dtype([("MMI", np.float64)]))
stddev_mmi = np.zeros(mean_shakemaps.shape[0], dtype=np.dtype([("MMI", np.float64)]))
# Get the MMI and standard deviation
mmi["MMI"], stddev_mmi["MMI"] = gmice.get_mean_intensity_and_stddev(
preferred_imt, mean_shakemaps[preferred_imt], stddev_shakemaps[preferred_imt], clip=True
)
return mmi, stddev_mmi
def shakemaps_from_quakeml(
event_id: str,
imts: List = ["PGA", "SA(0.3)", "SA(1.0)"],
imts: List = ["PGA", "SA(0.3)", "SA(1.0)", "MMI"],
config: Optional[Dict] = None,
export_folder: Optional[str] = None,
cache_file: Optional[str] = None,
......@@ -146,23 +214,52 @@ def shakemaps_from_quakeml(
results.contours = {"mean": {}, "stddevs": {}}
results.statistics = {"mean": {}}
for imt in imts:
results.shakemaps["mean"][imt] = shakemap.to_geotiff(mean_shakemaps, imt)
results.shakemaps["stddevs"][imt] = shakemap.to_geotiff(
stddev_shakemaps, imt, is_stddev=True
)
results.contours["mean"][imt] = shakemap.get_contours(
imt, mean_shakemaps, contour_levels_mean
)
results.contours["stddevs"][imt] = shakemap.get_contours(
imt, stddev_shakemaps, contour_levels_stddev, is_stddev=True
)
# Retrieve summary statistics
results.statistics["mean"][imt] = {
"maximum": np.exp(np.max(mean_shakemaps[imt])),
"median": np.exp(np.mean(mean_shakemaps[imt])),
"minimum": np.exp(np.min(mean_shakemaps[imt])),
"IQR": stats.iqr(mean_shakemaps[imt]),
}
if imt == "MMI":
# Get MMI and its standard deviation from the mean and stddev shakemaps
mmi, stddev_mmi = get_mmi_shakemaps(
results.tectonic_region,
mean_shakemaps,
stddev_shakemaps,
shakemap.sctx,
shakemap.rctx,
shakemap.dctx,
)
# MMIs are not in logarithmic domain, so setting is_stddev to True
results.shakemaps["mean"][imt] = shakemap.to_geotiff(mmi, imt, is_stddev=True)
results.shakemaps["stddevs"][imt] = shakemap.to_geotiff(
stddev_mmi, imt, is_stddev=True
)
results.contours["mean"][imt] = shakemap.get_contours(
imt, mmi, np.arange(1, 11, 1), is_stddev=True
)
results.contours["stddevs"][imt] = shakemap.get_contours(
imt, stddev_mmi, contour_levels_stddev, is_stddev=True
)
# Retrieve summary statistics - not in logarithmic domain
results.statistics["mean"][imt] = {
"maximum": np.max(mmi[imt]),
"median": np.mean(mmi[imt]),
"minimum": np.min(mmi[imt]),
"IQR": stats.iqr(mmi[imt]),
}
else:
results.shakemaps["mean"][imt] = shakemap.to_geotiff(mean_shakemaps, imt)
results.shakemaps["stddevs"][imt] = shakemap.to_geotiff(
stddev_shakemaps, imt, is_stddev=True
)
results.contours["mean"][imt] = shakemap.get_contours(
imt, mean_shakemaps, contour_levels_mean
)
results.contours["stddevs"][imt] = shakemap.get_contours(
imt, stddev_shakemaps, contour_levels_stddev, is_stddev=True
)
# Retrieve summary statistics
results.statistics["mean"][imt] = {
"maximum": np.exp(np.max(mean_shakemaps[imt])),
"median": np.exp(np.mean(mean_shakemaps[imt])),
"minimum": np.exp(np.min(mean_shakemaps[imt])),
"IQR": stats.iqr(mean_shakemaps[imt]),
}
if export_folder:
filestem = "{:s}_{:s}".format(results.earthquake.id, imt)
# Export the bytes to raster files
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
"""
Ground Motion Intensity Conversion Equations Test Cases
"""
import os
import unittest
import numpy as np
import pandas as pd
from typing import Dict, Tuple
from openquake.hazardlib.gsim.base import RuptureContext, SitesContext, DistancesContext
from shakyground2.gmice import WordenEtAl2012MRDependent, AtkinsonKaka2007MRDependent
DATA_PATH = os.path.join(os.path.dirname(__file__), os.path.join("data", "gmice"))
class GMICEMRDependentTestCase(unittest.TestCase):
# GMICE class
GMICE = WordenEtAl2012MRDependent
# PATH TO MEAN MMI TEST TABLE FILE
MEAN_FILE = ""
# PATH TO STDDEV MMI TEST TABLE FILE
STDDEV_FILE = ""
# MAXIMUM TOLERABLE DISCREPANCY MEAN (in %)
MAX_DISCR_MEAN = 0.01
# MAXIMUM TOLERABLE DISCREPANCY SIGMA (in %)
MAX_DISCR_SIGMA = 0.01
def compare_mean(self):
"""
Compares the mean MMI with those from the test table
"""
mean_data = pd.read_csv(os.path.join(DATA_PATH, self.MEAN_FILE), sep=",")
grps = mean_data.groupby("evid")
for evid, grp in grps:
sctx, rctx, dctx, gmvs, sigmas = self._sort_group_to_contexts(grp)
gmice = self.GMICE(sctx, rctx, dctx)
# Run the comparisons
for col in grp.columns:
if col in ("PGA", "PGV") or col.startswith("SA"):
imt = col.upper()
else:
continue
if imt == "PGV":
gmv = gmvs["vel"]
sigma = sigmas["vel"]
else:
gmv = gmvs["accel"]
sigma = sigmas["accel"]
mmi = gmice.get_mean_intensity_and_stddev(imt, np.log(gmv), sigma)[0]
expected_mmi = grp[col].to_numpy()
diff_mmi = ((mmi / expected_mmi) - 1.0) * 100.0
for a, b, c in zip(mmi, expected_mmi, diff_mmi):
print("%s %12.7f %12.7f %12.7f" % (col, a, b, c))
self.assertTrue(np.all(np.fabs(diff_mmi) <= self.MAX_DISCR_MEAN))
def compare_stddev(self):
"""
Compares the standard deviation MMI with those from the test table
"""
sigma_data = pd.read_csv(os.path.join(DATA_PATH, self.STDDEV_FILE), sep=",")
grps = sigma_data.groupby("evid")
for evid, grp in grps:
sctx, rctx, dctx, gmvs, sigmas = self._sort_group_to_contexts(grp)
gmice = self.GMICE(sctx, rctx, dctx)
# Run the comparisons
for col in grp.columns:
if col in ("PGA", "PGV") or col.startswith("SA"):
imt = col.upper()
else:
continue
if imt == "PGV":
gmv = gmvs["vel"]
sigma = sigmas["vel"]
else:
gmv = gmvs["accel"]
sigma = sigmas["accel"]
sigma_mmi = gmice.get_mean_intensity_and_stddev(imt, np.log(gmv), sigma)[1]
expected_sigma_mmi = grp[col].to_numpy()
diff_sigma_mmi = ((sigma_mmi / expected_sigma_mmi) - 1.0) * 100.0
for a, b, c in zip(sigma_mmi, expected_sigma_mmi, diff_sigma_mmi):
print("%s %12.7f %12.7f %12.7f" % (col, a, b, c))
self.assertTrue(np.all(np.fabs(diff_sigma_mmi) <= self.MAX_DISCR_SIGMA))
def check_raise_unsupported_imt_error(self):
sctx = SitesContext()
dctx = DistancesContext()
dctx.rrup = np.array([1.0])
rctx = RuptureContext()
rctx.mag = 6.0
gmvs = np.array([0.0])
sigma = np.array([0.5])
gmice = self.GMICE(sctx, rctx, dctx)
with self.assertRaises(ValueError) as ve:
gmice.get_mean_intensity_and_stddev("BAD_IMT", gmvs, sigma)
print(str(ve.exception))
self.assertEqual(
str(ve.exception),
"Conversion to intensity for IMT BAD_IMT unsupported for %s"
% gmice.__class__.__name__,
)
def _sort_group_to_contexts(
self, grp: pd.DataFrame
) -> Tuple[SitesContext, RuptureContext, DistancesContext, Dict, Dict]:
"""Build the context objects corresponding to a particular scenario
and return both these and the input ground motion values and standard deviations
"""
# Get contexts
rctx = RuptureContext()
dctx = DistancesContext()
sctx = SitesContext()
gmvs = {}
sigmas = {}
for key in grp.columns:
if key.startswith("rup"):
attr = key.split("_")[1]
setattr(rctx, attr, grp[key].iloc[0])
elif key.startswith("dists"):
attr = key.split("_")[1]
setattr(dctx, attr, grp[key].to_numpy())
elif key.startswith("sites"):
attr = key.split("_")[1]
setattr(sctx, attr, grp[key].to_numpy())
elif key.startswith("gmvs"):
attr = key.split("_")[1]
gmvs[attr] = grp[key].to_numpy()
elif key.startswith("stddevs"):
attr = key.split("_")[1]
sigmas[attr] = grp[key].to_numpy()
return sctx, rctx, dctx, gmvs, sigmas
class WordenEtAl2012MRDependentTestCase(GMICEMRDependentTestCase):
"""
Test tables generated from original implementation in USGS Shakemap software
https://github.com/usgs/shakemap/blob/master/shakelib/gmice/wgrw12.py
"""
# GMICE class
GMICE = WordenEtAl2012MRDependent
# PATH TO MEAN MMI TEST TABLE FILE
MEAN_FILE = "worden_2012_mean.csv"
# PATH TO STDDEV MMI TEST TABLE FILE
STDDEV_FILE = "worden_2012_stddev.csv"
# Maximum tolerable discrepancy for the mean is raised to account for
# different values of g used by the different software.
MAX_DISCR_MEAN = 0.5
def test_mean(self):
self.compare_mean()
def test_stddev(self):
self.compare_stddev()
def test_error(self):
self.check_raise_unsupported_imt_error()
class AtkinsonKaka2007MRDependentTestCase(WordenEtAl2012MRDependentTestCase):
"""
Test tables generated from original implementation in USGS Shakemap software
https://github.com/usgs/shakemap/blob/master/shakelib/gmice/ak07.py
"""
# GMICE class
GMICE = AtkinsonKaka2007MRDependent
# PATH TO MEAN MMI TEST TABLE FILE
MEAN_FILE = "atkinson_kaka_2007_mean.csv"
# PATH TO STDDEV MMI TEST TABLE FILE
STDDEV_FILE = "atkinson_kaka_2007_stddev.csv"
# Maximum tolerable discrepancy for the mean is raised to account for
# different values of g used by the different software.
MAX_DISCR_MEAN = 0.5
......@@ -4,13 +4,105 @@ Test cases for end-to-end workflows
import os
import unittest
import numpy as np
from typing import List
from geopandas import GeoDataFrame
from shakyground2.workflows import shakemaps_from_quakeml, ShakemapWorkflowResult
from openquake.hazardlib.gsim.base import RuptureContext, DistancesContext, SitesContext
from shakyground2.workflows import (
shakemaps_from_quakeml,
ShakemapWorkflowResult,
get_mmi_shakemaps,
)
DATA_PATH = os.path.join(os.path.dirname(__file__), "data")
class MMIConversionTestCase(unittest.TestCase):
"""
Tests the function to produce MMI shakemaps from ground motion inputs
"""
def setUp(self):
self.nsites = 10
self.sctx = SitesContext()
self.dctx = DistancesContext()
self.dctx.rrup = 10.0 * np.ones(self.nsites, dtype=float)
self.rctx = RuptureContext()
self.rctx.mag = 6.0
def _get_null_shakemap(self, imts: List):
"""
Returns shakemaps full of zeros for specified imts
Args:
imts: List of intensity measure types
Returns:
mean_shakemap: Shakemap of mean of logarithm of ground motions
stddev_shakemap: Shakemap of standard deviation of logarithm of ground motions
"""
shakemap_dtypes = np.dtype([(imt, np.float64) for imt in imts])
mean_shakemap = np.zeros(self.nsites, shakemap_dtypes)
stddev_shakemap = np.zeros(self.nsites, shakemap_dtypes)
return mean_shakemap, stddev_shakemap
def test_mmi_shakemaps_pgv_present(self):
# Tests the case when PGV is in the imt list
mean_shakemap, stddev_shakemap = self._get_null_shakemap(["PGA", "PGV", "SA(1.0)"])
# Assume default tectonic region
mmi, stddev_mmi = get_mmi_shakemaps(
"default", mean_shakemap, stddev_shakemap, self.sctx, self.rctx, self.dctx
)
expected_mmi = 3.6 # Converts from PGV using Worden et al. (2012)
expected_stddev_mmi = 0.63
np.testing.assert_array_almost_equal(mmi["MMI"], expected_mmi * np.ones(self.nsites))
np.testing.assert_array_almost_equal(
stddev_mmi["MMI"], expected_stddev_mmi * np.ones(self.nsites)
)
def test_mmi_shakemaps_no_pgv(self):
# Tests the case when PGV is not in the IMT list - should convert from PGA instead
mean_shakemap, stddev_shakemap = self._get_null_shakemap(["PGA", "SA(1.0)"])
# Assume default tectonic region
mmi, stddev_mmi = get_mmi_shakemaps(
"default", mean_shakemap, stddev_shakemap, self.sctx, self.rctx, self.dctx
)
expected_mmi = 8.5586265 # Converts from PGA using Worden et al. (2012)
expected_stddev_mmi = 0.66
np.testing.assert_array_almost_equal(mmi["MMI"], expected_mmi * np.ones(self.nsites))
np.testing.assert_array_almost_equal(
stddev_mmi["MMI"], expected_stddev_mmi * np.ones(self.nsites)
)
def test_mmi_shakemaps_stable_region(self):
# Tests the case when the earthquake is in the 'Global Stable' tectonic region
mean_shakemap, stddev_shakemap = self._get_null_shakemap(["PGA", "SA(1.0)"])
# Assume default tectonic region
mmi, stddev_mmi = get_mmi_shakemaps(
"Global Stable", mean_shakemap, stddev_shakemap, self.sctx, self.rctx, self.dctx
)
expected_mmi = 9.01498599 # Converts from SA(1.0) using Atkinson & Kaka (2007)
expected_stddev_mmi = 0.73
np.testing.assert_array_almost_equal(mmi["MMI"], expected_mmi * np.ones(self.nsites))
np.testing.assert_array_almost_equal(
stddev_mmi["MMI"], expected_stddev_mmi * np.ones(self.nsites)
)
def test_raise_error_no_supported_imt(self):
mean_shakemap, stddev_shakemap = self._get_null_shakemap(
[
"SA(0.5)",
]
)
with self.assertRaises(ValueError) as ve:
mmi, stddev_mmi = get_mmi_shakemaps(
"default", mean_shakemap, stddev_shakemap, self.sctx, self.rctx, self.dctx
)
self.assertEqual(
str(ve.exception),
"MMI conversion with WordenEtAl2012MRDependent failed as none of the IMTs needed "
"for conversion were found in the shakemaps",
)
class GeofonWorkflowTestCase(unittest.TestCase):
"""
Tests the Gofon workflow with the default configurations. Note that this is not
......@@ -18,7 +110,7 @@ class GeofonWorkflowTestCase(unittest.TestCase):
"""
def setUp(self):
self.imts = ["PGA", "SA(1.0)"]
self.imts = ["PGA", "SA(1.0)", "MMI"]
def test_complete_workflow_no_export_default_config(self):
event_id = os.path.join(DATA_PATH, "gfz2021eksc.xml")
......@@ -71,10 +163,11 @@ class GeofonWorkflowTestCase(unittest.TestCase):
self.assertTrue(
os.path.exists(os.path.join(test_folder, "gfz2021eksc_SA(1.0)_mean.tif"))
)
self.assertTrue(os.path.exists(os.path.join(test_folder, "gfz2021eksc_PGA_stddev.tif")))
self.assertTrue(
os.path.exists(os.path.join(test_folder, "gfz2021eksc_SA(1.0)_stddev.tif"))
)
self.assertTrue(os.path.exists(os.path.join(test_folder, "gfz2021eksc_MMI_mean.tif")))
self.assertTrue(os.path.exists(os.path.join(test_folder, "gfz2021eksc_MMI_stddev.tif")))
self.assertTrue(
os.path.exists(os.path.join(test_folder, "gfz2021eksc_PGA_contour_mean.geojson"))
)
......@@ -83,6 +176,9 @@ class GeofonWorkflowTestCase(unittest.TestCase):
os.path.join(test_folder, "gfz2021eksc_SA(1.0)_contour_mean.geojson")
)
)
self.assertTrue(
os.path.exists(os.path.join(test_folder, "gfz2021eksc_MMI_contour_mean.geojson"))
)
# Now re-run but with the same directory
with self.assertRaises(IOError) as ioe:
shakemaps_from_quakeml(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment