Commit 6050ae84 authored by Graeme Weatherill's avatar Graeme Weatherill
Browse files

Implements single workflow function for GEOFON (Earth Explorer) use case

parent 6a3991d7
""" """
Contains the core classes to represent an earthquake Contains the core classes to represent an earthquake
""" """
from __future__ import annotations
import datetime import datetime
from typing import Tuple, Optional, List from typing import Tuple, Optional, List
from math import radians, sin, tan from math import radians, sin, tan
...@@ -9,6 +10,7 @@ from openquake.hazardlib.geo.surface.base import BaseSurface ...@@ -9,6 +10,7 @@ from openquake.hazardlib.geo.surface.base import BaseSurface
from openquake.hazardlib.source.rupture import ParametricProbabilisticRupture from openquake.hazardlib.source.rupture import ParametricProbabilisticRupture
from shakyground2 import valid from shakyground2 import valid
from shakyground2.rupture_mechanism import RuptureMechanism from shakyground2.rupture_mechanism import RuptureMechanism
from shakyground2.io import fetch_quakeml
class Earthquake(object): class Earthquake(object):
...@@ -195,6 +197,47 @@ class Earthquake(object): ...@@ -195,6 +197,47 @@ class Earthquake(object):
self.id, self.lon, self.lat, self.depth, self.mag self.id, self.lon, self.lat, self.depth, self.mag
) )
@classmethod
def from_quakeml(cls, path: str) -> Earthquake:
"""
Creates the Earthquake object from an xml file or geofon event ID
Args:
path: Path to QuakeML (xml) file containing GEOFON event information, or GEOFON
event ID (to retrieve the information from the GEOFON FDSN web service)
"""
event = fetch_quakeml(path)
if not event:
raise IOError(
"Incorrect or insufficient information to create the Earthquake "
"object found in %s" % path
)
# Time is not necessarily in ISO format, so fix this for use with datetime object
hms, micsec = event["time"].split(".")
micsec = micsec.replace("Z", "0")[:6]
event_time = ".".join([hms, micsec])
d_t = datetime.datetime.fromisoformat(" ".join([event["date"], event_time]))
# If the event has a focal mechanism then parse this into the correct format
if event["focalmechanism"]:
focal_mechanism = {
"nodal_plane_1": event["focalmechanism"][0],
"nodal_plane_2": event["focalmechanism"][1],
}
else:
focal_mechanism = None
if event["id"].endswith(".xml"):
event["id"] = event["id"].replace(".xml", "")
return cls(
event["id"],
event["origin"]["longitude"],
event["origin"]["latitude"],
event["origin"]["depth"],
event["magnitude"],
focal_mechanism=focal_mechanism,
date=d_t.date(),
time=d_t.time(),
)
def get_maximum_distance_bbox(self, max_distance: Optional[float] = None) -> List: def get_maximum_distance_bbox(self, max_distance: Optional[float] = None) -> List:
""" """
Defines a bounding box around the event up to a maximum distance. Defines a bounding box around the event up to a maximum distance.
...@@ -308,8 +351,8 @@ class Earthquake(object): ...@@ -308,8 +351,8 @@ class Earthquake(object):
right_length = (1.0 - hypo_loc[0]) * length right_length = (1.0 - hypo_loc[0]) * length
# Build corner points # Build corner points
downdip_dir = (dip + 90.0) % 360 downdip_dir = (strike + 90.0) % 360
updip_dir = (dip - 90.0) % 360 updip_dir = (strike - 90.0) % 360
mid_left = centroid.point_at(left_length, 0.0, (strike + 180.0) % 360.0) mid_left = centroid.point_at(left_length, 0.0, (strike + 180.0) % 360.0)
mid_right = centroid.point_at(right_length, 0.0, strike) mid_right = centroid.point_at(right_length, 0.0, strike)
top_left = mid_left.point_at(updip_surface_length, -updip_depth_change, updip_dir) top_left = mid_left.point_at(updip_surface_length, -updip_depth_change, updip_dir)
......
from shakyground2.io.import_fdsnws_eq import fetch_quakeml
__all__ = [
"fetch_quakeml",
]
...@@ -152,7 +152,6 @@ def fetch_quakeml_ws(evid: str) -> str: ...@@ -152,7 +152,6 @@ def fetch_quakeml_ws(evid: str) -> str:
) )
) )
) )
req = ul.Request(url) req = ul.Request(url)
u = ul.urlopen(req) u = ul.urlopen(req)
buf = u.read().decode("utf8") buf = u.read().decode("utf8")
......
"""
Defines complete end-to-end shakemap calculation workflows
"""
import os
from typing import Optional, Dict, List, Tuple, Union
from shakyground2.earthquake import Earthquake
from shakyground2.site_model import SiteModel
from shakyground2.regionalization import DEFAULT_GLOBAL_REGIONALIZATION, RegionalizationSet
from shakyground2.shakemap import Shakemap
DEFAULT_CONFIGURATION = {
# Bounding Box
"spcx": 1.0 / 120.0, # 30 arc-seconds
"spcy": 1.0 / 120.0, # 30 arc-seconds
"max_distance_bbox": None,
# Site Properties
"default_vs30": 600.0,
"vs30measured": True,
"z1pt0": None,
"z2pt5": None,
"xvf": 0.0,
"backarc": False,
"region": 0,
"geology": "UNCLASSIFIED",
# Shakemap
"num_rupture_samples": 100,
"rdim": 0.0,
"synth_dist_weights": [0.25, 0.25, 0.25, 0.25],
"synthetic_rupture_max_site_distance": 200.0,
"synthetic_rupture_site_spacing": 0.05,
}
class ShakemapWorkflowResult(object):
"""
Object to hold output information from shakemap calculations, including
configuration metadata
"""
__slots__ = [
"shakemaps",
"contours",
"num_sites",
"bbox_properties",
"ground_motion_models",
"tectonic_region",
"earthquake",
]
def __getitem__(self, key):
return getattr(self, key)
def shakemaps_from_quakeml(
event_id: str,
imts: List = ["PGA", "SA(0.3)", "SA(1.0)"],
config: Optional[Dict] = None,
export_folder: Optional[str] = None,
cache_file: Optional[str] = None,
contour_levels_mean: Union[int, List] = 10,
contour_levels_stddev: Union[int, List] = 10,
regionalization: RegionalizationSet = DEFAULT_GLOBAL_REGIONALIZATION,
) -> Tuple[Dict, Dict]:
"""
Implements the complete shakemap workflow for use with the GEOFON FDSN Web Service
Args:
event_id: GEOFON event ID or path to QuakeML file for event
imts: List of intensity measure types e.g., PGA, SA(0.1), SA(0.2), ... etc.
config: Dictionary of configuration properties (will use the GEOFON default
configuration if not supplied
export_folder: Path to export the geotiff and contour data (if storing locally)
cache_file: File to cache the shakemap data (optional)
contour_levels_mean: Number of levels for contouring the mean shakemaps, or list of
pre-defined levels
contour_levels_stddev: Number of levels for contouring the standard deviation
shakemaps, or list of pre-defined levels
Returns:
Tuple of dictionaries containing the mean and standard deviation shakemaps
as instances of BytesIO objects, organised by intensity measure type
"""
if not config:
config = DEFAULT_CONFIGURATION
if export_folder:
if os.path.exists(export_folder):
raise IOError("Designated export folder %s already exists!" % export_folder)
os.mkdir(export_folder)
# Create the event from the GEOFON event ID (or the path to the QuakeML file)
results = ShakemapWorkflowResult()
results.earthquake = Earthquake.from_quakeml(event_id)
# Split the configuration into those parts relating to the bounding box,
# the site model and the shakemap
bbox_config = {}
for key in ["spcx", "spcy", "max_distance_bbox"]:
bbox_config[key] = config.get(key, DEFAULT_CONFIGURATION[key])
site_config = {}
for key in [
"default_vs30",
"vs30measured",
"z1pt0",
"z2pt5",
"xvf",
"backarc",
"region",
"geology",
]:
site_config[key] = config.get(key, DEFAULT_CONFIGURATION[key])
# Build the site model
bbox = results.earthquake.get_maximum_distance_bbox(bbox_config["max_distance_bbox"])
vs30 = site_config.pop("default_vs30")
site_model = SiteModel.from_bbox(
bbox, bbox_config["spcx"], bbox_config["spcy"], vs30, **site_config
)
results.num_sites = len(site_model)
results.bbox_properties = site_model.bbox_properties
# Get the ground motion models
results.tectonic_region, results.ground_motion_models = regionalization(results.earthquake)
shakemap_config = {}
for key in [
"num_rupture_samples",
"rdim",
"synth_dist_weights",
"synthetic_rupture_max_site_distance",
"synthetic_rupture_site_spacing",
]:
shakemap_config[key] = config.get(key, DEFAULT_CONFIGURATION[key])
# Run the shakemap
shakemap = Shakemap(
results.earthquake,
site_model,
results.ground_motion_models,
results.tectonic_region,
cache_file=cache_file,
**shakemap_config
)
mean_shakemaps, stddev_shakemaps, _ = shakemap.get_shakemap(imts)
# Export to file (if an export directory is given) or to a dictionary of byte arrays
results.shakemaps = {"mean": {}, "stddevs": {}}
results.contours = {"mean": {}, "stddevs": {}}
for imt in imts:
results.shakemaps["mean"][imt] = shakemap.to_geotiff(mean_shakemaps, imt)
results.shakemaps["stddevs"][imt] = shakemap.to_geotiff(
stddev_shakemaps, imt, is_stddev=True
)
results.contours["mean"][imt] = shakemap.get_contours(
imt, mean_shakemaps, contour_levels_mean
)
results.contours["stddevs"][imt] = shakemap.get_contours(
imt, stddev_shakemaps, contour_levels_stddev, is_stddev=True
)
if export_folder:
filestem = "{:s}_{:s}".format(results.earthquake.id, imt)
# Export the bytes to raster files
fname_raster_mean = os.path.join(export_folder, filestem + "_mean.tif")
with open(fname_raster_mean, "wb") as f:
f.write(results.shakemaps["mean"][imt])
fname_raster_stddev = os.path.join(export_folder, filestem + "_stddev.tif")
with open(fname_raster_stddev, "wb") as f:
f.write(results.shakemaps["stddevs"][imt])
# Export the contour dataframes to geojson
fname_contour_mean = os.path.join(export_folder, filestem + "_contour_mean.geojson")
results.contours["mean"][imt].to_file(fname_contour_mean, driver="GeoJSON")
if results.contours["stddevs"][imt].shape[0]:
# If all the sites have the same standard deviation then skip this as the
# contours will yield an empty dataframe
fname_contour_stddev = os.path.join(
export_folder, filestem + "_contour_stddev.geojson"
)
results.contours["stddevs"][imt].to_file(fname_contour_stddev, driver="GeoJSON")
return results
<?xml version="1.0" encoding="UTF-8"?><q:quakeml xmlns="http://quakeml.org/xmlns/bed/1.2" xmlns:q="http://quakeml.org/xmlns/quakeml/1.2"><eventParameters publicID="smi:org.gfz-potsdam.de/geofon/EventParameters"><event publicID="smi:org.gfz-potsdam.de/geofon/gfz2021eksc"><description><text>Greece</text><type>region name</type></description><creationInfo><agencyID>GFZ</agencyID><creationTime>2021-03-04T18:39:25.971737Z</creationTime></creationInfo><focalMechanism publicID="smi:org.gfz-potsdam.de/geofon/FocalMechanism/20210304185910.015761.2039019"><momentTensor publicID="smi:org.gfz-potsdam.de/geofon/MomentTensor/20210304185910.016093.2039022"><scalarMoment><value>3.293169658e+18</value></scalarMoment><tensor><Mrr><value>-3.258257666e+18</value></Mrr><Mtt><value>9.159863253e+17</value></Mtt><Mpp><value>2.342271341e+18</value></Mpp><Mrt><value>-2.919819388e+17</value></Mrt><Mrp><value>3.229233072e+17</value></Mrp><Mtp><value>-1.478065961e+18</value></Mtp></tensor><varianceReduction>0.7434766705</varianceReduction><doubleCouple>0.9937815948</doubleCouple><clvd>0.006218405199</clvd><creationInfo><agencyID>GFZ</agencyID><creationTime>2021-03-04T18:59:10.015448Z</creationTime></creationInfo><derivedOriginID>smi:org.gfz-potsdam.de/geofon/Origin/20210304185910.015938.2039020</derivedOriginID><momentMagnitudeID>smi:org.gfz-potsdam.de/geofon/Magnitude/20210304185910.01602.2039021</momentMagnitudeID><greensFunctionID>smi:org.gfz-potsdam.de/geofon/saul_/gemini-prem</greensFunctionID><filterID>smi:org.gfz-potsdam.de/geofon/BP_60s-150s</filterID><methodID>smi:org.gfz-potsdam.de/geofon/GEOFON_standard</methodID></momentTensor><nodalPlanes><nodalPlane1><strike><value>146.5577511</value></strike><dip><value>48.75643809</value></dip><rake><value>-91.74289876</value></rake></nodalPlane1><nodalPlane2><strike><value>329.2004058</value></strike><dip><value>41.27378495</value></dip><rake><value>-88.01325753</value></rake></nodalPlane2></nodalPlanes><principalAxes><tAxis><azimuth><value>237.7927052</value></azimuth><plunge><value>3.742314607</value></plunge><length><value>3.298285201e+18</value></length></tAxis><pAxis><azimuth><value>38.44025697</value></azimuth><plunge><value>86.03426941</value></plunge><length><value>-3.288030165e+18</value></length></pAxis><nAxis><azimuth><value>147.7069772</value></azimuth><plunge><value>1.310421926</value></plunge><length><value>-1.025503692e+16</value></length></nAxis></principalAxes><azimuthalGap>112.6342606</azimuthalGap><misfit>0.2565233295</misfit><creationInfo><agencyID>GFZ</agencyID><creationTime>2021-03-04T18:59:10.015448Z</creationTime></creationInfo><triggeringOriginID>smi:org.gfz-potsdam.de/geofon/Origin/20210304184527.529384.2512777</triggeringOriginID></focalMechanism><magnitude publicID="smi:org.gfz-potsdam.de/geofon/Magnitude/20210304185910.01602.2039021"><stationCount>22</stationCount><azimuthalGap>112.6342606</azimuthalGap><creationInfo><agencyID>GFZ</agencyID><creationTime>2021-03-04T18:59:10.015448Z</creationTime></creationInfo><mag><value>6.278409404</value></mag><type>Mw</type><methodID>smi:org.gfz-potsdam.de/geofon/MT</methodID></magnitude><origin publicID="smi:org.gfz-potsdam.de/geofon/Origin/20210304192045.9955.2520068"><time><value>2021-03-04T18:38:20.19439Z</value><uncertainty>0.1043797851</uncertainty></time><longitude><value>22.28781891</value><uncertainty>1.989549573</uncertainty></longitude><latitude><value>39.79063416</value><uncertainty>2.088507623</uncertainty></latitude><depthType>operator assigned</depthType><quality><associatedPhaseCount>387</associatedPhaseCount><usedPhaseCount>115</usedPhaseCount><associatedStationCount>387</associatedStationCount><usedStationCount>115</usedStationCount><depthPhaseCount>0</depthPhaseCount><standardError>1.082192206</standardError><azimuthalGap>31.50198364</azimuthalGap><maximumDistance>95.47735596</maximumDistance><minimumDistance>0.3485697806</minimumDistance><medianDistance>74.75289917</medianDistance></quality><evaluationMode>manual</evaluationMode><creationInfo><agencyID>GFZ</agencyID><creationTime>2021-03-04T19:20:46.003038Z</creationTime></creationInfo><depth><value>10000</value><uncertainty>0</uncertainty></depth><originUncertainty><minHorizontalUncertainty>4193.433762</minHorizontalUncertainty><maxHorizontalUncertainty>4557.92284</maxHorizontalUncertainty><azimuthMaxHorizontalUncertainty>152.8412018</azimuthMaxHorizontalUncertainty><preferredDescription>horizontal uncertainty</preferredDescription></originUncertainty><methodID>smi:org.gfz-potsdam.de/geofon/LOCSAT</methodID><earthModelID>smi:org.gfz-potsdam.de/geofon/iasp91</earthModelID><evaluationStatus>confirmed</evaluationStatus></origin><preferredOriginID>smi:org.gfz-potsdam.de/geofon/Origin/20210304192045.9955.2520068</preferredOriginID><preferredMagnitudeID>smi:org.gfz-potsdam.de/geofon/Magnitude/20210304185910.01602.2039021</preferredMagnitudeID><preferredFocalMechanismID>smi:org.gfz-potsdam.de/geofon/FocalMechanism/20210304185910.015761.2039019</preferredFocalMechanismID><type>earthquake</type></event></eventParameters></q:quakeml>
...@@ -146,11 +146,11 @@ class EarthquakeTestCase(unittest.TestCase): ...@@ -146,11 +146,11 @@ class EarthquakeTestCase(unittest.TestCase):
self.assertAlmostEqual(width, 10.0 / sin(radians(dip))) self.assertAlmostEqual(width, 10.0 / sin(radians(dip)))
# Verify correct plane from top left and bottom right points - calculated by hand # Verify correct plane from top left and bottom right points - calculated by hand
self.assertAlmostEqual(plane0.top_left.longitude, -0.402398, 6) self.assertAlmostEqual(plane0.top_left.longitude, -0.389417678, 6)
self.assertAlmostEqual(plane0.top_left.latitude, 0.022483, 6) self.assertAlmostEqual(plane0.top_left.latitude, 0.025961, 6)
self.assertAlmostEqual(plane0.top_left.depth, 0.0, 7) self.assertAlmostEqual(plane0.top_left.depth, 0.0, 7)
self.assertAlmostEqual(plane0.bottom_right.longitude, 0.402398, 6) self.assertAlmostEqual(plane0.bottom_right.longitude, 0.389417678, 6)
self.assertAlmostEqual(plane0.bottom_right.latitude, -0.022483, 6) self.assertAlmostEqual(plane0.bottom_right.latitude, -0.025961, 6)
self.assertAlmostEqual(plane0.bottom_right.depth, 10.0, 7) self.assertAlmostEqual(plane0.bottom_right.depth, 10.0, 7)
def test_rupture_creation_surface(self): def test_rupture_creation_surface(self):
......
...@@ -8,11 +8,11 @@ ...@@ -8,11 +8,11 @@
# ---------------------------------------------------------------------- # ----------------------------------------------------------------------
import os import os
import unittest import unittest
from shakyground2.import_fdsnws_eq import fetch_quakeml from shakyground2.io.import_fdsnws_eq import fetch_quakeml
# Path for storing the cache file that will be generated # Path for storing the cache file that will be generated
DATA_PATH = os.path.join(os.path.dirname(__file__), "data") DATA_PATH = os.path.join(os.path.dirname(__file__), "..", "data")
class QuakeMLReadTestCase(unittest.TestCase): class QuakeMLReadTestCase(unittest.TestCase):
...@@ -25,7 +25,7 @@ class QuakeMLReadTestCase(unittest.TestCase): ...@@ -25,7 +25,7 @@ class QuakeMLReadTestCase(unittest.TestCase):
""" """
outfile = "data/testoutput.yaml" outfile = os.path.join(DATA_PATH, "testoutput.yaml")
def test_read_good_file(self): def test_read_good_file(self):
""" """
......
"""
Test cases for end-to-end workflows
"""
import os
import unittest
from geopandas import GeoDataFrame
from shakyground2.workflows import shakemaps_from_quakeml, ShakemapWorkflowResult
DATA_PATH = os.path.join(os.path.dirname(__file__), "data")
class GeofonWorkflowTestCase(unittest.TestCase):
"""
Tests the Gofon workflow with the default configurations. Note that this is not
intended to verify correctness of the results, but rather ensures end-to-end execution
"""
def setUp(self):
self.imts = ["PGA", "SA(1.0)"]
def test_complete_workflow_no_export_default_config(self):
event_id = os.path.join(DATA_PATH, "gfz2021eksc.xml")
results = shakemaps_from_quakeml(event_id, self.imts)
self.assertIsInstance(results, ShakemapWorkflowResult)
self.assertListEqual(list(results.shakemaps), ["mean", "stddevs"])
self.assertListEqual(list(results.shakemaps["mean"]), self.imts)
for imt in self.imts:
for maptype in results.shakemaps:
self.assertIsInstance(results.shakemaps[maptype][imt], bytes)
self.assertIsInstance(results.contours["mean"][imt], GeoDataFrame)
def test_complete_workflow_no_export(self):
event_id = os.path.join(DATA_PATH, "gfz2021eksc.xml")
results = shakemaps_from_quakeml(event_id, self.imts, config={"spcx": 0.1, "spcy": 0.1})
self.assertListEqual(list(results.shakemaps), ["mean", "stddevs"])
self.assertListEqual(list(results.shakemaps["mean"]), self.imts)
self.assertIsInstance(results, ShakemapWorkflowResult)
for imt in self.imts:
for maptype in results.shakemaps:
self.assertIsInstance(results.shakemaps[maptype][imt], bytes)
self.assertIsInstance(results.contours["mean"][imt], GeoDataFrame)
def test_workflow_with_exports(self):
test_folder = os.path.join(DATA_PATH, "tmp_shakemaps_from_quakeml")
event_id = os.path.join(DATA_PATH, "gfz2021eksc.xml")
results = shakemaps_from_quakeml(
event_id, self.imts, config={"spcx": 0.1, "spcy": 0.1}, export_folder=test_folder
)
self.assertIsInstance(results, ShakemapWorkflowResult)
# Verify that the files have been created
self.assertTrue(os.path.exists(os.path.join(test_folder, "gfz2021eksc_PGA_mean.tif")))
self.assertTrue(os.path.exists(os.path.join(test_folder, "gfz2021eksc_PGA_stddev.tif")))
self.assertTrue(
os.path.exists(os.path.join(test_folder, "gfz2021eksc_SA(1.0)_mean.tif"))
)
self.assertTrue(os.path.exists(os.path.join(test_folder, "gfz2021eksc_PGA_stddev.tif")))
self.assertTrue(
os.path.exists(os.path.join(test_folder, "gfz2021eksc_SA(1.0)_stddev.tif"))
)
self.assertTrue(
os.path.exists(os.path.join(test_folder, "gfz2021eksc_PGA_contour_mean.geojson"))
)
self.assertTrue(
os.path.exists(
os.path.join(test_folder, "gfz2021eksc_SA(1.0)_contour_mean.geojson")
)
)
# Now re-run but with the same directory
with self.assertRaises(IOError) as ioe:
shakemaps_from_quakeml(
event_id,
self.imts,
config={"spcx": 0.1, "spcy": 0.1},
export_folder=test_folder,
)
self.assertEqual(
str(ioe.exception), "Designated export folder %s already exists!" % test_folder
)
# Remove the test folder
os.system("rm -r %s" % test_folder)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment