Commit 419fb998 authored by Sebastian Heimann's avatar Sebastian Heimann
Browse files

Merge branch 'manualpicks'

Conflicts:
	src/core.py
	src/plot.py
parents 5d10a325 b60b6fc0
......@@ -245,6 +245,10 @@ def command_scan(args):
'--show-movie', dest='show_movie', action='store_true',
help='show movie when showing detections')
parser.add_option(
'--show-window-traces', dest='show_window_traces', action='store_true',
help='show preprocessed traces for every processing time window')
parser.add_option(
'--stop-after-first', dest='stop_after_first', action='store_true',
help='show plot for every detection found')
......@@ -290,6 +294,7 @@ def command_scan(args):
force=options.force,
show_detections=options.show_detections,
show_movie=options.show_movie,
show_window_traces=options.show_window_traces,
stop_after_first=options.stop_after_first,
nparallel=nparallel)
......
......@@ -2,6 +2,7 @@ import logging
from pyrocko.guts import Object, String, Float, Timestamp, List, Bool, load
from pyrocko import model
from pyrocko.fdsn import station as fs
from lassie import receiver, ifc, grid, geo
......@@ -16,6 +17,10 @@ class Config(Object):
optional=True,
help='stations file in Pyrocko format')
stations_stationxml_path = String.T(
optional=True,
help='stations file in StationXML format')
receivers = List.T(
receiver.Receiver.T(),
help='receiver coordinates if not read from file')
......@@ -24,6 +29,14 @@ class Config(Object):
String.T(),
help='list of directories paths to search for data')
events_path = String.T(
optional=True,
help='limit processing to time windows around given events')
event_time_window_factor = Float.T(
default=2.,
help='controls length of time windows for event-wise processing')
blacklist = List.T(
String.T(),
help='codes in the form NET.STA.LOC of receivers to be excluded')
......@@ -70,10 +83,16 @@ class Config(Object):
default=200.,
help='threshold on detector function')
fill_incomplete_with_zeros = Bool.T(
default=False,
help='fill incomplete trace time windows with zeros '
'(and let edge effects ruin your day)')
def __init__(self, *args, **kwargs):
Object.__init__(self, *args, **kwargs)
self._receivers = None
self._grid = None
self._events = None
def get_receivers(self):
'''Aggregate receivers from different sources.'''
......@@ -88,8 +107,26 @@ class Config(Object):
lat=station.lat,
lon=station.lon))
if self.stations_stationxml_path:
sx = fs.load_xml(filename=self.stations_stationxml_path)
for station in sx.get_pyrocko_stations():
self._receivers.append(
receiver.Receiver(
codes=station.nsl(),
lat=station.lat,
lon=station.lon))
return self._receivers
def get_events(self):
if self.events_path is None:
return None
if self._events is None:
self._events = model.load_events(self.events_path)
return self._events
def get_grid(self):
'''Get grid or make default grid.'''
......@@ -108,7 +145,7 @@ class Config(Object):
spacing = vmin / fsmooth_max / self.autogrid_density_factor
lat0, lon0, north, east, depth = geo.bounding_box_square(
*receiver.receivers_coords(receivers),
*geo.points_coords(receivers),
scale=self.autogrid_radius_factor)
self._grid = grid.Carthesian3DGrid(
......@@ -138,6 +175,7 @@ def read_config(file_path):
conf = load(filename=file_path)
return conf
__all__ = [
'Config',
'read_config',
......
import sys
import os
import logging
import os.path as op
import shutil
from collections import defaultdict
import numpy as num
from pyrocko import pile, trace, util, io
......@@ -40,6 +41,35 @@ def check_data_consistency(p, receivers):
logger.warn(nsl_id)
def zero_fill(trs, tmin, tmax):
trs = trace.degapper(trs)
d = defaultdict(list)
for tr in trs:
d[tr.nslc_id].append(tr)
trs_out = []
for nslc, trs_group in d.iteritems():
if not all(tr.deltat == trs_group[0].deltat for tr in trs_group):
logger.warn('inconsistent sample rate, cannot merge traces')
continue
if not all(tr.ydata.dtype == trs_group[0].ydata.dtype
for tr in trs_group):
logger.warn('inconsistent data type, cannot merge traces')
continue
tr_combi = trs_group[0].copy()
tr_combi.extend(tmin, tmax, fillmethod='zeros')
for tr in trs_group[1:]:
tr_combi.add(tr)
trs_out.append(tr_combi)
return trs_out
def scan(
config,
......@@ -47,6 +77,7 @@ def scan(
override_tmax=None,
show_detections=False,
show_movie=False,
show_window_traces=False,
force=False,
stop_after_first=False,
nparallel=None):
......@@ -61,7 +92,8 @@ def scan(
os.mkdir(config.stackmax_path)
else:
raise common.LassieError(
'detections file already exists: %s' % config.detections_path)
'detections file already exists: %s'
% config.detections_path)
util.ensuredirs(config.detections_path)
......@@ -72,31 +104,52 @@ def scan(
shutil.rmtree(config.stackmax_path)
else:
raise common.LassieError(
'stackmax directory already exists: %s' % config.stackmax_path)
'stackmax directory already exists: %s' %
config.stackmax_path)
util.ensuredirs(config.stackmax_path)
grid = config.get_grid()
receivers = config.get_receivers()
norm_map = gridmod.geometrical_normalization(grid, receivers)
for data_path in config.data_paths:
if not op.exists(data_path):
raise common.LassieError(
'waveform data path does not exist: %s' % data_path)
p = pile.make_pile(config.data_paths, fileformat='detect')
if p.is_empty():
raise common.LassieError('no usable waveforms found')
ifcs = config.image_function_contributions
for ifc in ifcs:
ifc.prescan(p)
shift_tables = []
tshift_maxs = []
for ifc in ifcs:
shift_tables.append(ifc.shifter.get_table(grid, receivers))
shift_tables.append(ifc.get_table(grid, receivers))
tshift_maxs.append(shift_tables[-1].max())
fsmooth_min = min(ifc.get_fsmooth() for ifc in ifcs)
tshift_max = max(tshift_maxs) * 1.0
tinc = tshift_max * 10
tpeaksearch = tshift_max + 1.0 / fsmooth_min
tpad = max(ifc.get_tpad() for ifc in ifcs) + tshift_max
tpad = max(ifc.get_tpad() for ifc in ifcs) + tshift_max + tpeaksearch
fsmooth_min = min(ifc.get_fsmooth() for ifc in ifcs)
tinc = tshift_max * 10. + 3.0 * tpad
tavail = p.tmax - p.tmin
tinc = min(tinc, tavail - 2.0 * tpad)
if tinc <= 0:
raise common.LassieError(
'available waveforms too short \n'
'required: %g s\n'
'available: %g s\n' % (2.*tpad, tavail))
blacklist = set(tuple(s.split('.')) for s in config.blacklist)
......@@ -123,162 +176,204 @@ def scan(
tmin = override_tmin or config.tmin
tmax = override_tmax or config.tmax
for trs in p.chopper(
tmin=tmin, tmax=tmax, tinc=tinc, tpad=tpad,
want_incomplete=False,
trace_selector=lambda tr: tr.nslc_id[:3] in station_index):
if not trs:
continue
logger.info('processing time window %s - %s' % (
util.time_to_str(trs[0].wmin),
util.time_to_str(trs[0].wmax)))
wmin = trs[0].wmin
wmax = trs[0].wmax
frames = None
pdata = []
trs_debug = []
shift_maxs = []
for iifc, ifc in enumerate(ifcs):
dataset = ifc.preprocess(trs, wmin, wmax, tshift_max)
if not dataset:
continue
nstations_selected = len(dataset)
events = config.get_events()
twindows = []
if events is not None:
for ev in events:
if tmin <= ev.time <= tmax:
twindows.append((
ev.time - tshift_max * config.event_time_window_factor,
ev.time + tshift_max * config.event_time_window_factor))
nsls_selected, trs_selected = zip(*dataset)
#trs_debug.extend(trs + list(trs_selected))
deltat_cf = trs_selected[0].deltat
t0 = (wmin / deltat_cf) * deltat_cf
istations_selected = num.array(
[station_index[nsl] for nsl in nsls_selected], dtype=num.int)
else:
twindows.append((tmin, tmax))
arrays = [tr.ydata.astype(num.float) for tr in trs_selected]
for tmin_win, tmax_win in twindows:
for trs in p.chopper(
tmin=tmin_win, tmax=tmax_win, tinc=tinc, tpad=tpad,
want_incomplete=config.fill_incomplete_with_zeros,
trace_selector=lambda tr: tr.nslc_id[:3] in station_index):
offsets = num.array(
[int(round((tr.tmin-t0) / deltat_cf)) for tr in trs_selected],
dtype=num.int32)
trs_ok = []
for tr in trs:
if tr.ydata.size == 0:
logger.warn(
'skipping empty trace: %s.%s.%s.%s' % tr.nslc_id)
w = num.array(
[station_weights.get(nsl, 1.0) for nsl in nsls_selected],
dtype=num.float)
continue
if not num.all(num.isfinite(tr.ydata)):
logger.warn(
'skipping trace because of invalid values: '
'%s.%s.%s.%s' % tr.nslc_id)
weights = num.ones((ngridpoints, nstations_selected))
weights *= w[num.newaxis, :]
weights *= ifc.weight
continue
shift_table = shift_tables[iifc]
trs_ok.append(tr)
shifts = -num.round(
shift_table[:, istations_selected] /
deltat_cf).astype(num.int32)
trs = trs_ok
pdata.append((list(trs_selected), shift_table, ifc))
if not trs:
continue
iwmin = int(round((wmin-t0) / deltat_cf))
iwmax = int(round((wmax-t0) / deltat_cf))
logger.info('processing time window %s - %s' % (
util.time_to_str(trs[0].wmin),
util.time_to_str(trs[0].wmax)))
lengthout = iwmax - iwmin + 1
wmin = trs[0].wmin
wmax = trs[0].wmax
shift_maxs.append(num.max(-shifts) * deltat_cf)
if config.fill_incomplete_with_zeros:
trs = zero_fill(trs, wmin - tpad, wmax + tpad)
frames, ioff = parstack(
arrays, offsets, shifts, weights, 0,
offsetout=iwmin,
lengthout=lengthout,
result=frames,
nparallel=nparallel,
impl='openmp')
frames = None
pdata = []
trs_debug = []
shift_maxs = []
for iifc, ifc in enumerate(ifcs):
dataset = ifc.preprocess(
trs, wmin-tpeaksearch, wmax+tpeaksearch, tshift_max)
shift_max = max(shift_maxs)
if not dataset:
continue
if config.sharpness_normalization:
frame_maxs = frames.max(axis=0)
frame_means = num.abs(frames).mean(axis=0)
frames *= (frame_maxs / frame_means)[num.newaxis, :]
frames *= norm_map[:, num.newaxis]
nstations_selected = len(dataset)
frame_maxs = frames.max(axis=0)
nsls_selected, trs_selected = zip(*dataset)
tmin_frames = t0 + ioff * deltat_cf
trs_debug.extend(trs + list(trs_selected))
tr_stackmax = trace.Trace(
'', 'SMAX', '', '',
tmin=tmin_frames,
deltat=deltat_cf,
ydata=frame_maxs)
deltat_cf = trs_selected[0].deltat
#trs_debug.append(tr_stackmax)
t0 = (wmin / deltat_cf) * deltat_cf
# trace.snuffle(trs_debug)
istations_selected = num.array(
[station_index[nsl] for nsl in nsls_selected],
dtype=num.int)
ydata_window = tr_stackmax.chop(wmin, wmax, inplace=False).get_ydata()
arrays = [tr.ydata.astype(num.float) for tr in trs_selected]
logger.info('CF stats: min %g, max %g, median %g' % (
num.min(ydata_window),
num.max(ydata_window),
num.median(ydata_window)))
offsets = num.array(
[int(round((tr.tmin-t0) / deltat_cf))
for tr in trs_selected],
dtype=num.int32)
tpeaks, apeaks = tr_stackmax.peaks(
config.detector_threshold, shift_max + 1.0/fsmooth_min)
w = num.array(
[station_weights.get(nsl, 1.0) for nsl in nsls_selected],
dtype=num.float)
tr_stackmax_indx = tr_stackmax.copy(data=False)
imaxs = num.argmax(frames, axis=0)
tr_stackmax_indx.set_ydata(imaxs.astype(num.int32))
tr_stackmax_indx.set_location('i')
weights = num.ones((ngridpoints, nstations_selected))
weights *= w[num.newaxis, :]
weights *= ifc.weight
for (tpeak, apeak) in zip(tpeaks, apeaks):
if not (wmin <= tpeak and tpeak < wmax):
continue
shift_table = shift_tables[iifc]
logger.info('detection: %s %g' % (
util.time_to_str(tpeak),
apeak))
shifts = -num.round(
shift_table[:, istations_selected] /
deltat_cf).astype(num.int32)
iframe = int(round(((tpeak-t0) - ioff*deltat_cf) / deltat_cf))
frame = frames[:, iframe]
imax = num.argmax(frame)
pdata.append((list(trs_selected), shift_table, ifc))
latpeak, lonpeak, xpeak, ypeak, zpeak = grid.index_to_location(imax)
iwmin = int(round((wmin-tpeaksearch-t0) / deltat_cf))
iwmax = int(round((wmax+tpeaksearch-t0) / deltat_cf))
idetection += 1
lengthout = iwmax - iwmin + 1
if config.detections_path:
f = open(config.detections_path, 'a')
f.write('%06i %s %g %g %g %g %g %g\n' % (
idetection,
util.time_to_str(tpeak, format='%Y-%m-%d %H:%M:%S.6FRAC'),
apeak,
latpeak, lonpeak, xpeak, ypeak, zpeak))
shift_maxs.append(num.max(-shifts) * deltat_cf)
f.close()
frames, ioff = parstack(
arrays, offsets, shifts, weights, 0,
offsetout=iwmin,
lengthout=lengthout,
result=frames,
nparallel=nparallel,
impl='openmp')
if show_detections:
fmin = min(ifc.fmin for ifc in ifcs)
fmax = min(ifc.fmax for ifc in ifcs)
plot.plot_detection(
grid, receivers, frames, tmin_frames,
deltat_cf, imax, iframe, fsmooth_min, xpeak, ypeak, zpeak,
tr_stackmax, tpeaks, apeaks, config.detector_threshold,
pdata, trs, fmin, fmax, idetection,
grid_station_shift_max=shift_max,
movie=show_movie)
shift_max = max(shift_maxs)
if stop_after_first:
return
if config.sharpness_normalization:
frame_maxs = frames.max(axis=0)
frame_means = num.abs(frames).mean(axis=0)
frames *= (frame_maxs / frame_means)[num.newaxis, :]
frames *= norm_map[:, num.newaxis]
tr_stackmax.chop(wmin, wmax)
frame_maxs = frames.max(axis=0)
if config.stackmax_path:
io.save([tr_stackmax, tr_stackmax_indx], os.path.join(config.stackmax_path, 'trace_%(tmin_ms)s.mseed'))
tmin_frames = t0 + ioff * deltat_cf
tr_stackmax = trace.Trace(
'', 'SMAX', '', '',
tmin=tmin_frames,
deltat=deltat_cf,
ydata=frame_maxs)
trs_debug.append(tr_stackmax)
if show_window_traces:
trace.snuffle(trs_debug)
ydata_window = tr_stackmax.chop(
wmin, wmax, inplace=False).get_ydata()
logger.info('CF stats: min %g, max %g, median %g' % (
num.min(ydata_window),
num.max(ydata_window),
num.median(ydata_window)))
tpeaks, apeaks = zip(*[(tpeak, apeak) for (tpeak, apeak) in zip(
*tr_stackmax.peaks(config.detector_threshold, tpeaksearch)) if
wmin <= tpeak and tpeak < wmax]) or ([], [])
for (tpeak, apeak) in zip(tpeaks, apeaks):
logger.info('detection: %s %g' % (
util.time_to_str(tpeak),
apeak))
iframe = int(round(((tpeak-t0) - ioff*deltat_cf) / deltat_cf))
frame = frames[:, iframe]
imax = num.argmax(frame)
latpeak, lonpeak, xpeak, ypeak, zpeak = \
grid.index_to_location(imax)
idetection += 1
if config.detections_path:
f = open(config.detections_path, 'a')
f.write('%06i %s %g %g %g %g %g %g\n' % (
idetection,
util.time_to_str(
tpeak,
format='%Y-%m-%d %H:%M:%S.6FRAC'),
apeak,
latpeak, lonpeak, xpeak, ypeak, zpeak))
f.close()
if show_detections:
fmin = min(ifc.fmin for ifc in ifcs)
fmax = min(ifc.fmax for ifc in ifcs)
plot.plot_detection(
grid, receivers, frames, tmin_frames,
deltat_cf, imax, iframe, fsmooth_min, xpeak, ypeak,
zpeak,
tr_stackmax, tpeaks, apeaks, config.detector_threshold,
wmin, wmax,
pdata, trs, fmin, fmax, idetection,
grid_station_shift_max=shift_max,
movie=show_movie)
if stop_after_first:
return
tr_stackmax.chop(wmin, wmax)
if config.stackmax_path:
io.save(
[tr_stackmax, tr_stackmax_indx],
os.path.join(
config.stackmax_path, 'trace_%(tmin_ms)s.mseed'))
__all__ = [
......
import numpy as num
from pyrocko import orthodrome as od
from pyrocko.guts import Object, Float
class Point(Object):
lat = Float.T(default=0.0)
lon = Float.T(default=0.0)
x = Float.T(default=0.0)
y = Float.T(default=0.0)
z = Float.T(default=0.0)
@property
def vec(self):
return self.lat, self.lon, self.x, self.y, self.z
def points_coords(points, system=None):
lats, lons, xs, ys, zs = num.array(
[r.vec for r in points], dtype=num.float).T
if system is None:
return (lats, lons, xs, ys, zs)
elif system == 'latlon':
return od.ne_to_latlon(lats, lons, xs, ys)
elif system[0] == 'ne':
lat0, lon0 = system[1:3]
if num.all(lats == lat0) and num.all(lons == lon0):
return xs, ys
else:
elats, elons = od.ne_to_latlon(lats, lons, xs, ys)
return od.latlon_to_ne_numpy(lat0, lon0, elats, elons)
def float_array_broadcast(*args):
......
......@@ -3,7 +3,7 @@ import numpy as num
from pyrocko.guts import Object, Float
from pyrocko import orthodrome as od
from lassie import receiver
from lassie import geo
guts_prefix = 'lassie'
......@@ -68,7 +68,7 @@ class Carthesian3DGrid(Grid):
nx, ny, nz = self._shape()
x, y, z = self._get_coords()
rx, ry = receiver.receivers_coords(