Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Sebastian Heimann
grond
Commits
d0337554
Commit
d0337554
authored
Feb 24, 2017
by
Marius Isken
Browse files
Added RectangularProblem and SatelliteTarget // Refactored file structure
parent
1dfb64b6
Changes
12
Expand all
Hide whitespace changes
Inline
Side-by-side
apps/grond
View file @
d0337554
...
...
@@ -3,6 +3,7 @@
import
math
import
sys
import
logging
import
os
import
os.path
as
op
from
optparse
import
OptionParser
...
...
@@ -12,8 +13,7 @@ from pyrocko.gf import Range
import
grond
logger
=
logging
.
getLogger
(
'main'
)
km
=
1000.
km
=
1e3
def
d2u
(
d
):
...
...
@@ -136,26 +136,32 @@ def command_init(args):
def
setup
(
parser
):
parser
.
add_option
(
'--waveform'
,
dest
=
'waveform'
,
action
=
'store_true'
,
default
=
True
,
help
=
'Create an example configuration for waveform inversion.'
)
help
=
'Create an example configuration for waveform inversion. '
'(default)'
)
parser
.
add_option
(
'--static'
,
dest
=
'static'
,
action
=
'store_true'
,
help
=
'Create an example configuration for static displacement'
'
targets
using kite scene containers.'
)
help
=
'Create an example configuration for static displacement
s
'
' using kite scene containers.'
)
parser
,
options
,
args
=
cl_parse
(
'init'
,
args
,
setup
)
dir_struct
=
[
'gf_store'
]
if
options
.
waveform
and
not
options
.
static
:
dir_struct
+=
[
'data'
]
dataset_config
=
grond
.
DatasetConfig
(
stations_path
=
'stations.txt'
,
events_path
=
'events.txt'
,
waveform_paths
=
[
'data'
])
target_configs
=
[
grond
.
TargetConfig
(
super_group
=
'time_domain'
,
group
=
'all'
,
distance_min
=
10
*
km
,
distance_max
=
1000
*
km
,
channels
=
[
'Z'
,
'R'
,
'T'
],
interpolation
=
'multilinear'
,
store_id
=
'g
lobal_2s
'
,
store_id
=
'g
f_store
'
,
inner_misfit_config
=
grond
.
InnerMisfitConfig
(
fmin
=
0.01
,
fmax
=
0.1
))]
...
...
@@ -182,15 +188,19 @@ def command_init(args):
)
elif
options
.
static
:
dir_struct
+=
[
'scenes'
]
dataset_config
=
grond
.
DatasetConfig
(
kite_scene_paths
=
[
'scenes'
]
events_path
=
'events.txt'
,
kite_scene_paths
=
[
'scenes'
],
)
target_configs
=
[
grond
.
TargetConfig
(
super_group
=
'insar_target'
,
group
=
'all'
,
interpolation
=
'multilinear'
,
store_id
=
'g
lobal_2s
'
,
store_id
=
'g
f_store
'
,
inner_satellite_misfit_config
=
grond
.
InnerSatelliteMisfitConfig
(
use_
kite_covariance
=
Tru
e
))]
use_
weight_focal
=
Fals
e
))]
problem_config
=
grond
.
RectangularProblemConfig
(
name_template
=
'rect_source'
,
...
...
@@ -206,11 +216,40 @@ def command_init(args):
slip
=
Range
(
1
,
3
))
)
engine_config
=
grond
.
EngineConfig
(
gf_store_superdirs
=
[
'.'
])
config
=
grond
.
Config
(
rundir_template
=
op
.
abspath
(
op
.
curdir
),
rundir_template
=
op
.
join
(
op
.
abspath
(
op
.
curdir
),
'rundir'
),
dataset_config
=
dataset_config
,
target_configs
=
target_configs
,
problem_config
=
problem_config
)
problem_config
=
problem_config
,
engine_config
=
engine_config
)
events
=
'''name = 2011-myanmar
time = 2011-03-24 13:55:12.010
latitude = 20.687
longitude = 99.822
magnitude = 6.9
moment = 1.9228e+19
depth = 8000
region = Myanmar
--------------------------------------------'''
def
create_struct
(
dirname
):
def
p
(
fn
):
op
.
abspath
(
op
.
join
(
dirname
,
fn
))
os
.
mkdir
(
op
.
abspath
(
dirname
))
with
open
(
p
(
'config.yml'
),
'w'
)
as
cf
:
cf
.
write
(
str
(
config
))
with
open
(
p
(
'events.txt'
),
'w'
)
as
ef
:
ef
.
write
(
events
)
open
(
'stations.txt'
,
'a'
).
close
()
for
d
in
dir_struct
:
os
.
mkdir
(
p
(
d
))
print
config
...
...
setup.py
View file @
d0337554
...
...
@@ -8,7 +8,7 @@ setup(
version
=
'0.1'
,
author
=
'Sebastian Heimann'
,
author_email
=
'sebastian.heimann@gfz-potsdam.de'
,
packages
=
[
'grond'
,
'grond.problems'
],
packages
=
[
'grond'
],
package_dir
=
{
'grond'
:
'src'
},
scripts
=
[
'apps/grond'
],
package_data
=
{
'grond'
:
[]},
...
...
src/__init__.py
View file @
d0337554
from
core
import
*
# noqa
from
dataset
import
*
# noqa
from
.
core
import
*
# noqa
from
.
dataset
import
*
# noqa
from
.problems
import
*
# noqa
from
.targets
import
*
# noqa
src/core.py
View file @
d0337554
This diff is collapsed.
Click to expand it.
src/dataset.py
View file @
d0337554
import
glob
import
copy
import
logging
from
collections
import
defaultdict
import
numpy
as
num
from
collections
import
defaultdict
from
pyrocko
import
util
,
pile
,
model
,
config
,
trace
,
\
marker
as
pmarker
from
pyrocko.fdsn
import
enhanced_sacpz
,
station
as
fs
from
pyrocko.guts
import
Object
,
Tuple
,
String
,
Float
,
dump_all
,
load_all
from
pyrocko.guts
import
(
Object
,
Tuple
,
String
,
Float
,
List
,
Bool
,
dump_all
,
load_all
)
guts_prefix
=
'grond'
from
.meta
import
Path
,
HasPaths
,
expand_template
,
xjoin
from
.synthetic_tests
import
SyntheticTest
guts_prefix
=
'grond'
logger
=
logging
.
getLogger
(
'grond.dataset'
)
...
...
@@ -219,14 +224,19 @@ class Dataset(object):
self
.
_picks
=
None
def
add_kite_
displacement
(
self
,
filename
s
):
def
add_kite_
scene
(
self
,
filename
):
try
:
from
kite
import
read
from
kite
import
Scene
except
ImportError
:
raise
ImportError
(
'Module kite could not be imported, '
'please install from http://pyrocko.org'
)
for
file
in
filenames
:
self
.
kite_scenes
.
append
(
read
(
file
))
logger
.
debug
(
'Loading kite scene from %s'
%
filename
)
scene
=
Scene
()
scene
.
_log
.
setLevel
(
logger
.
level
)
scene
.
load
(
filename
)
self
.
kite_scenes
.
append
(
scene
)
def
is_blacklisted
(
self
,
obj
):
try
:
...
...
@@ -319,9 +329,12 @@ class Dataset(object):
if
not
self
.
is_blacklisted
(
self
.
stations
[
k
])
and
self
.
is_whitelisted
(
self
.
stations
[
k
])]
def
get_kite_displacement
(
self
,
scene_id
=
None
):
def
get_kite_scenes
(
self
):
return
self
.
kite_scenes
def
get_kite_scene
(
self
,
scene_id
=
None
):
if
scene_id
is
None
:
if
len
(
self
.
kite_scene
)
==
0
:
if
len
(
self
.
kite_scene
s
)
==
0
:
raise
AttributeError
(
'No kite displacements defined'
)
return
self
.
kite_scenes
[
0
]
else
:
...
...
@@ -738,12 +751,122 @@ class Dataset(object):
return
self
.
get_picks
().
get
((
nsl
,
phasename
,
eventname
),
None
)
class
DatasetConfig
(
HasPaths
):
stations_path
=
Path
.
T
(
optional
=
True
)
stations_stationxml_paths
=
List
.
T
(
Path
.
T
(),
optional
=
True
)
events_path
=
Path
.
T
(
optional
=
True
)
waveform_paths
=
List
.
T
(
Path
.
T
(),
optional
=
True
)
clippings_path
=
Path
.
T
(
optional
=
True
)
responses_sacpz_path
=
Path
.
T
(
optional
=
True
)
responses_stationxml_paths
=
List
.
T
(
Path
.
T
(),
optional
=
True
)
station_corrections_path
=
Path
.
T
(
optional
=
True
)
apply_correction_factors
=
Bool
.
T
(
optional
=
True
,
default
=
True
)
apply_correction_delays
=
Bool
.
T
(
optional
=
True
,
default
=
True
)
picks_paths
=
List
.
T
(
Path
.
T
())
blacklist_paths
=
List
.
T
(
Path
.
T
())
blacklist
=
List
.
T
(
String
.
T
(),
help
=
'stations/components to be excluded according to their STA, '
'NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes.'
)
whitelist_paths
=
List
.
T
(
Path
.
T
())
whitelist
=
List
.
T
(
String
.
T
(),
optional
=
True
,
help
=
'if not None, list of stations/components to included according '
'to their STA, NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes. '
'Note: ''when whitelisting on channel level, both, the raw and '
'the processed channel codes have to be listed.'
)
synthetic_test
=
SyntheticTest
.
T
(
optional
=
True
)
kite_scene_paths
=
List
.
T
(
Path
.
T
(),
optional
=
True
)
def
__init__
(
self
,
*
args
,
**
kwargs
):
HasPaths
.
__init__
(
self
,
*
args
,
**
kwargs
)
self
.
_ds
=
{}
def
get_event_names
(
self
):
def
extra
(
path
):
return
expand_template
(
path
,
dict
(
event_name
=
'*'
))
def
fp
(
path
):
return
self
.
expand_path
(
path
,
extra
=
extra
)
events
=
[]
for
fn
in
glob
.
glob
(
fp
(
self
.
events_path
)):
events
.
extend
(
model
.
load_events
(
filename
=
fn
))
event_names
=
[
ev
.
name
for
ev
in
events
]
return
event_names
def
get_dataset
(
self
,
event_name
):
if
event_name
not
in
self
.
_ds
:
def
extra
(
path
):
return
expand_template
(
path
,
dict
(
event_name
=
event_name
))
def
fp
(
path
):
return
self
.
expand_path
(
path
,
extra
=
extra
)
ds
=
Dataset
(
event_name
)
ds
.
add_stations
(
pyrocko_stations_filename
=
fp
(
self
.
stations_path
),
stationxml_filenames
=
fp
(
self
.
stations_stationxml_paths
))
ds
.
add_events
(
filename
=
fp
(
self
.
events_path
))
if
self
.
waveform_paths
:
ds
.
add_waveforms
(
paths
=
fp
(
self
.
waveform_paths
))
if
self
.
kite_scene_paths
:
for
path
in
self
.
kite_scene_paths
:
for
fn
in
glob
.
glob
(
xjoin
(
fp
(
path
),
'*.npz'
)):
ds
.
add_kite_scene
(
filename
=
fn
)
if
self
.
clippings_path
:
ds
.
add_clippings
(
markers_filename
=
fp
(
self
.
clippings_path
))
if
self
.
responses_sacpz_path
:
ds
.
add_responses
(
sacpz_dirname
=
fp
(
self
.
responses_sacpz_path
))
if
self
.
responses_stationxml_paths
:
ds
.
add_responses
(
stationxml_filenames
=
fp
(
self
.
responses_stationxml_paths
))
if
self
.
station_corrections_path
:
ds
.
add_station_corrections
(
filename
=
fp
(
self
.
station_corrections_path
))
ds
.
apply_correction_factors
=
self
.
apply_correction_factors
ds
.
apply_correction_delays
=
self
.
apply_correction_delays
for
picks_path
in
self
.
picks_paths
:
ds
.
add_picks
(
filename
=
fp
(
picks_path
))
ds
.
add_blacklist
(
self
.
blacklist
)
ds
.
add_blacklist
(
filenames
=
fp
(
self
.
blacklist_paths
))
if
self
.
whitelist
:
ds
.
add_whitelist
(
self
.
whitelist
)
if
self
.
whitelist_paths
:
ds
.
add_whitelist
(
filenames
=
fp
(
self
.
whitelist_paths
))
ds
.
set_synthetic_test
(
copy
.
deepcopy
(
self
.
synthetic_test
))
self
.
_ds
[
event_name
]
=
ds
return
self
.
_ds
[
event_name
]
__all__
=
'''
Dataset
DatasetConfig
DatasetError
InvalidObject
NotFound
StationCorrection
Dataset
load_station_corrections
dump_station_corrections
'''
.
split
()
src/meta.py
0 → 100644
View file @
d0337554
import
os.path
as
op
from
string
import
Template
from
pyrocko.guts
import
Object
,
String
def
xjoin
(
basepath
,
path
):
if
path
is
None
and
basepath
is
not
None
:
return
basepath
elif
op
.
isabs
(
path
)
or
basepath
is
None
:
return
path
else
:
return
op
.
join
(
basepath
,
path
)
def
xrelpath
(
path
,
start
):
if
op
.
isabs
(
path
):
return
path
else
:
return
op
.
relpath
(
path
,
start
)
class
Forbidden
(
Exception
):
pass
class
Path
(
String
):
pass
class
GrondError
(
Exception
):
pass
def
expand_template
(
template
,
d
):
try
:
return
Template
(
template
).
substitute
(
d
)
except
KeyError
as
e
:
raise
GrondError
(
'invalid placeholder "%s" in template: "%s"'
%
(
str
(
e
),
template
))
except
ValueError
:
raise
GrondError
(
'malformed placeholder in template: "%s"'
%
template
)
class
HasPaths
(
Object
):
path_prefix
=
Path
.
T
(
optional
=
True
)
def
__init__
(
self
,
*
args
,
**
kwargs
):
Object
.
__init__
(
self
,
*
args
,
**
kwargs
)
self
.
_basepath
=
None
self
.
_parent_path_prefix
=
None
def
set_basepath
(
self
,
basepath
,
parent_path_prefix
=
None
):
self
.
_basepath
=
basepath
self
.
_parent_path_prefix
=
parent_path_prefix
for
(
prop
,
val
)
in
self
.
T
.
ipropvals
(
self
):
if
isinstance
(
val
,
HasPaths
):
val
.
set_basepath
(
basepath
,
self
.
path_prefix
or
self
.
_parent_path_prefix
)
def
get_basepath
(
self
):
assert
self
.
_basepath
is
not
None
return
self
.
_basepath
def
change_basepath
(
self
,
new_basepath
,
parent_path_prefix
=
None
):
assert
self
.
_basepath
is
not
None
self
.
_parent_path_prefix
=
parent_path_prefix
if
self
.
path_prefix
or
not
self
.
_parent_path_prefix
:
self
.
path_prefix
=
op
.
normpath
(
xjoin
(
xrelpath
(
self
.
_basepath
,
new_basepath
),
self
.
path_prefix
))
for
val
in
self
.
T
.
ivals
(
self
):
if
isinstance
(
val
,
HasPaths
):
val
.
change_basepath
(
new_basepath
,
self
.
path_prefix
or
self
.
_parent_path_prefix
)
self
.
_basepath
=
new_basepath
def
expand_path
(
self
,
path
,
extra
=
None
):
assert
self
.
_basepath
is
not
None
if
extra
is
None
:
def
extra
(
path
):
return
path
path_prefix
=
self
.
path_prefix
or
self
.
_parent_path_prefix
if
path
is
None
:
return
None
elif
isinstance
(
path
,
basestring
):
return
extra
(
op
.
normpath
(
xjoin
(
self
.
_basepath
,
xjoin
(
path_prefix
,
path
))))
else
:
return
[
extra
(
op
.
normpath
(
xjoin
(
self
.
_basepath
,
xjoin
(
path_prefix
,
p
))))
for
p
in
path
]
src/problems
/cmt
.py
→
src/problems.py
View file @
d0337554
This diff is collapsed.
Click to expand it.
src/problems/__init__.py
deleted
100644 → 0
View file @
1dfb64b6
from
cmt
import
*
# noqa
from
double_dc
import
*
# noqa
from
rectangular
import
*
# noqa
src/problems/double_dc.py
deleted
100644 → 0
View file @
1dfb64b6
import
logging
import
numpy
as
num
from
pyrocko
import
gf
,
util
from
pyrocko.guts
import
Float
,
String
,
Dict
,
List
,
Int
from
grond
import
core
guts_prefix
=
'grond'
logger
=
logging
.
getLogger
(
'grond.double_dc'
)
km
=
1000.
as_km
=
dict
(
scale_factor
=
km
,
scale_unit
=
'km'
)
class
DoubleDCProblem
(
core
.
Problem
):
parameters
=
[
core
.
Parameter
(
'time'
,
's'
,
label
=
'Time'
),
core
.
Parameter
(
'north_shift'
,
'm'
,
label
=
'Northing'
,
**
as_km
),
core
.
Parameter
(
'east_shift'
,
'm'
,
label
=
'Easting'
,
**
as_km
),
core
.
Parameter
(
'depth'
,
'm'
,
label
=
'Depth'
,
**
as_km
),
core
.
Parameter
(
'magnitude'
,
label
=
'Magnitude'
),
core
.
Parameter
(
'strike1'
,
'deg'
,
label
=
'Strike 1'
),
core
.
Parameter
(
'dip1'
,
'deg'
,
label
=
'Dip 1'
),
core
.
Parameter
(
'rake1'
,
'deg'
,
label
=
'Rake 1'
),
core
.
Parameter
(
'strike2'
,
'deg'
,
label
=
'Strike 2'
),
core
.
Parameter
(
'dip2'
,
'deg'
,
label
=
'Dip 2'
),
core
.
Parameter
(
'rake2'
,
'deg'
,
label
=
'Rake 2'
),
core
.
Parameter
(
'delta_time'
,
's'
,
label
=
'$\Delta$ Time'
),
core
.
Parameter
(
'delta_depth'
,
'm'
,
label
=
'$\Delta$ Depth'
),
core
.
Parameter
(
'azimuth'
,
'deg'
,
label
=
'Azimuth'
),
core
.
Parameter
(
'distance'
,
'm'
,
label
=
'Distance'
),
core
.
Parameter
(
'mix'
,
label
=
'Mix'
),
core
.
Parameter
(
'duration1'
,
's'
,
label
=
'Duration 1'
),
core
.
Parameter
(
'duration2'
,
's'
,
label
=
'Duration 2'
)]
dependants
=
[]
targets
=
List
.
T
(
core
.
MisfitTarget
.
T
())
ranges
=
Dict
.
T
(
String
.
T
(),
gf
.
Range
.
T
())
distance_min
=
Float
.
T
(
default
=
0.0
)
nbootstrap
=
Int
.
T
(
default
=
100
)
def
unpack
(
self
,
x
):
d
=
self
.
parameter_dict
(
x
)
p
=
{}
for
k
in
self
.
base_source
.
keys
():
if
k
in
d
:
p
[
k
]
=
float
(
self
.
ranges
[
k
].
make_relative
(
self
.
base_source
[
k
],
d
[
k
]))
stf1
=
gf
.
HalfSinusoidSTF
(
duration
=
float
(
d
.
duration1
))
stf2
=
gf
.
HalfSinusoidSTF
(
duration
=
float
(
d
.
duration2
))
source
=
self
.
base_source
.
clone
(
stf1
=
stf1
,
stf2
=
stf2
,
**
p
)
return
source
def
make_dependant
(
self
,
xs
,
pname
):
if
xs
.
ndim
==
1
:
return
self
.
make_dependant
(
xs
[
num
.
newaxis
,
:],
pname
)[
0
]
raise
KeyError
(
pname
)
def
extract
(
self
,
xs
,
i
):
if
xs
.
ndim
==
1
:
return
self
.
extract
(
xs
[
num
.
newaxis
,
:],
i
)[
0
]
if
i
<
self
.
nparameters
:
return
xs
[:,
i
]
else
:
return
self
.
make_dependant
(
xs
,
self
.
dependants
[
i
-
self
.
nparameters
].
name
)
def
pack
(
self
,
source
):
x
=
num
.
array
([
source
.
time
-
self
.
base_source
.
time
,
source
.
north_shift
,
source
.
east_shift
,
source
.
depth
,
source
.
magnitude
,
source
.
strike1
,
source
.
dip1
,
source
.
rake1
,
source
.
strike2
,
source
.
dip2
,
source
.
rake2
,
source
.
delta_time
,
source
.
delta_depth
,
source
.
azimuth
,
source
.
distance
,
source
.
mix
,
source
.
stf1
.
duration
,
source
.
stf2
.
duration
],
dtype
=
num
.
float
)
return
x
def
random_uniform
(
self
,
xbounds
):
x
=
num
.
zeros
(
self
.
nparameters
)
for
i
in
xrange
(
self
.
nparameters
):
x
[
i
]
=
num
.
random
.
uniform
(
xbounds
[
i
,
0
],
xbounds
[
i
,
1
])
return
x
.
tolist
()
def
preconstrain
(
self
,
x
):
source
=
self
.
unpack
(
x
)
if
any
(
self
.
distance_min
>
source
.
distance_to
(
t
)
for
t
in
self
.
targets
):
raise
core
.
Forbidden
()
return
num
.
array
(
x
,
dtype
=
num
.
float
)
def
bounds
(
self
):
out
=
[]
for
p
in
self
.
parameters
:
r
=
self
.
ranges
[
p
.
name
]
out
.
append
((
r
.
start
,
r
.
stop
))
return
out
def
dependant_bounds
(
self
):
out
=
[]
return
out
def
evaluate
(
self
,
x
,
result_mode
=
'sparse'
):
source
=
self
.
unpack
(
x
)
engine
=
self
.
get_engine
()
for
target
in
self
.
targets
:
target
.
set_result_mode
(
result_mode
)
resp
=
engine
.
process
(
source
,
self
.
targets
)
data
=
[]
results
=
[]
for
target
,
result
in
zip
(
self
.
targets
,
resp
.
results_list
[
0
]):
if
isinstance
(
result
,
gf
.
SeismosizerError
):
logger
.
debug
(
'%s.%s.%s.%s: %s'
%
(
target
.
codes
+
(
str
(
result
),)))
data
.
append
((
None
,
None
))
results
.
append
(
result
)
else
:
data
.
append
((
result
.
misfit_value
,
result
.
misfit_norm
))
results
.
append
(
result
)
ms
,
ns
=
num
.
array
(
data
,
dtype
=
num
.
float
).
T
if
result_mode
==
'full'
:
return
ms
,
ns
,
results
else
:
return
ms
,
ns
def
forward
(
self
,
x
):
source
=
self
.
unpack
(
x
)
engine
=
self
.
get_engine
()
plain_targets
=
[
target
.
get_plain_target
()
for
target
in
self
.
targets
]
resp
=
engine
.
process
(
source
,
plain_targets
)
results
=
[]
for
target
,
result
in
zip
(
self
.
targets
,
resp
.
results_list
[
0
]):
if
isinstance
(
result
,
gf
.
SeismosizerError
):
logger
.
debug
(
'%s.%s.%s.%s: %s'
%
(
target
.
codes
+
(
str
(
result
),)))
results
.
append
(
None
)
else
:
results
.
append
(
result
)
return
results
def
get_target_weights
(
self
):
if
self
.
_target_weights
is
None
:
self
.
_target_weights
=
num
.
array
(
[
target
.
get_combined_weight
(
apply_balancing_weights
=
self
.
apply_balancing_weights
)
for
target
in
self
.
targets
],
dtype
=
num
.
float
)