Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Station metadata now gets passed by default to get_synthetics #279

Merged
merged 7 commits into from
Nov 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/install/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ Unpack seismic waveforms used by examples:
.. code::

bash ./data/examples/unpack.bash
bash ./data/tests/unpack.bash
bash ./data/tests/download.bash


Finally, install PyGMT:
Expand Down
44 changes: 31 additions & 13 deletions mtuq/greens_tensor/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ class GreensTensor(Stream):
`ObsPy documentation <https://docs.obspy.org/packages/autogen/obspy.core.stream.Stream.html>`_
for more information.

"""
"""

def __init__(self,
traces=None,
station=None,
Expand Down Expand Up @@ -102,13 +103,6 @@ def _set_components(self, components):

def _preallocate(self):
""" Preallocates structures used by `get_synthetics`

.. note:

Every time ``get_synthetics(inplace=True)`` is called, the numeric
trace data get overwritten. Every time ``_set_components`` is
called, the traces get overwritten. The stream itself never gets
overwritten.
"""
nc, nr, nt = self._get_shape()

Expand Down Expand Up @@ -148,10 +142,10 @@ def _allocate_stream(self, stats=None):
"""
nc, nr, nt = self._get_shape()

if not stats:
stats = []
if stats is None:
stats = list()
for component in self.components:
stats += [self[0].stats.copy()]
stats.append(deepcopy(self.station))
stats[-1].update({'npts': nt, 'channel': component})

stream = Stream()
Expand Down Expand Up @@ -291,7 +285,7 @@ def select(self, selector):
return selected


def get_synthetics(self, source, components=None, stats=None, mode='apply', **kwargs):
def get_synthetics(self, source, components=['Z','R','T'], stats=None, mode='apply', **kwargs):
""" Generates synthetics through a linear combination of time series

Returns an MTUQ `Dataset`
Expand All @@ -307,7 +301,25 @@ def get_synthetics(self, source, components=None, stats=None, mode='apply', **kw

``stats`` (`obspy.Trace.Stats` object):
ObsPy Stats object that will be attached to the synthetics

(Defaults to `GreensTensor` `station` attributes.)


.. note::

Different ways of generating synthetics are possible (including in-place
methods suitable for use in mtuq/grid_search.py):

- When ``get_synthetics(inplace=True)`` is called, the existing
stream and trace objects are reused, and only the numeric trace
data gets overwritten

- When ``get_synthetics(inplace=False)`` is called, new stream and trace
objects are allocated

- When ``_set_components()`` is called prior to
``get_synthetics(inplace=True)``, the existing stream is reused but
new trace objects are allocated

"""
if mode=='map':
if components is None:
Expand All @@ -322,6 +334,12 @@ def get_synthetics(self, source, components=None, stats=None, mode='apply', **kw
return synthetics

elif mode=='apply':
if stats is not None:
print("get_synthetics() stats keyword argument can only be "
"used with mode='apply'")

warnings.warn("Ignoring stats keyword argument.")

synthetics = Dataset()
for tensor in self:
synthetics.append(tensor.get_synthetics(
Expand Down
4 changes: 0 additions & 4 deletions mtuq/io/clients/CPS_SAC.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,6 @@
]


# disply prominent warning
print('CPS client still under testing')


class Client(ClientBase):
""" CPS database client

Expand Down
53 changes: 28 additions & 25 deletions mtuq/io/readers/SAC.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,11 @@ def read(filenames, station_id_list=None, event_id=None, tags=[]):
for station_id in data_sorted:
streams += [data_sorted[station_id]]

# check for duplicate components
for stream in streams:
# check for duplicate components
check_components(stream)

# check for that all traces have same time sampling
check_components(stream)

# collect event metadata
Expand All @@ -81,7 +84,7 @@ def read(filenames, station_id_list=None, event_id=None, tags=[]):

# collect station metadata
for stream in streams:
stream.station = _get_station(stream, preliminary_origin)
stream.station = _get_station(stream)

# create MTUQ Dataset
return Dataset(streams, id=event_id, tags=tags)
Expand Down Expand Up @@ -133,26 +136,28 @@ def _get_origin(stream, event_id):
})


def _get_station(stream, origin, attach_sac_headers=True):
def _get_station(stream):
""" Extracts station metadata from SAC headers
"""
#
# extract metadata from ObsPy structures
#
meta = deepcopy(stream[0].meta.__dict__)

sac_headers = meta.pop('sac')

# remove channel-specific attributes
for attr in ['channel', 'component']:
if attr in meta:
meta.pop(attr)
"""
stats = stream[0].stats
sac_headers = stream[0].stats.sac

#
#
# populate station object
#
station = Station(meta)

#
station = Station()

station.update({
'network': stream[0].stats.network,
'station': stream[0].stats.station,
'location': stream[0].stats.location,
'sampling_rate': stream[0].stats.sampling_rate,
'npts': stream[0].stats.npts,
'delta': stream[0].stats.delta,
'starttime': stream[0].stats.starttime,
'endtime': stream[0].stats.endtime,
})

station.update({
'id': '.'.join([
stream[0].stats.network,
Expand All @@ -168,20 +173,18 @@ def _get_station(stream, origin, attach_sac_headers=True):
except:
raise Exception(
"Could not determine station location from SAC headers.")

try:
station.update({
'station_elevation_in_m': sac_headers.stel,
'station_depth_in_m': sac_headers.stdp})
except:
except:
pass

if attach_sac_headers:
station.sac = sac_headers


return station



def _glob(filenames):
# glob any wildcards
_list = list()
Expand Down
Loading