Skip to content

Commit

Permalink
picker changes from trigger_detection branch to main
Browse files Browse the repository at this point in the history
  • Loading branch information
mrosskopf committed Jan 11, 2024
1 parent ef20944 commit 7aa679a
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 10 deletions.
14 changes: 13 additions & 1 deletion dug_seis/event_processing/detection/coincidence_trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def coincidence_trigger(
thr_off,
stream,
thr_coincidence_sum,
active_channels,
trace_ids=None,
max_trigger_length=1e6,
delete_long_trigger=False,
Expand Down Expand Up @@ -85,6 +86,7 @@ def coincidence_trigger(
:param stream: Stream containing waveform data for all stations. These
data are changed inplace, make a copy to keep the raw waveform data.
:type thr_coincidence_sum: int or float
:type active_channels: list of active channels
:param thr_coincidence_sum: Threshold for coincidence sum. The network
coincidence sum has to be at least equal to this value for a trigger to
be included in the returned trigger list.
Expand Down Expand Up @@ -178,9 +180,19 @@ def util_val_of_scalar_or_list(elem, idx):
new_options = {}
for key, val in options.items():
new_options[key] = util_val_of_scalar_or_list(val, idx)
if not any(x in tr.id for x in active_channels):
# Linus inserted this on 15.03.2023, no cf performed on trigger channel/s but
# filtering/derivative/filtering/scaling and negation performed
tr.trigger(trigger_type, **new_options)
else:
kernel_size = 2000
kernel = np.ones(kernel_size) / kernel_size
data_filtered = np.convolve(tr.data, kernel, mode='same')
time = np.arange(0, len(data_filtered)) * 1 / st.traces[0].stats.sampling_rate
data_filtered_dif = np.gradient(data_filtered) / np.gradient(time)
data_filtered_dif_filtered = np.convolve(data_filtered_dif, kernel, mode='same')
tr.data = -1 * data_filtered_dif_filtered/1000
# end of adjustments

kwargs["max_len"] = int(max_trigger_length * tr.stats.sampling_rate + 0.5)

# original
Expand Down
12 changes: 7 additions & 5 deletions dug_seis/event_processing/detection/dug_trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,13 @@ def dug_trigger(
the first and the last moment of triggering at different stations
for which this event is classified as electronic interference.
Usually set to 0.25e-2.
coincidence_trigger_opts: Keyword arguments passed on to the coincidence
conincidence_trigger_opts: Keyword arguments passed on to the coincidence
trigger.
"""
triggers = coincidence_trigger(stream=st, **conincidence_trigger_opts)

events = []
for trig in triggers:
event_mask_passive = [False for i in range(len(triggers))]
for idx, trig in enumerate(triggers):
event = {"time": min(trig["time"]), "triggered_channels": trig["trace_ids"]}
# Too close to previous event.
if (
Expand All @@ -66,7 +66,8 @@ def dug_trigger(

# Classification.
# Triggered on active triggering channel == active
if active_triggering_channel and active_triggering_channel in trig["trace_ids"]:
# old: if active_triggering_channel and active_triggering_channel in trig["trace_ids"]:
if any(item in trig["trace_ids"] for item in active_triggering_channel):
classification = "active"
# Single input trace == passive
elif len(st) == 1:
Expand All @@ -79,8 +80,9 @@ def dug_trigger(
# Otherwise just treat is as passive.
else:
classification = "passive"
event_mask_passive[idx] = True

event["classification"] = classification
events.append(event)

return events
return events, event_mask_passive
17 changes: 13 additions & 4 deletions dug_seis/event_processing/picking/dug_picker.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,26 +248,35 @@ def dug_picker(
return picks


def sta_lta(stream, st_window, lt_window, threshold_on, threshold_off):
def sta_lta(stream, st_window, lt_window, thresholds):
"""
Apply the STA LTA picker
"""
stream.detrend("constant")
# stream.filter("bandpass", freqmin=1000.0, freqmax=20000.0)
sampling_rate = stream[0].stats.sampling_rate

picks = []

for trace in stream:
for idx, trace in enumerate(stream):
# create characteristic function
cft = recursive_sta_lta(trace, st_window, lt_window)

# do picking
trig = trigger_onset(cft, threshold_on, threshold_off)
trig = trigger_onset(cft, thresholds[idx], thresholds[idx])

if len(trig):
t_pick_UTC = (
trace.stats.starttime
+ adjust_pick_time(trig[0][0], cft) / sampling_rate
)
# calculate snr
try:
snr = max(abs(trace.data[trig[0][0] + 10:trig[0][0] + 100])) / max(
abs(trace.data[trig[0][0] - 100:trig[0][0] - 10]))
except ValueError:
continue

picks.append(
Pick(
time=t_pick_UTC,
Expand All @@ -279,7 +288,7 @@ def sta_lta(stream, st_window, lt_window, threshold_on, threshold_off):
),
method_id="recursive_sta_lta",
phase_hint="P",
evaluation_mode="automatic",
evaluation_mode="automatic", # comments=[str(round(snr, 3))],
)
)

Expand Down

0 comments on commit 7aa679a

Please sign in to comment.