From 7aa679a3da69c74d7d94d5b379bc942f6a5d0dc9 Mon Sep 17 00:00:00 2001 From: mrosskopf Date: Thu, 11 Jan 2024 12:27:29 +0100 Subject: [PATCH] picker changes from trigger_detection branch to main --- .../detection/coincidence_trigger.py | 14 +++++++++++++- .../event_processing/detection/dug_trigger.py | 12 +++++++----- dug_seis/event_processing/picking/dug_picker.py | 17 +++++++++++++---- 3 files changed, 33 insertions(+), 10 deletions(-) diff --git a/dug_seis/event_processing/detection/coincidence_trigger.py b/dug_seis/event_processing/detection/coincidence_trigger.py index 617b3d7..39e8540 100644 --- a/dug_seis/event_processing/detection/coincidence_trigger.py +++ b/dug_seis/event_processing/detection/coincidence_trigger.py @@ -35,6 +35,7 @@ def coincidence_trigger( thr_off, stream, thr_coincidence_sum, + active_channels, trace_ids=None, max_trigger_length=1e6, delete_long_trigger=False, @@ -85,6 +86,7 @@ def coincidence_trigger( :param stream: Stream containing waveform data for all stations. These data are changed inplace, make a copy to keep the raw waveform data. :type thr_coincidence_sum: int or float + :type active_channels: list of active channels :param thr_coincidence_sum: Threshold for coincidence sum. The network coincidence sum has to be at least equal to this value for a trigger to be included in the returned trigger list. @@ -178,9 +180,19 @@ def util_val_of_scalar_or_list(elem, idx): new_options = {} for key, val in options.items(): new_options[key] = util_val_of_scalar_or_list(val, idx) + if not any(x in tr.id for x in active_channels): + # Linus inserted this on 15.03.2023, no cf performed on trigger channel/s but + # filtering/derivative/filtering/scaling and negation performed tr.trigger(trigger_type, **new_options) + else: + kernel_size = 2000 + kernel = np.ones(kernel_size) / kernel_size + data_filtered = np.convolve(tr.data, kernel, mode='same') + time = np.arange(0, len(data_filtered)) * 1 / st.traces[0].stats.sampling_rate + data_filtered_dif = np.gradient(data_filtered) / np.gradient(time) + data_filtered_dif_filtered = np.convolve(data_filtered_dif, kernel, mode='same') + tr.data = -1 * data_filtered_dif_filtered/1000 # end of adjustments - kwargs["max_len"] = int(max_trigger_length * tr.stats.sampling_rate + 0.5) # original diff --git a/dug_seis/event_processing/detection/dug_trigger.py b/dug_seis/event_processing/detection/dug_trigger.py index dfd3106..0fb9716 100644 --- a/dug_seis/event_processing/detection/dug_trigger.py +++ b/dug_seis/event_processing/detection/dug_trigger.py @@ -48,13 +48,13 @@ def dug_trigger( the first and the last moment of triggering at different stations for which this event is classified as electronic interference. Usually set to 0.25e-2. - coincidence_trigger_opts: Keyword arguments passed on to the coincidence + conincidence_trigger_opts: Keyword arguments passed on to the coincidence trigger. """ triggers = coincidence_trigger(stream=st, **conincidence_trigger_opts) - events = [] - for trig in triggers: + event_mask_passive = [False for i in range(len(triggers))] + for idx, trig in enumerate(triggers): event = {"time": min(trig["time"]), "triggered_channels": trig["trace_ids"]} # Too close to previous event. if ( @@ -66,7 +66,8 @@ def dug_trigger( # Classification. # Triggered on active triggering channel == active - if active_triggering_channel and active_triggering_channel in trig["trace_ids"]: + # old: if active_triggering_channel and active_triggering_channel in trig["trace_ids"]: + if any(item in trig["trace_ids"] for item in active_triggering_channel): classification = "active" # Single input trace == passive elif len(st) == 1: @@ -79,8 +80,9 @@ def dug_trigger( # Otherwise just treat is as passive. else: classification = "passive" + event_mask_passive[idx] = True event["classification"] = classification events.append(event) - return events + return events, event_mask_passive diff --git a/dug_seis/event_processing/picking/dug_picker.py b/dug_seis/event_processing/picking/dug_picker.py index 6152024..82a0b9c 100644 --- a/dug_seis/event_processing/picking/dug_picker.py +++ b/dug_seis/event_processing/picking/dug_picker.py @@ -248,26 +248,35 @@ def dug_picker( return picks -def sta_lta(stream, st_window, lt_window, threshold_on, threshold_off): +def sta_lta(stream, st_window, lt_window, thresholds): """ Apply the STA LTA picker """ + stream.detrend("constant") + # stream.filter("bandpass", freqmin=1000.0, freqmax=20000.0) sampling_rate = stream[0].stats.sampling_rate picks = [] - for trace in stream: + for idx, trace in enumerate(stream): # create characteristic function cft = recursive_sta_lta(trace, st_window, lt_window) # do picking - trig = trigger_onset(cft, threshold_on, threshold_off) + trig = trigger_onset(cft, thresholds[idx], thresholds[idx]) if len(trig): t_pick_UTC = ( trace.stats.starttime + adjust_pick_time(trig[0][0], cft) / sampling_rate ) + # calculate snr + try: + snr = max(abs(trace.data[trig[0][0] + 10:trig[0][0] + 100])) / max( + abs(trace.data[trig[0][0] - 100:trig[0][0] - 10])) + except ValueError: + continue + picks.append( Pick( time=t_pick_UTC, @@ -279,7 +288,7 @@ def sta_lta(stream, st_window, lt_window, threshold_on, threshold_off): ), method_id="recursive_sta_lta", phase_hint="P", - evaluation_mode="automatic", + evaluation_mode="automatic", # comments=[str(round(snr, 3))], ) )