diff --git a/eqcorrscan/core/match_filter/helpers/processes.py b/eqcorrscan/core/match_filter/helpers/processes.py index 7eb44e52..c0da8823 100644 --- a/eqcorrscan/core/match_filter/helpers/processes.py +++ b/eqcorrscan/core/match_filter/helpers/processes.py @@ -654,6 +654,7 @@ def _make_detections( threshold: float, threshold_type: str, save_progress: bool, + make_events: bool, output_queue: Queue, poison_queue: Queue, ): @@ -677,6 +678,8 @@ def _make_detections( :param save_progress: Whether to save progress or not: If true, individual Party files will be written each time this is run. + :param make_events: + Whether to make events for all detections or not. :param output_queue: Queue of output Party filenames. :param poison_queue: @@ -706,7 +709,7 @@ def _make_detections( detections=detections, threshold=threshold, threshold_type=threshold_type, templates=templates, chunk_start=starttime, chunk_id=chunk_id, - save_progress=save_progress) + save_progress=save_progress, make_events=make_events) chunk_id += 1 output_queue.put_nowait(chunk_file) except Exception as e: diff --git a/eqcorrscan/core/match_filter/helpers/tribe.py b/eqcorrscan/core/match_filter/helpers/tribe.py index 2b6ff2a1..c54c403b 100644 --- a/eqcorrscan/core/match_filter/helpers/tribe.py +++ b/eqcorrscan/core/match_filter/helpers/tribe.py @@ -617,7 +617,8 @@ def _make_party( templates: List[Template], chunk_start: UTCDateTime, chunk_id: int, - save_progress: bool + save_progress: bool, + make_events: bool, ) -> str: """ Construct a Party from Detections. @@ -629,6 +630,7 @@ def _make_party( :param chunk_start: Starttime of party epoch :param chunk_id: Internal index for party epoch :param save_progress: Whether to save progress or not + :param make_events: Whether to make events for all detections or not :return: The filename the party has been pickled to. """ @@ -657,7 +659,10 @@ def _make_party( detection_idx_dict[detection.template_name].append(n) # Convert to Families and build party. - Logger.info("Converting to party and making events") + if not make_events: + Logger.info("Converting to party") + else: + Logger.info("Converting to party and making events") chunk_party = Party() # Make a dictionary of templates keyed by name - we could be passed a dict @@ -676,7 +681,8 @@ def _make_party( with open(template, "rb") as f: template = pickle.load(f) for d in family_detections: - d._calculate_event(template=template) + if make_events: + d._calculate_event(template=template) family = Family( template=template, detections=family_detections) chunk_party += family diff --git a/eqcorrscan/core/match_filter/tribe.py b/eqcorrscan/core/match_filter/tribe.py index 60d2e8b3..d7db6b1b 100644 --- a/eqcorrscan/core/match_filter/tribe.py +++ b/eqcorrscan/core/match_filter/tribe.py @@ -657,8 +657,8 @@ def detect(self, stream, threshold, threshold_type, trig_int, plot=False, concurrent_processing=False, ignore_length=False, ignore_bad_data=False, group_size=None, overlap="calculate", full_peaks=False, save_progress=False, process_cores=None, - pre_processed=False, check_processing=True, min_stations=0, - **kwargs): + pre_processed=False, check_processing=True, make_events=True, + min_stations=0, **kwargs): """ Detect using a Tribe of templates within a continuous stream. @@ -875,7 +875,7 @@ def detect(self, stream, threshold, threshold_type, trig_int, plot=False, ignore_bad_data, group_size, groups, sampling_rate, threshold, threshold_type, save_progress, xcorr_func, concurrency, cores, export_cccsums, parallel, peak_cores, trig_int, full_peaks, - plot, plotdir, plot_format, min_stations,) + plot, plotdir, plot_format, make_events, min_stations,) if concurrent_processing: party = self._detect_concurrent(*args, **inner_kwargs) @@ -903,7 +903,7 @@ def _detect_serial( group_size, groups, sampling_rate, threshold, threshold_type, save_progress, xcorr_func, concurrency, cores, export_cccsums, parallel, peak_cores, trig_int, full_peaks, plot, plotdir, plot_format, - min_stations, **kwargs + make_events, min_stations, **kwargs ): """ Internal serial detect workflow. """ from eqcorrscan.core.match_filter.helpers.tribe import ( @@ -968,7 +968,8 @@ def _detect_serial( detections=detections, threshold=threshold, threshold_type=threshold_type, templates=self.templates, chunk_start=starttime, - chunk_id=i, save_progress=save_progress) + chunk_id=i, save_progress=save_progress, + make_events=make_events) chunk_files.append(chunk_file) # Rebuild for _chunk_file in chunk_files: @@ -993,7 +994,7 @@ def _detect_concurrent( group_size, groups, sampling_rate, threshold, threshold_type, save_progress, xcorr_func, concurrency, cores, export_cccsums, parallel, peak_cores, trig_int, full_peaks, plot, plotdir, plot_format, - min_stations, **kwargs + make_events, min_stations, **kwargs ): """ Internal concurrent detect workflow. """ from eqcorrscan.core.match_filter.helpers.processes import ( @@ -1082,6 +1083,7 @@ def _detect_concurrent( threshold=threshold, threshold_type=threshold_type, save_progress=save_progress, + make_events=make_events, output_queue=party_file_queue, poison_queue=poison_queue, ), @@ -1232,7 +1234,8 @@ def client_detect(self, client, starttime, endtime, threshold, ignore_bad_data=False, group_size=None, return_stream=False, full_peaks=False, save_progress=False, process_cores=None, retries=3, - check_processing=True, min_stations=0, **kwargs): + check_processing=True, make_events=True, + min_stations=0, **kwargs): """ Detect using a Tribe of templates within a continuous stream. @@ -1438,7 +1441,7 @@ def client_detect(self, client, starttime, endtime, threshold, return_stream=return_stream, check_processing=False, poison_queue=poison_queue, shutdown=False, concurrent_processing=concurrent_processing, groups=groups, - min_stations=min_stations) + make_events=make_events, min_stations=min_stations) if not concurrent_processing: Logger.warning("Using concurrent_processing=True can be faster if"