From 52423c6d1df0cc176b084524e415557eaf879fa6 Mon Sep 17 00:00:00 2001 From: Curtis McCully Date: Tue, 23 Apr 2024 12:51:42 -0400 Subject: [PATCH] Omit large spurious sources in our photometry stage. --- CHANGES.md | 5 +++++ banzai/photometry.py | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 81c3f5e1..48faa0bf 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,8 @@ +1.17.0 (2023-04-24) +------------------- +- We now omit sources in the photometry stage that have an area larger than 1000 pixels as they lead to long + processing times and are almost invariably spurious. + 1.16.1 (2023-04-23) ------------------- - Correction to aperture photometry. We were incorrectly using the radius instead of the diameter diff --git a/banzai/photometry.py b/banzai/photometry.py index 58bc7cce..01a6c70f 100755 --- a/banzai/photometry.py +++ b/banzai/photometry.py @@ -102,6 +102,10 @@ def do_stage(self, image): # Do an initial source detection segmentation_map = detect_sources(convolved_data, self.threshold, npixels=self.min_area) + # We now remove any sources with an area > 1000 pixels because they are almost invariably spurious + segmentation_map.remove_labels(segmentation_map.labels[segmentation_map.areas > 1000]) + segmentation_map.relabel_consecutive(1) + logger.info('Deblending sources', image=image) # Note that nlevels here is DEBLEND_NTHRESH in source extractor which is 32 by default deblended_seg_map = deblend_sources(convolved_data, segmentation_map,