Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes based on commissioning #32

Merged
merged 3 commits into from
Nov 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
0.11.1 (2024-11-12)
-------------------
- Fixes to the quality of the reductions
- We now trim the edges of orders better to remove artifacts

0.11.0 (2024-11-05)
-------------------
- Added the ability to combine the extraction from both orders into a single spectrum
Expand Down
35 changes: 21 additions & 14 deletions banzai_floyds/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ def extract(binned_data, bin_key='order_wavelength_bin', data_keyword='data', ba
if np.max(data_to_sum[weights_key][data_to_sum['extraction_window']]) < 5e-3:
continue

data_to_sum = data_to_sum[data_to_sum['mask'] == 0]
if len(data_to_sum) == 0:
continue
wavelength_bin_width = data_to_sum[bin_key + '_width'][0]
# This should be equivalent to Horne 1986 optimal extraction
flux = data_to_sum[data_keyword] - data_to_sum[background_key]
Expand Down Expand Up @@ -110,22 +113,26 @@ def do_stage(self, image):
# Scale the background in the same way we scaled the data so we can still subtract it cleanly
image.binned_data['flux_background'] = image.binned_data['background'] * image.binned_data['flux']
image.binned_data['flux_background'] /= image.binned_data['data']
overlap_region = [max([domain[0] for domain in image.wavelengths.domains]),
min([domain[1] for domain in image.wavelengths.domains])]
order_2 = image.binned_data['order'] == 2
order_1 = image.binned_data['order'] == 1
in_overlap = np.logical_and(image.binned_data['wavelength'] > overlap_region[0],
image.binned_data['wavelength'] < overlap_region[1])

# Since we are now combining two orders, we need to divide the weights by 2 in the overlap region
image.binned_data['combined_weights'] = image.binned_data['weights']
image.binned_data['combined_weights'][in_overlap] /= 2
overlap_region = [max([domain[0] for domain in image.wavelengths.wavelength_domains]),
min([domain[1] for domain in image.wavelengths.wavelength_domains])]
# Normalize the orders to make sure they overlap
normalization = np.median(image.binned_data['flux'][np.logical_and(order_1, in_overlap)])
normalization /= np.median(image.binned_data['flux'][np.logical_and(order_2, in_overlap)])
# order_1 onto order_2 because order_1 has higher resolution
extracted_order1 = image.extracted['order'] == 1
extracted_order2 = image.extracted['order'] == 2
in_extracted_overlap = np.logical_and(image.extracted['wavelength'] > overlap_region[0],
image.extracted['wavelength'] < overlap_region[1])
waves_to_interp = image.extracted['wavelength'][np.logical_and(extracted_order1, in_extracted_overlap)]
overlap_order2 = np.logical_and(extracted_order2, in_extracted_overlap)
flux_to_ratio = np.interp(waves_to_interp, image.extracted['wavelength'][overlap_order2],
image.extracted['flux'][overlap_order2])
order_ratio = image.extracted['flux'][np.logical_and(extracted_order1, in_extracted_overlap)]
order_ratio /= flux_to_ratio
normalization = np.median(order_ratio)
order_2 = image.binned_data['order'] == 2
for key in ['flux', 'fluxerror', 'flux_background']:
image.binned_data[key][order_2] *= normalization
image.spectrum = extract(image.binned_data, data_keyword='flux', bin_key='wavelength_bin',
background_key='flux_background', background_out_key='background',
uncertainty_key='fluxerror', weights_key='combined_weights', include_order=False)
background_key='flux_background', background_out_key='background', flux_keyword='flux',
flux_error_key='fluxerror', uncertainty_key='fluxerror',
weights_key='weights', include_order=False)
return image
2 changes: 1 addition & 1 deletion banzai_floyds/orders.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ class OrderSolver(Stage):
POLYNOMIAL_ORDER = 3

ORDER_REGIONS = {'ogg': [(0, 1550), (500, 1835)],
'coj': [(0, 1600), (615, 1920)]}
'coj': [(55, 1600), (615, 1920)]}

def do_stage(self, image):
if image.orders is None:
Expand Down
2 changes: 1 addition & 1 deletion banzai_floyds/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
'banzai_floyds.profile.ProfileFitter',
'banzai_floyds.background.BackgroundFitter',
'banzai_floyds.extract.Extractor',
# 'banzai_floyds.trim.Trimmer',
'banzai_floyds.trim.Trimmer',
'banzai_floyds.flux.StandardLoader',
'banzai_floyds.flux.FluxSensitivity',
'banzai_floyds.flux.FluxCalibrator',
Expand Down
28 changes: 27 additions & 1 deletion banzai_floyds/tests/test_extract.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
from banzai import context
from banzai_floyds.tests.utils import generate_fake_science_frame
from banzai_floyds.extract import Extractor, extract, set_extraction_region
from banzai_floyds.extract import Extractor, extract, set_extraction_region, CombinedExtractor
from banzai_floyds.utils.binning_utils import bin_data
from collections import namedtuple
from astropy.table import Table
Expand Down Expand Up @@ -68,3 +68,29 @@ def test_full_extraction_stage():
residuals = frame['EXTRACTED'].data['fluxraw'] - expected
residuals /= frame['EXTRACTED'].data['fluxrawerr']
assert (np.abs(residuals) < 3).sum() > 0.99 * len(frame['EXTRACTED'].data['fluxraw'])


def test_combined_extraction():
np.random.seed(125325)
input_context = context.Context({})
frame = generate_fake_science_frame(flat_spectrum=False, include_sky=True)
frame.binned_data = bin_data(frame.data, frame.uncertainty, frame.wavelengths, frame.orders)
fake_profile_width_funcs = [Legendre(frame.input_profile_sigma,) for _ in frame.input_profile_centers]
frame.profile = frame.input_profile_centers, fake_profile_width_funcs, None
frame.binned_data['background'] = frame.input_sky[frame.binned_data['y'].astype(int),
frame.binned_data['x'].astype(int)]
frame.extraction_windows = [[-5.0, 5.0], [-5.0, 5.0]]
set_extraction_region(frame)
frame.sensitivity = Table({'wavelength': [0, 1e6, 0, 1e6], 'sensitivity': [1, 1, 1, 1], 'order': [1, 1, 2, 2]})
frame.telluric = Table({'wavelength': [0, 1e6], 'telluric': [1, 1]})
extracted_waves = np.arange(3000.0, 10000.0)
flux = np.ones(len(extracted_waves) * 2)
orders = np.hstack([np.ones(len(extracted_waves)), np.ones(len(extracted_waves)) * 2])
frame.extracted = Table({'wavelength': np.hstack([extracted_waves, extracted_waves]), 'flux': flux,
'order': orders})
stage = CombinedExtractor(input_context)
frame = stage.do_stage(frame)
expected = np.interp(frame['SPECTRUM'].data['wavelength'], frame.input_spectrum_wavelengths, frame.input_spectrum)
residuals = frame['SPECTRUM'].data['flux'] - expected
residuals /= frame['SPECTRUM'].data['fluxerror']
assert (np.abs(residuals) < 3).sum() > 0.99 * len(frame['SPECTRUM'].data['flux'])
4 changes: 3 additions & 1 deletion banzai_floyds/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,9 @@ def generate_fake_science_frame(include_sky=False, flat_spectrum=True, fringe=Fa

frame = FLOYDSObservationFrame([CCDData(data,
fits.Header({'DAY-OBS': '20230101',
'DATE-OBS': '2023-01-01 12:41:56.11'}),
'DATE-OBS': '2023-01-01 12:41:56.11',
'HEIGHT': 0,
'AIRMASS': 1.0}),
uncertainty=errors)],
'foo.fits')
frame.input_profile_centers = profile_centers
Expand Down
32 changes: 24 additions & 8 deletions banzai_floyds/trim.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,29 @@

class Trimmer(Stage):
def do_stage(self, image):
image.extracted = image.extracted[image.extracted['wavelength'] < 10500.0]
order_1 = image.extracted['order'] == 1
# We cut off 50 points here in the red order due to chip edge effects
# 50 is a little arbirary, but works for early tests on standard star observations
# We need to trim ~10 pixels off each order to avoid artifacts with the background
# The tilt of the arc lines is abou 8 degrees, so this corresponds to about just cutting wavelengths
# The don't fully fall on the chip
cuts = {}
for order in [1, 2]:
order_wavelengths = image.extracted['wavelength'][image.extracted['order'] == order]
order_wavelengths = order_wavelengths[np.argsort(order_wavelengths)]
cuts[order] = order_wavelengths[10], order_wavelengths[-10]
keep_extracted = np.zeros_like(image.extracted, dtype=bool)
for order, cut in cuts.items():
passes_cut = image.extracted['order'] == order
passes_cut = np.logical_and(passes_cut, image.extracted['wavelength'] > cut[0])
passes_cut = np.logical_and(passes_cut, image.extracted['wavelength'] < cut[1])
keep_extracted = np.logical_or(keep_extracted, passes_cut)

keep_extracted = np.logical_and(keep_extracted, image.extracted['wavelength'] < 10500.0)
image.extracted = image.extracted[keep_extracted]

for order, cut in cuts.items():
in_order = image.binned_data['order'] == order
should_mask = np.logical_or(image.binned_data['wavelength'] < cuts[order][0],
image.binned_data['wavelength'] > cuts[order][1])
should_mask = np.logical_and(should_mask, in_order)
image.binned_data['mask'][should_mask] = 1

sorted_wavelengths = np.argsort(image.extracted[order_1]['wavelength'])
wavelength_cut = image.extracted[order_1][sorted_wavelengths][50]['wavelength']
image.extracted = image.extracted[np.logical_or(image.extracted['order'] == 2,
image.extracted['wavelength'] >= wavelength_cut)]
return image
Loading