From fb63f6e441af61d8c1d8448572151b465c58d2d9 Mon Sep 17 00:00:00 2001 From: "Warren J. Hack" Date: Wed, 22 Feb 2023 11:22:48 -0500 Subject: [PATCH 1/3] Improve memory use for calwebb_spec3 processing --- jwst/cube_build/cube_build_io_util.py | 4 +++ jwst/cube_build/cube_build_step.py | 11 ++++++-- jwst/cube_build/data_types.py | 4 +++ jwst/cube_build/file_table.py | 13 +++++++--- jwst/cube_build/ifu_cube.py | 13 +++++++--- jwst/cube_build/src/cube_dq_utils.c | 10 +++++-- .../outlier_detection_ifu.py | 19 ++++++++++++++ jwst/pipeline/calwebb_spec3.py | 26 ++++++++++++++++++- jwst/resample/resample_step.py | 2 +- 9 files changed, 88 insertions(+), 14 deletions(-) diff --git a/jwst/cube_build/cube_build_io_util.py b/jwst/cube_build/cube_build_io_util.py index 71ddb6a1f1..b2fef93a58 100644 --- a/jwst/cube_build/cube_build_io_util.py +++ b/jwst/cube_build/cube_build_io_util.py @@ -124,6 +124,8 @@ def read_cubepars(par_filename, for tabdata in ptab.ifucubepars_multichannel_driz_wavetable: table_wave = tabdata['WAVELENGTH'] instrument_info.SetMultiChannelDrizTable(table_wave) + ptab.close() + del ptab # Read in NIRSPEC Values elif instrument == 'NIRSPEC': @@ -231,3 +233,5 @@ def read_cubepars(par_filename, table_scalerad = tabdata['SCALERAD'] instrument_info.SetHighEMSMTable(table_wave, table_sroi, table_wroi, table_scalerad) + ptab.close() + del ptab \ No newline at end of file diff --git a/jwst/cube_build/cube_build_step.py b/jwst/cube_build/cube_build_step.py index 90bda169fc..9dde66aa06 100755 --- a/jwst/cube_build/cube_build_step.py +++ b/jwst/cube_build/cube_build_step.py @@ -70,6 +70,9 @@ def process(self, input): input : list of objects or str list of datamodels or string name of input fits file or association. """ + from guppy import hpy + heapy = hpy() + heapy.setrelheap() self.log.info('Starting IFU Cube Building Step') # ________________________________________________________________________________ @@ -275,6 +278,7 @@ def process(self, input): if not self.single: self.log.info(f'Number of IFU cubes produced by this run = {num_cubes}') + import pdb;pdb.set_trace() # ModelContainer of ifucubes cube_container = ModelContainer() @@ -329,10 +333,13 @@ def process(self, input): # Else standard IFU cube building else: - cube_result = thiscube.build_ifucube() - result, status = cube_result + result, status = thiscube.build_ifucube() + # result, status = cube_result cube_container.append(result) + thiscube.close() + del thiscube + # check if cube_build failed # ************************** if status == 1: diff --git a/jwst/cube_build/data_types.py b/jwst/cube_build/data_types.py index 8003601d03..db8f226200 100644 --- a/jwst/cube_build/data_types.py +++ b/jwst/cube_build/data_types.py @@ -89,12 +89,16 @@ def __init__(self, input, single, output_file, output_dir): self.output_name = 'Temp' if not single: # find the name of the output file from the association self.output_name = input_try.meta.asn_table.products[0].name + # Verify that all the input models are IFUImageModels + # and create a list of those models which pass the check. for model in input_try: # check if input data is an IFUImageModel if not isinstance(model, datamodels.IFUImageModel): raise NotIFUImageModel( f"Input data is not a IFUImageModel, instead it is {model}") self.filenames.append(model.meta.filename) + model.close() + del model self.input_models = input_try else: diff --git a/jwst/cube_build/file_table.py b/jwst/cube_build/file_table.py index e195e8e598..3eb4f90d83 100644 --- a/jwst/cube_build/file_table.py +++ b/jwst/cube_build/file_table.py @@ -132,16 +132,16 @@ def set_file_table(self, if assign_wcs != 'COMPLETE': raise ErrorNoAssignWCS("Assign WCS has not been run on file %s", ifile) - # _____________________________________________________________________ - # MIRI instrument + # _____________________________________________________________________ + # MIRI instrument if instrument == 'MIRI': channel = input_model.meta.instrument.channel subchannel = input_model.meta.instrument.band.lower() clenf = len(channel) for k in range(clenf): self.FileMap['MIRI'][channel[k]][subchannel].append(input_model) - # _____________________________________________________________________ - # NIRSPEC instrument + # _____________________________________________________________________ + # NIRSPEC instrument elif instrument == 'NIRSPEC': fwa = input_model.meta.instrument.filter.lower() gwa = input_model.meta.instrument.grating.lower() @@ -149,6 +149,11 @@ def set_file_table(self, else: pass # log.info('Instrument not valid for cube') + + # close model and remove references to this object so memory can be freed + input_model.close() + del input_model + return instrument diff --git a/jwst/cube_build/ifu_cube.py b/jwst/cube_build/ifu_cube.py index fb06182435..8f49ec17a2 100644 --- a/jwst/cube_build/ifu_cube.py +++ b/jwst/cube_build/ifu_cube.py @@ -560,6 +560,7 @@ def build_ifucube(self): number_bands = len(self.list_par1) + import pdb;pdb.set_trace() for ib in range(number_bands): this_par1 = self.list_par1[ib] this_par2 = self.list_par2[ib] @@ -750,7 +751,7 @@ def build_ifucube_single(self): """ # loop over input models - single_ifucube_container = ModelContainer() + single_ifucube_container = ModelContainer(save_open=False) weight_type = 0 # default to emsm instead of msm if self.weighting == 'msm': @@ -849,10 +850,14 @@ def build_ifucube_single(self): # determine Cube Spaxel flux status = 0 - result = self.setup_final_ifucube_model(input_model) - ifucube_model, status = result + ifucube_model, status = self.setup_final_ifucube_model(input_model) + # ifucube_model, status = result + + import pdb;pdb.set_trace() + single_ifucube_container.append(ifucube_model.filename) + ifucube_model.close() + del ifucube_model - single_ifucube_container.append(ifucube_model) if status != 0: log.debug("Possible problem with single ifu cube, no valid data in cube") j = j + 1 diff --git a/jwst/cube_build/src/cube_dq_utils.c b/jwst/cube_build/src/cube_dq_utils.c index 063e6eefee..751d6b31a9 100644 --- a/jwst/cube_build/src/cube_dq_utils.c +++ b/jwst/cube_build/src/cube_dq_utils.c @@ -536,6 +536,8 @@ int dq_miri(int start_region, int end_region, int overlap_partial, int overlap_f double xi_corner[4], eta_corner[4]; int *idqv ; // int vector for spaxel + int *wave_slice_dq; + if (mem_alloc_dq(ncube, &idqv)) return 1; double corner1[2]; @@ -547,7 +549,9 @@ int dq_miri(int start_region, int end_region, int overlap_partial, int overlap_f // corner of the FOV for each wavelength nxy = nx * ny; - int wave_slice_dq[nxy]; + // int wave_slice_dq[nxy]; + wave_slice_dq = (int *)malloc (nxy * sizeof(int)); + // Loop over the wavelength planes and set DQ plane for (w = 0; w < nz; w++) { @@ -634,12 +638,15 @@ int dq_nirspec(int overlap_partial, double c1_min, c2_min, c1_max, c2_max; int *idqv ; // int vector for spaxel idqv = (int*)calloc(ncube, sizeof(int)); + int *wave_slice_dq; for (i = 0; i< ncube; i++){ idqv[i] = 0; } nxy = nx * ny; + // int wave_slice_dq[nxy]; + wave_slice_dq = (int *)malloc (nxy * sizeof(int)); for (w = 0; w < nz; w++) { long imatch = 0; @@ -658,7 +665,6 @@ int dq_nirspec(int overlap_partial, c1_max, c2_max, match_slice); - int wave_slice_dq[nxy]; for (j =0; j< nxy; j++){ wave_slice_dq[j] = 0; } diff --git a/jwst/outlier_detection/outlier_detection_ifu.py b/jwst/outlier_detection/outlier_detection_ifu.py index ed037be3fc..33575816db 100644 --- a/jwst/outlier_detection/outlier_detection_ifu.py +++ b/jwst/outlier_detection/outlier_detection_ifu.py @@ -106,16 +106,24 @@ def _find_ifu_coverage(self): self.ifu_band2 = self.gratings # not used in NIRSpec def _convert_inputs(self): + log.info("Converting IFU inputs using outlier_detection_ifu._convert_inputs()...") self.input_models = self.inputs self.converted = False def do_detection(self): """Flag outlier pixels in DQ of input images.""" + from guppy import hpy + hpifu = hpy() + hpifu.setrelheap() + self._convert_inputs() self._find_ifu_coverage() self.build_suffix(**self.outlierpars) + log.info("Initialization finished for do_detection()") + log.info(f"HEAPY:\n {hpifu.heap()}") + save_intermediate_results = \ self.outlierpars['save_intermediate_results'] @@ -125,6 +133,9 @@ def do_detection(self): for model in self.blot_models: # replace arrays with all zeros to accommodate blotted data model.data = np.zeros(model.data.shape, dtype=model.data.dtype) + log.info("Initialized self.blot_models with zeros") + log.info(f"{len(self.blot_models)} models with shape of {self.blot_models[0].data.shape}") + log.info(f"HEAPY:\n {hpifu.heap()}") # Create the resampled/mosaic images for each group of exposures # @@ -132,7 +143,10 @@ def do_detection(self): log.info("Performing IFU outlier_detection for exptype {}".format( exptype)) num_bands = len(self.ifu_band1) + log.info(f"{num_bands} bands to be processed.") + hpifu.setrelheap() for i in range(num_bands): + log.info(f"HEAPY: iteration {i}. Memory use of :\n {hpifu.heap()}") select1 = self.ifu_band1[i] select2 = self.ifu_band2[i] @@ -185,6 +199,9 @@ def do_detection(self): # original input... self.blot_median(median_model) + log.info("Finished blotting the median images for all bands.") + log.info(f"HEAPY: \n {hpifu.heap()}") + if save_intermediate_results: log.info("Writing out BLOT images...") @@ -198,6 +215,8 @@ def do_detection(self): # each original input image and the blotted version of the # median image of all channels self.detect_outliers(self.blot_models) + log.info("Finished detecting outliers.") + log.info(f"HEAPY: \n {hpifu.heap()}") # clean-up (just to be explicit about being finished # with these results) diff --git a/jwst/pipeline/calwebb_spec3.py b/jwst/pipeline/calwebb_spec3.py index 42178e9432..bec6a6d3f2 100644 --- a/jwst/pipeline/calwebb_spec3.py +++ b/jwst/pipeline/calwebb_spec3.py @@ -93,11 +93,15 @@ def process(self, input): self.combine_1d.suffix = 'c1d' self.combine_1d.save_results = self.save_results + from guppy import hpy + heapy = hpy() + heapy.setrelheap() + # Retrieve the inputs: # could either be done via LoadAsAssociation and then manually # load input members into models and ModelContainer, or just # do a direct open of all members in ASN file, e.g. - input_models = datamodels.open(input, asn_exptypes=asn_exptypes) + input_models = datamodels.open(input, asn_exptypes=asn_exptypes, save_open=False) # Immediately update the ASNTABLE keyword value in all inputs, # so that all outputs get the new value @@ -122,10 +126,16 @@ def process(self, input): for member in product['members']: members_by_type[member['exptype'].lower()].append(member['expname']) + self.log.info("Finished initializing ASN information.") + self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") + if is_moving_target(input_models): self.log.info("Assigning WCS to a Moving Target exposure.") input_models = self.assign_mtwcs(input_models) + self.log.info("Assigning WCS to a Moving Target exposure.") + self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") + # If background data are present, call the master background step if members_by_type['background']: source_models = self.master_background(input_models) @@ -142,6 +152,8 @@ def process(self, input): # The input didn't contain any background members, # so we use all the inputs in subsequent steps source_models = input_models + self.log.info("Finished master_background step.") + self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # `sources` is the list of astronomical sources that need be # processed. Each element is a ModelContainer, which contains @@ -198,9 +210,13 @@ def process(self, input): hotfixed_sources.append((str(src_id), model)) sources = hotfixed_sources + self.log.info("Converted sources...") + self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # Process each source for source in sources: + self.log.info("Processing input source.") + self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # If each source is a SourceModelContainer # the output name needs to be updated with the source name. @@ -220,6 +236,9 @@ def process(self, input): if exptype in ['MIR_MRS']: result = self.mrs_imatch(result) + self.log.info("Calling outlier_detection on source.") + self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") + # Call outlier detection if exptype not in SLITLESS_TYPES: # Update the asn table name to the level 3 instance so that @@ -229,6 +248,9 @@ def process(self, input): cal_array.meta.asn.table_name = op.basename(input_models.asn_table_name) result = self.outlier_detection(result) + self.log.info("Calling cube_build for this source.") + self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") + # Resample time. Dependent on whether the data is IFU or not. resample_complete = None if exptype in IFU_EXPTYPES: @@ -243,6 +265,8 @@ def process(self, input): resample_complete = result.meta.cal_step.resample except AttributeError: pass + self.log.info("Finished resampling spec source.") + self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # Do 1-D spectral extraction if exptype in SLITLESS_TYPES: diff --git a/jwst/resample/resample_step.py b/jwst/resample/resample_step.py index 158b17cf5e..a8d9d82e39 100755 --- a/jwst/resample/resample_step.py +++ b/jwst/resample/resample_step.py @@ -57,7 +57,7 @@ class ResampleStep(Step): single = boolean(default=False) blendheaders = boolean(default=True) allowed_memory = float(default=None) # Fraction of memory to use for the combined image. - in_memory = boolean(default=True) + in_memory = boolean(default=False) """ reference_file_types = ['drizpars'] From 9ff214fd9f04b04e5c773f86ca1dcd16cbc93a55 Mon Sep 17 00:00:00 2001 From: "Warren J. Hack" Date: Thu, 23 Feb 2023 09:56:53 -0500 Subject: [PATCH 2/3] Remove guppy and pdb --- jwst/cube_build/cube_build_step.py | 6 +-- jwst/cube_build/file_table.py | 4 +- jwst/cube_build/ifu_cube.py | 38 +++++++++++++------ .../outlier_detection_ifu.py | 9 ----- 4 files changed, 29 insertions(+), 28 deletions(-) diff --git a/jwst/cube_build/cube_build_step.py b/jwst/cube_build/cube_build_step.py index 9dde66aa06..7bb13f333c 100755 --- a/jwst/cube_build/cube_build_step.py +++ b/jwst/cube_build/cube_build_step.py @@ -70,9 +70,6 @@ def process(self, input): input : list of objects or str list of datamodels or string name of input fits file or association. """ - from guppy import hpy - heapy = hpy() - heapy.setrelheap() self.log.info('Starting IFU Cube Building Step') # ________________________________________________________________________________ @@ -278,7 +275,6 @@ def process(self, input): if not self.single: self.log.info(f'Number of IFU cubes produced by this run = {num_cubes}') - import pdb;pdb.set_trace() # ModelContainer of ifucubes cube_container = ModelContainer() @@ -337,7 +333,7 @@ def process(self, input): # result, status = cube_result cube_container.append(result) - thiscube.close() + thiscube.save() del thiscube # check if cube_build failed diff --git a/jwst/cube_build/file_table.py b/jwst/cube_build/file_table.py index 3eb4f90d83..3809492101 100644 --- a/jwst/cube_build/file_table.py +++ b/jwst/cube_build/file_table.py @@ -139,13 +139,13 @@ def set_file_table(self, subchannel = input_model.meta.instrument.band.lower() clenf = len(channel) for k in range(clenf): - self.FileMap['MIRI'][channel[k]][subchannel].append(input_model) + self.FileMap['MIRI'][channel[k]][subchannel].append(input_model.copy()) # _____________________________________________________________________ # NIRSPEC instrument elif instrument == 'NIRSPEC': fwa = input_model.meta.instrument.filter.lower() gwa = input_model.meta.instrument.grating.lower() - self.FileMap['NIRSPEC'][gwa][fwa].append(input_model) + self.FileMap['NIRSPEC'][gwa][fwa].append(input_model.copy()) else: pass # log.info('Instrument not valid for cube') diff --git a/jwst/cube_build/ifu_cube.py b/jwst/cube_build/ifu_cube.py index 8f49ec17a2..fd390caeda 100644 --- a/jwst/cube_build/ifu_cube.py +++ b/jwst/cube_build/ifu_cube.py @@ -560,14 +560,17 @@ def build_ifucube(self): number_bands = len(self.list_par1) - import pdb;pdb.set_trace() for ib in range(number_bands): + log.info(f"###\n Processing IFU band {ib+1} out of {number_bands} bands.\n###") + this_par1 = self.list_par1[ib] this_par2 = self.list_par2[ib] nfiles = len(self.master_table.FileMap[self.instrument][this_par1][this_par2]) # ________________________________________________________________________________ # loop over the files that cover the spectral range the cube is for for k in range(nfiles): + log.info(f"###\n Processing file {k+1} out of {nfiles} files\n###") + input_model = self.master_table.FileMap[self.instrument][this_par1][this_par2][k] self.input_models_this_cube.append(input_model) # set up input_model to be first file used to copy in basic header info @@ -636,7 +639,7 @@ def build_ifucube(self): self.spaxel_iflux = self.spaxel_iflux + np.asarray(result[3], np.float64) spaxel_dq.astype(np.uint) self.spaxel_dq = np.bitwise_or(self.spaxel_dq, spaxel_dq) - result = None + if self.weighting == 'drizzle' and build_cube: cdelt3_mean = np.nanmean(self.cdelt3_normal) xi1, eta1, xi2, eta2, xi3, eta3, xi4, eta4 = corner_coord @@ -654,14 +657,19 @@ def build_ifucube(self): self.cdelt1, self.cdelt2, cdelt3_mean, linear) spaxel_flux, spaxel_weight, spaxel_var, spaxel_iflux, spaxel_dq = result - self.spaxel_flux = self.spaxel_flux + np.asarray(spaxel_flux, np.float64) - self.spaxel_weight = self.spaxel_weight + np.asarray(spaxel_weight, np.float64) - self.spaxel_var = self.spaxel_var + np.asarray(spaxel_var, np.float64) - self.spaxel_iflux = self.spaxel_iflux + np.asarray(spaxel_iflux, np.float64) - spaxel_dq.astype(np.uint) - self.spaxel_dq = np.bitwise_or(self.spaxel_dq, spaxel_dq) - result = None - + self.spaxel_flux += np.asarray(spaxel_flux, np.float64) + self.spaxel_weight += np.asarray(spaxel_weight, np.float64) + self.spaxel_var += np.asarray(spaxel_var, np.float64) + self.spaxel_iflux += np.asarray(spaxel_iflux, np.float64) + # spaxel_dq.astype(np.uint) + self.spaxel_dq = np.bitwise_or(self.spaxel_dq, spaxel_dq.astype(np.uint)) + + # Clean up memory + result = None + del xi1, eta1, xi2, eta2, xi3, eta3, xi4, eta4 + del spaxel_flux, spaxel_weight, spaxel_var, spaxel_iflux, spaxel_dq, result + del coord1, coord2, corner_coord, wave, dwave, flux, err, slice_no, rois_pixel + del roiw_pixel, weight_pixel, softrad_pixel, scalerad_pixel, pixelresult # -------------------------------------------------------------------------------- # # AREA - 2d method only works for single files local slicer plane (internal_cal) # -------------------------------------------------------------------------------- @@ -698,7 +706,11 @@ def build_ifucube(self): self.spaxel_weight = self.spaxel_weight + np.asarray(spaxel_weight, np.float64) self.spaxel_var = self.spaxel_var + np.asarray(spaxel_var, np.float64) self.spaxel_iflux = self.spaxel_iflux + np.asarray(spaxel_iflux, np.float64) + + # Clean up memory result = None + del spaxel_flux, spaxel_weight, spaxel_var, spaxel_iflux, result + # -------------------------------------------------------------------------------- # NIRSPEC @@ -729,7 +741,10 @@ def build_ifucube(self): self.spaxel_weight = self.spaxel_weight + np.asarray(spaxel_weight, np.float64) self.spaxel_var = self.spaxel_var + np.asarray(spaxel_var, np.float64) self.spaxel_iflux = self.spaxel_iflux + np.asarray(spaxel_iflux, np.float64) + + # Clean up memory result = None + del spaxel_flux, spaxel_weight, spaxel_var, spaxel_iflux, result # _______________________________________________________________________ # done looping over files @@ -853,9 +868,8 @@ def build_ifucube_single(self): ifucube_model, status = self.setup_final_ifucube_model(input_model) # ifucube_model, status = result - import pdb;pdb.set_trace() single_ifucube_container.append(ifucube_model.filename) - ifucube_model.close() + ifucube_model.save() del ifucube_model if status != 0: diff --git a/jwst/outlier_detection/outlier_detection_ifu.py b/jwst/outlier_detection/outlier_detection_ifu.py index 33575816db..134ea92ce5 100644 --- a/jwst/outlier_detection/outlier_detection_ifu.py +++ b/jwst/outlier_detection/outlier_detection_ifu.py @@ -112,9 +112,6 @@ def _convert_inputs(self): def do_detection(self): """Flag outlier pixels in DQ of input images.""" - from guppy import hpy - hpifu = hpy() - hpifu.setrelheap() self._convert_inputs() self._find_ifu_coverage() @@ -122,7 +119,6 @@ def do_detection(self): self.build_suffix(**self.outlierpars) log.info("Initialization finished for do_detection()") - log.info(f"HEAPY:\n {hpifu.heap()}") save_intermediate_results = \ self.outlierpars['save_intermediate_results'] @@ -135,7 +131,6 @@ def do_detection(self): model.data = np.zeros(model.data.shape, dtype=model.data.dtype) log.info("Initialized self.blot_models with zeros") log.info(f"{len(self.blot_models)} models with shape of {self.blot_models[0].data.shape}") - log.info(f"HEAPY:\n {hpifu.heap()}") # Create the resampled/mosaic images for each group of exposures # @@ -144,9 +139,7 @@ def do_detection(self): exptype)) num_bands = len(self.ifu_band1) log.info(f"{num_bands} bands to be processed.") - hpifu.setrelheap() for i in range(num_bands): - log.info(f"HEAPY: iteration {i}. Memory use of :\n {hpifu.heap()}") select1 = self.ifu_band1[i] select2 = self.ifu_band2[i] @@ -200,7 +193,6 @@ def do_detection(self): self.blot_median(median_model) log.info("Finished blotting the median images for all bands.") - log.info(f"HEAPY: \n {hpifu.heap()}") if save_intermediate_results: log.info("Writing out BLOT images...") @@ -216,7 +208,6 @@ def do_detection(self): # median image of all channels self.detect_outliers(self.blot_models) log.info("Finished detecting outliers.") - log.info(f"HEAPY: \n {hpifu.heap()}") # clean-up (just to be explicit about being finished # with these results) From 7aa996bca8502ff0d07acfd283e315d8801bba28 Mon Sep 17 00:00:00 2001 From: "Warren J. Hack" Date: Thu, 23 Feb 2023 10:07:48 -0500 Subject: [PATCH 3/3] Remove more guppy calls --- jwst/pipeline/calwebb_spec3.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/jwst/pipeline/calwebb_spec3.py b/jwst/pipeline/calwebb_spec3.py index bec6a6d3f2..76b1a9c281 100644 --- a/jwst/pipeline/calwebb_spec3.py +++ b/jwst/pipeline/calwebb_spec3.py @@ -93,10 +93,6 @@ def process(self, input): self.combine_1d.suffix = 'c1d' self.combine_1d.save_results = self.save_results - from guppy import hpy - heapy = hpy() - heapy.setrelheap() - # Retrieve the inputs: # could either be done via LoadAsAssociation and then manually # load input members into models and ModelContainer, or just @@ -127,14 +123,12 @@ def process(self, input): members_by_type[member['exptype'].lower()].append(member['expname']) self.log.info("Finished initializing ASN information.") - self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") if is_moving_target(input_models): self.log.info("Assigning WCS to a Moving Target exposure.") input_models = self.assign_mtwcs(input_models) self.log.info("Assigning WCS to a Moving Target exposure.") - self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # If background data are present, call the master background step if members_by_type['background']: @@ -153,7 +147,6 @@ def process(self, input): # so we use all the inputs in subsequent steps source_models = input_models self.log.info("Finished master_background step.") - self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # `sources` is the list of astronomical sources that need be # processed. Each element is a ModelContainer, which contains @@ -211,12 +204,10 @@ def process(self, input): sources = hotfixed_sources self.log.info("Converted sources...") - self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # Process each source for source in sources: self.log.info("Processing input source.") - self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # If each source is a SourceModelContainer # the output name needs to be updated with the source name. @@ -237,7 +228,6 @@ def process(self, input): result = self.mrs_imatch(result) self.log.info("Calling outlier_detection on source.") - self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # Call outlier detection if exptype not in SLITLESS_TYPES: @@ -249,7 +239,6 @@ def process(self, input): result = self.outlier_detection(result) self.log.info("Calling cube_build for this source.") - self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # Resample time. Dependent on whether the data is IFU or not. resample_complete = None @@ -266,7 +255,6 @@ def process(self, input): except AttributeError: pass self.log.info("Finished resampling spec source.") - self.log.info(f"HEAPY: Memory usage \n{heapy.heap()}") # Do 1-D spectral extraction if exptype in SLITLESS_TYPES: