diff --git a/CHANGES.rst b/CHANGES.rst index b29b79840..5cdd209b6 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,23 @@ ## What's Changed +1.3.0 (2024-12-19) +================== + +Web Application +~~~~~~~~~~~~~~~ +* Exclude source-specific WFSS files from observation page by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1651 +* Switch URL for prog info scraping to use the OPO site by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1662 + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +* Added logging configuration to config file, and use it when opening logging by @york-stsci in https://github.com/spacetelescope/jwql/pull/1635 +* Fix bad parens in dark monitor model definitions by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1644 +* Add radius keyword to bokeh.figure.circle calls by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1643 +* Remove bokeh templating code by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1647 +* Update Bad Pixel Monitor to use Django DB Models by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1497 +* Update Bias Monitor to use Django DB Models by @bsunnquist in https://github.com/spacetelescope/jwql/pull/1503 + + 1.2.11 (2024-08-26) =================== diff --git a/jwql/bokeh_templating/__init__.py b/jwql/bokeh_templating/__init__.py deleted file mode 100644 index 2cf1818b4..000000000 --- a/jwql/bokeh_templating/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .template import BokehTemplate diff --git a/jwql/bokeh_templating/example/example_interface.yaml b/jwql/bokeh_templating/example/example_interface.yaml deleted file mode 100644 index 4aec297c7..000000000 --- a/jwql/bokeh_templating/example/example_interface.yaml +++ /dev/null @@ -1,26 +0,0 @@ -- !Slider: &a_slider # a slider for the a value - ref: "a_slider" - title: "A" - value: 4 - range: !!python/tuple [1, 20, 0.1] - on_change: ['value', !self.controller ] -- !Slider: &b_slider # a slider for the b value - ref: "b_slider" - title: "B" - value: 2 - range: !!python/tuple [1, 20, 0.1] - on_change: ['value', !self.controller ] -- !ColumnDataSource: &figure_source # the ColumnDataSource for the figure - ref: "figure_source" - data: - x: !self.x - y: !self.y -- !Figure: &the_figure # the Figure itself, which includes a single line element. - ref: 'the_figure' - elements: - - {'kind': 'line', 'source': *figure_source, 'line_color': 'orange', 'line_width': 2} -- !Document: # the Bokeh document layout: a single column with the figure and two sliders - - !column: - - *the_figure # note the use of YAML anchors to add the Bokeh objects to the Document layout directly. - - *a_slider - - *b_slider \ No newline at end of file diff --git a/jwql/bokeh_templating/example/main.py b/jwql/bokeh_templating/example/main.py deleted file mode 100644 index bd91d4e87..000000000 --- a/jwql/bokeh_templating/example/main.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -This is a minimal example demonstrating how to create a Bokeh app using -the ``bokeh-templating`` package and the associated YAML template files. - -Author -------- - - - Graham Kanarek - -Dependencies ------------- - - The user must have PyYAML, Bokeh, and the ``bokeh-templating`` - packages installed. -""" - -import os -import numpy as np - -from jwql.bokeh_templating import BokehTemplate - -file_dir = os.path.dirname(os.path.realpath(__file__)) - - -class TestBokehApp(BokehTemplate): - """This is a minimal ``BokehTemplate`` app.""" - - _embed = True - - def pre_init(self): - """Before creating the Bokeh interface (by parsing the interface - file), we must initialize our ``a`` and ``b`` variables, and set - the path to the interface file. - """ - - self.a, self.b = 4, 2 - - self.format_string = None - self.interface_file = os.path.join(file_dir, "example_interface.yaml") - - # No post-initialization tasks are required. - post_init = None - - @property - def x(self): - """The x-value of the Lissajous curves.""" - return 4. * np.sin(self.a * np.linspace(0, 2 * np.pi, 500)) - - @property - def y(self): - """The y-value of the Lissajous curves.""" - return 3. * np.sin(self.b * np.linspace(0, 2 * np.pi, 500)) - - def controller(self, attr, old, new): - """This is the controller function which is used to update the - curves when the sliders are adjusted. Note the use of the - ``self.refs`` dictionary for accessing the Bokeh object - attributes.""" - self.a = self.refs["a_slider"].value - self.b = self.refs["b_slider"].value - - self.refs["figure_source"].data = {'x': self.x, 'y': self.y} - - -TestBokehApp() diff --git a/jwql/bokeh_templating/factory.py b/jwql/bokeh_templating/factory.py deleted file mode 100644 index 7c77bfa5d..000000000 --- a/jwql/bokeh_templating/factory.py +++ /dev/null @@ -1,270 +0,0 @@ -""" -This module defines YAML constructors and factory functions which are -used to create Bokeh objects parsed from YAML template files. - -The ``mapping_factory`` and ``sequence_factory`` functions are used to -create a constructor function for each of the mappings (i.e., classes) -and sequences (i.e., functions) included in the keyword map. The -``document_constructor`` and ``figure_constructor`` functions are -stand-alone constructors for the ``!Document`` and ``!Figure`` tag, -respectively. - -Author -------- - - - Graham Kanarek - -Use ---- - - The functions in this file are not intended to be called by the user - directly; users should subclass the ``BokehTemplate`` class found in - ``template.py`` instead. However, they can be used as a model for - creating new constructors for user-defined tags, which can then be - registered using the ``BokehTemplate.register_mapping_constructor`` - and ``BokehTemplate.register_sequence_constructor`` classmethods. - -Dependencies ------------- - - The user must have Bokeh installed. -""" - -from bokeh.io import curdoc - -from .keyword_map import bokeh_mappings as mappings, bokeh_sequences as sequences - -# Figures get their own constructor so we remove references to Figures from -# the keyword maps. -Figure = mappings.pop("figure") - - -def mapping_factory(tool, element_type): - """ - Create a mapping constructor for the given tool, used to parse the - given element tag. - - Parameters - ---------- - tool : BokehTemplate instance - The web app class instance to which the constructor will be - attached. This will become ``self`` when the factory is a method, - and is used to both store the Bokeh objects in the - ``BokehTemplate.refs`` dictionary, and allow for app-wide - formatting choices via ``BokehTemplate.format_string``. - - element_type : str - The Bokeh element name for which a constructor is desired. For - example, an ``element_type`` of ``'Slider'`` will create a - constructor for a Bokeh ``Slider`` widget, designated by the - ``!Slider`` tag in the YAML template file. - - Notes - ----- - See the ``BokehTemplate`` class implementation in ``template.py`` - for an example of how this function is used. - """ - - def mapping_constructor(loader, node): # docstring added below - fmt = tool.formats.get(element_type, {}) - value = loader.construct_mapping(node, deep=True) - ref = value.pop("ref", "") - callback = value.pop("on_change", []) - selection_callback = value.pop("selection_on_change", []) - onclick = value.pop("on_click", None) - fmt.update(value) - # convert "range" YAML keyword of slider into something Bokeh can read - if element_type == "Slider": - fmt["start"], fmt["end"], fmt["step"] = fmt.pop("range", [0, 1, 0.1]) - - # Many of these have hybrid signatures, with both positional and - # keyword arguments, so we need to convert an "args" keyword into - # positional arguments - arg = fmt.pop("arg", None) - if arg is not None: - obj = mappings[element_type](*arg, **fmt) - else: - obj = mappings[element_type](**fmt) - - # Store the object in the tool's "refs" dictionary - if ref: - tool.refs[ref] = obj - - # Handle callbacks and on_clicks - if callback: - obj.on_change(*callback) - if onclick: - obj.on_click(onclick) - if selection_callback: - obj.selected.on_change(*selection_callback) - - yield obj - - mapping_constructor.__name__ = element_type.lower() + '_' + mapping_constructor.__name__ - mapping_constructor.__doc__ = """ - A YAML constructor for the ``{et}`` Bokeh object. This will create a - ``{et}`` object wherever the ``!{et}`` tag appears in the YAML template - file. If a ``ref`` tag is specified, the object will then be stored in - the ``BokehTemplate.refs`` dictionary. - - This constructor is used for mappings -- i.e., classes or functions - which primarily have keyword arguments in their signatures. If - positional arguments appear, they can be included in the YAML file - with the `args` keyword. - """.format(et=element_type) - - return mapping_constructor - - -def sequence_factory(tool, element_type): - """ Create a sequence constructor for the given tool, used to parse - the given element tag. - - Parameters - ---------- - tool : BokehTemplate instance - The web app class instance to which the constructor will be - attached. This will become ``self`` when the factory is a method, - and is used to both store the Bokeh objects in the - ``BokehTemplate.refs`` dictionary, and allow for app-wide - formatting choices via ``BokehTemplate.format_string``. - - element_type : str - The Bokeh element name for which a constructor is desired. For - example, an ``element_type`` of ``'Slider'`` will create a - constructor for a Bokeh ``Slider`` widget, designated by the - ``!Slider`` tag in the YAML template file. - - Notes - ----- - See the ``BokehTemplate`` class implementation in ``template.py`` - for an example of how this function is used. - """ - - def sequence_constructor(loader, node): - fmt = tool.formats.get(element_type, {}) - value = loader.construct_sequence(node, deep=True) - obj = sequences[element_type](*value, **fmt) - yield obj - - sequence_constructor.__name__ = element_type.lower() + '_' + sequence_constructor.__name__ - sequence_constructor.__doc__ = """ - A YAML constructor for the ``{et}`` Bokeh object. This will create a - ``{et}`` object wherever the ``!{et}`` tag appears in the YAML template - file. If a ``ref`` tag is specified, the object will then be stored in - the ``BokehTemplate.refs`` dictionary. - - This constructor is used for sequences -- i.e., classes or functions - which have only positional arguments in their signatures (which for - Bokeh is only functions, no classes). - """.format(et=element_type) - - return sequence_constructor - - -# These constructors need more specialized treatment - -def document_constructor(tool, loader, node): - """ A YAML constructor for the Bokeh document, which is grabbed via - the Bokeh ``curdoc()`` function. When laying out a Bokeh document - with a YAML template, the ``!Document`` tag should be used as the - top-level tag in the layout. - """ - - layout = loader.construct_sequence(node, deep=True) - for element in layout: - curdoc().add_root(element) - tool.document = curdoc() - yield tool.document - - -def figure_constructor(tool, loader, node): - """ A YAML constructor for Bokeh Figure objects, which are - complicated enough to require their own (non-factory) constructor. - Each ``!Figure`` tag in the YAML template file will be turned into a - ``Figure`` object via this constructor (once it's been registered by - the ``BokehTemplate`` class). - """ - - fig = loader.construct_mapping(node, deep=True) - fmt = tool.formats.get('Figure', {}) - - elements = fig.pop('elements', []) - cmds = [] - ref = fig.pop("ref", "") - callback = fig.pop("on_change", []) - axis = tool.formats.get("Axis", {}) - axis.update(fig.pop("axis", {})) - - for key in fig: - val = fig[key] - if key in ['text', 'add_tools', 'js_on_event']: - cmds.append((key, val)) - else: - fmt[key] = val - - figure = Figure(**fmt) - - for key, cmd in cmds: - if key == 'add_tools': - figure.add_tools(*cmd) - elif key == 'text': - figure.text(*cmd.pop('loc'), **cmd) - elif key == 'js_on_event': - for event in cmd: - figure.js_on_event(*event) - - for element in elements: - key = element.pop('kind', 'diamond') - shape = {'line': ('Line', figure.line), - 'circle': ('Circle', figure.circle), - 'step': ('Step', figure.step), - 'diamond': ('Diamond', figure.diamond), - 'triangle': ('Triangle', figure.triangle), - 'square': ('Square', figure.square), - 'asterisk': ('Asterisk', figure.asterisk), - 'x': ('XGlyph', figure.x), - 'vbar': ('VBar', figure.vbar)} - if key in shape: - fmt_key, glyph = shape[key] - shape_fmt = tool.formats.get(fmt_key, {}) - shape_fmt.update(element) - x = shape_fmt.pop('x', 'x') - y = shape_fmt.pop('y', 'y') - glyph(x, y, **shape_fmt) - elif key == 'rect': - rect_fmt = tool.formats.get('Rect', {}) - rect_fmt.update(element) - figure.rect('rx', 'ry', 'rw', 'rh', **rect_fmt) - elif key == 'quad': - quad_fmt = tool.formats.get('Quad', {}) - quad_fmt.update(element) - figure.quad(**quad_fmt) - elif key == 'image': - image_fmt = tool.formats.get('Image', {}) - image_fmt.update(element) - arg = image_fmt.pop("image", None) - figure.image(arg, **image_fmt) - elif key == 'image_rgba': - image_fmt = tool.formats.get('ImageRGBA', {}) - image_fmt.update(element) - arg = image_fmt.pop("image", None) - figure.image_rgba(arg, **image_fmt) - elif key == 'multi_line': - multi_fmt = tool.formats.get('MultiLine', {}) - multi_fmt.update(element) - figure.multi_line(**multi_fmt) - elif key == 'layout': - obj = element.pop('obj', None) - figure.add_layout(obj, **element) - - for attr, val in axis.items(): - # change axis attributes, hopefully - setattr(figure.axis, attr, val) - - if ref: - tool.refs[ref] = figure - if callback: - figure.on_change(*callback) - - yield figure diff --git a/jwql/bokeh_templating/keyword_map.py b/jwql/bokeh_templating/keyword_map.py deleted file mode 100644 index 8f1be71ce..000000000 --- a/jwql/bokeh_templating/keyword_map.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -A script to scrape the Bokeh package and collate dictionaries of -classes and functions. - -The ``_parse_module`` function iterates over a module, and uses the -``inspect`` package to sort everything in the module's namespace (as -identified by ``inspect.getmembers``) into a dictionary of mappings -(requiring primarily keyword arguments) and sequences (requiring -primarily positional arguments). - -Note that thhe files ``surface3d.py`` and ``surface3d.ts``, used to -create 3D surface plots, were downloaded from the Bokeh ``surface3d`` -example. - -Author -------- - - - Graham Kanarek - -Use ---- - - To access the Bokeh elements, the user should import as follows: - - :: - - from jwql.bokeh_templating.keyword_map import bokeh_sequences, bokeh_mappings - -Dependencies ------------- - - The user must have Bokeh installed. -""" - -from bokeh import layouts, models, palettes, plotting, transform -from inspect import getmembers, isclass, isfunction - -bokeh_sequences = {} -bokeh_mappings = {} # Note that abstract base classes *are* included - - -def _parse_module(module): - """ - Sort the members of a module into dictionaries of functions (sequences) - and classes (mappings). - """ - - def accessible_member(name, member): - return (not name.startswith("_")) and (module.__name__ in member.__module__) - - seqs = {nm: mem for nm, mem in getmembers(module, isfunction) if accessible_member(nm, mem)} - maps = {nm: mem for nm, mem in getmembers(module, isclass) if accessible_member(nm, mem)} - - # these need to be mappings - if 'gridplot' in seqs: - maps['gridplot'] = seqs.pop('gridplot') - if 'Donut' in seqs: - maps['Donut'] = seqs.pop('Donut') - return (seqs, maps) - - -for module in [models, plotting, layouts, palettes, transform]: - seqs, maps = _parse_module(module) - bokeh_sequences.update(seqs) - bokeh_mappings.update(maps) diff --git a/jwql/bokeh_templating/template.py b/jwql/bokeh_templating/template.py deleted file mode 100644 index 4f854fd30..000000000 --- a/jwql/bokeh_templating/template.py +++ /dev/null @@ -1,302 +0,0 @@ -#! /usr/bin/env python - -"""This module defines the ``BokehTemplate`` class, which can be subclassed -to create a Bokeh web app with a YAML templating file. - - -Author -------- - - - Graham Kanarek - -Use ---- - - The user should subclass the ``BokehTemplate`` class to create an - app, as demonstrated in ``example.py``. - - (A full tutorial on developing Bokeh apps with ``BokehTemplate`` is - forthcoming.) - - -Dependencies ------------- - - The user must have Bokeh and PyYAML installed. -""" - -import yaml -import os -from . import factory -from bokeh.embed import components -from inspect import signature - - -class BokehTemplateParserError(Exception): - """ - A custom error for problems with parsing the interface files. - """ - - -class BokehTemplateEmbedError(Exception): - """ - A custom error for problems with embedding components. - """ - - -class BokehTemplate(object): - """The base class for creating Bokeh web apps using a YAML - templating framework. - - Attributes - ---------- - _embed : bool - A flag to indicate whether or not the individual widgets will be - embedded in a webpage. If ``False``, the YAML interface file - must include a !Document tag. Defaults to ``False``. - document: obje - The Bokeh Dpcument object (if any), equivalent to the result of - calling ``curdoc()``. - formats: dict - A dictionary of widget formating specifications, parsed from - ``format_string`` (if one exists). - format_string: str - A string of YAML formatting specifications, using the same - syntax as the interface file, for Bokeh widgets. Note that - formatting choices present in individual widget instances in the - interface file override these. - interface_file: str - The path to the YAML interface file. - refs : dict - A dictionary of Bokeh objects which are given ``ref`` strings in - the interface file. Use this to store and interact with the - Bokeh data sources and widgets in callback methods. - - Methods - ------- - _mapping_factory() - Used by the interface parser to construct Bokeh widgets - _sequence_factory() - Used by the interface parser to construct Bokeh widgets - _figure_constructor() - Used by the interface parser to construct Bokeh widgets - _document_constructor() - Used by the interface parser to construct Bokeh widgets - """ - - # Each of these functions has a ``tool`` argument, which becomes ``self`` - # when they are stored as methods. This way, the YAML constructors can - # store the Bokeh objects in the ``tool.ref`` dictionary, and can access - # the formatting string, if any. See ``factory.py`` for more details. - _mapping_factory = factory.mapping_factory - _sequence_factory = factory.sequence_factory - _figure_constructor = factory.figure_constructor - _document_constructor = factory.document_constructor - - _embed = False - document = None - format_string = "" - formats = {} - interface_file = "" - refs = {} - - def _self_constructor(self, loader, tag_suffix, node): - """ - A multi_constructor for `!self` tag in the interface file. - """ - yield eval("self" + tag_suffix, globals(), locals()) - - def _register_default_constructors(self): - """ - Register all the default constructors with ``yaml.add_constructor``. - """ - for m in factory.mappings: - yaml.add_constructor("!" + m + ":", self._mapping_factory(m)) - - for s in factory.sequences: - yaml.add_constructor("!" + s + ":", self._sequence_factory(s)) - - yaml.add_constructor("!Figure:", self._figure_constructor) - yaml.add_constructor("!Document:", self._document_constructor) - yaml.add_multi_constructor(u"!self", self._self_constructor) - - def pre_init(self, **kwargs): - """ - This should be implemented by the app subclass, to do any pre- - initialization steps that it requires (setting defaults, loading - data, etc). - - If this is not required, subclass should set `pre_init = None` - in the class definition. - """ - - raise NotImplementedError - - def post_init(self): - """ - This should be implemented by the app subclass, to do any post- - initialization steps that the tool requires. - - If this is not required, subclass should set `post_init = None` - in the class definition. - """ - - raise NotImplementedError - - def __init__(self, **kwargs): - """ - Keyword arguments are passed to self.pre_init(). - """ - self._register_default_constructors() - - # Allow for pre-initialization code from the subclass. - if self.pre_init is not None: - if signature(self.pre_init).parameters: - # If we try to call pre_init with keyword parameters when none - # are included, it will throw an error - # thus, we use inspect.signature - self.pre_init(**kwargs) - else: - self.pre_init() - - # Initialize attributes for YAML parsing - self.formats = {} - self.refs = {} - - # Parse formatting string, if any, and the interface YAML file - self.include_formatting() - self.parse_interface() - - # Allow for post-init code from the subclass. - if self.post_init is not None: - self.post_init() - - def include_formatting(self): - """ - This should simply be a dictionary of formatting keywords at the end. - """ - if not self.format_string: - return - - self.formats = yaml.load(self.format_string, Loader=yaml.SafeLoader) - - def parse_interface(self): - """ - This is the workhorse YAML parser, which creates the interface based - on the layout file. - - `interface_file` is the path to the interface .yaml file to be parsed. - """ - - if not self.interface_file: - raise NotImplementedError("Interface file required.") - - # Read the interface file into a string - filepath = os.path.abspath(os.path.expanduser(self.interface_file)) - if not os.path.exists(filepath): - raise BokehTemplateParserError("Interface file path does not exist.") - with open(filepath) as f: - interface = f.read() - - # If necessary, verify that the interface string contains !Document tag - if not self._embed and '!Document' not in interface: - raise BokehTemplateParserError("Interface file must contain a Document tag") - - # Now, since we've registered all the constructors, we can parse the - # entire string with yaml. We don't need to assign the result to a - # variable, since the constructors store everything in self.refs - # (and self.document, for the document). - try: - self.full_stream = list(yaml.load_all(interface, Loader=yaml.FullLoader)) - except yaml.YAMLError as exc: - raise BokehTemplateParserError(exc) - - def embed(self, ref): - """A wrapper for ``bokeh.embed.components`` to return embeddable - code for the given widget reference.""" - element = self.refs.get(ref, None) - if element is None: - raise BokehTemplateEmbedError("Undefined component reference") - return components(element) - - @staticmethod - def parse_string(yaml_string): - """ A utility functon to parse any YAML string using the - registered constructors. (Usually used for debugging.)""" - return list(yaml.load_all(yaml_string)) - - @classmethod - def register_sequence_constructor(cls, tag, parse_func): - """ - Register a new sequence constructor with YAML. - - Parameters - ---------- - tag : str - The YAML tag string to be used for the constructor. - parse_func: object - The parsing function to be registered with YAML. This - function should accept a multi-line string, and return a - python object. - - Notes - ----- - This classmethod should be used to register a new constructor - *before* creating & instantiating a subclass of BokehTemplate : - - :: - - from bokeh_template import BokehTemplate - BokehTemplate.register_sequence_constructor("my_tag", my_parser) - - class myTool(BokehTemplate): - pass - - myTool() - """ - if tag.startswith("!"): - tag = tag[1:] - - def user_constructor(loader, node): - value = loader.construct_sequence(node, deep=True) - yield parse_func(value) - user_constructor.__name__ = tag.lower() + "_constructor" - yaml.add_constructor("!" + tag, user_constructor) - - @classmethod - def register_mapping_constructor(cls, tag, parse_func): - """ - Register a new mapping constructor with YAML. - - Parameters - ---------- - tag : str - The YAML tag string to be used for the constructor. - parse_func: object - The parsing function to be registered with YAML. This - function should accept a multi-line string, and return a - python object. - - Notes - ----- - This classmethod should be used to register a new constructor - *before* creating & instantiating a subclass of BokehTemplate : - - :: - - from bokeh_template import BokehTemplate - BokehTemplate.register_mapping_constructor("my_tag", my_parser) - - class myTool(BokehTemplate): - pass - - myTool() - """ - if tag.startswith("!"): - tag = tag[1:] - - def user_constructor(loader, node): - value = loader.construct_mapping(node, deep=True) - yield parse_func(value) - user_constructor.__name__ = tag.lower() + "_constructor" - yaml.add_constructor("!" + tag, user_constructor) diff --git a/jwql/edb/engineering_database.py b/jwql/edb/engineering_database.py index b4812d81b..ad51af223 100644 --- a/jwql/edb/engineering_database.py +++ b/jwql/edb/engineering_database.py @@ -799,7 +799,7 @@ def bokeh_plot_text_data(self, show_plot=False): dates = abscissa[index].astype(np.datetime64) y_values = list(np.ones(len(index), dtype=int) * i) p1.line(dates, y_values, line_width=1, line_color='blue', line_dash='dashed') - p1.circle(dates, y_values, color='blue') + p1.circle(dates, y_values, color='blue', radius=5, radius_dimension='y', radius_units='screen') p1.yaxis.ticker = list(override_dict.keys()) p1.yaxis.major_label_overrides = override_dict diff --git a/jwql/example_config.json b/jwql/example_config.json index 41e17be50..adcf87555 100644 --- a/jwql/example_config.json +++ b/jwql/example_config.json @@ -47,5 +47,50 @@ "cores" : "", "redis_host": "", "redis_port": "", - "transfer_dir": "" + "transfer_dir": "", + "logging": { + "version": 1, + "disable_existing_loggers": true, + "formatters": { + "simple": { + "format": "%(asctime)s %(levelname)s: %(message)s", + "datefmt": "%m/%d/%Y %H:%M:%S %p" + } + }, + "filters": { + "warnings_and_below": { + "()" : "jwql.utils.logging_functions.filter_maker", + "level": "WARNING" + } + }, + "handlers": { + "stdout": { + "class": "logging.StreamHandler", + "level": "INFO", + "formatter": "simple", + "stream": "ext://sys.stdout", + "filters": ["warnings_and_below"] + }, + "stderr": { + "class": "logging.StreamHandler", + "level": "ERROR", + "formatter": "simple", + "stream": "ext://sys.stderr" + }, + "file": { + "class": "logging.FileHandler", + "formatter": "simple", + "filename": "app.log", + "mode": "a" + } + }, + "root": { + "level": "DEBUG", + "handlers": [ + "stderr", + "stdout", + "file" + ] + } + } } diff --git a/jwql/instrument_monitors/common_monitors/bad_pixel_monitor.py b/jwql/instrument_monitors/common_monitors/bad_pixel_monitor.py index d3ae2e795..55001117c 100755 --- a/jwql/instrument_monitors/common_monitors/bad_pixel_monitor.py +++ b/jwql/instrument_monitors/common_monitors/bad_pixel_monitor.py @@ -95,28 +95,44 @@ from jwst_reffiles.bad_pixel_mask import bad_pixel_mask import numpy as np -from jwql.database.database_interface import engine, session -from jwql.database.database_interface import NIRCamBadPixelQueryHistory, NIRCamBadPixelStats -from jwql.database.database_interface import NIRISSBadPixelQueryHistory, NIRISSBadPixelStats -from jwql.database.database_interface import MIRIBadPixelQueryHistory, MIRIBadPixelStats -from jwql.database.database_interface import NIRSpecBadPixelQueryHistory, NIRSpecBadPixelStats -from jwql.database.database_interface import FGSBadPixelQueryHistory, FGSBadPixelStats from jwql.instrument_monitors import pipeline_tools from jwql.shared_tasks.shared_tasks import only_one, run_pipeline, run_parallel_pipeline from jwql.utils import crds_tools, instrument_properties, monitor_utils -from jwql.utils.constants import DARKS_BAD_PIXEL_TYPES, DARK_EXP_TYPES, FLATS_BAD_PIXEL_TYPES, FLAT_EXP_TYPES -from jwql.utils.constants import JWST_INSTRUMENT_NAMES, JWST_INSTRUMENT_NAMES_MIXEDCASE, ON_GITHUB_ACTIONS -from jwql.utils.constants import ON_READTHEDOCS +from jwql.utils.constants import ( + DARKS_BAD_PIXEL_TYPES, + DARK_EXP_TYPES, + FLATS_BAD_PIXEL_TYPES, + FLAT_EXP_TYPES, +) +from jwql.utils.constants import JWST_INSTRUMENT_NAMES, JWST_INSTRUMENT_NAMES_MIXEDCASE +from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils.logging_functions import log_info, log_fail from jwql.utils.mast_utils import mast_query from jwql.utils.permissions import set_permissions -from jwql.utils.utils import copy_files, create_png_from_fits, ensure_dir_exists, get_config, filesystem_path +from jwql.utils.utils import ( + copy_files, + create_png_from_fits, + ensure_dir_exists, + get_config, + filesystem_path, +) if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: - from jwql.website.apps.jwql.monitor_pages.monitor_bad_pixel_bokeh import BadPixelPlots + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) -THRESHOLDS_FILE = os.path.join(os.path.split(__file__)[0], 'bad_pixel_file_thresholds.txt') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + + from jwql.website.apps.jwql.monitor_models.bad_pixel import * + from jwql.website.apps.jwql.monitor_pages.monitor_bad_pixel_bokeh import ( + BadPixelPlots, + ) + +THRESHOLDS_FILE = os.path.join( + os.path.split(__file__)[0], "bad_pixel_file_thresholds.txt" +) def bad_map_to_list(badpix_image, mnemonic): @@ -158,7 +174,9 @@ def bad_map_to_list(badpix_image, mnemonic): return x_location, y_location -def check_for_sufficient_files(uncal_files, instrument_name, aperture_name, threshold_value, file_type): +def check_for_sufficient_files( + uncal_files, instrument_name, aperture_name, threshold_value, file_type +): """From a list of files of a given type (flats or darks), check to see if there are enough files to call the bad pixel monitor. The number of files must be equal to or greater than the provided @@ -195,27 +213,40 @@ def check_for_sufficient_files(uncal_files, instrument_name, aperture_name, thre Whether or not the bad pixel monitor will be called on these files. """ - if file_type not in ['darks', 'flats']: + if file_type not in ["darks", "flats"]: raise ValueError('Input file_type must be "darks" or "flats"') - file_type_singular = file_type.strip('s') + file_type_singular = file_type.strip("s") if len(uncal_files) > 0: uncal_files = sorted(list(set(uncal_files))) if len(uncal_files) < threshold_value: - logging.info(('\tBad pixels from {} skipped. {} new {} files for {},' - '{} found. {} new files are required to run bad pixels' - 'from {} portion of monitor.') - .format(file_type, len(uncal_files), file_type_singular, - instrument_name, aperture_name, threshold_value, file_type)) + logging.info( + ( + "\tBad pixels from {} skipped. {} new {} files for {}," + "{} found. {} new files are required to run bad pixels" + "from {} portion of monitor." + ).format( + file_type, + len(uncal_files), + file_type_singular, + instrument_name, + aperture_name, + threshold_value, + file_type, + ) + ) uncal_files = None run_data = False else: - logging.info('\tSufficient new files found for {}, {} to run the' - 'bad pixel from {} portion of the monitor.' - .format(instrument_name, aperture_name, file_type)) - logging.info('\tNew entries: {}'.format(len(uncal_files))) + logging.info( + "\tSufficient new files found for {}, {} to run the" + "bad pixel from {} portion of the monitor.".format( + instrument_name, aperture_name, file_type + ) + ) + logging.info("\tNew entries: {}".format(len(uncal_files))) run_data = True return uncal_files, run_data @@ -273,18 +304,18 @@ def locate_rate_files(uncal_files): rate_files = [] rate_files_to_copy = [] for uncal in uncal_files: - base = uncal.split('_uncal.fits')[0] - constructed_ratefile = '{}_rateints.fits'.format(base) + base = uncal.split("_uncal.fits")[0] + constructed_ratefile = "{}_rateints.fits".format(base) try: rate_files.append(filesystem_path(constructed_ratefile)) rate_files_to_copy.append(filesystem_path(constructed_ratefile)) except FileNotFoundError: - constructed_ratefile = '{}_rate.fits'.format(base) + constructed_ratefile = "{}_rate.fits".format(base) try: rate_files.append(filesystem_path(constructed_ratefile)) rate_files_to_copy.append(filesystem_path(constructed_ratefile)) except FileNotFoundError: - rate_files.append('None') + rate_files.append("None") return rate_files, rate_files_to_copy @@ -304,20 +335,23 @@ def locate_uncal_files(query_result): """ uncal_files = [] for entry in query_result: - filename = entry['filename'] - suffix = filename.split('_')[-1].replace('.fits', '') - uncal_file = filename.replace(suffix, 'uncal') + filename = entry["filename"] + suffix = filename.split("_")[-1].replace(".fits", "") + uncal_file = filename.replace(suffix, "uncal") # Look for uncal file try: uncal_files.append(filesystem_path(uncal_file)) except FileNotFoundError: - logging.warning('\t\tUnable to locate {} in filesystem. Not including in processing.' - .format(uncal_file)) + logging.warning( + "\t\tUnable to locate {} in filesystem. Not including in processing.".format( + uncal_file + ) + ) return uncal_files -class BadPixels(): +class BadPixels: """Class for executing the bad pixel monitor. This class will search for new (since the previous instance of the @@ -392,7 +426,16 @@ class BadPixels(): def __init__(self): """Initialize an instance of the ``BadPixels`` class.""" - def add_bad_pix(self, coordinates, pixel_type, files, obs_start_time, obs_mid_time, obs_end_time, baseline_file): + def add_bad_pix( + self, + coordinates, + pixel_type, + files, + obs_start_time, + obs_mid_time, + obs_end_time, + baseline_file, + ): """Add a set of bad pixels to the bad pixel database table Parameters @@ -422,21 +465,25 @@ def add_bad_pix(self, coordinates, pixel_type, files, obs_start_time, obs_mid_ti pixel population was compared """ - logging.info('Adding {} {} pixels to database.'.format(len(coordinates[0]), pixel_type)) + logging.info( + "Adding {} {} pixels to database.".format(len(coordinates[0]), pixel_type) + ) source_files = [os.path.basename(item) for item in files] - entry = {'detector': self.detector, - 'x_coord': coordinates[0], - 'y_coord': coordinates[1], - 'type': pixel_type, - 'source_files': source_files, - 'obs_start_time': obs_start_time, - 'obs_mid_time': obs_mid_time, - 'obs_end_time': obs_end_time, - 'baseline_file': baseline_file, - 'entry_date': datetime.datetime.now()} - with engine.begin() as connection: - connection.execute(self.pixel_table.__table__.insert(), entry) + entry = { + "detector": self.detector, + "x_coord": coordinates[0], + "y_coord": coordinates[1], + "type": pixel_type, + "source_files": source_files, + "obs_start_time": obs_start_time, + "obs_mid_time": obs_mid_time, + "obs_end_time": obs_end_time, + "baseline_file": baseline_file, + "entry_date": datetime.datetime.now(datetime.timezone.utc), + } + entry = self.pixel_table(**entry) + entry.save() def filter_query_results(self, results, datatype): """Filter MAST query results. For input flats, keep only those @@ -460,29 +507,33 @@ def filter_query_results(self, results, datatype): # Need to filter all instruments' results by filter. # Choose filter with the most files # Only for flats - if ((datatype == 'flat') and (self.instrument != 'fgs')): - if self.instrument in ['nircam', 'niriss']: - filter_on = 'pupil' - elif self.instrument == 'nirspec': - filter_on = 'grating' - elif self.instrument == 'miri': - filter_on = 'filter' - - filter_list = ['{}:{}'.format(entry['filter'], entry[filter_on]) for entry in results] + if (datatype == "flat") and (self.instrument != "fgs"): + if self.instrument in ["nircam", "niriss"]: + filter_on = "pupil" + elif self.instrument == "nirspec": + filter_on = "grating" + elif self.instrument == "miri": + filter_on = "filter" + + filter_list = [ + "{}:{}".format(entry["filter"], entry[filter_on]) for entry in results + ] filter_set = list(set(filter_list)) # Find the filter with the largest number of entries maxnum = 0 - maxfilt = '' + maxfilt = "" for filt in filter_set: if filter_list.count(filt) > maxnum: maxnum = filter_list.count(filt) maxfilt = filt - filter_name, other_name = maxfilt.split(':') + filter_name, other_name = maxfilt.split(":") filtered = [] for entry in results: - if ((str(entry['filter']) == filter_name) and (str(entry[filter_on]) == other_name)): + if (str(entry["filter"]) == filter_name) and ( + str(entry[filter_on]) == other_name + ): filtered.append(entry) results = deepcopy(filtered) @@ -490,20 +541,20 @@ def filter_query_results(self, results, datatype): # All instruments: need to filter by readout pattern. # Any pattern name not containing "IRS2" is ok # choose readout pattern with the most entries - readpatt_list = [entry['readpatt'] for entry in results] + readpatt_list = [entry["readpatt"] for entry in results] readpatt_set = list(set(readpatt_list)) maxnum = 0 - maxpatt = '' + maxpatt = "" for patt in readpatt_set: - if ((readpatt_list.count(patt) > maxnum) and ('IRS2' not in patt)): + if (readpatt_list.count(patt) > maxnum) and ("IRS2" not in patt): maxnum = readpatt_list.count(patt) maxpatt = patt # Find the readpattern with the largest number of entries readpatt_filtered = [] for entry in results: - if entry['readpatt'] == maxpatt: + if entry["readpatt"] == maxpatt: readpatt_filtered.append(entry) return readpatt_filtered @@ -520,8 +571,8 @@ def get_metadata(self, filename): header = fits.getheader(filename) try: - self.detector = header['DETECTOR'] - self.nints = header['NINTS'] + self.detector = header["DETECTOR"] + self.nints = header["NINTS"] except KeyError as e: logging.error(e) @@ -535,25 +586,27 @@ def get_possible_apertures(self): possible_aperture : list List of acceptible apertures for self.instrument """ - if self.instrument == 'nircam': + if self.instrument == "nircam": possible_apertures = [] for i in range(1, 6): - possible_apertures.append('NRCA{}_FULL'.format(i)) - possible_apertures.append('NRCB{}_FULL'.format(i)) - if self.instrument == 'niriss': - possible_apertures = ['NIS_CEN'] - if self.instrument == 'miri': + possible_apertures.append("NRCA{}_FULL".format(i)) + possible_apertures.append("NRCB{}_FULL".format(i)) + if self.instrument == "niriss": + possible_apertures = ["NIS_CEN"] + if self.instrument == "miri": # Since MIRI is organized a little bit differently than the # other instruments, you can't use aperture names to uniquely # identify the full frame darks/flats from a given detector. # Instead you must use detector names. - possible_apertures = [('MIRIMAGE', 'MIRIM_FULL'), - ('MIRIFULONG', 'MIRIM_FULL'), - ('MIRIFUSHORT', 'MIRIM_FULL')] - if self.instrument == 'fgs': - possible_apertures = ['FGS1_FULL', 'FGS2_FULL'] - if self.instrument == 'nirspec': - possible_apertures = ['NRS1_FULL', 'NRS2_FULL'] + possible_apertures = [ + ("MIRIMAGE", "MIRIM_FULL"), + ("MIRIFULONG", "MIRIM_FULL"), + ("MIRIFUSHORT", "MIRIM_FULL"), + ] + if self.instrument == "fgs": + possible_apertures = ["FGS1_FULL", "FGS2_FULL"] + if self.instrument == "nirspec": + possible_apertures = ["NRS1_FULL", "NRS2_FULL"] return possible_apertures def exclude_existing_badpix(self, badpix, pixel_type): @@ -580,17 +633,16 @@ def exclude_existing_badpix(self, badpix, pixel_type): List of y coordinates of new bad pixels """ - if pixel_type not in ['hot', 'dead', 'noisy']: - raise ValueError('Unrecognized bad pixel type: {}'.format(pixel_type)) + if pixel_type not in ["hot", "dead", "noisy"]: + raise ValueError("Unrecognized bad pixel type: {}".format(pixel_type)) - db_entries = session.query(self.pixel_table) \ - .filter(self.pixel_table.type == pixel_type) \ - .filter(self.pixel_table.detector == self.detector) \ - .all() + filters = {"type__iexact": pixel_type, "detector__iexact": self.detector} + records = self.pixel_table.objects.filter(**filters).all() already_found = [] - if len(db_entries) != 0: - for _row in db_entries: + + if len(records) != 0: + for _row in records: x_coords = _row.x_coord y_coords = _row.y_coord for x, y in zip(x_coords, y_coords): @@ -606,8 +658,6 @@ def exclude_existing_badpix(self, badpix, pixel_type): new_pixels_x.append(x) new_pixels_y.append(y) - session.close() - return (new_pixels_x, new_pixels_y) def identify_tables(self): @@ -615,10 +665,12 @@ def identify_tables(self): monitor """ mixed_case_name = JWST_INSTRUMENT_NAMES_MIXEDCASE[self.instrument] - self.query_table = eval('{}BadPixelQueryHistory'.format(mixed_case_name)) - self.pixel_table = eval('{}BadPixelStats'.format(mixed_case_name)) + self.query_table = eval(f"{mixed_case_name}BadPixelQueryHistory") + self.pixel_table = eval(f"{mixed_case_name}BadPixelStats") - def map_uncal_and_rate_file_lists(self, uncal_files, rate_files, rate_files_to_copy, obs_type): + def map_uncal_and_rate_file_lists( + self, uncal_files, rate_files, rate_files_to_copy, obs_type + ): """Copy uncal and rate files from the filesystem to the working directory. Any requested files that are not in the filesystem are noted and skipped. Return the file lists with skipped files @@ -653,14 +705,18 @@ def map_uncal_and_rate_file_lists(self, uncal_files, rate_files, rate_files_to_c the rate file failed) """ # Copy files from filesystem - uncal_copied_files, uncal_not_copied = copy_files(uncal_files, self.working_data_dir) - rate_copied_files, rate_not_copied = copy_files(rate_files_to_copy, self.working_data_dir) + uncal_copied_files, uncal_not_copied = copy_files( + uncal_files, self.working_data_dir + ) + rate_copied_files, rate_not_copied = copy_files( + rate_files_to_copy, self.working_data_dir + ) # Set any rate files that failed to copy to None so # that we can regenerate them if len(rate_not_copied) > 0: for badfile in rate_not_copied: - rate_files[rate_files.index(badfile)] = 'None' + rate_files[rate_files.index(badfile)] = "None" # Any uncal files that failed to copy must be removed # entirely from the uncal and rate lists @@ -670,20 +726,28 @@ def map_uncal_and_rate_file_lists(self, uncal_files, rate_files, rate_files_to_c del uncal_files[bad_index] del rate_files[bad_index] - logging.info('\tNew {} observations: '.format(obs_type)) - logging.info('\tData dir: {}'.format(self.working_data_dir)) - logging.info('\tCopied to data dir: {}'.format(uncal_copied_files)) - logging.info('\tNot copied (failed, or missing from filesystem): {}'.format(uncal_not_copied)) + logging.info("\tNew {} observations: ".format(obs_type)) + logging.info("\tData dir: {}".format(self.working_data_dir)) + logging.info("\tCopied to data dir: {}".format(uncal_copied_files)) + logging.info( + "\tNot copied (failed, or missing from filesystem): {}".format( + uncal_not_copied + ) + ) # After all this, the lists should be the same length # and have a 1-to-1 correspondence if len(uncal_files) != len(rate_files): - print('Lists of {} uncal and rate files have different lengths!!'.format(obs_type)) + print( + "Lists of {} uncal and rate files have different lengths!!".format( + obs_type + ) + ) raise ValueError return uncal_files, rate_files - def most_recent_search(self, file_type='dark'): + def most_recent_search(self, file_type="dark"): """Query the query history database and return the information on the most recent query for the given ``aperture_name`` where the dark monitor was executed. @@ -700,31 +764,34 @@ def most_recent_search(self, file_type='dark'): Date (in MJD) of the ending range of the previous MAST query where the dark monitor was run. """ - if file_type.lower() == 'dark': - run_field = self.query_table.run_bpix_from_darks - elif file_type.lower() == 'flat': - run_field = self.query_table.run_bpix_from_flats - - query = session.query(self.query_table).filter(self.query_table.aperture == self.aperture). \ - filter(run_field == True) # noqa: E712 (comparison to true) - - dates = np.zeros(0) - if file_type.lower() == 'dark': - for instance in query: - dates = np.append(dates, instance.dark_end_time_mjd) - elif file_type.lower() == 'flat': - for instance in query: - dates = np.append(dates, instance.flat_end_time_mjd) - - query_count = len(dates) - if query_count == 0: + if file_type.lower() == "dark": + run_field = "run_bpix_from_darks" + sort_field = "-dark_end_time_mjd" + elif file_type.lower() == "flat": + run_field = "run_bpix_from_flats" + sort_field = "-flat_end_time_mjd" + + filters = {"aperture__iexact": self.aperture, run_field: True} + + record = self.query_table.objects.filter(**filters).order_by(sort_field).first() + + # Record is django QuerySet object, when empty QuerySet object is returned () + # the result of record.first() is None + if record is None: query_result = 59607.0 # a.k.a. Jan 28, 2022 == First JWST images (MIRI) - logging.info(('\tNo query history for {}. Beginning search date will be set to {}.' - .format(self.aperture, query_result))) + logging.info( + ( + "\tNo query history for {}. Beginning search date will be set to {}.".format( + self.aperture, query_result + ) + ) + ) else: - query_result = np.max(dates) + if file_type.lower() == "dark": + query_result = record.dark_end_time_mjd + elif file_type.lower() == "flat": + query_result = record.flat_end_time_mjd - session.close() return query_result def make_crds_parameter_dict(self): @@ -736,20 +803,28 @@ def make_crds_parameter_dict(self): Dictionary of parameters, in the format expected by CRDS """ parameters = {} - parameters['INSTRUME'] = self.instrument.upper() - parameters['SUBARRAY'] = 'FULL' - parameters['DATE-OBS'] = datetime.date.today().isoformat() - current_date = datetime.datetime.now() - parameters['TIME-OBS'] = current_date.time().isoformat() - parameters['DETECTOR'] = self.detector.upper() - if self.instrument.upper() == 'NIRCAM': - if parameters['DETECTOR'] in ['NRCALONG', 'NRCBLONG']: - parameters['CHANNEL'] = 'LONG' + parameters["INSTRUME"] = self.instrument.upper() + parameters["SUBARRAY"] = "FULL" + parameters["DATE-OBS"] = datetime.date.today().isoformat() + current_date = datetime.datetime.now(datetime.timezone.utc) + parameters["TIME-OBS"] = current_date.time().isoformat() + parameters["DETECTOR"] = self.detector.upper() + if self.instrument.upper() == "NIRCAM": + if parameters["DETECTOR"] in ["NRCALONG", "NRCBLONG"]: + parameters["CHANNEL"] = "LONG" else: - parameters['CHANNEL'] = 'SHORT' + parameters["CHANNEL"] = "SHORT" return parameters - def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_count_threshold, dark_raw_files, dark_slope_files, dark_file_count_threshold): + def process( + self, + illuminated_raw_files, + illuminated_slope_files, + flat_file_count_threshold, + dark_raw_files, + dark_slope_files, + dark_file_count_threshold, + ): """The main method for processing darks. See module docstrings for further details. @@ -785,71 +860,115 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun badpix_types = [] illuminated_obstimes = [] if illuminated_raw_files: - logging.info("Found {} uncalibrated flat fields".format(len(illuminated_raw_files))) + logging.info( + "Found {} uncalibrated flat fields".format(len(illuminated_raw_files)) + ) badpix_types.extend(FLATS_BAD_PIXEL_TYPES) - out_exts = defaultdict(lambda: ['jump', '0_ramp_fit']) + out_exts = defaultdict(lambda: ["jump", "0_ramp_fit"]) in_files = [] - for uncal_file, rate_file in zip(illuminated_raw_files, illuminated_slope_files): - logging.info("\tChecking illuminated raw file {} with rate file {}".format(uncal_file, rate_file)) + for uncal_file, rate_file in zip( + illuminated_raw_files, illuminated_slope_files + ): + logging.info( + "\tChecking illuminated raw file {} with rate file {}".format( + uncal_file, rate_file + ) + ) self.get_metadata(uncal_file) - if rate_file == 'None': - short_name = os.path.basename(uncal_file).replace('_uncal.fits', '') - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) - logging.info('Calling pipeline for {}'.format(uncal_file)) + if rate_file == "None": + short_name = os.path.basename(uncal_file).replace("_uncal.fits", "") + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) + logging.info("Calling pipeline for {}".format(uncal_file)) logging.info("Copying raw file to {}".format(self.working_data_dir)) copy_files([uncal_file], self.working_data_dir) - if hasattr(self, 'nints') and self.nints > 1: - out_exts[short_name] = ['jump', '1_ramp_fit'] + if hasattr(self, "nints") and self.nints > 1: + out_exts[short_name] = ["jump", "1_ramp_fit"] needs_calibration = False for file_type in out_exts[short_name]: - if not os.path.isfile(local_uncal_file.replace("uncal", file_type)): + if not os.path.isfile( + local_uncal_file.replace("uncal", file_type) + ): needs_calibration = True if needs_calibration: in_files.append(local_uncal_file) else: - logging.info("\t\tCalibrated files already exist for {}".format(short_name)) + logging.info( + "\t\tCalibrated files already exist for {}".format( + short_name + ) + ) else: logging.info("\tRate file found for {}".format(uncal_file)) if os.path.isfile(rate_file): copy_files([rate_file], self.working_data_dir) else: - logging.warning("\tRate file {} doesn't actually exist".format(rate_file)) - short_name = os.path.basename(uncal_file).replace('_uncal.fits', '') - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) - logging.info('Calling pipeline for {}'.format(uncal_file)) - logging.info("Copying raw file to {}".format(self.working_data_dir)) + logging.warning( + "\tRate file {} doesn't actually exist".format(rate_file) + ) + short_name = os.path.basename(uncal_file).replace( + "_uncal.fits", "" + ) + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) + logging.info("Calling pipeline for {}".format(uncal_file)) + logging.info( + "Copying raw file to {}".format(self.working_data_dir) + ) copy_files([uncal_file], self.working_data_dir) - if hasattr(self, 'nints') and self.nints > 1: - out_exts[short_name] = ['jump', '1_ramp_fit'] + if hasattr(self, "nints") and self.nints > 1: + out_exts[short_name] = ["jump", "1_ramp_fit"] needs_calibration = False for file_type in out_exts[short_name]: - if not os.path.isfile(local_uncal_file.replace("uncal", file_type)): + if not os.path.isfile( + local_uncal_file.replace("uncal", file_type) + ): needs_calibration = True if needs_calibration: in_files.append(local_uncal_file) else: - logging.info("\t\tCalibrated files already exist for {}".format(short_name)) + logging.info( + "\t\tCalibrated files already exist for {}".format( + short_name + ) + ) outputs = {} if len(in_files) > 0: logging.info("Running pipeline for {} files".format(len(in_files))) - outputs = run_parallel_pipeline(in_files, "uncal", out_exts, self.instrument, jump_pipe=True) + outputs = run_parallel_pipeline( + in_files, "uncal", out_exts, self.instrument, jump_pipe=True + ) index = 0 logging.info("Checking files post-calibration") - for uncal_file, rate_file in zip(illuminated_raw_files, illuminated_slope_files): - logging.info("\tChecking files {}, {}".format(os.path.basename(uncal_file), os.path.basename(rate_file))) - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) + for uncal_file, rate_file in zip( + illuminated_raw_files, illuminated_slope_files + ): + logging.info( + "\tChecking files {}, {}".format( + os.path.basename(uncal_file), os.path.basename(rate_file) + ) + ) + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) if local_uncal_file in outputs: logging.info("\t\tAdding calibrated file.") - illuminated_slope_files[index] = deepcopy(outputs[local_uncal_file][1]) + illuminated_slope_files[index] = deepcopy( + outputs[local_uncal_file][1] + ) else: logging.info("\t\tCalibration was skipped for file") self.get_metadata(illuminated_raw_files[index]) local_ramp_file = local_uncal_file.replace("uncal", "0_ramp_fit") local_rateints_file = local_uncal_file.replace("uncal", "rateints") - if hasattr(self, 'nints') and self.nints > 1: - local_ramp_file = local_ramp_file.replace("0_ramp_fit", "1_ramp_fit") + if hasattr(self, "nints") and self.nints > 1: + local_ramp_file = local_ramp_file.replace( + "0_ramp_fit", "1_ramp_fit" + ) if os.path.isfile(local_ramp_file): logging.info("\t\t\tFound local ramp file") illuminated_slope_files[index] = local_ramp_file @@ -862,11 +981,16 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun index += 1 # Get observation time for all files - illuminated_obstimes.append(instrument_properties.get_obstime(uncal_file)) + illuminated_obstimes.append( + instrument_properties.get_obstime(uncal_file) + ) logging.info("Trimming unfound files.") index = 0 while index < len(illuminated_raw_files): - if illuminated_slope_files[index] is None or illuminated_slope_files[index] == 'None': + if ( + illuminated_slope_files[index] is None + or illuminated_slope_files[index] == "None" + ): logging.info("\tRemoving {}".format(illuminated_raw_files[index])) del illuminated_raw_files[index] del illuminated_slope_files[index] @@ -874,9 +998,9 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun else: index += 1 - min_illum_time = 0. - max_illum_time = 0. - mid_illum_time = 0. + min_illum_time = 0.0 + max_illum_time = 0.0 + mid_illum_time = 0.0 if len(illuminated_obstimes) > 0: min_illum_time = min(illuminated_obstimes) max_illum_time = max(illuminated_obstimes) @@ -896,21 +1020,33 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun # even if the rate file is present, because we also need the jump # and fitops files, which are not saved by default in_files = [] - out_exts = defaultdict(lambda: ['jump', 'fitopt', '0_ramp_fit']) + out_exts = defaultdict(lambda: ["jump", "fitopt", "0_ramp_fit"]) for uncal_file, rate_file in zip(dark_raw_files, dark_slope_files): - logging.info("Checking dark file {} with rate file {}".format(uncal_file, rate_file)) + logging.info( + "Checking dark file {} with rate file {}".format( + uncal_file, rate_file + ) + ) self.get_metadata(uncal_file) - short_name = os.path.basename(uncal_file).replace('_uncal.fits', '') - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) + short_name = os.path.basename(uncal_file).replace("_uncal.fits", "") + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) if not os.path.isfile(local_uncal_file): - logging.info("\tCopying raw file to {}".format(self.working_data_dir)) + logging.info( + "\tCopying raw file to {}".format(self.working_data_dir) + ) copy_files([uncal_file], self.working_data_dir) - if hasattr(self, 'nints') and self.nints > 1: - out_exts[short_name] = ['jump', 'fitopt', '1_ramp_fit'] - local_processed_files = [local_uncal_file.replace("uncal", x) for x in out_exts[short_name]] + if hasattr(self, "nints") and self.nints > 1: + out_exts[short_name] = ["jump", "fitopt", "1_ramp_fit"] + local_processed_files = [ + local_uncal_file.replace("uncal", x) for x in out_exts[short_name] + ] calibrated_data = [os.path.isfile(x) for x in local_processed_files] if not all(calibrated_data): - logging.info('\tCalling pipeline for {} {}'.format(uncal_file, rate_file)) + logging.info( + "\tCalling pipeline for {} {}".format(uncal_file, rate_file) + ) in_files.append(local_uncal_file) dark_jump_files.append(None) dark_fitopt_files.append(None) @@ -926,14 +1062,18 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun outputs = {} if len(in_files) > 0: logging.info("Running pipeline for {} files".format(len(in_files))) - outputs = run_parallel_pipeline(in_files, "uncal", out_exts, self.instrument, jump_pipe=True) + outputs = run_parallel_pipeline( + in_files, "uncal", out_exts, self.instrument, jump_pipe=True + ) index = 0 logging.info("Checking files post-calibration") for uncal_file, rate_file in zip(dark_raw_files, dark_slope_files): logging.info("\tChecking files {}, {}".format(uncal_file, rate_file)) - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) - short_name = os.path.basename(uncal_file).replace('_uncal.fits', '') + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) + short_name = os.path.basename(uncal_file).replace("_uncal.fits", "") if local_uncal_file in outputs: logging.info("\t\tAdding calibrated files") dark_jump_files[index] = outputs[local_uncal_file][0] @@ -943,21 +1083,31 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun logging.info("\t\tCalibration skipped for file") self.get_metadata(local_uncal_file) local_ramp_file = local_uncal_file.replace("uncal", "0_ramp_fit") - if hasattr(self, 'nints') and self.nints > 1: - local_ramp_file = local_ramp_file.replace("0_ramp_fit", "1_ramp_fit") + if hasattr(self, "nints") and self.nints > 1: + local_ramp_file = local_ramp_file.replace( + "0_ramp_fit", "1_ramp_fit" + ) if not os.path.isfile(local_uncal_file.replace("uncal", "jump")): logging.info("\t\t\tJump file not found") dark_jump_files[index] = None else: - dark_jump_files[index] = local_uncal_file.replace("uncal", "jump") + dark_jump_files[index] = local_uncal_file.replace( + "uncal", "jump" + ) if not os.path.isfile(local_uncal_file.replace("uncal", "fitopt")): logging.info("\t\t\tFitopt file not found") dark_fitopt_files[index] = None else: - dark_fitopt_files[index] = local_uncal_file.replace("uncal", "fitopt") + dark_fitopt_files[index] = local_uncal_file.replace( + "uncal", "fitopt" + ) if not os.path.isfile(local_ramp_file): - if os.path.isfile(local_uncal_file.replace("uncal", "rateints")): - dark_slope_files[index] = local_uncal_file.replace("uncal", "rateints") + if os.path.isfile( + local_uncal_file.replace("uncal", "rateints") + ): + dark_slope_files[index] = local_uncal_file.replace( + "uncal", "rateints" + ) else: logging.info("\t\t\tRate file not found") dark_slope_files[index] = None @@ -968,7 +1118,11 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun index = 0 logging.info("Trimming unfound files.") while index < len(dark_raw_files): - if dark_jump_files[index] is None or dark_fitopt_files[index] is None or dark_slope_files[index] is None: + if ( + dark_jump_files[index] is None + or dark_fitopt_files[index] is None + or dark_slope_files[index] is None + ): logging.info("\tRemoving {}".format(dark_raw_files[index])) del dark_raw_files[index] del dark_jump_files[index] @@ -992,7 +1146,9 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun dark_length = 0 else: dark_length = len(dark_slope_files) - if (flat_length < flat_file_count_threshold) and (dark_length < dark_file_count_threshold): + if (flat_length < flat_file_count_threshold) and ( + dark_length < dark_file_count_threshold + ): logging.info("After removing failed files, not enough new files remian.") return @@ -1001,46 +1157,62 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun dead_flux_files = [] if illuminated_raw_files is not None: for illum_file in illuminated_raw_files: - ngroup = fits.getheader(illum_file)['NGROUPS'] + ngroup = fits.getheader(illum_file)["NGROUPS"] if ngroup >= 4: dead_flux_files.append(illum_file) if len(dead_flux_files) == 0: dead_flux_files = None # Instrument-specific preferences from jwst_reffiles meetings - if self.instrument in ['nircam', 'niriss', 'fgs']: - dead_search_type = 'sigma_rate' - elif self.instrument in ['miri', 'nirspec']: - dead_search_type = 'absolute_rate' + if self.instrument in ["nircam", "niriss", "fgs"]: + dead_search_type = "sigma_rate" + elif self.instrument in ["miri", "nirspec"]: + dead_search_type = "absolute_rate" - flat_mean_normalization_method = 'smoothed' + flat_mean_normalization_method = "smoothed" # Call the bad pixel search module from jwst_reffiles. Lots of # other possible parameters. Only specify the non-default params # in order to make things easier to read. - query_string = 'darks_{}_flats_{}_to_{}'.format(self.dark_query_start, self.flat_query_start, self.query_end) - output_file = '{}_{}_{}_bpm.fits'.format(self.instrument, self.aperture, query_string) + query_string = "darks_{}_flats_{}_to_{}".format( + self.dark_query_start, self.flat_query_start, self.query_end + ) + output_file = "{}_{}_{}_bpm.fits".format( + self.instrument, self.aperture, query_string + ) output_file = os.path.join(self.output_dir, output_file) -# logging.info("Calling bad_pixel_mask.bad_pixels") -# logging.info("\tflat_slope_files are: {}".format(illuminated_slope_files)) -# logging.info("\tdead__search_type={}".format(dead_search_type)) -# logging.info("\tflat_mean_normalization_method={}".format(flat_mean_normalization_method)) -# logging.info("\tdead_flux_check_files are: {}".format(dead_flux_files)) -# logging.info("\tdark_slope_files are: {}".format(dark_slope_files)) -# logging.info("\tdark_uncal_files are: {}".format(dark_raw_files)) -# logging.info("\tdark_jump_files are: {}".format(dark_jump_files)) -# logging.info("\tdark_fitopt_files are: {}".format(dark_fitopt_files)) -# logging.info("\toutput_file={}".format(output_file)) - - bad_pixel_mask.bad_pixels(flat_slope_files=illuminated_slope_files, dead_search_type=dead_search_type, - flat_mean_normalization_method=flat_mean_normalization_method, - run_dead_flux_check=True, dead_flux_check_files=dead_flux_files, flux_check=35000, - dark_slope_files=dark_slope_files, dark_uncal_files=dark_raw_files, - dark_jump_files=dark_jump_files, dark_fitopt_files=dark_fitopt_files, plot=False, - output_file=output_file, author='jwst_reffiles', description='A bad pix mask', - pedigree='GROUND', useafter='2222-04-01 00:00:00', - history='This file was created by JWQL', quality_check=False) + # logging.info("Calling bad_pixel_mask.bad_pixels") + # logging.info("\tflat_slope_files are: {}".format(illuminated_slope_files)) + # logging.info("\tdead__search_type={}".format(dead_search_type)) + # logging.info("\tflat_mean_normalization_method={}".format(flat_mean_normalization_method)) + # logging.info("\tdead_flux_check_files are: {}".format(dead_flux_files)) + # logging.info("\tdark_slope_files are: {}".format(dark_slope_files)) + # logging.info("\tdark_uncal_files are: {}".format(dark_raw_files)) + # logging.info("\tdark_jump_files are: {}".format(dark_jump_files)) + # logging.info("\tdark_fitopt_files are: {}".format(dark_fitopt_files)) + # logging.info("\toutput_file={}".format(output_file)) + + bad_pixel_mask.bad_pixels( + flat_slope_files=illuminated_slope_files, + dead_search_type=dead_search_type, + flat_mean_normalization_method=flat_mean_normalization_method, + run_dead_flux_check=True, + dead_flux_check_files=dead_flux_files, + flux_check=35000, + dark_slope_files=dark_slope_files, + dark_uncal_files=dark_raw_files, + dark_jump_files=dark_jump_files, + dark_fitopt_files=dark_fitopt_files, + plot=False, + output_file=output_file, + author="jwst_reffiles", + description="A bad pix mask", + pedigree="GROUND", + useafter="2222-04-01 00:00:00", + history="This file was created by JWQL", + quality_check=False, + ) # Read in the newly-created bad pixel file set_permissions(output_file) @@ -1048,17 +1220,23 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun # Locate and read in the current bad pixel mask parameters = self.make_crds_parameter_dict() - mask_dictionary = crds_tools.get_reffiles(parameters, ['mask'], download=True) - baseline_file = mask_dictionary['mask'] - - if 'NOT FOUND' in baseline_file: - logging.warning(('\tNo baseline bad pixel file for {} {}. Any bad ' - 'pixels found as part of this search will be considered new'.format(self.instrument, self.aperture))) + mask_dictionary = crds_tools.get_reffiles(parameters, ["mask"], download=True) + baseline_file = mask_dictionary["mask"] + + if "NOT FOUND" in baseline_file: + logging.warning( + ( + "\tNo baseline bad pixel file for {} {}. Any bad " + "pixels found as part of this search will be considered new".format( + self.instrument, self.aperture + ) + ) + ) baseline_file = new_badpix_file yd, xd = badpix_mask.shape baseline_badpix_mask = np.zeros((yd, xd), type=np.int) else: - logging.info('\tBaseline bad pixel file is {}'.format(baseline_file)) + logging.info("\tBaseline bad pixel file is {}".format(baseline_file)) baseline_badpix_mask = fits.getdata(baseline_file) # Exclude hot and dead pixels in the current bad pixel mask @@ -1070,21 +1248,43 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun bad_location_list = bad_map_to_list(new_since_reffile, bad_type) # Add new hot and dead pixels to the database - logging.info('\tFound {} new {} pixels'.format(len(bad_location_list[0]), bad_type)) + logging.info( + "\tFound {} new {} pixels".format(len(bad_location_list[0]), bad_type) + ) if bad_type in FLATS_BAD_PIXEL_TYPES: - self.add_bad_pix(bad_location_list, bad_type, illuminated_slope_files, - min_illum_time, mid_illum_time, max_illum_time, baseline_file) - flat_png = create_png_from_fits(illuminated_slope_files[0], self.output_dir) + self.add_bad_pix( + bad_location_list, + bad_type, + illuminated_slope_files, + min_illum_time, + mid_illum_time, + max_illum_time, + baseline_file, + ) + flat_png = create_png_from_fits( + illuminated_slope_files[0], self.output_dir + ) elif bad_type in DARKS_BAD_PIXEL_TYPES: - self.add_bad_pix(bad_location_list, bad_type, dark_slope_files, - min_dark_time, mid_dark_time, max_dark_time, baseline_file) + self.add_bad_pix( + bad_location_list, + bad_type, + dark_slope_files, + min_dark_time, + mid_dark_time, + max_dark_time, + baseline_file, + ) dark_png = create_png_from_fits(dark_slope_files[0], self.output_dir) else: - raise ValueError("Unrecognized type of bad pixel: {}. Cannot update database table.".format(bad_type)) + raise ValueError( + "Unrecognized type of bad pixel: {}. Cannot update database table.".format( + bad_type + ) + ) # Remove raw files, rate files, and pipeline products in order to save disk space - files_to_remove = glob(f'{self.working_data_dir}/*.fits') + files_to_remove = glob(f"{self.working_data_dir}/*.fits") for filename in files_to_remove: os.remove(filename) @@ -1101,11 +1301,11 @@ def run(self): For each, we will query MAST, copy new files from the filesystem and pass the list of copied files into the ``process()`` method. """ - logging.info('Begin logging for bad_pixel_monitor') + logging.info("Begin logging for bad_pixel_monitor") # Get the output directory - self.working_dir = os.path.join(get_config()['working'], 'bad_pixel_monitor') - self.output_dir = os.path.join(get_config()['outputs'], 'bad_pixel_monitor') + self.working_dir = os.path.join(get_config()["working"], "bad_pixel_monitor") + self.output_dir = os.path.join(get_config()["outputs"], "bad_pixel_monitor") # Read in config file that defines the thresholds for the number # of dark files that must be present in order for the monitor to run @@ -1131,13 +1331,13 @@ def run(self): lamp = None # NIRSpec flats use the MIRROR grating. - if self.instrument == 'nirspec': - grating = 'MIRROR' + if self.instrument == "nirspec": + grating = "MIRROR" # MIRI is unlike the other instruments. We basically treat # the detector as the aperture name because there is no # aperture name for a full frame MRS exposure. - if self.instrument == 'miri': + if self.instrument == "miri": detector_name, aperture_name = aperture self.aperture = detector_name else: @@ -1145,41 +1345,64 @@ def run(self): aperture_name = aperture # In flight, NIRISS plans to take darks using the LINE2 lamp - if self.instrument == 'niriss': - lamp = 'LINE2' + if self.instrument == "niriss": + lamp = "LINE2" # What lamp is most appropriate for NIRSpec? - if self.instrument == 'nirspec': - lamp = 'LINE2' + if self.instrument == "nirspec": + lamp = "LINE2" # What lamp is most appropriate for FGS? # if self.instrument == 'fgs': # lamp = 'G2LAMP1' - logging.info('') - logging.info('Working on aperture {} in {}'.format(aperture, self.instrument)) + logging.info("") + logging.info( + "Working on aperture {} in {}".format(aperture, self.instrument) + ) # Find the appropriate threshold for number of new files needed - match = self.aperture == limits['Aperture'] - flat_file_count_threshold = limits['FlatThreshold'][match].data[0] - dark_file_count_threshold = limits['DarkThreshold'][match].data[0] + match = self.aperture == limits["Aperture"] + flat_file_count_threshold = limits["FlatThreshold"][match].data[0] + dark_file_count_threshold = limits["DarkThreshold"][match].data[0] # Locate the record of the most recent MAST search - self.flat_query_start = self.most_recent_search(file_type='flat') - self.dark_query_start = self.most_recent_search(file_type='dark') - logging.info('\tFlat field query times: {} {}'.format(self.flat_query_start, self.query_end)) - logging.info('\tDark current query times: {} {}'.format(self.dark_query_start, self.query_end)) + self.flat_query_start = self.most_recent_search(file_type="flat") + self.dark_query_start = self.most_recent_search(file_type="dark") + logging.info( + "\tFlat field query times: {} {}".format( + self.flat_query_start, self.query_end + ) + ) + logging.info( + "\tDark current query times: {} {}".format( + self.dark_query_start, self.query_end + ) + ) # Query MAST using the aperture and the time of the most # recent previous search as the starting time. flat_templates = FLAT_EXP_TYPES[instrument] dark_templates = DARK_EXP_TYPES[instrument] - new_flat_entries = mast_query(instrument, flat_templates, self.flat_query_start, self.query_end, - aperture=aperture_name, grating=grating, detector=detector_name, - lamp=lamp) - new_dark_entries = mast_query(instrument, dark_templates, self.dark_query_start, self.query_end, - aperture=aperture_name, detector=detector_name) + new_flat_entries = mast_query( + instrument, + flat_templates, + self.flat_query_start, + self.query_end, + aperture=aperture_name, + grating=grating, + detector=detector_name, + lamp=lamp, + ) + new_dark_entries = mast_query( + instrument, + dark_templates, + self.dark_query_start, + self.query_end, + aperture=aperture_name, + detector=detector_name, + ) # Filter the results # Filtering could be different for flats vs darks. @@ -1210,58 +1433,141 @@ def run(self): if new_flat_entries: # Exclude ASIC tuning data len_new_flats = len(new_flat_entries) - new_flat_entries = monitor_utils.exclude_asic_tuning(new_flat_entries) + new_flat_entries = monitor_utils.exclude_asic_tuning( + new_flat_entries + ) len_no_asic = len(new_flat_entries) num_asic = len_new_flats - len_no_asic - logging.info("\tFiltering out ASIC tuning files removed {} flat files.".format(num_asic)) - - new_flat_entries = self.filter_query_results(new_flat_entries, datatype='flat') - apcheck_flat_entries = pipeline_tools.aperture_size_check(new_flat_entries, instrument, aperture) - lost_to_bad_metadata = len(new_flat_entries) - len(apcheck_flat_entries) - logging.info('\t{} flat field files ignored due to inconsistency in array size and metadata.'.format(lost_to_bad_metadata)) + logging.info( + "\tFiltering out ASIC tuning files removed {} flat files.".format( + num_asic + ) + ) + + new_flat_entries = self.filter_query_results( + new_flat_entries, datatype="flat" + ) + apcheck_flat_entries = pipeline_tools.aperture_size_check( + new_flat_entries, instrument, aperture + ) + lost_to_bad_metadata = len(new_flat_entries) - len( + apcheck_flat_entries + ) + logging.info( + "\t{} flat field files ignored due to inconsistency in array size and metadata.".format( + lost_to_bad_metadata + ) + ) flat_uncal_files = locate_uncal_files(apcheck_flat_entries) - flat_uncal_files, run_flats = check_for_sufficient_files(flat_uncal_files, instrument, aperture, flat_file_count_threshold, 'flats') - flat_rate_files, flat_rate_files_to_copy = locate_rate_files(flat_uncal_files) + flat_uncal_files, run_flats = check_for_sufficient_files( + flat_uncal_files, + instrument, + aperture, + flat_file_count_threshold, + "flats", + ) + flat_rate_files, flat_rate_files_to_copy = locate_rate_files( + flat_uncal_files + ) else: run_flats = False - flat_uncal_files, flat_rate_files, flat_rate_files_to_copy = None, None, None + flat_uncal_files, flat_rate_files, flat_rate_files_to_copy = ( + None, + None, + None, + ) if new_dark_entries: # Exclude ASIC tuning data len_new_darks = len(new_dark_entries) - new_dark_entries = monitor_utils.exclude_asic_tuning(new_dark_entries) + new_dark_entries = monitor_utils.exclude_asic_tuning( + new_dark_entries + ) len_no_asic = len(new_dark_entries) num_asic = len_new_darks - len_no_asic - logging.info("\tFiltering out ASIC tuning files removed {} dark files.".format(num_asic)) - - new_dark_entries = self.filter_query_results(new_dark_entries, datatype='dark') - apcheck_dark_entries = pipeline_tools.aperture_size_check(new_dark_entries, instrument, aperture) - lost_to_bad_metadata = len(new_dark_entries) - len(apcheck_dark_entries) - logging.info('\t{} dark files ignored due to inconsistency in array size and metadata.'.format(lost_to_bad_metadata)) + logging.info( + "\tFiltering out ASIC tuning files removed {} dark files.".format( + num_asic + ) + ) + + new_dark_entries = self.filter_query_results( + new_dark_entries, datatype="dark" + ) + apcheck_dark_entries = pipeline_tools.aperture_size_check( + new_dark_entries, instrument, aperture + ) + lost_to_bad_metadata = len(new_dark_entries) - len( + apcheck_dark_entries + ) + logging.info( + "\t{} dark files ignored due to inconsistency in array size and metadata.".format( + lost_to_bad_metadata + ) + ) dark_uncal_files = locate_uncal_files(apcheck_dark_entries) - dark_uncal_files, run_darks = check_for_sufficient_files(dark_uncal_files, instrument, aperture, dark_file_count_threshold, 'darks') - dark_rate_files, dark_rate_files_to_copy = locate_rate_files(dark_uncal_files) + dark_uncal_files, run_darks = check_for_sufficient_files( + dark_uncal_files, + instrument, + aperture, + dark_file_count_threshold, + "darks", + ) + dark_rate_files, dark_rate_files_to_copy = locate_rate_files( + dark_uncal_files + ) else: run_darks = False - dark_uncal_files, dark_rate_files, dark_rate_files_to_copy = None, None, None + dark_uncal_files, dark_rate_files, dark_rate_files_to_copy = ( + None, + None, + None, + ) # Set up directories for the copied data - ensure_dir_exists(os.path.join(self.working_dir, 'data')) - ensure_dir_exists(os.path.join(self.output_dir, 'data')) - self.working_data_dir = os.path.join(self.working_dir, 'data/{}_{}'.format(self.instrument.lower(), self.aperture.lower())) - self.output_data_dir = os.path.join(self.output_dir, 'data/{}_{}'.format(self.instrument.lower(), self.aperture.lower())) + ensure_dir_exists(os.path.join(self.working_dir, "data")) + ensure_dir_exists(os.path.join(self.output_dir, "data")) + self.working_data_dir = os.path.join( + self.working_dir, + "data/{}_{}".format(self.instrument.lower(), self.aperture.lower()), + ) + self.output_data_dir = os.path.join( + self.output_dir, + "data/{}_{}".format(self.instrument.lower(), self.aperture.lower()), + ) ensure_dir_exists(self.working_data_dir) ensure_dir_exists(self.output_data_dir) # Copy files from filesystem if run_flats: - flat_uncal_files, flat_rate_files = self.map_uncal_and_rate_file_lists(flat_uncal_files, flat_rate_files, flat_rate_files_to_copy, 'flat') + flat_uncal_files, flat_rate_files = ( + self.map_uncal_and_rate_file_lists( + flat_uncal_files, + flat_rate_files, + flat_rate_files_to_copy, + "flat", + ) + ) if run_darks: - dark_uncal_files, dark_rate_files = self.map_uncal_and_rate_file_lists(dark_uncal_files, dark_rate_files, dark_rate_files_to_copy, 'dark') + dark_uncal_files, dark_rate_files = ( + self.map_uncal_and_rate_file_lists( + dark_uncal_files, + dark_rate_files, + dark_rate_files_to_copy, + "dark", + ) + ) # Run the bad pixel monitor if run_flats or run_darks: - self.process(flat_uncal_files, flat_rate_files, flat_file_count_threshold, dark_uncal_files, dark_rate_files, dark_file_count_threshold) + self.process( + flat_uncal_files, + flat_rate_files, + flat_file_count_threshold, + dark_uncal_files, + dark_rate_files, + dark_file_count_threshold, + ) updated_instruments.append(self.instrument) # Update the query history @@ -1275,34 +1581,35 @@ def run(self): else: num_flat_files = len(flat_uncal_files) - new_entry = {'instrument': self.instrument.upper(), - 'aperture': self.aperture, - 'dark_start_time_mjd': self.dark_query_start, - 'dark_end_time_mjd': self.query_end, - 'flat_start_time_mjd': self.flat_query_start, - 'flat_end_time_mjd': self.query_end, - 'dark_files_found': num_dark_files, - 'flat_files_found': num_flat_files, - 'run_bpix_from_darks': run_darks, - 'run_bpix_from_flats': run_flats, - 'run_monitor': run_flats or run_darks, - 'entry_date': datetime.datetime.now()} - with engine.begin() as connection: - connection.execute(self.query_table.__table__.insert(), new_entry) - logging.info('\tUpdated the query history table') + new_entry = { + "instrument": self.instrument.upper(), + "aperture": self.aperture, + "dark_start_time_mjd": self.dark_query_start, + "dark_end_time_mjd": self.query_end, + "flat_start_time_mjd": self.flat_query_start, + "flat_end_time_mjd": self.query_end, + "dark_files_found": num_dark_files, + "flat_files_found": num_flat_files, + "run_bpix_from_darks": run_darks, + "run_bpix_from_flats": run_flats, + "run_monitor": run_flats or run_darks, + "entry_date": datetime.datetime.now(datetime.timezone.utc), + } + entry = self.query_table(**new_entry) + entry.save() + logging.info("\tUpdated the query history table") # Update the figures to be shown in the web app. Only update figures # for instruments where the monitor ran for instrument in updated_instruments: BadPixelPlots(instrument) - logging.info(f'Updating web pages for: {updated_instruments}') - logging.info('Bad Pixel Monitor completed successfully.') - + logging.info(f"Updating web pages for: {updated_instruments}") + logging.info("Bad Pixel Monitor completed successfully.") -if __name__ == '__main__': - module = os.path.basename(__file__).strip('.py') +if __name__ == "__main__": + module = os.path.basename(__file__).strip(".py") start_time, log_file = monitor_utils.initialize_instrument_monitor(module) monitor = BadPixels() diff --git a/jwql/instrument_monitors/common_monitors/bias_monitor.py b/jwql/instrument_monitors/common_monitors/bias_monitor.py index f47bc6213..b1247ac58 100755 --- a/jwql/instrument_monitors/common_monitors/bias_monitor.py +++ b/jwql/instrument_monitors/common_monitors/bias_monitor.py @@ -35,7 +35,6 @@ import datetime import logging import os -from time import sleep from astropy.io import fits from astropy.stats import sigma_clip, sigma_clipped_stats @@ -47,21 +46,26 @@ from mpl_toolkits.axes_grid1 import make_axes_locatable # noqa: E402 (module import not at top) import numpy as np # noqa: E402 (module import not at top) from pysiaf import Siaf # noqa: E402 (module import not at top) -from sqlalchemy.sql.expression import and_ # noqa: E402 (module import not at top) -from jwql.database.database_interface import session, engine # noqa: E402 (module import not at top) -from jwql.database.database_interface import NIRCamBiasQueryHistory, NIRCamBiasStats, NIRISSBiasQueryHistory # noqa: E402 (module import not at top) -from jwql.database.database_interface import NIRISSBiasStats, NIRSpecBiasQueryHistory, NIRSpecBiasStats # noqa: E402 (module import not at top) from jwql.instrument_monitors import pipeline_tools # noqa: E402 (module import not at top) -from jwql.shared_tasks.shared_tasks import only_one, run_pipeline, run_parallel_pipeline # noqa: E402 (module import not at top) +from jwql.shared_tasks.shared_tasks import only_one, run_parallel_pipeline # noqa: E402 (module import not at top) from jwql.utils import instrument_properties, monitor_utils # noqa: E402 (module import not at top) -from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE # noqa: E402 (module import not at top) +from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE, ON_GITHUB_ACTIONS, ON_READTHEDOCS # noqa: E402 (module import not at top) from jwql.utils.logging_functions import log_info, log_fail # noqa: E402 (module import not at top) -from jwql.utils.monitor_utils import update_monitor_table # noqa: E402 (module import not at top) from jwql.utils.permissions import set_permissions # noqa: E402 (module import not at top) -from jwql.utils.utils import copy_files, ensure_dir_exists, filesystem_path, get_config # noqa: E402 (module import not at top) +from jwql.utils.utils import ensure_dir_exists, filesystem_path, get_config # noqa: E402 (module import not at top) from jwql.website.apps.jwql.monitor_pages.monitor_bias_bokeh import BiasMonitorPlots # noqa: E402 (module import not at top) +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + + # Import * is okay here because this module specifically only contains database models + # for this monitor + from jwql.website.apps.jwql.monitor_models.bias import * # noqa: E402 (module level import not at top of file) + class Bias(): """Class for executing the bias monitor. @@ -201,15 +205,13 @@ def file_exists_in_database(self, filename): ``True`` if filename exists in the bias stats database. """ - query = session.query(self.stats_table) - results = query.filter(self.stats_table.uncal_filename == filename).all() + records = self.stats_table.objects.filter(uncal_filename__iexact=filename).all() - if len(results) != 0: + if len(records) != 0: file_exists = True else: file_exists = False - session.close() return file_exists def get_amp_medians(self, image, amps): @@ -346,16 +348,16 @@ def most_recent_search(self): where the bias monitor was run. """ - query = session.query(self.query_table).filter(and_(self.query_table.aperture == self.aperture, - self.query_table.run_monitor == True)).order_by(self.query_table.end_time_mjd).all() # noqa: E348 (comparison to true) + filters = {'aperture__iexact': self.aperture, + 'run_monitor': True} + record = self.query_table.objects.filter(**filters).order_by('-end_time_mjd').first() - if len(query) == 0: + if record is None: query_result = 59607.0 # a.k.a. Jan 28, 2022 == First JWST images (MIRI) logging.info(('\tNo query history for {}. Beginning search date will be set to {}.'.format(self.aperture, query_result))) else: - query_result = query[-1].end_time_mjd + query_result = record.end_time_mjd - session.close() return query_result def process(self, file_list): @@ -420,18 +422,18 @@ def process(self, file_list): 'mean': float(mean), 'median': float(median), 'stddev': float(stddev), - 'collapsed_rows': collapsed_rows.astype(float), - 'collapsed_columns': collapsed_columns.astype(float), - 'counts': counts.astype(float), - 'bin_centers': bin_centers.astype(float), + 'collapsed_rows': list(collapsed_rows.astype(float)), + 'collapsed_columns': list(collapsed_columns.astype(float)), + 'counts': list(counts.astype(float)), + 'bin_centers': list(bin_centers.astype(float)), 'entry_date': datetime.datetime.now() } for key in amp_medians.keys(): bias_db_entry[key] = float(amp_medians[key]) # Add this new entry to the bias database table - with engine.begin() as connection: - connection.execute(self.stats_table.__table__.insert(), bias_db_entry) + entry = self.stats_table(**bias_db_entry) + entry.save() # Don't print long arrays of numbers to the log file log_dict = {} @@ -545,8 +547,8 @@ def run(self): 'files_found': len(new_files), 'run_monitor': monitor_run, 'entry_date': datetime.datetime.now()} - with engine.begin() as connection: - connection.execute(self.query_table.__table__.insert(), new_entry) + entry = self.query_table(**new_entry) + entry.save() logging.info('\tUpdated the query history table') # Update the bias monitor plots diff --git a/jwql/instrument_monitors/common_monitors/dark_monitor.py b/jwql/instrument_monitors/common_monitors/dark_monitor.py index 534fdfbf9..ab4ff673b 100755 --- a/jwql/instrument_monitors/common_monitors/dark_monitor.py +++ b/jwql/instrument_monitors/common_monitors/dark_monitor.py @@ -656,7 +656,8 @@ def overplot_bad_pix(self, pix_type, coords, values): # Overplot the bad pixel locations badpixplots[pix_type] = self.plot.circle(x=f'pixels_x', y=f'pixels_y', - source=sources[pix_type], color=colors[pix_type]) + source=sources[pix_type], color=colors[pix_type], radius=0.5, + radius_dimension='y', radius_units='data') # Add to the legend if numpix > 0: diff --git a/jwql/instrument_monitors/common_monitors/edb_telemetry_monitor.py b/jwql/instrument_monitors/common_monitors/edb_telemetry_monitor.py index f07e48875..12e593f7c 100755 --- a/jwql/instrument_monitors/common_monitors/edb_telemetry_monitor.py +++ b/jwql/instrument_monitors/common_monitors/edb_telemetry_monitor.py @@ -2097,7 +2097,8 @@ def plot_every_change_data(data, mnem_name, units, show_plot=False, savefig=True source = ColumnDataSource(data={'x': val_times, 'y': val_data, 'dep': dependency_val}) ldata = fig.line(x='x', y='y', line_width=1, line_color=Turbo256[color], source=source, legend_label=key) - cdata = fig.circle(x='x', y='y', fill_color=Turbo256[color], size=8, source=source, legend_label=key) + cdata = fig.circle(x='x', y='y', fill_color=Turbo256[color], source=source, legend_label=key, radius=4, + radius_dimension='y', radius_units='screen') hover_tool = HoverTool(tooltips=[('Value', '@dep'), ('Data', '@y{1.11111}'), diff --git a/jwql/shared_tasks/run_pipeline.py b/jwql/shared_tasks/run_pipeline.py index ffc197310..a29de1a74 100755 --- a/jwql/shared_tasks/run_pipeline.py +++ b/jwql/shared_tasks/run_pipeline.py @@ -359,6 +359,9 @@ def run_save_jump(input_file, short_name, work_directory, instrument, ramp_fit=T pipe_type = args.pipe outputs = args.outputs step_args = args.step_args + # ***** FUTURE FIX ***** + # This needs to be removed once it's possible to do multi-core pipelines again + args.max_cores = "none" status_file = os.path.join(working_path, short_name + "_status.txt") with open(status_file, 'w') as out_file: diff --git a/jwql/utils/logging_functions.py b/jwql/utils/logging_functions.py index abd82a45e..c20bfce3d 100644 --- a/jwql/utils/logging_functions.py +++ b/jwql/utils/logging_functions.py @@ -57,6 +57,7 @@ def my_main_function(): import getpass import importlib import logging +import logging.config import os import pwd import socket @@ -76,8 +77,40 @@ def my_main_function(): from jwql.utils.utils import get_config, ensure_dir_exists +def filter_maker(level): + """ + This creates a logging filter that takes in an integer describing log level (with + DEBUG being the lowest value and CRITICAL the highest), and returns True if and only + if the logged message has a lower level than the filter level. + + The filter is needed because the logging system is designed so that it outputs + messages of LogLevel *or higher*, because the assumption is you want to know if + something happens that's more serious than what you're looking at. + + In this case, though, we're dividing printed-out log messages between the built-in + STDOUT and STDERR output streams, and we have assigned ERROR and above to go to + STDERR, while INFO and above go to STDOUT. So, by default, anything at ERROR or at + CRITICAL would go to *both* STDOUT and STDERR. This function lets you add a filter + that returns false for anything with a level above WARNING, so that STDOUT won't + duplicate those messages. + """ + level = getattr(logging, level) + + def filter(record): + return record.levelno <= level + + return filter + + def configure_logging(module): - """Configure the log file with a standard logging format. + """ + Configure the log file with a standard logging format. The format in question is + set up as follows: + + - DEBUG messages are ignored + - INFO and WARNING messages go to both the log file and sys.stdout + - ERROR and CRITICAL messages go to both the log file and sys.stderr + - existing loggers are disabled before this configuration is applied Parameters ---------- @@ -98,15 +131,14 @@ def configure_logging(module): # Determine log file location log_file = make_log_file(module) - # Make sure no other root lhandlers exist before configuring the logger - for handler in logging.root.handlers[:]: - logging.root.removeHandler(handler) + # Get the logging configuration dictionary + logging_config = get_config()['logging'] + + # Set the log file to the file that we got above + logging_config["handlers"]["file"]["filename"] = log_file - # Create the log file and set the permissions - logging.basicConfig(filename=log_file, - format='%(asctime)s %(levelname)s: %(message)s', - datefmt='%m/%d/%Y %H:%M:%S %p', - level=logging.INFO) + # Configure the logging system and set permissions for the file + logging.config.dictConfig(logging_config) print('Log file initialized to {}'.format(log_file)) set_permissions(log_file) @@ -243,7 +275,7 @@ def wrapped(*args, **kwargs): # nosec comment added to ignore bandit security check try: - environment = subprocess.check_output('conda env export', universal_newlines=True, shell=True) # nosec + environment = subprocess.check_output('conda env export', universal_newlines=True, shell=True) # nosec logging.info('Environment:') for line in environment.split('\n'): logging.info(line) diff --git a/jwql/website/apps/jwql/bokeh_containers.py b/jwql/website/apps/jwql/bokeh_containers.py index f4569747a..aba023568 100644 --- a/jwql/website/apps/jwql/bokeh_containers.py +++ b/jwql/website/apps/jwql/bokeh_containers.py @@ -278,7 +278,7 @@ def generic_telemetry_plot(times, values, name, nominal_value=None, yellow_limit fig = figure(width=400, height=400, x_axis_label='Date', y_axis_label='Voltage', x_axis_type='datetime') - fig.circle(times, values, size=4, color='navy', alpha=0.5) + fig.circle(times, values, color='navy', alpha=0.5, radius=2, radius_dimension='y', radius_units='screen') if nominal_value is not None: fig.line(times, np.repeat(nominal_value, len(times)), line_dash='dashed') diff --git a/jwql/website/apps/jwql/bokeh_dashboard.py b/jwql/website/apps/jwql/bokeh_dashboard.py index 536ef9710..a4469be82 100644 --- a/jwql/website/apps/jwql/bokeh_dashboard.py +++ b/jwql/website/apps/jwql/bokeh_dashboard.py @@ -225,9 +225,9 @@ def dashboard_disk_usage(self): y_axis_label='Disk Space (TB)') plots[data['shortname']].line(x='date', y='available', source=source, legend_label='Available', line_dash='dashed', line_color='#C85108', line_width=3) - plots[data['shortname']].circle(x='date', y='available', source=source,color='#C85108', size=10) + plots[data['shortname']].circle(x='date', y='available', source=source,color='#C85108', radius=5, radius_dimension='y', radius_units='screen') plots[data['shortname']].line(x='date', y='used', source=source, legend_label='Used', line_dash='dashed', line_color='#355C7D', line_width=3) - plots[data['shortname']].circle(x='date', y='used', source=source, color='#355C7D', size=10) + plots[data['shortname']].circle(x='date', y='used', source=source, color='#355C7D', radius=5, radius_dimension='y', radius_units='screen') plots[data['shortname']].xaxis.formatter = DatetimeTickFormatter(hours="%H:%M %d %B %Y", days="%d %B %Y", @@ -289,7 +289,7 @@ def dashboard_central_store_data_volume(self): # Plot the results source = ColumnDataSource(results) plot.line(x='date', y='used', source=source, line_color=color, line_dash='dashed', legend_label=area, line_width=3) - plot.circle(x='date', y='used', source=source, color=color, size=10) + plot.circle(x='date', y='used', source=source, color=color, radius=5, radius_dimension='y', radius_units='screen') hover_tool = HoverTool(tooltips=[('Used:', f'@used TB'), ('Date:', '@date{%d %b %Y}') @@ -331,7 +331,7 @@ def dashboard_central_store_data_volume(self): # Plot the results legend_str = 'File volume' cen_store_plot.line(x='date', y='used', source=cen_store_source, legend_label=legend_str, line_dash='dashed', line_color='#355C7D', line_width=3) - cen_store_plot.circle(x='date', y='used', source=cen_store_source, color='#355C7D', size=10) + cen_store_plot.circle(x='date', y='used', source=cen_store_source, color='#355C7D', radius=5, radius_dimension='y', radius_units='screen') cen_store_plot.xaxis.formatter = DatetimeTickFormatter(hours="%H:%M %d %B %Y", days="%d %B %Y", months="%d %B %Y", diff --git a/jwql/website/apps/jwql/data_containers.py b/jwql/website/apps/jwql/data_containers.py index 218488c42..2eb0a00fd 100644 --- a/jwql/website/apps/jwql/data_containers.py +++ b/jwql/website/apps/jwql/data_containers.py @@ -2057,12 +2057,15 @@ def text_scrape(prop_id): program_meta : dict Dictionary containing information about program """ + # Ensure prop_id is a 5-digit string + prop_id = str(prop_id).zfill(5) # Generate url - url = 'http://www.stsci.edu/cgi-bin/get-proposal-info?id=' + str(prop_id) + '&submit=Go&observatory=JWST' + url = f'https://www.stsci.edu/jwst-program-info/program/?program={prop_id}' html = BeautifulSoup(requests.get(url).text, 'lxml') not_available = "not available via this interface" in html.text not_available |= "temporarily unable" in html.text + not_available |= "internal error" in html.text program_meta = {} program_meta['prop_id'] = prop_id @@ -2144,7 +2147,7 @@ def thumbnails_ajax(inst, proposal, obs_num=None): ---------- inst : str Name of JWST instrument - proposal : str (optional) + proposal : str Number of APT proposal to filter obs_num : str (optional) Observation number @@ -2193,6 +2196,12 @@ def thumbnails_ajax(inst, proposal, obs_num=None): # Gather data for each rootname, and construct a list of all observations # in the proposal for rootname in rootnames: + # Skip over unsupported filenames + # e.g. jw02279-o001_s000... are spec2 products for WFSS with one file per source + # Any filename with a dash after the proposal number is either this spec2 product + # or a level 3 product + if f'jw{proposal}-' in rootname: + continue # Parse filename filename_dict = filename_parser(rootname) @@ -2200,20 +2209,9 @@ def thumbnails_ajax(inst, proposal, obs_num=None): # Weed out file types that are not supported by generate_preview_images if 'stage_3' in filename_dict['filename_type']: continue - else: - # Temporary workaround for noncompliant files in filesystem - filename_dict = {'activity': rootname[17:19], - 'detector': rootname[26:], - 'exposure_id': rootname[20:25], - 'observation': rootname[7:10], - 'parallel_seq_id': rootname[16], - 'program_id': rootname[2:7], - 'visit': rootname[10:13], - 'visit_group': rootname[14:16], - 'group_root': rootname[:26]} - logging.warning((f'While running thumbnails_ajax() on rootname {rootname}, ' - 'filename_parser() failed to recognize the file pattern.')) + # Skip over files not recognized by the filename_parser + continue # Get list of available filenames and exposure start times. All files with a given # rootname will have the same exposure start time, so just keep the first. diff --git a/jwql/website/apps/jwql/migrations/0026_alter_fgsdarkdarkcurrent_amplifier_and_more.py b/jwql/website/apps/jwql/migrations/0026_alter_fgsdarkdarkcurrent_amplifier_and_more.py new file mode 100644 index 000000000..25d6b921b --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0026_alter_fgsdarkdarkcurrent_amplifier_and_more.py @@ -0,0 +1,744 @@ +# Generated by Django 5.0.7 on 2024-08-30 15:18 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0025_rootfileinfo_comment_rootfileinfo_exp_comment'), + ] + + operations = [ + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='fgsdarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='miridarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='miridarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='nircamdarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='nirissdarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='exp_comment', + field=models.TextField(blank=True, default='', help_text='Anomaly Comment Field'), + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0027_alter_fgsbadpixelstats_source_files_and_more.py b/jwql/website/apps/jwql/migrations/0027_alter_fgsbadpixelstats_source_files_and_more.py new file mode 100644 index 000000000..e6f785d02 --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0027_alter_fgsbadpixelstats_source_files_and_more.py @@ -0,0 +1,89 @@ +# Generated by Django 5.1 on 2024-11-12 19:09 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0026_alter_fgsdarkdarkcurrent_amplifier_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='fgsbadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='miribadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nircambadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirissbadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecbadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + ] diff --git a/jwql/website/apps/jwql/monitor_models/bad_pixel.py b/jwql/website/apps/jwql/monitor_models/bad_pixel.py index 4d02a0c1c..f50a5a261 100644 --- a/jwql/website/apps/jwql/monitor_models/bad_pixel.py +++ b/jwql/website/apps/jwql/monitor_models/bad_pixel.py @@ -29,7 +29,7 @@ from django.contrib.postgres.fields import ArrayField from django.db import models -from jwql.utils.constants import MAX_LEN_APERTURE, MAX_LEN_DETECTOR, MAX_LEN_FILENAME, MAX_LEN_INSTRUMENT, MAX_LEN_TYPE +from jwql.utils.constants import MAX_LEN_APERTURE, MAX_LEN_DETECTOR, MAX_LEN_FILENAME, MAX_LEN_INSTRUMENT, MAX_LEN_TYPE, DEFAULT_MODEL_CHARFIELD class FGSBadPixelQueryHistory(models.Model): @@ -58,7 +58,7 @@ class FGSBadPixelStats(models.Model): x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -96,7 +96,7 @@ class MIRIBadPixelStats(models.Model): x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -134,7 +134,7 @@ class NIRCamBadPixelStats(models.Model): x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -172,7 +172,7 @@ class NIRISSBadPixelStats(models.Model): x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -210,7 +210,7 @@ class NIRSpecBadPixelStats(models.Model): x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) diff --git a/jwql/website/apps/jwql/monitor_models/dark_current.py b/jwql/website/apps/jwql/monitor_models/dark_current.py index 0783d66d8..b467d062b 100644 --- a/jwql/website/apps/jwql/monitor_models/dark_current.py +++ b/jwql/website/apps/jwql/monitor_models/dark_current.py @@ -49,7 +49,7 @@ class FGSDarkDarkCurrent(models.Model): mean = models.FloatField(default=0., blank=True, null=True) stdev = models.FloatField(default=0., blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -81,7 +81,7 @@ class FGSDarkPixelStats(models.Model): y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -118,7 +118,7 @@ class MIRIDarkDarkCurrent(models.Model): mean = models.FloatField(default=0., blank=True, null=True) stdev = models.FloatField(default=0., blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -150,7 +150,7 @@ class MIRIDarkPixelStats(models.Model): y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -187,7 +187,7 @@ class NIRCamDarkDarkCurrent(models.Model): mean = models.FloatField(default=0., blank=True, null=True) stdev = models.FloatField(default=0., blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -219,7 +219,7 @@ class NIRCamDarkPixelStats(models.Model): y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -256,7 +256,7 @@ class NIRISSDarkDarkCurrent(models.Model): mean = models.FloatField(default=0., blank=True, null=True) stdev = models.FloatField(default=0., blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -288,7 +288,7 @@ class NIRISSDarkPixelStats(models.Model): y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -325,7 +325,7 @@ class NIRSpecDarkDarkCurrent(models.Model): mean = models.FloatField(default=0., blank=True, null=True) stdev = models.FloatField(default=0., blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) @@ -357,7 +357,7 @@ class NIRSpecDarkPixelStats(models.Model): y_coord = ArrayField(models.IntegerField()) type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", - default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True)) + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) diff --git a/jwql/website/apps/jwql/monitor_pages/monitor_bad_pixel_bokeh.py b/jwql/website/apps/jwql/monitor_pages/monitor_bad_pixel_bokeh.py index ee2c902f0..8830a6b3d 100755 --- a/jwql/website/apps/jwql/monitor_pages/monitor_bad_pixel_bokeh.py +++ b/jwql/website/apps/jwql/monitor_pages/monitor_bad_pixel_bokeh.py @@ -32,20 +32,23 @@ import numpy as np from sqlalchemy import and_, func -from jwql.database.database_interface import get_unique_values_per_column, session -from jwql.database.database_interface import NIRCamBadPixelQueryHistory, NIRCamBadPixelStats -from jwql.database.database_interface import NIRISSBadPixelQueryHistory, NIRISSBadPixelStats -from jwql.database.database_interface import MIRIBadPixelQueryHistory, MIRIBadPixelStats -from jwql.database.database_interface import NIRSpecBadPixelQueryHistory, NIRSpecBadPixelStats -from jwql.database.database_interface import FGSBadPixelQueryHistory, FGSBadPixelStats from jwql.utils.constants import BAD_PIXEL_MONITOR_MAX_POINTS_TO_PLOT, BAD_PIXEL_TYPES, DARKS_BAD_PIXEL_TYPES from jwql.utils.constants import DETECTOR_PER_INSTRUMENT, FLATS_BAD_PIXEL_TYPES, JWST_INSTRUMENT_NAMES_MIXEDCASE +from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils.permissions import set_permissions from jwql.utils.utils import filesystem_path, get_config, read_png, save_png +from jwql.website.apps.jwql.models import get_unique_values_per_column SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) OUTPUT_DIR = get_config()['outputs'] +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + from jwql.website.apps.jwql.monitor_models.bad_pixel import * + class BadPixelPlots(): """Class for creating the bad pixel monitor plots and figures to be displayed @@ -273,40 +276,33 @@ def __init__(self, pixel_table, instrument, detector): self.get_trending_data(badtype) def get_most_recent_entry(self): - """Get all nedded data from the database tables. + """For the given detector, get the latest entry for each bad pixel type """ - # For the given detector, get the latest entry for each bad pixel type - subq = (session - .query(self.pixel_table.type, func.max(self.pixel_table.entry_date).label("max_created")) - .filter(self.pixel_table.detector == self.detector) - .group_by(self.pixel_table.type) - .subquery() - ) - - query = (session.query(self.pixel_table) - .join(subq, self.pixel_table.entry_date == subq.c.max_created) - ) - - latest_entries_by_type = query.all() - session.close() - - # Organize the results - for row in latest_entries_by_type: - self.new_bad_pix[row.type] = (row.x_coord, row.y_coord) - self.background_file[row.type] = row.source_files[0] - self.obs_start_time[row.type] = row.obs_start_time - self.obs_end_time[row.type] = row.obs_end_time - self.num_files[row.type] = len(row.source_files) - self.baseline_file[row.type] = row.baseline_file - - # If no data is retrieved from the database at all, add a dummy generic entry - if len(self.new_bad_pix.keys()) == 0: - self.new_bad_pix[self.badtypes[0]] = ([], []) - self.background_file[self.badtypes[0]] = '' - self.obs_start_time[self.badtypes[0]] = datetime.datetime.today() - self.obs_end_time[self.badtypes[0]] = datetime.datetime.today() - self.num_files[self.badtypes[0]] = 0 - self.baseline_file[self.badtypes[0]] = '' + + bad_pixel_types = self.pixel_table.objects.values('type').distinct() + + for bad_type in bad_pixel_types: + bad_filters = {'detector__iexact': self.detector, + 'type': bad_type} + + record = (self.pixel_table.objects + .filter(**bad_filters) + .order_by("-obs_end_time").first()) + + if record is None: + self.new_bad_pix[bad_type] = ([], []) + self.background_file[bad_type] = '' + self.obs_start_time[bad_type] = datetime.datetime.today() + self.obs_end_time[bad_type] = datetime.datetime.today() + self.num_files[bad_type] = 0 + self.baseline_file[bad_type] = '' + else: + self.new_bad_pix[bad_type] = (record.x_coord, record.y_coord) + self.background_file[bad_type] = record.source_file + self.obs_start_time[bad_type] = record.obs_start_time + self.obs_end_time[bad_type] = record.obs_end_time + self.num_files[bad_type] = len(record.source_files) + self.baseline_file[bad_type] = record.baseline_file def get_trending_data(self, badpix_type): """Retrieve and organize the data needed to produce the trending plot. @@ -316,21 +312,20 @@ def get_trending_data(self, badpix_type): badpix_type : str The type of bad pixel to query for, e.g. 'dead' """ - # Query database for all data in the table with a matching detector and bad pixel type - all_entries_by_type = session.query(self.pixel_table.type, self.pixel_table.detector, func.array_length(self.pixel_table.x_coord, 1), - self.pixel_table.obs_mid_time) \ - .filter(and_(self.pixel_table.detector == self.detector, self.pixel_table.type == badpix_type)) \ - .all() + filters = {"type": badpix_type, + "detector": self.detector} + + all_entries_by_type = self.pixel_table.objects.filter(**filters).all() - # Organize the results num_pix = [] times = [] + for i, row in enumerate(all_entries_by_type): if i == 0: - badtype = row[0] - detector = row[1] - num_pix.append(row[2]) - times.append(row[3]) + badtype = row.type + detector = row.detector + num_pix.append(len(row.x_coord)) + times.append(row.obs_mid_time) # If there was no data in the database, create an empty entry if len(num_pix) == 0: @@ -339,9 +334,7 @@ def get_trending_data(self, badpix_type): num_pix = [0] times = [datetime.datetime.today()] - # Add results to self.trending_data - self.trending_data[badpix_type] = (detector, num_pix, times) - session.close() + self.trending_data[badtype] = (detector, num_pix, times) class NewBadPixPlot(): diff --git a/jwql/website/apps/jwql/monitor_pages/monitor_bias_bokeh.py b/jwql/website/apps/jwql/monitor_pages/monitor_bias_bokeh.py index 77d799a22..9fe7fcd49 100644 --- a/jwql/website/apps/jwql/monitor_pages/monitor_bias_bokeh.py +++ b/jwql/website/apps/jwql/monitor_pages/monitor_bias_bokeh.py @@ -20,30 +20,33 @@ monitor_template.input_parameters = ('NIRCam', 'NRCA1_FULL') """ -from datetime import datetime, timedelta +from datetime import datetime import os -from astropy.stats import sigma_clip - from bokeh.embed import components, file_html from bokeh.layouts import layout -from bokeh.models import ColorBar, ColumnDataSource, DatetimeTickFormatter, HoverTool, Legend, LinearAxis +from bokeh.models import ColumnDataSource, DatetimeTickFormatter, HoverTool from bokeh.models.layouts import Tabs, TabPanel from bokeh.plotting import figure, output_file, save from bokeh.resources import CDN -from datetime import datetime, timedelta import numpy as np import pandas as pd -from PIL import Image -from sqlalchemy import func -from jwql.bokeh_templating import BokehTemplate -from jwql.database.database_interface import get_unique_values_per_column, NIRCamBiasStats, NIRISSBiasStats, NIRSpecBiasStats, session -from jwql.utils.constants import FULL_FRAME_APERTURES, JWST_INSTRUMENT_NAMES_MIXEDCASE +from jwql.utils.constants import FULL_FRAME_APERTURES, JWST_INSTRUMENT_NAMES_MIXEDCASE, ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils.permissions import set_permissions from jwql.utils.utils import read_png from jwql.website.apps.jwql.bokeh_utils import PlaceholderPlot +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + + # Import * is okay here because this module specifically only contains database models + # for this monitor + from jwql.website.apps.jwql.monitor_models.bias import * # noqa: E402 (module level import not at top of file) + SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) TEMPLATE_DIR = os.path.join(SCRIPT_DIR, '../templates') @@ -67,8 +70,8 @@ class BiasMonitorData(): Latest bias data for a particular aperture, from the stats_table - stats_table : sqlalchemy.orm.decl_api.DeclarativeMeta - Bias stats sqlalchemy table + stats_table : jwql.website.apps.jwql.monitor_models.bias.NIRCamBiasStats + Bias stats django database trending_data : pandas.DataFrame Data from the stats table to be used for the trending plot @@ -93,35 +96,24 @@ def retrieve_trending_data(self, aperture): """ # Query database for all data in bias stats with a matching aperture, # and sort the data by exposure start time. - tmp_trending_data = session.query(self.stats_table.amp1_even_med, - self.stats_table.amp1_odd_med, - self.stats_table.amp2_even_med, - self.stats_table.amp2_odd_med, - self.stats_table.amp3_even_med, - self.stats_table.amp3_odd_med, - self.stats_table.amp4_even_med, - self.stats_table.amp4_odd_med, - self.stats_table.expstart, - self.stats_table.uncal_filename) \ - .filter(self.stats_table.aperture == aperture) \ - .order_by(self.stats_table.expstart) \ - .all() - - session.close() + columns = ['amp1_even_med', 'amp1_odd_med', 'amp2_even_med', 'amp2_odd_med', + 'amp3_even_med', 'amp3_odd_med', 'amp4_even_med', 'amp4_odd_med', + 'expstart', 'uncal_filename'] + tmp_trending_data = self.stats_table.objects.filter(aperture__iexact=aperture).order_by('expstart').all().values(*columns) # Convert the query results to a pandas dataframe - self.trending_data = pd.DataFrame(tmp_trending_data, columns=['amp1_even_med', 'amp1_odd_med', - 'amp2_even_med', 'amp2_odd_med', - 'amp3_even_med', 'amp3_odd_med', - 'amp4_even_med', 'amp4_odd_med', - 'expstart_str', 'uncal_filename']) - uncal_basename = [os.path.basename(e) for e in self.trending_data['uncal_filename']] - self.trending_data['uncal_filename'] = uncal_basename - - # Add a column of expstart values that are datetime objects - format_data = "%Y-%m-%dT%H:%M:%S.%f" - datetimes = [datetime.strptime(entry, format_data) for entry in self.trending_data['expstart_str']] - self.trending_data['expstart'] = datetimes + if len(tmp_trending_data) != 0: + self.trending_data = pd.DataFrame.from_records(tmp_trending_data) + uncal_basename = [os.path.basename(e) for e in self.trending_data['uncal_filename']] + self.trending_data['uncal_filename'] = uncal_basename + + # Add a column of expstart values that are datetime objects + format_data = "%Y-%m-%dT%H:%M:%S.%f" + datetimes = [datetime.strptime(entry, format_data) for entry in self.trending_data['expstart']] + self.trending_data['expstart_str'] = self.trending_data['expstart'] + self.trending_data['expstart'] = datetimes + else: + self.trending_data = pd.DataFrame(None, columns=columns + ['uncal_filename', 'expstart_str']) def retrieve_latest_data(self, aperture): """Query the database table to get the data needed for the non-trending @@ -132,40 +124,23 @@ def retrieve_latest_data(self, aperture): aperture : str Aperture name (e.g. NRCA1_FULL) """ - subq = (session.query(self.stats_table.aperture, func.max(self.stats_table.expstart).label("max_created")) - .group_by(self.stats_table.aperture) - .subquery() - ) - - query = (session.query(self.stats_table.aperture, - self.stats_table.uncal_filename, - self.stats_table.cal_filename, - self.stats_table.cal_image, - self.stats_table.expstart, - self.stats_table.collapsed_rows, - self.stats_table.collapsed_columns, - self.stats_table.counts, - self.stats_table.bin_centers, - self.stats_table.entry_date) - .filter(self.stats_table.aperture == aperture) - .order_by(self.stats_table.entry_date) \ - .join(subq, self.stats_table.expstart == subq.c.max_created) - ) - - latest_data = query.all() - session.close() - - # Put the returned data in a dataframe. Include only the most recent entry. - # The query has already filtered to include only entries using the latest - # expstart value. - self.latest_data = pd.DataFrame(latest_data[-1:], columns=['aperture', 'uncal_filename', 'cal_filename', - 'cal_image', 'expstart_str', 'collapsed_rows', - 'collapsed_columns', 'counts', 'bin_centers', - 'entry_date']) - # Add a column of expstart values that are datetime objects - format_data = "%Y-%m-%dT%H:%M:%S.%f" - datetimes = [datetime.strptime(entry, format_data) for entry in self.latest_data['expstart_str']] - self.latest_data['expstart'] = datetimes + # Query database for the most recent bias stats entry with a matching aperture + columns = ['aperture', 'uncal_filename', 'cal_filename', 'cal_image', 'expstart', + 'collapsed_rows', 'collapsed_columns', 'counts', 'bin_centers', 'entry_date'] + tmp_data = self.stats_table.objects.filter(aperture__iexact=aperture).order_by('-expstart').all().values(*columns).first() + + # Put the returned data in a dataframe + if tmp_data is not None: + # Orient and transpose needed due to list column entries e.g. counts + self.latest_data = pd.DataFrame.from_dict(tmp_data, orient='index').transpose() + + # Add a column of expstart values that are datetime objects + format_data = "%Y-%m-%dT%H:%M:%S.%f" + datetimes = [datetime.strptime(entry, format_data) for entry in self.latest_data['expstart']] + self.latest_data['expstart_str'] = self.latest_data['expstart'] + self.latest_data['expstart'] = datetimes + else: + self.latest_data = pd.DataFrame(None, columns=columns + ['expstart_str']) class BiasMonitorPlots(): @@ -232,7 +207,7 @@ def __init__(self, instrument): self.db = BiasMonitorData(self.instrument) # Now we need to loop over the available apertures and create plots for each - self.available_apertures = get_unique_values_per_column(self.db.stats_table, 'aperture') + self.available_apertures = sorted(self.db.stats_table.objects.values_list('aperture', flat=True).distinct()) # Make sure all full frame apertures are present. If there are no data for a # particular full frame entry, then produce an empty plot, in order to @@ -298,12 +273,12 @@ def ensure_all_full_frame_apertures(self): self.available_apertures.append(ap) def modify_bokeh_saved_html(self): - """Given an html string produced by Bokeh when saving bad pixel monitor plots, + """Given an html string produced by Bokeh when saving bias monitor plots, make tweaks such that the page follows the general JWQL page formatting. """ # Insert into our html template and save temp_vars = {'inst': self.instrument, 'plot_script': self.script, 'plot_div': self.div} - html_lines = file_html(self.tabs, CDN, title=f'{self.instrument} bias monitor', + html_lines = file_html(self.tabs, CDN, title=f'{self.instrument} bias monitor', template=self.html_file, template_variables=temp_vars) lines = html_lines.split('\n')