diff --git a/CHANGELOG.md b/CHANGELOG.md index 5e35c53..b87ebae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,20 @@ +# toasty 0.15.0 (2022-01-14) + +- Start adding metadata about data min/max values and suggested pixel range cuts + for tiled FITS data sets, for either TOAST or HiPS-based data processing (#71, + @imbasimba). This will allow the renderer to provide nice default settings + when opening up FITS datasets. +- Add support for 32-bit integer FITS (#72, @imbasimba) +- Allow Astropy's WCS code to fix non-standard FITS headers, which increases our + compatibility with more FITS datasets in the wild (#73, @imbasimba) +- Add the `--fits-wcs` argument to `tile-study`, to apply coordinates to an RGB + image based on the data contained in a separate FITS file (#74, @pkgw). This + is especially useful if you have an image that Astrometry.Net can solve, since + that service produces small downloadable FITS files with its solution + information. +- Reorganize the API docs a bit (#74, @pkgw) + + # toasty 0.14.0 (2021-12-13) - Expand the all-in-one FITS API, [`toasty.tile_fits`], to invoke the diff --git a/ci/azure-build-and-test.yml b/ci/azure-build-and-test.yml index 24c6f56..3e9163d 100644 --- a/ci/azure-build-and-test.yml +++ b/ci/azure-build-and-test.yml @@ -5,36 +5,36 @@ parameters: type: object default: - - name: linux_37 - vmImage: ubuntu-20.04 - vars: - PYTHON_SERIES: "3.7" - - name: linux_38 vmImage: ubuntu-20.04 vars: PYTHON_SERIES: "3.8" - - name: macos_37 - vmImage: macos-10.15 + - name: linux_39 + vmImage: ubuntu-20.04 vars: - PYTHON_SERIES: "3.7" + PYTHON_SERIES: "3.9" - name: macos_38 vmImage: macos-10.15 vars: PYTHON_SERIES: "3.8" - - name: windows_37 - vmImage: windows-2019 + - name: macos_39 + vmImage: macos-10.15 vars: - PYTHON_SERIES: "3.7" + PYTHON_SERIES: "3.9" - name: windows_38 vmImage: windows-2019 vars: PYTHON_SERIES: "3.8" + - name: windows_39 + vmImage: windows-2019 + vars: + PYTHON_SERIES: "3.9" + jobs: - ${{ each build in parameters.builds }}: - job: ${{ format('build_{0}', build.name) }} diff --git a/docs/api.rst b/docs/api.rst deleted file mode 100644 index 12b35cc..0000000 --- a/docs/api.rst +++ /dev/null @@ -1,83 +0,0 @@ -==================== -Python API Reference -==================== - -.. automodapi:: toasty - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.builder - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.cli - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.collection - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.fits_tiler - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.image - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.merge - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.multi_tan - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.openexr - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.par_util - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.pipeline - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.pipeline.astropix - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.pipeline.azure_io - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.pipeline.cli - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.pipeline.djangoplicity - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.pipeline.local_io - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.pyramid - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.samplers - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.study - :no-inheritance-diagram: - :no-inherited-members: - -.. automodapi:: toasty.toast - :no-inheritance-diagram: - :no-inherited-members: diff --git a/docs/api/toasty.builder.rst b/docs/api/toasty.builder.rst new file mode 100644 index 0000000..7d7f275 --- /dev/null +++ b/docs/api/toasty.builder.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.builder + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.cli.rst b/docs/api/toasty.cli.rst new file mode 100644 index 0000000..b3e8949 --- /dev/null +++ b/docs/api/toasty.cli.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.cli + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.collection.rst b/docs/api/toasty.collection.rst new file mode 100644 index 0000000..6df7e1b --- /dev/null +++ b/docs/api/toasty.collection.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.collection + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.fits_tiler.rst b/docs/api/toasty.fits_tiler.rst new file mode 100644 index 0000000..3c547c4 --- /dev/null +++ b/docs/api/toasty.fits_tiler.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.fits_tiler + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.image.rst b/docs/api/toasty.image.rst new file mode 100644 index 0000000..1692c2d --- /dev/null +++ b/docs/api/toasty.image.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.image + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.merge.rst b/docs/api/toasty.merge.rst new file mode 100644 index 0000000..ee9b0c7 --- /dev/null +++ b/docs/api/toasty.merge.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.merge + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.multi_tan.rst b/docs/api/toasty.multi_tan.rst new file mode 100644 index 0000000..6367250 --- /dev/null +++ b/docs/api/toasty.multi_tan.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.multi_tan + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.openexr.rst b/docs/api/toasty.openexr.rst new file mode 100644 index 0000000..0043e85 --- /dev/null +++ b/docs/api/toasty.openexr.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.openexr + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.par_util.rst b/docs/api/toasty.par_util.rst new file mode 100644 index 0000000..46f7290 --- /dev/null +++ b/docs/api/toasty.par_util.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.par_util + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.pipeline.astropix.rst b/docs/api/toasty.pipeline.astropix.rst new file mode 100644 index 0000000..f0e358b --- /dev/null +++ b/docs/api/toasty.pipeline.astropix.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.pipeline.astropix + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.pipeline.azure_io.rst b/docs/api/toasty.pipeline.azure_io.rst new file mode 100644 index 0000000..7d4fb55 --- /dev/null +++ b/docs/api/toasty.pipeline.azure_io.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.pipeline.azure_io + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.pipeline.cli.rst b/docs/api/toasty.pipeline.cli.rst new file mode 100644 index 0000000..6d4e609 --- /dev/null +++ b/docs/api/toasty.pipeline.cli.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.pipeline.cli + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.pipeline.djangoplicity.rst b/docs/api/toasty.pipeline.djangoplicity.rst new file mode 100644 index 0000000..dd7daea --- /dev/null +++ b/docs/api/toasty.pipeline.djangoplicity.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.pipeline.djangoplicity + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.pipeline.local_io.rst b/docs/api/toasty.pipeline.local_io.rst new file mode 100644 index 0000000..1fe501d --- /dev/null +++ b/docs/api/toasty.pipeline.local_io.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.pipeline.local_io + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.pipeline.rst b/docs/api/toasty.pipeline.rst new file mode 100644 index 0000000..0ca2dcf --- /dev/null +++ b/docs/api/toasty.pipeline.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.pipeline + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.pyramid.rst b/docs/api/toasty.pyramid.rst new file mode 100644 index 0000000..681f93e --- /dev/null +++ b/docs/api/toasty.pyramid.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.pyramid + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.rst b/docs/api/toasty.rst new file mode 100644 index 0000000..5b50f05 --- /dev/null +++ b/docs/api/toasty.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.samplers.rst b/docs/api/toasty.samplers.rst new file mode 100644 index 0000000..d387a77 --- /dev/null +++ b/docs/api/toasty.samplers.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.samplers + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.study.rst b/docs/api/toasty.study.rst new file mode 100644 index 0000000..8142be2 --- /dev/null +++ b/docs/api/toasty.study.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.study + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/api/toasty.toast.rst b/docs/api/toasty.toast.rst new file mode 100644 index 0000000..7739b76 --- /dev/null +++ b/docs/api/toasty.toast.rst @@ -0,0 +1,3 @@ +.. automodapi:: toasty.toast + :no-inheritance-diagram: + :no-inherited-members: diff --git a/docs/cli/tile-study.rst b/docs/cli/tile-study.rst index 468b696..986a0c4 100644 --- a/docs/cli/tile-study.rst +++ b/docs/cli/tile-study.rst @@ -18,6 +18,7 @@ Usage [--outdir DIR] [--name NAME] [--avm] + [--fits-wcs PATH] IMAGE-PATH See the :ref:`cli-std-image-options` section for documentation on those options. @@ -48,6 +49,14 @@ requires the `pyavm`_ package to be installed on your system. .. _pyavm: https://astrofrog.github.io/pyavm/ +If the ``--fits-wcs`` argument is given, Toasty will attempt to load +world-coordinate information from the headers of the named `FITS`_ file. It is +up to you to ensure that information contained in that file corresponds to the +main input image. This argument can be useful with the output from +`Astrometry.Net`_, which generates WCS FITS files for the images it solves. + +.. _FITS: https://en.wikipedia.org/wiki/FITS +.. _Astrometry.Net: https://astrometry.net/ Notes ===== diff --git a/docs/index.rst b/docs/index.rst index fbc117a..67dd9b3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,7 +16,34 @@ maps can be viewed in software such as the AAS_ `WorldWide Telescope`_. manual cli - api + + +Python API Reference +==================== + +.. toctree:: + :maxdepth: 1 + + api/toasty + api/toasty.builder + api/toasty.cli + api/toasty.collection + api/toasty.fits_tiler + api/toasty.image + api/toasty.merge + api/toasty.multi_tan + api/toasty.openexr + api/toasty.par_util + api/toasty.pipeline + api/toasty.pipeline.astropix + api/toasty.pipeline.azure_io + api/toasty.pipeline.cli + api/toasty.pipeline.djangoplicity + api/toasty.pipeline.local_io + api/toasty.pyramid + api/toasty.samplers + api/toasty.study + api/toasty.toast Getting help diff --git a/setup.py b/setup.py index 7866616..3b07cad 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ def get_long_desc(): setup_args = dict( name="toasty", # cranko project-name - version="0.14.0", # cranko project-version + version="0.15.0", # cranko project-version description="Generate TOAST image tile pyramids from existing image data", long_description=get_long_desc(), long_description_content_type="text/markdown", @@ -78,7 +78,7 @@ def get_long_desc(): "reproject", "shapely", "tqdm>=4.0", - "wwt_data_formats>=0.10.2", + "wwt_data_formats>=0.12", ], extras_require={ "test": [ diff --git a/toasty/builder.py b/toasty/builder.py index e6371e1..b2e8e49 100644 --- a/toasty/builder.py +++ b/toasty/builder.py @@ -15,9 +15,9 @@ """ from __future__ import absolute_import, division, print_function -__all__ = ''' +__all__ = """ Builder -'''.split() +""".split() from wwt_data_formats.enums import DataSetType, ProjectionType from wwt_data_formats.imageset import ImageSet @@ -49,14 +49,13 @@ def __init__(self, pio): self.pio = pio self.imgset = ImageSet() - self.imgset.name = 'Toasty' - self.imgset.file_type = '.' + pio.get_default_format() + self.imgset.name = "Toasty" + self.imgset.file_type = "." + pio.get_default_format() self.imgset.url = pio.get_path_scheme() + self.imgset.file_type self.place = Place() self.place.foreground_image_set = self.imgset - self.place.name = 'Toasty' - + self.place.name = "Toasty" def _check_no_wcs_yet(self): """ @@ -68,8 +67,9 @@ def _check_no_wcs_yet(self): backwards. """ if self.imgset.center_x != 0 or self.imgset.center_y != 0: - raise Exception('order-of-operations error: you must apply WCS after applying tiling settings') - + raise Exception( + "order-of-operations error: you must apply WCS after applying tiling settings" + ) def prepare_study_tiling(self, image): """ @@ -99,7 +99,6 @@ def prepare_study_tiling(self, image): tiling.apply_to_imageset(self.imgset) return tiling - def execute_study_tiling(self, image, tiling, **kwargs): """ Tile the specified image as a WWT "study". @@ -122,7 +121,6 @@ def execute_study_tiling(self, image, tiling, **kwargs): tiling.tile_image(image, self.pio, **kwargs) return self - def tile_base_as_study(self, image, **kwargs): """ Tile an image assuming that it is in the appropriate format for WWT's @@ -147,7 +145,6 @@ def tile_base_as_study(self, image, **kwargs): return self - def default_tiled_study_astrometry(self): self._check_no_wcs_yet() self.imgset.data_set_type = DataSetType.SKY @@ -156,7 +153,6 @@ def default_tiled_study_astrometry(self): self.place.zoom_level = 1.0 return self - def load_from_wwtl(self, cli_settings, wwtl_path, cli_progress=False): from contextlib import closing from io import BytesIO @@ -164,15 +160,17 @@ def load_from_wwtl(self, cli_settings, wwtl_path, cli_progress=False): # Load WWTL and see if it matches expectations with closing(LayerContainerReader.from_file(wwtl_path)) as lc: if len(lc.layers) != 1: - raise Exception('WWTL file must contain exactly one layer') + raise Exception("WWTL file must contain exactly one layer") layer = lc.layers[0] if not isinstance(layer, ImageSetLayer): - raise Exception('WWTL file must contain an imageset layer') + raise Exception("WWTL file must contain an imageset layer") imgset = layer.image_set if imgset.projection != ProjectionType.SKY_IMAGE: - raise Exception('WWTL imageset layer must have "SkyImage" projection type') + raise Exception( + 'WWTL imageset layer must have "SkyImage" projection type' + ) # Looks OK. Read and parse the image. loader = ImageLoader.create_from_args(cli_settings) @@ -184,7 +182,7 @@ def load_from_wwtl(self, cli_settings, wwtl_path, cli_progress=False): self.imgset = imgset self.place.foreground_image_set = self.imgset - self.imgset.file_type = '.' + self.pio.get_default_format() + self.imgset.file_type = "." + self.pio.get_default_format() self.imgset.url = self.pio.get_path_scheme() + self.imgset.file_type self.place.name = self.imgset.name @@ -192,14 +190,17 @@ def load_from_wwtl(self, cli_settings, wwtl_path, cli_progress=False): # existing imageset as much as possible, but update the parameters that # change in the tiling process. - wcs_keywords = self.imgset.wcs_headers_from_position() - self.imgset.center_x = self.imgset.center_y = 0 # hack to satisfy _check_no_wcs_yet() + wcs_keywords = self.imgset.wcs_headers_from_position(height=img.height) + self.imgset.center_x = ( + self.imgset.center_y + ) = 0 # hack to satisfy _check_no_wcs_yet() self.tile_base_as_study(img, cli_progress=cli_progress) - self.imgset.set_position_from_wcs(wcs_keywords, img.width, img.height, place=self.place) + self.imgset.set_position_from_wcs( + wcs_keywords, img.width, img.height, place=self.place + ) return img - def toast_base(self, sampler, depth, is_planet=False, is_pano=False, **kwargs): from .toast import sample_layer @@ -220,22 +221,37 @@ def toast_base(self, sampler, depth, is_planet=False, is_pano=False, **kwargs): return self - def cascade(self, **kwargs): from .merge import averaging_merger, cascade_images + cascade_images(self.pio, self.imgset.tile_levels, averaging_merger, **kwargs) - return self + if "fits" in self.imgset.file_type: + from .pyramid import Pos + from astropy.io import fits + import numpy as np + + with fits.open( + self.pio.tile_path( + pos=Pos(n=0, x=0, y=0), format="fits", makedirs=False + ) + ) as top_tile: + self.imgset.data_min = top_tile[0].header["DATAMIN"] + self.imgset.data_max = top_tile[0].header["DATAMAX"] + ( + self.imgset.pixel_cut_low, + self.imgset.pixel_cut_high, + ) = np.nanpercentile(top_tile[0].data, [0.5, 99.5]) + return self def make_thumbnail_from_other(self, thumbnail_image): thumb = thumbnail_image.make_thumbnail_bitmap() - with self.pio.open_metadata_for_write('thumb.jpg') as f: - thumb.save(f, format='JPEG') - self.imgset.thumbnail_url = 'thumb.jpg' + with self.pio.open_metadata_for_write("thumb.jpg") as f: + thumb.save(f, format="JPEG") + self.imgset.thumbnail_url = "thumb.jpg" return self - def make_placeholder_thumbnail(self): import numpy as np from .image import Image @@ -243,23 +259,22 @@ def make_placeholder_thumbnail(self): arr = np.zeros((45, 96, 3), dtype=np.uint8) img = Image.from_array(arr) - with self.pio.open_metadata_for_write('thumb.jpg') as f: - img.aspil().save(f, format='JPEG') + with self.pio.open_metadata_for_write("thumb.jpg") as f: + img.aspil().save(f, format="JPEG") - self.imgset.thumbnail_url = 'thumb.jpg' + self.imgset.thumbnail_url = "thumb.jpg" return self - def apply_wcs_info(self, wcs, width, height): self.imgset.set_position_from_wcs( wcs.to_header(), - width, height, - place = self.place, + width, + height, + place=self.place, ) return self - def apply_avm_info(self, avm, width, height): # So. The AVM standard discusses how parity should be expressed and how # it should be translated into WCS data, but in practice things are a @@ -274,6 +289,7 @@ def apply_avm_info(self, avm, width, height): wcs = avm.to_wcs(target_shape=(width, height)) from .image import _flip_wcs_parity + wcs = _flip_wcs_parity(wcs, height) self.apply_wcs_info(wcs, width, height) @@ -292,13 +308,11 @@ def apply_avm_info(self, avm, width, height): return self - def set_name(self, name): self.imgset.name = name self.place.name = name return self - def create_wtml_folder(self): """ Create a one-item :class:`wwt_data_formats.folder.Folder` object @@ -325,13 +339,12 @@ def create_wtml_folder(self): return folder - def write_index_rel_wtml(self): from wwt_data_formats import write_xml_doc folder = self.create_wtml_folder() - with self.pio.open_metadata_for_write('index_rel.wtml') as f: + with self.pio.open_metadata_for_write("index_rel.wtml") as f: write_xml_doc(folder.to_xml(), dest_stream=f, dest_wants_bytes=True) return self diff --git a/toasty/cli.py b/toasty/cli.py index be57a61..a3cd636 100644 --- a/toasty/cli.py +++ b/toasty/cli.py @@ -178,11 +178,6 @@ def make_thumbnail_impl(settings): thumb.save(f, format="JPEG") -# "pipeline" subcommands - -from .pipeline.cli import pipeline_getparser, pipeline_impl - - # "tile_allsky" subcommand @@ -427,6 +422,11 @@ def tile_study_getparser(parser): action="store_true", help="Expect the input image to have AVM positioning tags", ) + parser.add_argument( + "--fits-wcs", + metavar="PATH", + help="Get WCS information from this FITS file", + ) parser.add_argument( "--placeholder-thumbnail", action="store_true", @@ -481,6 +481,43 @@ def tile_study_impl(settings): raise builder.apply_avm_info(avm, img.width, img.height) + elif settings.fits_wcs: + try: + from astropy.io import fits + from astropy.wcs import WCS + except ImportError: + die("cannot use FITS WCS: you must install the `astropy` package") + + try: + with fits.open(settings.fits_wcs) as hdul: + # TODO: more flexibility in HDU choice: + hdu = hdul[0] + wcs = WCS(hdu, fobj=hdul) + except Exception as e: + die(f"cannot read WCS from FITS file `{settings.fits_wcs}`: {e}") + + wcs_width = hdu.header.get("IMAGEW") # Astrometry.net + wcs_height = hdu.header.get("IMAGEH") + + if wcs_width is None and len(hdu.shape) > 1: + wcs_width = hdu.shape[-1] + wcs_height = hdu.shape[-2] + + if wcs_width is None: + warn( + f"cannot infer image dimensions from WCS FITS file `{settings.fits_wcs}`; " + f"unable to check consistency with input image" + ) + elif img.shape[:2] != (wcs_height, wcs_width): + warn( + f"image `{settings.imgpath}` has shape {img.shape}, but " + f"WCS reference file `{settings.fits_wcs}` has shape ({wcs_height}, {wcs_width}); " + f"astrometry may not transfer correctly" + ) + + img._wcs = wcs # <= hack alert, but I think this is OK + img.ensure_negative_parity() + builder.apply_wcs_info(img.wcs, img.width, img.height) elif img.wcs is not None: # Study images must have negative parity. img.ensure_negative_parity() @@ -750,13 +787,19 @@ def entrypoint(args=None): parameter. """ - # Set up the subcommands from globals() + # Set up the subcommands. We use locals() and globals() in a fairly gross + # way to avoid circular import issues in Sphinx. + + from .pipeline.cli import pipeline_getparser, pipeline_impl + + names = dict(locals()) + names.update(globals()) parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(dest="subcommand") commands = set() - for py_name, value in globals().items(): + for py_name, value in names.items(): if py_name.endswith("_getparser"): cmd_name = py_name[:-10].replace("_", "-") subparser = subparsers.add_parser(cmd_name) @@ -776,7 +819,7 @@ def entrypoint(args=None): py_name = settings.subcommand.replace("-", "_") - impl = globals().get(py_name + "_impl") + impl = names.get(py_name + "_impl") if impl is None: die('no such subcommand "{}"'.format(settings.subcommand)) diff --git a/toasty/collection.py b/toasty/collection.py index 2dd4ad4..ffdd37e 100644 --- a/toasty/collection.py +++ b/toasty/collection.py @@ -184,7 +184,7 @@ def _load(self, actually_load_data): # End hack(s). - wcs = WCS(hdu.header, fix=False) + wcs = WCS(hdu.header) shape = hdu.shape # We need to make sure the data are 2D celestial, since that's diff --git a/toasty/fits_tiler.py b/toasty/fits_tiler.py index ccfeb8f..b0b63f3 100644 --- a/toasty/fits_tiler.py +++ b/toasty/fits_tiler.py @@ -345,4 +345,11 @@ def _copy_hips_properties_to_builder(self): self.builder.imgset.base_degrees_per_tile = float( hips_properties["hips_initial_fov"] ) + pixel_cut = hips_properties["hips_pixel_cut"].split(" ") + self.builder.imgset.pixel_cut_low = float(pixel_cut[0]) + self.builder.imgset.pixel_cut_high = float(pixel_cut[1]) + data_range = hips_properties["hips_data_range"].split(" ") + self.builder.imgset.data_min = float(data_range[0]) + self.builder.imgset.data_max = float(data_range[1]) + self.builder.imgset.url = "Norder{0}/Dir{1}/Npix{2}" diff --git a/toasty/image.py b/toasty/image.py index fe82b5b..fb21258 100644 --- a/toasty/image.py +++ b/toasty/image.py @@ -12,14 +12,14 @@ """ from __future__ import absolute_import, division, print_function -__all__ = ''' +__all__ = """ get_format_vertical_parity_sign Image ImageDescription ImageLoader ImageMode SUPPORTED_FORMATS -'''.split() +""".split() from enum import Enum from PIL import Image as pil_image @@ -30,23 +30,26 @@ try: from astropy.io import fits + ASTROPY_INSTALLED = True except ImportError: ASTROPY_INSTALLED = False -PIL_RGB_FORMATS = {'jpg': 'JPEG'} -PIL_RGBA_FORMATS = {'png': 'PNG'} +PIL_RGB_FORMATS = {"jpg": "JPEG"} +PIL_RGBA_FORMATS = {"png": "PNG"} PIL_FORMATS = PIL_RGB_FORMATS.copy() PIL_FORMATS.update(PIL_RGBA_FORMATS) -SUPPORTED_FORMATS = list(PIL_RGB_FORMATS) + list(PIL_RGBA_FORMATS) + ['npy'] +SUPPORTED_FORMATS = list(PIL_RGB_FORMATS) + list(PIL_RGBA_FORMATS) + ["npy"] if ASTROPY_INSTALLED: - SUPPORTED_FORMATS += ['fits'] + SUPPORTED_FORMATS += ["fits"] def _validate_format(name, fmt): if fmt is not None and fmt not in SUPPORTED_FORMATS: - raise ValueError('{0} should be one of {1}'.format(name, '/'.join(sorted(SUPPORTED_FORMATS)))) + raise ValueError( + "{0} should be one of {1}".format(name, "/".join(sorted(SUPPORTED_FORMATS))) + ) def get_format_vertical_parity_sign(format): @@ -77,28 +80,35 @@ def get_format_vertical_parity_sign(format): While proper WCS coordinates can convey *either* parity in a FITS file, WWT's renderer assumes bottoms-up. """ - if format == 'fits': + if format == "fits": return +1 return -1 + def _array_to_mode(array): if array.ndim == 2: - if array.dtype.kind == 'f' and array.dtype.itemsize == 4: + if array.dtype.kind == "f" and array.dtype.itemsize == 4: return ImageMode.F32 - elif array.dtype.kind == 'f' and array.dtype.itemsize == 8: + elif array.dtype.kind == "f" and array.dtype.itemsize == 8: return ImageMode.F64 - elif array.dtype.kind == 'u' and array.dtype.itemsize == 1: + elif array.dtype.kind == "u" and array.dtype.itemsize == 1: return ImageMode.U8 + elif array.dtype.kind == "i" and array.dtype.itemsize == 4: + return ImageMode.I32 elif array.ndim == 3: if array.shape[2] == 3: - if array.dtype.kind == 'f' and array.itemsize == 2: + if array.dtype.kind == "f" and array.itemsize == 2: return ImageMode.F16x3 - elif array.dtype.kind == 'u' and array.dtype.itemsize == 1: + elif array.dtype.kind == "u" and array.dtype.itemsize == 1: return ImageMode.RGB elif array.shape[2] == 4: - if array.dtype.kind == 'u' and array.dtype.itemsize == 1: + if array.dtype.kind == "u" and array.dtype.itemsize == 1: return ImageMode.RGBA - raise ValueError('Could not determine mode for array with dtype {0} and shape {1}'.format(array.dtype, array.shape)) + raise ValueError( + "Could not determine mode for array with dtype {0} and shape {1}".format( + array.dtype, array.shape + ) + ) class ImageMode(Enum): @@ -110,19 +120,20 @@ class ImageMode(Enum): obscure PIL modes that we do not support. """ - RGB = 'RGB' + + RGB = "RGB" "24-bit color with three uint8 channels for red, green, and blue." - RGBA = 'RGBA' + RGBA = "RGBA" "32-bit color with four uint8 channels for red, green, blue, and alpha (transparency)." - F32 = 'F' + F32 = "F" "32-bit floating-point scalar data." - F64 = 'D' + F64 = "D" "64-bit floating-point scalar data." - F16x3 = 'F16x3' + F16x3 = "F16x3" """ 48-bit color with three 16-bit floating-point channels for red, green, and blue. @@ -131,29 +142,40 @@ class ImageMode(Enum): stored in the OpenEXR file format. """ - U8 = 'U8' + U8 = "U8" + + I32 = "I32" + "32-bit integer data." @classmethod def from_array_info(cls, shape, dtype): + # Make sure we have an actual dtype instance to work with: + dtype = np.dtype(dtype) + if len(shape) == 2: - if dtype.kind == 'f' and dtype.itemsize == 4: + if dtype.kind == "f" and dtype.itemsize == 4: return cls.F32 - elif dtype.kind == 'f' and dtype.itemsize == 8: + elif dtype.kind == "f" and dtype.itemsize == 8: return cls.F64 - elif dtype.kind == 'u' and dtype.itemsize == 1: + elif dtype.kind == "u" and dtype.itemsize == 1: return cls.U8 + elif dtype.kind == "i" and dtype.itemsize == 4: + return cls.I32 elif len(shape) == 3: if shape[2] == 3: - if dtype.kind == 'f' and dtype.itemsize == 2: + if dtype.kind == "f" and dtype.itemsize == 2: return cls.F16x3 - elif dtype.kind == 'u' and dtype.itemsize == 1: + elif dtype.kind == "u" and dtype.itemsize == 1: return cls.RGB elif shape[2] == 4: - if dtype.kind == 'u' and dtype.itemsize == 1: + if dtype.kind == "u" and dtype.itemsize == 1: return cls.RGBA - raise ValueError('Could not determine mode for array with dtype {0} and shape {1}'.format(dtype, shape)) - + raise ValueError( + "Could not determine mode for array with dtype {0} and shape {1}".format( + dtype, shape + ) + ) def make_maskable_buffer(self, buf_height, buf_width): """ @@ -190,8 +212,10 @@ def make_maskable_buffer(self, buf_height, buf_width): arr = np.empty((buf_height, buf_width, 3), dtype=np.float16) elif self == ImageMode.U8: arr = np.empty((buf_height, buf_width), dtype=np.uint8) + elif self == ImageMode.I32: + arr = np.empty((buf_height, buf_width), dtype=np.int32) else: - raise Exception('unhandled mode in make_maskable_buffer()') + raise Exception("unhandled mode in make_maskable_buffer()") return Image.from_array(arr) @@ -211,10 +235,10 @@ def try_as_pil(self): def _wcs_to_parity_sign(wcs): h = wcs.to_header() - cd1_1 = h['CDELT1'] * h.get('PC1_1', 1.0) - cd1_2 = h['CDELT1'] * h.get('PC1_2', 0.0) - cd2_1 = h['CDELT2'] * h.get('PC2_1', 0.0) - cd2_2 = h['CDELT2'] * h.get('PC2_2', 1.0) + cd1_1 = h["CDELT1"] * h.get("PC1_1", 1.0) + cd1_2 = h["CDELT1"] * h.get("PC1_2", 0.0) + cd2_1 = h["CDELT2"] * h.get("PC2_1", 0.0) + cd2_2 = h["CDELT2"] * h.get("PC2_2", 1.0) det = cd1_1 * cd2_2 - cd1_2 * cd2_1 @@ -227,19 +251,21 @@ def _flip_wcs_parity(wcs, image_height): from astropy.wcs import WCS h = wcs.to_header() - h['CD1_1'] = h['CDELT1'] * h.setdefault('PC1_1', 1.0) - h['CD1_2'] = h['CDELT1'] * h.setdefault('PC1_2', 0.0) - h['CD2_1'] = h['CDELT2'] * h.setdefault('PC2_1', 0.0) - h['CD2_2'] = h['CDELT2'] * h.setdefault('PC2_2', 1.0) + h["CD1_1"] = h["CDELT1"] * h.setdefault("PC1_1", 1.0) + h["CD1_2"] = h["CDELT1"] * h.setdefault("PC1_2", 0.0) + h["CD2_1"] = h["CDELT2"] * h.setdefault("PC2_1", 0.0) + h["CD2_2"] = h["CDELT2"] * h.setdefault("PC2_2", 1.0) - for hn in 'CDELT1 CDELT2 PC1_1 PC1_2 PC2_1 PC2_2'.split(): + for hn in "CDELT1 CDELT2 PC1_1 PC1_2 PC2_1 PC2_2".split(): del h[hn] # Here's what we need to flip: - h['CD1_2'] *= -1 - h['CD2_2'] *= -1 - h['CRPIX2'] = image_height + 1 - h['CRPIX2'] # this is FITS, so pixel indices are 1-based + h["CD1_2"] *= -1 + h["CD2_2"] *= -1 + h["CRPIX2"] = ( + image_height + 1 - h["CRPIX2"] + ) # this is FITS, so pixel indices are 1-based return WCS(h) @@ -261,7 +287,6 @@ class ImageDescription(object): wcs = None "The WCS information associated with the image, if available." - def __init__(self, mode=None, shape=None, wcs=None): self.mode = mode self.shape = shape @@ -282,7 +307,6 @@ def width(self): def height(self): return self.shape[0] - def get_parity_sign(self): """ Get this ImageDescription's parity, based on its WCS. @@ -296,11 +320,12 @@ def get_parity_sign(self): See :meth:`Image.get_parity_sign` for detailed discussion. """ if self.wcs is None: - raise ValueError('cannot determine parity of an ImageDescription without WCS') + raise ValueError( + "cannot determine parity of an ImageDescription without WCS" + ) return _wcs_to_parity_sign(self.wcs) - def flip_parity(self): """ Invert the parity of this ImageDescription's WCS. @@ -317,12 +342,13 @@ def flip_parity(self): """ if self.wcs is None: - raise ValueError('cannot flip the parity of an ImageDescription without WCS') + raise ValueError( + "cannot flip the parity of an ImageDescription without WCS" + ) self.wcs = _flip_wcs_parity(self.wcs, self.height) return self - def ensure_negative_parity(self): """ Ensure that this ImageDescription has negative parity. @@ -349,8 +375,9 @@ class ImageLoader(object): This is implemented as its own class since there can be some options involved, and we want to provide a centralized place for handling them all. """ + black_to_transparent = False - colorspace_processing = 'srgb' + colorspace_processing = "srgb" crop = None psd_single_layer = None @@ -376,27 +403,27 @@ def add_arguments(cls, parser): """ parser.add_argument( - '--black-to-transparent', - action = 'store_true', - help = 'Convert full black colors to be transparent', + "--black-to-transparent", + action="store_true", + help="Convert full black colors to be transparent", ) parser.add_argument( - '--colorspace-processing', - metavar = 'MODE', - default = 'srgb', - help = 'What kind of RGB colorspace processing to perform (default: %(default)s; choices: %(choices)s)', - choices = ['srgb', 'none'], + "--colorspace-processing", + metavar="MODE", + default="srgb", + help="What kind of RGB colorspace processing to perform (default: %(default)s; choices: %(choices)s)", + choices=["srgb", "none"], ) parser.add_argument( - '--crop', - metavar = 'TOP,RIGHT,BOTTOM,LEFT', - help = 'Crop the input image by discarding pixels from each edge (default: 0,0,0,0)', + "--crop", + metavar="TOP,RIGHT,BOTTOM,LEFT", + help="Crop the input image by discarding pixels from each edge (default: 0,0,0,0)", ) parser.add_argument( - '--psd-single-layer', - type = int, - metavar = 'NUMBER', - help = 'If loading a Photoshop image, the (0-based) layer number to load -- saves memory', + "--psd-single-layer", + type=int, + metavar="NUMBER", + help="If loading a Photoshop image, the (0-based) layer number to load -- saves memory", ) return cls @@ -421,11 +448,13 @@ def create_from_args(cls, settings): if settings.crop is not None: try: - crop = list(map(int, settings.crop.split(','))) + crop = list(map(int, settings.crop.split(","))) assert all(c >= 0 for c in crop) assert len(crop) in (1, 2, 4) except Exception: - raise Exception('cannot parse `--crop` setting `{settings.crop!r}`: should be a comma-separated list of 1, 2, or 4 non-negative integers') + raise Exception( + "cannot parse `--crop` setting `{settings.crop!r}`: should be a comma-separated list of 1, 2, or 4 non-negative integers" + ) if len(crop) == 1: c = crop[0] @@ -480,13 +509,16 @@ def load_pil(self, pil_img): try: ImageMode(pil_img.mode) except ValueError: - print('warning: trying to convert image file to RGB from unexpected bitmode "%s"' % pil_img.mode) + print( + 'warning: trying to convert image file to RGB from unexpected bitmode "%s"' + % pil_img.mode + ) if self.black_to_transparent: # Avoid double-converting in the next filter. - pil_img = pil_img.convert('RGBA') + pil_img = pil_img.convert("RGBA") else: - pil_img = pil_img.convert('RGB') + pil_img = pil_img.convert("RGB") # Convert pure black to transparent -- make sure to do this before any # colorspace processing. @@ -498,16 +530,16 @@ def load_pil(self, pil_img): # one will involve holding two buffers at once. if self.black_to_transparent: - if pil_img.mode != 'RGBA': - pil_img = pil_img.convert('RGBA') + if pil_img.mode != "RGBA": + pil_img = pil_img.convert("RGBA") a = np.asarray(pil_img) a = a.copy() # read-only buffer => writeable for i in range(a.shape[0]): - nonblack = (a[i,...,0] > 0) - np.logical_or(nonblack, a[i,...,1] > 0, out=nonblack) - np.logical_or(nonblack, a[i,...,2] > 0, out=nonblack) - a[i,...,3] *= nonblack + nonblack = a[i, ..., 0] > 0 + np.logical_or(nonblack, a[i, ..., 1] > 0, out=nonblack) + np.logical_or(nonblack, a[i, ..., 2] > 0, out=nonblack) + a[i, ..., 3] *= nonblack # This is my attempt to preserve the image metadata and other # attributes, swapping out the pixel data only. There is probably @@ -520,26 +552,32 @@ def load_pil(self, pil_img): # EPO images have funky colorspaces and we need to convert to sRGB to get # the tiled versions to appear correctly. - if self.colorspace_processing != 'none' and 'icc_profile' in pil_img.info: - assert self.colorspace_processing == 'srgb' # more modes, one day? + if self.colorspace_processing != "none" and "icc_profile" in pil_img.info: + assert self.colorspace_processing == "srgb" # more modes, one day? try: from PIL import ImageCms + # ImageCms doesn't raise import error if the implementation is unavailable # "for doc purposes". To see if it's available we need to actually try to # do something: - out_prof = ImageCms.createProfile('sRGB') + out_prof = ImageCms.createProfile("sRGB") except ImportError: - print('''warning: colorspace processing requested, but no `ImageCms` module found in PIL. + print( + """warning: colorspace processing requested, but no `ImageCms` module found in PIL. Your installation of PIL probably does not have colorspace support. Colors will not be transformed to sRGB and therefore may not appear as intended. Compare toasty's output to your source image and decide if this is acceptable to you. - Consider a different setting of the `--colorspace-processing` argument to avoid this warning.''', - file=sys.stderr) + Consider a different setting of the `--colorspace-processing` argument to avoid this warning.""", + file=sys.stderr, + ) else: from io import BytesIO - in_prof = ImageCms.getOpenProfile(BytesIO(pil_img.info['icc_profile'])) - xform = ImageCms.buildTransform(in_prof, out_prof, pil_img.mode, pil_img.mode) + + in_prof = ImageCms.getOpenProfile(BytesIO(pil_img.info["icc_profile"])) + xform = ImageCms.buildTransform( + in_prof, out_prof, pil_img.mode, pil_img.mode + ) ImageCms.applyTransform(pil_img, xform, inPlace=True) return Image.from_pil(pil_img) @@ -590,11 +628,11 @@ def load_path(self, path): # Special handling for Numpy arrays. TODO: it would be better to sniff # filetypes instead of just looking at extensions. But, lazy. - if path.endswith('.npy'): + if path.endswith(".npy"): arr = np.load(path) - return Image.from_array(arr, default_format='npy') + return Image.from_array(arr, default_format="npy") - if path.lower().endswith(('.fits', '.fts', '.fits.gz', '.fts.gz')): + if path.lower().endswith((".fits", ".fts", ".fits.gz", ".fts.gz")): # TODO: implement a better way to recognize FITS files # TODO: decide how to handle multiple HDUs @@ -604,11 +642,23 @@ def load_path(self, path): arr = hdul[0].data if ASTROPY_INSTALLED: from astropy.wcs import WCS + wcs = WCS(hdul[0].header) else: wcs = None - - img = Image.from_array(arr, wcs=wcs, default_format='fits') + max_value = self._get_header_value_or_none( + header=hdul[0].header, keyword="DATAMAX" + ) + min_value = self._get_header_value_or_none( + header=hdul[0].header, keyword="DATAMIN" + ) + img = Image.from_array( + arr, + wcs=wcs, + default_format="fits", + min_value=min_value, + max_value=max_value, + ) return img # Special handling for Photoshop files, used for some very large mosaics @@ -616,7 +666,7 @@ def load_path(self, path): # TODO: check for AVM in following formats and set WCS using this if needed. - if path.endswith('.psd') or path.endswith('.psb'): + if path.endswith(".psd") or path.endswith(".psb"): try: from psd_tools import PSDImage except ImportError: @@ -637,19 +687,28 @@ def load_path(self, path): # Special handling for OpenEXR files, used for large images with high # dynamic range. - if path.endswith('.exr'): + if path.endswith(".exr"): from .openexr import load_openexr + img = load_openexr(path) if img.dtype != np.float16: - raise Exception('only half-precision OpenEXR images are currently supported') + raise Exception( + "only half-precision OpenEXR images are currently supported" + ) return Image.from_array(img) # (One day, maybe we'll do more kinds of sniffing.) No special handling # came into play; just open the file and auto-detect. - with open(path, 'rb') as f: + with open(path, "rb") as f: return self.load_stream(f) + def _get_header_value_or_none(self, header, keyword): + value = None + if keyword in header: + value = header[keyword] + return value + class Image(object): """ @@ -662,8 +721,10 @@ class Image(object): _pil = None _array = None _mode = None - _default_format = 'png' + _default_format = "png" _wcs = None + _data_min = None + _data_max = None @classmethod def from_pil(cls, pil_img, wcs=None, default_format=None): @@ -685,7 +746,7 @@ def from_pil(cls, pil_img, wcs=None, default_format=None): A new :class:`Image` wrapping the PIL image. """ - _validate_format('default_format', default_format) + _validate_format("default_format", default_format) # Make sure that the image data are actually loaded from disk. Pillow # lazy-loads such that sometimes `np.asarray(img)` ends up failing @@ -701,12 +762,14 @@ def from_pil(cls, pil_img, wcs=None, default_format=None): try: inst._mode = ImageMode(pil_img.mode) except ValueError: - raise Exception('image mode {} is not supported'.format(pil_img.mode)) + raise Exception("image mode {} is not supported".format(pil_img.mode)) return inst @classmethod - def from_array(cls, array, wcs=None, default_format=None): + def from_array( + cls, array, wcs=None, default_format=None, min_value=None, max_value=None + ): """Create a new Image from an array-like data variable. Parameters @@ -721,6 +784,12 @@ def from_array(cls, array, wcs=None, default_format=None): The default format to use when writing the image if none is specified explicitly. If not specified, this is automatically chosen at write time based on the array type. + min_value : number or ``None`` (the default) + An optional number only used for FITS images. + The value represents to the lowest data value in this image and its children. + max_value : number or ``None`` (the default) + An optional number only used for FITS images. + The value represents to the highest data value in this image and its children. Returns ------- @@ -732,13 +801,13 @@ def from_array(cls, array, wcs=None, default_format=None): """ - _validate_format('default_format', default_format) + _validate_format("default_format", default_format) # Windows systems ('nt') cannot close a file while there are any variables pointing # to data within the opened file. Therefore we have to copy the entire array from # the opened file. In other, more permissive operating systems, pointing to the # file data is ok. - if os.name == 'nt': + if os.name == "nt": array = np.copy(array) array = np.atleast_2d(array) @@ -748,6 +817,11 @@ def from_array(cls, array, wcs=None, default_format=None): inst._default_format = default_format or cls._default_format inst._array = array inst._wcs = wcs + if "fits" in inst._default_format: + if min_value is not None: + inst._data_min = min_value + if max_value is not None: + inst._data_max = max_value return inst def asarray(self): @@ -786,7 +860,9 @@ def aspil(self): if self._pil is not None: return self._pil if self.mode.try_as_pil() is None: - raise Exception(f'Toasty image with mode {self.mode} cannot be converted to PIL') + raise Exception( + f"Toasty image with mode {self.mode} cannot be converted to PIL" + ) return pil_image.fromarray(self._array) @property @@ -821,9 +897,15 @@ def height(self): def default_format(self): if self._default_format is None: if self.mode in (ImageMode.RGB, ImageMode.RGBA): - return 'png' - elif self.mode in (ImageMode.F32, ImageMode.F64, ImageMode.F16x3, ImageMode.U8): - return 'npy' + return "png" + elif self.mode in ( + ImageMode.F32, + ImageMode.F64, + ImageMode.F16x3, + ImageMode.U8, + ImageMode.I32, + ): + return "npy" else: return self._default_format @@ -832,8 +914,15 @@ def default_format(self, value): if value in SUPPORTED_FORMATS: self._default_format = value else: - raise ValueError('Unrecognized format: {0}'.format(value)) + raise ValueError("Unrecognized format: {0}".format(value)) + + @property + def data_min(self): + return self._data_min + @property + def data_max(self): + return self._data_max def has_wcs(self): """ @@ -845,7 +934,6 @@ def has_wcs(self): """ return self._wcs is not None - def get_parity_sign(self): """ Get this image's parity, based on its WCS. @@ -884,11 +972,10 @@ def get_parity_sign(self): """ if self._wcs is None: - raise ValueError('cannot determine parity of an image without WCS') + raise ValueError("cannot determine parity of an image without WCS") return _wcs_to_parity_sign(self._wcs) - def flip_parity(self): """ Invert the parity of this image without changing its appearance. @@ -907,13 +994,12 @@ def flip_parity(self): """ if self._wcs is None: - raise ValueError('cannot flip the parity of an image without WCS') + raise ValueError("cannot flip the parity of an image without WCS") self._wcs = _flip_wcs_parity(self._wcs, self.height) self._array = self.asarray()[::-1] return self - def ensure_negative_parity(self): """ Ensure that this image has negative parity. @@ -936,7 +1022,6 @@ def ensure_negative_parity(self): self.flip_parity() return self - def fill_into_maskable_buffer(self, buffer, iy_idx, ix_idx, by_idx, bx_idx): """ Fill a maskable buffer with a rectangle of data from this image. @@ -972,16 +1057,16 @@ def fill_into_maskable_buffer(self, buffer, iy_idx, ix_idx, by_idx, bx_idx): if self.mode == ImageMode.RGB: b.fill(0) - b[by_idx,bx_idx,:3] = i[iy_idx,ix_idx] - b[by_idx,bx_idx,3] = 255 - elif self.mode in (ImageMode.RGBA, ImageMode.U8): + b[by_idx, bx_idx, :3] = i[iy_idx, ix_idx] + b[by_idx, bx_idx, 3] = 255 + elif self.mode in (ImageMode.RGBA, ImageMode.U8, ImageMode.I32): b.fill(0) - b[by_idx,bx_idx] = i[iy_idx,ix_idx] + b[by_idx, bx_idx] = i[iy_idx, ix_idx] elif self.mode in (ImageMode.F32, ImageMode.F64, ImageMode.F16x3): b.fill(np.nan) - b[by_idx,bx_idx] = i[iy_idx,ix_idx] + b[by_idx, bx_idx] = i[iy_idx, ix_idx] else: - raise Exception('unhandled mode in fill_into_maskable_buffer') + raise Exception("unhandled mode in fill_into_maskable_buffer") def update_into_maskable_buffer(self, buffer, iy_idx, ix_idx, by_idx, bx_idx): """ @@ -1014,24 +1099,24 @@ def update_into_maskable_buffer(self, buffer, iy_idx, ix_idx, by_idx, bx_idx): # since it will be out-of-date. buffer._pil = None - sub_b = b[by_idx,bx_idx] - sub_i = i[iy_idx,ix_idx] + sub_b = b[by_idx, bx_idx] + sub_i = i[iy_idx, ix_idx] if self.mode == ImageMode.RGB: - sub_b[...,:3] = sub_i - sub_b[...,3] = 255 + sub_b[..., :3] = sub_i + sub_b[..., 3] = 255 elif self.mode == ImageMode.RGBA: - valid = (sub_i[...,3] != 0) - valid = np.broadcast_to(valid[...,None], sub_i.shape) + valid = sub_i[..., 3] != 0 + valid = np.broadcast_to(valid[..., None], sub_i.shape) np.putmask(sub_b, valid, sub_i) elif self.mode in (ImageMode.F32, ImageMode.F64): valid = ~np.isnan(sub_i) np.putmask(sub_b, valid, sub_i) elif self.mode == ImageMode.F16x3: valid = ~np.any(np.isnan(sub_i), axis=2) - valid = np.broadcast_to(valid[...,None], sub_i.shape) + valid = np.broadcast_to(valid[..., None], sub_i.shape) np.putmask(sub_b, valid, sub_i) - elif self.mode == ImageMode.U8: + elif self.mode in (ImageMode.U8, ImageMode.I32): # zero is our maskval, so here's a convenient way to get pretty good # update semantics. It will behave unusually if two buffers overlap # and disagree on their non-zero pixel values: instead of the second @@ -1039,9 +1124,13 @@ def update_into_maskable_buffer(self, buffer, iy_idx, ix_idx, by_idx, bx_idx): # buffer "wins", biased towards the brighter values. np.maximum(sub_b, sub_i, out=sub_b) else: - raise Exception(f'unhandled mode `{self.mode}` in update_into_maskable_buffer') + raise Exception( + f"unhandled mode `{self.mode}` in update_into_maskable_buffer" + ) - def save(self, path_or_stream, format=None, mode=None): + def save( + self, path_or_stream, format=None, mode=None, min_value=None, max_value=None + ): """ Save this image to a filesystem path or stream @@ -1050,9 +1139,21 @@ def save(self, path_or_stream, format=None, mode=None): path_or_stream : path-like object or file-like object The destination into which the data should be written. If file-like, the stream should accept bytes. + format : :class:`str` or ``None`` (the default) + The format name; one of ``SUPPORTED_FORMATS`` + mode : :class:`toasty.image.ImageMode` or ``None`` (the default) + The image data mode to use if ``format`` is a ``PIL_FORMATS`` + min_value : number or ``None`` (the default) + An optional number only used for FITS images. + The value represents to the lowest data value in this image and its children. + If not set, the minimum value will be extracted from this image. + max_value : number or ``None`` (the default) + An optional number only used for FITS images. + The value represents to the highest data value in this image and its children. + If not set, the maximum value will be extracted from this image. """ - _validate_format('format', format) + _validate_format("format", format) format = format or self._default_format @@ -1063,24 +1164,30 @@ def save(self, path_or_stream, format=None, mode=None): if mode is not None: pil_image = pil_image.convert(mode.try_as_pil()) pil_image.save(path_or_stream, format=PIL_FORMATS[format]) - elif format == 'npy': + elif format == "npy": np.save(path_or_stream, self.asarray()) - elif format == 'fits': + elif format == "fits": header = fits.Header() if self._wcs is None else self._wcs.to_header() arr = self.asarray() # Avoid annoying RuntimeWarnings on all-NaN data with warnings.catch_warnings(): - warnings.simplefilter('ignore') - - m = np.nanmin(arr) - if np.isfinite(m): # Astropy will raise an error if we don't NaN-guard - header['DATAMIN'] = m - - m = np.nanmax(arr) - if np.isfinite(m): - header['DATAMAX'] = m + warnings.simplefilter("ignore") + if min_value is not None: + header["DATAMIN"] = min_value + else: + m = np.nanmin(arr) + if np.isfinite( + m + ): # Astropy will raise an error if we don't NaN-guard + header["DATAMIN"] = m + if max_value is not None: + header["DATAMAX"] = max_value + else: + m = np.nanmax(arr) + if np.isfinite(m): + header["DATAMAX"] = m fits.writeto( path_or_stream, @@ -1099,8 +1206,8 @@ def make_thumbnail_bitmap(self): be saved in JPEG format. """ - if self.mode in (ImageMode.F32, ImageMode.F64, ImageMode.F16x3): - raise Exception('cannot thumbnail-ify non-RGB Image') + if self.mode in (ImageMode.I32, ImageMode.F32, ImageMode.F64, ImageMode.F16x3): + raise Exception("cannot thumbnail-ify non-RGB Image") THUMB_SHAPE = (96, 45) THUMB_ASPECT = THUMB_SHAPE[0] / THUMB_SHAPE[1] @@ -1131,7 +1238,7 @@ def make_thumbnail_bitmap(self): # Depending on the source image, the mode might be RGBA, which can't # be JPEG-ified. - thumb = thumb.convert('RGB') + thumb = thumb.convert("RGB") return thumb @@ -1148,9 +1255,9 @@ def clear(self): with NaNs. """ - if self._mode in (ImageMode.RGB, ImageMode.RGBA, ImageMode.U8): + if self._mode in (ImageMode.RGB, ImageMode.RGBA, ImageMode.U8, ImageMode.I32): self.asarray().fill(0) elif self._mode in (ImageMode.F32, ImageMode.F64, ImageMode.F16x3): self.asarray().fill(np.nan) else: - raise Exception('unhandled mode in clear()') + raise Exception("unhandled mode in clear()") diff --git a/toasty/merge.py b/toasty/merge.py index e1ba1d9..26b2c61 100644 --- a/toasty/merge.py +++ b/toasty/merge.py @@ -22,10 +22,10 @@ """ from __future__ import absolute_import, division, print_function -__all__ = ''' +__all__ = """ averaging_merger cascade_images -'''.split() +""".split() import numpy as np import os @@ -70,7 +70,7 @@ def averaging_merger(data): # nanmean will raise a RuntimeWarning if there are all-NaN quartets. This # gets annoying, so we silence them. with warnings.catch_warnings(): - warnings.simplefilter('ignore') + warnings.simplefilter("ignore") return np.nanmean(data.reshape(s), axis=(1, 3)).astype(data.dtype) @@ -101,6 +101,7 @@ def cascade_images(pio, start, merger, parallel=None, cli_progress=False): """ from .par_util import resolve_parallelism + parallel = resolve_parallelism(parallel) if start < 1: @@ -127,7 +128,9 @@ def _cascade_images_serial(pio, start, merger, cli_progress): else: slices = SLICES_MATCHING_PARITY - with tqdm(total=pyramid.depth2tiles(start - 1), disable=not cli_progress) as progress: + with tqdm( + total=pyramid.depth2tiles(start - 1), disable=not cli_progress + ) as progress: for pos in pyramid.generate_pos(start): if pos.n == start: continue # start layer is already there; we're cascading up @@ -136,10 +139,10 @@ def _cascade_images_serial(pio, start, merger, cli_progress): # processed. children = pyramid.pos_children(pos) - img0 = pio.read_image(children[0], default='none') - img1 = pio.read_image(children[1], default='none') - img2 = pio.read_image(children[2], default='none') - img3 = pio.read_image(children[3], default='none') + img0 = pio.read_image(children[0], default="none") + img1 = pio.read_image(children[1], default="none") + img2 = pio.read_image(children[2], default="none") + img3 = pio.read_image(children[3], default="none") if img0 is None and img1 is None and img2 is None and img3 is None: progress.update(1) @@ -156,18 +159,52 @@ def _cascade_images_serial(pio, start, merger, cli_progress): subimg.update_into_maskable_buffer( buf, - slice(None), slice(None), # subimage indexer: nothing + slice(None), + slice(None), # subimage indexer: nothing *slidx, # buffer indexer: appropriate sub-quadrant ) merged = Image.from_array(merger(buf.asarray())) - pio.write_image(pos, merged) + min_value, max_value = _get_min_max_of_children( + pio, [img0, img1, img2, img3] + ) + + pio.write_image(pos, merged, min_value=min_value, max_value=max_value) progress.update(1) if cli_progress: print() +def _get_min_max_of_children(pio, children): + min_value = None + max_value = None + if "fits" in pio.get_default_format(): + min_values = _get_existing_min_values(children) + if min_values: # Check there are any valid min values + min_value = min(min_values) + max_values = _get_existing_max_values(children) + if max_values: # Check there are any valid max values + max_value = max(max_values) + return min_value, max_value + + +def _get_existing_min_values(images): + values = [] + for image in images: + if image is not None and image.data_min is not None: + values.append(image.data_min) + return values + + +def _get_existing_max_values(images): + values = [] + for image in images: + if image is not None and image.data_max is not None: + values.append(image.data_max) + return values + + def _cascade_images_parallel(pio, start, merger, cli_progress, parallel): """Parallelized cascade operation @@ -188,7 +225,7 @@ def _cascade_images_parallel(pio, start, merger, cli_progress, parallel): first_level_to_do = start - 1 n_todo = pyramid.depth2tiles(first_level_to_do) ready_queue = mp.Queue() - done_queue = mp.Queue(maxsize = 2 * parallel) + done_queue = mp.Queue(maxsize=2 * parallel) done_event = mp.Event() # Seed the queue of ready tiles. We use generate_pos to try to seed the @@ -239,7 +276,7 @@ def _cascade_images_parallel(pio, start, merger, cli_progress, parallel): ppos, x_index, y_index = pos_parent(pos) bit_num = 2 * y_index + x_index flags = readiness.get(ppos, 0) - flags |= (1 << bit_num) + flags |= 1 << bit_num # If this tile was the last of its siblings to be finished, # the parent is now ready for processing. @@ -288,10 +325,10 @@ def _mp_cascade_worker(done_queue, ready_queue, done_event, pio, merger): # processed. children = pyramid.pos_children(pos) - img0 = pio.read_image(children[0], default='none') - img1 = pio.read_image(children[1], default='none') - img2 = pio.read_image(children[2], default='none') - img3 = pio.read_image(children[3], default='none') + img0 = pio.read_image(children[0], default="none") + img1 = pio.read_image(children[1], default="none") + img2 = pio.read_image(children[2], default="none") + img3 = pio.read_image(children[3], default="none") if img0 is None and img1 is None and img2 is None and img3 is None: pass # No data here; ignore @@ -307,11 +344,15 @@ def _mp_cascade_worker(done_queue, ready_queue, done_event, pio, merger): subimg.update_into_maskable_buffer( buf, - slice(None), slice(None), # subimage indexer: nothing + slice(None), + slice(None), # subimage indexer: nothing *slidx, # buffer indexer: appropriate sub-quadrant ) merged = Image.from_array(merger(buf.asarray())) - pio.write_image(pos, merged) + min_value, max_value = _get_min_max_of_children( + pio, [img0, img1, img2, img3] + ) + pio.write_image(pos, merged, min_value=min_value, max_value=max_value) done_queue.put(pos) diff --git a/toasty/pipeline/cli.py b/toasty/pipeline/cli.py index e7e35c7..1a772f9 100644 --- a/toasty/pipeline/cli.py +++ b/toasty/pipeline/cli.py @@ -1,24 +1,23 @@ # -*- mode: python; coding: utf-8 -*- -# Copyright 2019-2020 the AAS WorldWide Telescope project. +# Copyright 2019-2022 the AAS WorldWide Telescope project. # Licensed under the MIT License. """ Entrypoints for the "toasty pipeline" command-line tools. """ -__all__ = ''' +__all__ = """ pipeline_getparser pipeline_impl -'''.split() +""".split() -import argparse from fnmatch import fnmatch import glob import os.path import sys from wwt_data_formats.cli import EnsureGlobsExpandedAction -from ..cli import die, warn +from ..cli import die from . import NotActionableError @@ -52,19 +51,20 @@ def evaluate_imageid_args(searchdir, args): # The "approve" subcommand + def approve_setup_parser(parser): parser.add_argument( - '--workdir', - metavar = 'PATH', - default = '.', - help = 'The working directory for this processing session' + "--workdir", + metavar="PATH", + default=".", + help="The working directory for this processing session", ) parser.add_argument( - 'cand_ids', - nargs = '+', - action = EnsureGlobsExpandedAction, - metavar = 'IMAGE-ID', - help = 'Name(s) of image(s) to approve for publication (globs accepted)' + "cand_ids", + nargs="+", + action=EnsureGlobsExpandedAction, + metavar="IMAGE-ID", + help="Name(s) of image(s) to approve for publication (globs accepted)", ) @@ -75,29 +75,32 @@ def approve_impl(settings): mgr = PipelineManager(settings.workdir) mgr.ensure_config() - pub_url_prefix = mgr._config.get('publish_url_prefix') + pub_url_prefix = mgr._config.get("publish_url_prefix") if pub_url_prefix: - if pub_url_prefix[-1] != '/': - pub_url_prefix += '/' + if pub_url_prefix[-1] != "/": + pub_url_prefix += "/" - proc_dir = mgr._ensure_dir('processed') - app_dir = mgr._ensure_dir('approved') + proc_dir = mgr._ensure_dir("processed") + app_dir = mgr._ensure_dir("approved") for cid in evaluate_imageid_args(proc_dir, settings.cand_ids): if not os.path.isdir(os.path.join(proc_dir, cid)): - die(f'no such processed candidate ID {cid!r}') + die(f"no such processed candidate ID {cid!r}") - index_path = os.path.join(proc_dir, cid, 'index.wtml') - prefix = pub_url_prefix + cid + '/' + index_path = os.path.join(proc_dir, cid, "index.wtml") + prefix = pub_url_prefix + cid + "/" try: - f = Folder.from_file(os.path.join(proc_dir, cid, 'index_rel.wtml')) + f = Folder.from_file(os.path.join(proc_dir, cid, "index_rel.wtml")) f.mutate_urls(make_absolutizing_url_mutator(prefix)) - with open(index_path, 'wt', encoding='utf8') as f_out: + with open(index_path, "wt", encoding="utf8") as f_out: f.write_xml(f_out) except Exception as e: - print('error: failed to create index.wtml from index_rel.wtml', file=sys.stderr) + print( + "error: failed to create index.wtml from index_rel.wtml", + file=sys.stderr, + ) try: os.remove(index_path) @@ -111,19 +114,20 @@ def approve_impl(settings): # The "fetch" subcommand + def fetch_setup_parser(parser): parser.add_argument( - '--workdir', - metavar = 'PATH', - default = '.', - help = 'The working directory for this processing session' + "--workdir", + metavar="PATH", + default=".", + help="The working directory for this processing session", ) parser.add_argument( - 'cand_ids', - nargs = '+', - action = EnsureGlobsExpandedAction, - metavar = 'CAND-ID', - help = 'Name(s) of candidate(s) to fetch and prepare for processing (globs accepted)' + "cand_ids", + nargs="+", + action=EnsureGlobsExpandedAction, + metavar="CAND-ID", + help="Name(s) of candidate(s) to fetch and prepare for processing (globs accepted)", ) @@ -131,8 +135,8 @@ def fetch_impl(settings): from . import PipelineManager mgr = PipelineManager(settings.workdir) - cand_dir = mgr._ensure_dir('candidates') - rej_dir = mgr._ensure_dir('rejects') + cand_dir = mgr._ensure_dir("candidates") + rej_dir = mgr._ensure_dir("rejects") src = mgr.get_image_source() for cid in evaluate_imageid_args(cand_dir, settings.cand_ids): @@ -140,54 +144,53 @@ def fetch_impl(settings): # a NotActionable happens, so that we can move the directory on Windows. try: try: - cdata = open(os.path.join(cand_dir, cid), 'rb') + cdata = open(os.path.join(cand_dir, cid), "rb") except FileNotFoundError: - die(f'no such candidate ID {cid!r}') + die(f"no such candidate ID {cid!r}") try: - print(f'fetching {cid} ... ', end='') + print(f"fetching {cid} ... ", end="") sys.stdout.flush() - cachedir = mgr._ensure_dir('cache_todo', cid) + cachedir = mgr._ensure_dir("cache_todo", cid) src.fetch_candidate(cid, cdata, cachedir) - print('done') + print("done") finally: cdata.close() except NotActionableError as e: - print('not usable:', e) + print("not usable:", e) os.rename(os.path.join(cand_dir, cid), os.path.join(rej_dir, cid)) os.rmdir(cachedir) # The "init" subcommand + def init_setup_parser(parser): parser.add_argument( - '--azure-conn-env', - metavar = 'ENV-VAR-NAME', - help = 'The name of an environment variable contain an Azure Storage ' - 'connection string' + "--azure-conn-env", + metavar="ENV-VAR-NAME", + help="The name of an environment variable contain an Azure Storage " + "connection string", ) parser.add_argument( - '--azure-container', - metavar = 'CONTAINER-NAME', - help = 'The name of a blob container in the Azure storage account' + "--azure-container", + metavar="CONTAINER-NAME", + help="The name of a blob container in the Azure storage account", ) parser.add_argument( - '--azure-path-prefix', - metavar = 'PATH-PREFIX', - help = 'A slash-separated path prefix for blob I/O within the container' + "--azure-path-prefix", + metavar="PATH-PREFIX", + help="A slash-separated path prefix for blob I/O within the container", ) parser.add_argument( - '--local', - metavar = 'PATH', - help = 'Use the local-disk I/O backend' + "--local", metavar="PATH", help="Use the local-disk I/O backend" ) parser.add_argument( - 'workdir', - nargs = '?', - metavar = 'PATH', - default = '.', - help = 'The working directory for this processing session' + "workdir", + nargs="?", + metavar="PATH", + default=".", + help="The working directory for this processing session", ) @@ -200,31 +203,31 @@ def _pipeline_io_from_settings(settings): if settings.azure_conn_env: conn_str = os.environ.get(settings.azure_conn_env) if not conn_str: - die('--azure-conn-env=%s provided, but that environment variable is unset' - % settings.azure_conn_env) + die( + "--azure-conn-env=%s provided, but that environment variable is unset" + % settings.azure_conn_env + ) if not settings.azure_container: - die('--azure-container-name must be provided if --azure-conn-env is') + die("--azure-container-name must be provided if --azure-conn-env is") path_prefix = settings.azure_path_prefix if not path_prefix: - path_prefix = '' + path_prefix = "" azure_io.assert_enabled() return azure_io.AzureBlobPipelineIo( - conn_str, - settings.azure_container, - path_prefix + conn_str, settings.azure_container, path_prefix ) - die('An I/O backend must be specified with the arguments --local or --azure-*') + die("An I/O backend must be specified with the arguments --local or --azure-*") def init_impl(settings): pipeio = _pipeline_io_from_settings(settings) os.makedirs(settings.workdir, exist_ok=True) - pipeio.save_config(os.path.join(settings.workdir, 'toasty-store-config.yaml')) + pipeio.save_config(os.path.join(settings.workdir, "toasty-store-config.yaml")) # The "refresh" subcommand @@ -232,13 +235,14 @@ def init_impl(settings): # TODO: for large feeds, we should potentially add features to make it so that # we don't re-check every single candidate that's ever been posted. + def refresh_setup_parser(parser): parser.add_argument( - '--workdir', - nargs = '?', - metavar = 'PATH', - default = '.', - help = 'The working directory for this processing session' + "--workdir", + nargs="?", + metavar="PATH", + default=".", + help="The working directory for this processing session", ) @@ -246,8 +250,8 @@ def refresh_impl(settings): from . import PipelineManager mgr = PipelineManager(settings.workdir) - cand_dir = mgr._ensure_dir('candidates') - rej_dir = mgr._ensure_dir('rejects') + cand_dir = mgr._ensure_dir("candidates") + rej_dir = mgr._ensure_dir("rejects") src = mgr.get_image_source() n_cand = 0 n_saved = 0 @@ -259,60 +263,61 @@ def refresh_impl(settings): n_cand += 1 uniq_id = cand.get_unique_id() - if mgr._pipeio.check_exists(uniq_id, 'index.wtml'): + if mgr._pipeio.check_exists(uniq_id, "index.wtml"): n_done += 1 continue # skip already-done inputs - if mgr._pipeio.check_exists(uniq_id, 'skip.flag'): + if mgr._pipeio.check_exists(uniq_id, "skip.flag"): n_skipped += 1 continue # skip inputs that are explicitly flagged cand_path = os.path.join(cand_dir, uniq_id) try: - with open(cand_path, 'wb') as f: + with open(cand_path, "wb") as f: cand.save(f) n_saved += 1 except NotActionableError as e: os.remove(cand_path) - with open(os.path.join(rej_dir, uniq_id, 'wb')) as f: + with open(os.path.join(rej_dir, uniq_id, "wb")) as f: pass # for now, just touch the file n_rejected += 1 - print(f'analyzed {n_cand} candidates from the image source') - print(f' - {n_saved} processing candidates saved') - print(f' - {n_rejected} rejected as definitely unusable') - print(f' - {n_done} were already done') - print(f' - {n_skipped} were already marked to be ignored') + print(f"analyzed {n_cand} candidates from the image source") + print(f" - {n_saved} processing candidates saved") + print(f" - {n_rejected} rejected as definitely unusable") + print(f" - {n_done} were already done") + print(f" - {n_skipped} were already marked to be ignored") print() - print('See the `candidates` directory for candidate image IDs.') + print("See the `candidates` directory for candidate image IDs.") # Other subcommands not yet split out. + def pipeline_getparser(parser): - subparsers = parser.add_subparsers(dest='pipeline_command') + subparsers = parser.add_subparsers(dest="pipeline_command") def add_manager_command(name): subp = subparsers.add_parser(name) subp.add_argument( - '--workdir', - nargs = '?', - metavar = 'WORKDIR', - default = '.', - help = 'The local working directory', + "--workdir", + nargs="?", + metavar="WORKDIR", + default=".", + help="The local working directory", ) return subp - approve_setup_parser(subparsers.add_parser('approve')) - fetch_setup_parser(subparsers.add_parser('fetch')) - add_manager_command('ignore-rejects') - init_setup_parser(subparsers.add_parser('init')) - add_manager_command('process-todos') - add_manager_command('publish') - refresh_setup_parser(subparsers.add_parser('refresh')) + approve_setup_parser(subparsers.add_parser("approve")) + fetch_setup_parser(subparsers.add_parser("fetch")) + add_manager_command("ignore-rejects") + init_setup_parser(subparsers.add_parser("init")) + add_manager_command("process-todos") + add_manager_command("publish") + refresh_setup_parser(subparsers.add_parser("refresh")) def pipeline_impl(settings): @@ -322,22 +327,22 @@ def pipeline_impl(settings): print('Run the "pipeline" command with `--help` for help on its subcommands') return - if settings.pipeline_command == 'approve': + if settings.pipeline_command == "approve": approve_impl(settings) - elif settings.pipeline_command == 'fetch': + elif settings.pipeline_command == "fetch": fetch_impl(settings) - elif settings.pipeline_command == 'ignore-rejects': + elif settings.pipeline_command == "ignore-rejects": mgr = PipelineManager(settings.workdir) mgr.ignore_rejects() - elif settings.pipeline_command == 'init': + elif settings.pipeline_command == "init": init_impl(settings) - elif settings.pipeline_command == 'process-todos': + elif settings.pipeline_command == "process-todos": mgr = PipelineManager(settings.workdir) mgr.process_todos() - elif settings.pipeline_command == 'publish': + elif settings.pipeline_command == "publish": mgr = PipelineManager(settings.workdir) mgr.publish() - elif settings.pipeline_command == 'refresh': + elif settings.pipeline_command == "refresh": refresh_impl(settings) else: die('unrecognized "pipeline" subcommand ' + settings.pipeline_command) diff --git a/toasty/pyramid.py b/toasty/pyramid.py index b910319..6a5e585 100644 --- a/toasty/pyramid.py +++ b/toasty/pyramid.py @@ -13,7 +13,7 @@ """ from __future__ import absolute_import, division, print_function -__all__ = ''' +__all__ = """ depth2tiles generate_pos is_subtile @@ -23,7 +23,7 @@ pos_parent PyramidIO tiles_at_depth -'''.split() +""".split() import glob from collections import namedtuple @@ -33,7 +33,7 @@ from .image import ImageLoader, SUPPORTED_FORMATS, get_format_vertical_parity_sign -Pos = namedtuple('Pos', 'n x y') +Pos = namedtuple("Pos", "n x y") def next_highest_power_of_2(n): @@ -59,7 +59,7 @@ def tiles_at_depth(depth): """ Return the number of tiles in the WWT tile pyramid layer at depth *depth*. """ - return 4**depth + return 4 ** depth def is_subtile(deeper_pos, shallower_pos): @@ -78,7 +78,7 @@ def is_subtile(deeper_pos, shallower_pos): """ if deeper_pos.n < shallower_pos.n: - raise ValueError('deeper_pos has a lower depth than shallower_pos') + raise ValueError("deeper_pos has a lower depth than shallower_pos") if deeper_pos.n == shallower_pos.n: return deeper_pos.x == shallower_pos.x and deeper_pos.y == shallower_pos.y @@ -105,13 +105,9 @@ def pos_parent(pos): """ if pos.n < 1: - raise ValueError('cannot take the parent of a tile position with depth < 1') + raise ValueError("cannot take the parent of a tile position with depth < 1") - parent = Pos( - n = pos.n - 1, - x = pos.x // 2, - y = pos.y // 2 - ) + parent = Pos(n=pos.n - 1, x=pos.x // 2, y=pos.y // 2) return parent, pos.x % 2, pos.y % 2 @@ -136,9 +132,9 @@ def pos_children(pos): y *= 2 return [ - Pos(n=n, x=x, y=y ), - Pos(n=n, x=x + 1, y=y ), - Pos(n=n, x=x, y=y + 1), + Pos(n=n, x=x, y=y), + Pos(n=n, x=x + 1, y=y), + Pos(n=n, x=x, y=y + 1), Pos(n=n, x=x + 1, y=y + 1), ] @@ -193,23 +189,23 @@ class PyramidIO(object): defaults to 'png'. """ - def __init__(self, base_dir, scheme='L/Y/YX', default_format=None): + def __init__(self, base_dir, scheme="L/Y/YX", default_format=None): self._base_dir = base_dir - if scheme == 'L/Y/YX': + if scheme == "L/Y/YX": self._tile_path = self._tile_path_LsYsYX - self._scheme = '{1}/{3}/{3}_{2}' + self._scheme = "{1}/{3}/{3}_{2}" tile_pattern = "*/*/*_*.*" - elif scheme == 'LXY': + elif scheme == "LXY": self._tile_path = self._tile_path_LXY - self._scheme = 'L{1}X{2}Y{3}' + self._scheme = "L{1}X{2}Y{3}" tile_pattern = "L*X*Y*.*" else: raise ValueError(f'unsupported "scheme" option for PyramidIO: {scheme}') if default_format is None: - default_format = 'png' + default_format = "png" if os.path.exists(base_dir) and os.path.isdir(base_dir): for filename in glob.iglob(os.path.join(base_dir, tile_pattern)): @@ -250,7 +246,9 @@ def _tile_path_LsYsYX(self, level, ix, iy, format=None, makedirs=True): d = os.path.join(self._base_dir, level, iy) if makedirs: os.makedirs(d, exist_ok=True) - return os.path.join(d, '{}_{}.{}'.format(iy, ix, format or self._default_format)) + return os.path.join( + d, "{}_{}.{}".format(iy, ix, format or self._default_format) + ) def _tile_path_LXY(self, level, ix, iy, format=None, makedirs=True): if makedirs: @@ -258,7 +256,7 @@ def _tile_path_LXY(self, level, ix, iy, format=None, makedirs=True): return os.path.join( self._base_dir, - 'L{}X{}Y{}.{}'.format(level, ix, iy, format or self._default_format) + "L{}X{}Y{}.{}".format(level, ix, iy, format or self._default_format), ) def get_path_scheme(self): @@ -297,10 +295,10 @@ def get_default_vertical_parity_sign(self): """ if self._default_format is None: - raise Exception('cannot get default parity sign without a default format') + raise Exception("cannot get default parity sign without a default format") return get_format_vertical_parity_sign(self._default_format) - def read_image(self, pos, default='none', masked_mode=None, format=None): + def read_image(self, pos, default="none", masked_mode=None, format=None): """ Read an Image for the specified tile position. @@ -327,9 +325,9 @@ def read_image(self, pos, default='none', masked_mode=None, format=None): if e.errno != 2: raise # not EEXIST - if default == 'none': + if default == "none": return None - elif default == 'masked': + elif default == "masked": if masked_mode is None: raise ValueError('masked_mode should be set if default="masked"') buf = masked_mode.make_maskable_buffer(256, 256) @@ -340,7 +338,9 @@ def read_image(self, pos, default='none', masked_mode=None, format=None): return img - def write_image(self, pos, image, format=None, mode=None): + def write_image( + self, pos, image, format=None, mode=None, min_value=None, max_value=None + ): """Write an Image for the specified tile position. Parameters @@ -349,13 +349,31 @@ def write_image(self, pos, image, format=None, mode=None): The tile position to write. image : :class:`toasty.image.Image` The image to write. + format : :class:`str` or ``None`` (the default) + The format name; one of ``SUPPORTED_FORMATS`` + mode : :class:`toasty.image.ImageMode` or ``None`` (the default) + The image data mode to use if ``format`` is a ``PIL_FORMATS`` + min_value : number or ``None`` (the default) + An optional number only used for FITS images. + The value represents to the lowest data value in this image and its children. + If not set, the minimum value will be extracted from this image. + max_value : number or ``None`` (the default) + An optional number only used for FITS images. + The value represents to the highest data value in this image and its children. + If not set, the maximum value will be extracted from this image. """ p = self.tile_path(pos, format=format or self._default_format) - image.save(p, format=format or self._default_format, mode=mode) + image.save( + p, + format=format or self._default_format, + mode=mode, + min_value=min_value, + max_value=max_value, + ) @contextmanager - def update_image(self, pos, default='none', masked_mode=None, format=None): + def update_image(self, pos, default="none", masked_mode=None, format=None): # Plain FileLock doesn't work in HPC contexts, where we might be running # multiple processes on different hosts simultaneously. But it might be # more efficient in the non-HPC context? Should maybe choose the right @@ -364,12 +382,12 @@ def update_image(self, pos, default='none', masked_mode=None, format=None): p = self.tile_path(pos) - with SoftFileLock(p + '.lock'): + with SoftFileLock(p + ".lock"): img = self.read_image( pos, default=default, masked_mode=masked_mode, - format=format or self._default_format + format=format or self._default_format, ) yield img @@ -391,10 +409,10 @@ def clean_lockfiles(self, level): work. The "cascade" stage doesn't need locking, so in general only the deepest level of the pyramid will need to be cleaned. """ - for x in range(0, 2**level): - for y in range(0, 2**level): + for x in range(0, 2 ** level): + for y in range(0, 2 ** level): pos = Pos(level, x, y) - p = self.tile_path(pos, makedirs=False) + '.lock' + p = self.tile_path(pos, makedirs=False) + ".lock" try: os.unlink(p) @@ -415,7 +433,7 @@ def open_metadata_for_read(self, basename): A readable and closeable file-like object returning bytes. """ - return open(os.path.join(self._base_dir, basename), 'rb') + return open(os.path.join(self._base_dir, basename), "rb") def open_metadata_for_write(self, basename): """ @@ -437,4 +455,4 @@ def open_metadata_for_write(self, basename): except OSError as e: if e.errno != 17: raise # not EEXIST - return open(os.path.join(self._base_dir, basename), 'wb') + return open(os.path.join(self._base_dir, basename), "wb") diff --git a/toasty/tests/geminiann11015a_wcs.fits b/toasty/tests/geminiann11015a_wcs.fits new file mode 100644 index 0000000..8e673d6 --- /dev/null +++ b/toasty/tests/geminiann11015a_wcs.fits @@ -0,0 +1 @@ +SIMPLE = T / conforms to FITS standard BITPIX = 8 / array data type NAXIS = 0 / number of array dimensions WCSAXES = 2 / Number of coordinate axes CRPIX1 = 640.0 / Pixel coordinate of reference point CRPIX2 = 437.85397337636 / Pixel coordinate of reference point PC1_1 = -0.75470958022277 / Coordinate transformation matrix element PC1_2 = 0.65605902899051 / Coordinate transformation matrix element PC2_1 = -0.65605902899051 / Coordinate transformation matrix element PC2_2 = -0.75470958022277 / Coordinate transformation matrix element CDELT1 = -3.7727088905939E-05 / [deg] Coordinate increment at reference point CDELT2 = 3.7727088905939E-05 / [deg] Coordinate increment at reference point CUNIT1 = 'deg' / Units of coordinate increment and value CUNIT2 = 'deg' / Units of coordinate increment and value CTYPE1 = 'RA---TAN' / Right ascension, gnomonic projection CTYPE2 = 'DEC--TAN' / Declination, gnomonic projection CRVAL1 = 349.049780614 / [deg] Coordinate value at reference point CRVAL2 = -42.5874939584 / [deg] Coordinate value at reference point LONPOLE = 180.0 / [deg] Native longitude of celestial pole LATPOLE = -42.5874939584 / [deg] Native latitude of celestial pole MJDREF = 0.0 / [d] MJD of fiducial time RADESYS = 'ICRS' / Equatorial coordinate system END \ No newline at end of file diff --git a/toasty/tests/test_modes.py b/toasty/tests/test_modes.py new file mode 100644 index 0000000..f8d6f45 --- /dev/null +++ b/toasty/tests/test_modes.py @@ -0,0 +1,62 @@ +# -*- mode: python; coding: utf-8 -*- +# Copyright 2022 the AAS WorldWide Telescope project +# Licensed under the MIT License. + +import numpy as np +import pytest + +from ..image import Image, ImageMode + + +class ModeInfo(object): + def __init__(self, mode, dtype, demo_shape, default_format): + self.mode = mode + self.dtype = dtype + self.demo_shape = demo_shape + self.default_format = default_format + + +MODE_INFO = [ + ModeInfo(ImageMode.RGB, np.uint8, (10, 10, 3), "png"), + ModeInfo(ImageMode.RGBA, np.uint8, (10, 10, 4), "png"), + ModeInfo(ImageMode.F32, np.float32, (10, 10), "npy"), + ModeInfo(ImageMode.F64, np.float64, (10, 10), "npy"), + ModeInfo(ImageMode.F16x3, np.float16, (10, 10, 3), "npy"), + ModeInfo(ImageMode.U8, np.uint8, (10, 10), "npy"), + ModeInfo(ImageMode.I32, np.int32, (10, 10), "npy"), +] + + +def test_from_array_info(): + for mi in MODE_INFO: + assert ImageMode.from_array_info(mi.demo_shape, mi.dtype) == mi.mode + + with pytest.raises(ValueError): + s = (10, 10) + ImageMode.from_array_info(s, np.complex64) + + s = (10, 10, 3) + ImageMode.from_array_info(s, np.float32) + + s = (10, 10, 4) + ImageMode.from_array_info(s, np.int8) + + +def test_image_interface(): + for mi in MODE_INFO: + arr = np.zeros(mi.demo_shape, dtype=mi.dtype) + img = Image.from_array(arr) + img._default_format = None + assert img.default_format == mi.default_format + mb = mi.mode.make_maskable_buffer(8, 8) + img.fill_into_maskable_buffer( + mb, slice(1, 5), slice(1, 5), slice(4, 8), slice(4, 8) + ) + + mb.asarray().fill(1) + img.update_into_maskable_buffer( + mb, slice(1, 5), slice(1, 5), slice(4, 8), slice(4, 8) + ) + assert mb.asarray().flat[0] == 1 + + img.clear() diff --git a/toasty/tests/test_multi_tan.py b/toasty/tests/test_multi_tan.py index 9a02954..e6650f7 100644 --- a/toasty/tests/test_multi_tan.py +++ b/toasty/tests/test_multi_tan.py @@ -20,6 +20,7 @@ try: from astropy.io import fits + HAS_ASTRO = True except ImportError: HAS_ASTRO = False @@ -27,6 +28,7 @@ try: import reproject + HAS_REPROJECT = True except ImportError: HAS_REPROJECT = False @@ -34,54 +36,77 @@ class TestMultiTan(object): WTML = """ - - - - - thumb.jpg - - - + + + + + thumb.jpg + + + """ # Gross workaround for platform differences in the XML output. - if sys.platform == 'darwin': + if sys.platform == "darwin": WTML = WTML.replace('Dec="0.7438249862258411"', 'Dec="0.743824986225841"') # Back to the non-gross stuff. def setup_method(self, method): from tempfile import mkdtemp + self.work_dir = mkdtemp() def teardown_method(self, method): from shutil import rmtree + rmtree(self.work_dir) def work_path(self, *pieces): return os.path.join(self.work_dir, *pieces) - def test_basic(self): - coll = collection.SimpleFitsCollection([test_path('wcs512.fits.gz')]) + coll = collection.SimpleFitsCollection([test_path("wcs512.fits.gz")]) proc = multi_tan.MultiTanProcessor(coll) from ..pyramid import PyramidIO - pio = PyramidIO(self.work_path('basic'), default_format='fits') + + pio = PyramidIO(self.work_path("basic"), default_format="fits") builder = Builder(pio) @@ -124,7 +149,7 @@ def maybe_test_barycenter(self, path, bary_expected): (63.44949378800272, 64.40535387506924), (63.24744175084746, 63.67473452789256), (65.22950207855361, 63.35629429568745), - (62.027396724898814, 62.815937534782144) + (62.027396724898814, 62.815937534782144), ] def test_basic_cli(self): @@ -134,33 +159,39 @@ def test_basic_cli(self): module directly. """ expected = etree.fromstring( - self.WTML - .replace('Thumbnail="thumb.jpg"', '') - .replace('thumb.jpg', '') + self.WTML.replace('Thumbnail="thumb.jpg"', "").replace( + "thumb.jpg", "" + ) ) args = [ - 'tile-multi-tan', - '--hdu-index', '0', - '--outdir', self.work_path('basic_cli'), - test_path('wcs512.fits.gz') + "tile-multi-tan", + "--hdu-index", + "0", + "--outdir", + self.work_path("basic_cli"), + test_path("wcs512.fits.gz"), ] cli.entrypoint(args) - with open(self.work_path('basic_cli', 'index_rel.wtml'), 'rt', encoding='utf8') as f: + with open( + self.work_path("basic_cli", "index_rel.wtml"), "rt", encoding="utf8" + ) as f: observed = etree.fromstring(f.read()) assert_xml_elements_equal(observed, expected) args = [ - 'cascade', - '--start', '1', - self.work_path('basic_cli'), + "cascade", + "--start", + "1", + self.work_path("basic_cli"), ] cli.entrypoint(args) - self.maybe_test_barycenter(self.work_path('basic_cli', '0', '0', '0_0.fits'), self.WCS512_BARYDATA) - + self.maybe_test_barycenter( + self.work_path("basic_cli", "0", "0", "0_0.fits"), self.WCS512_BARYDATA + ) def test_study_cli(self): """ @@ -170,29 +201,34 @@ def test_study_cli(self): expected = etree.fromstring(self.WTML) args = [ - 'tile-study', - '--placeholder-thumbnail', - '--outdir', self.work_path('study_cli'), - test_path('wcs512.fits.gz') + "tile-study", + "--placeholder-thumbnail", + "--outdir", + self.work_path("study_cli"), + test_path("wcs512.fits.gz"), ] cli.entrypoint(args) - with open(self.work_path('study_cli', 'index_rel.wtml'), 'rt', encoding='utf8') as f: + with open( + self.work_path("study_cli", "index_rel.wtml"), "rt", encoding="utf8" + ) as f: observed = etree.fromstring(f.read()) assert_xml_elements_equal(observed, expected) args = [ - 'cascade', - '--start', '1', - self.work_path('study_cli'), + "cascade", + "--start", + "1", + self.work_path("study_cli"), ] cli.entrypoint(args) - self.maybe_test_barycenter(self.work_path('study_cli', '0', '0', '0_0.fits'), self.WCS512_BARYDATA) - + self.maybe_test_barycenter( + self.work_path("study_cli", "0", "0", "0_0.fits"), self.WCS512_BARYDATA + ) - @pytest.mark.skipif('not HAS_REPROJECT') + @pytest.mark.skipif("not HAS_REPROJECT") def test_as_multi_wcs(self): """ Once again, this doesn't super belong here, but this is where we have @@ -202,21 +238,26 @@ def test_as_multi_wcs(self): from .. import builder, collection, multi_wcs, pyramid reproject_function = reproject.reproject_interp - outdir = self.work_path('as_multi_wcs') + outdir = self.work_path("as_multi_wcs") - pio = pyramid.PyramidIO(outdir, default_format='fits') + pio = pyramid.PyramidIO(outdir, default_format="fits") bld = builder.Builder(pio) - coll = collection.SimpleFitsCollection([test_path('wcs512.fits.gz')], hdu_index=0) + coll = collection.SimpleFitsCollection( + [test_path("wcs512.fits.gz")], hdu_index=0 + ) proc = multi_wcs.MultiWcsProcessor(coll) proc.compute_global_pixelization(bld) proc.tile(pio, reproject_function, cli_progress=False, parallel=1) bld.write_index_rel_wtml() args = [ - 'cascade', - '--start', '1', - self.work_path('as_multi_wcs'), + "cascade", + "--start", + "1", + self.work_path("as_multi_wcs"), ] cli.entrypoint(args) - self.maybe_test_barycenter(self.work_path('as_multi_wcs', '0', '0', '0_0.fits'), self.WCS512_BARYDATA) + self.maybe_test_barycenter( + self.work_path("as_multi_wcs", "0", "0", "0_0.fits"), self.WCS512_BARYDATA + ) diff --git a/toasty/tests/test_study.py b/toasty/tests/test_study.py index f0990ab..d002ac9 100644 --- a/toasty/tests/test_study.py +++ b/toasty/tests/test_study.py @@ -1,5 +1,5 @@ # -*- mode: python; coding: utf-8 -*- -# Copyright 2020 the AAS WorldWide Telescope project +# Copyright 2020-2022 the AAS WorldWide Telescope project # Licensed under the MIT License. from __future__ import absolute_import, division, print_function @@ -16,23 +16,61 @@ class TestStudy(object): WTML = """ - - - - - thumb.jpg - - - + + + + + thumb.jpg + + + """ def setup_method(self, method): from tempfile import mkdtemp + self.work_dir = mkdtemp() def teardown_method(self, method): from shutil import rmtree + rmtree(self.work_dir) def work_path(self, *pieces): @@ -48,7 +86,6 @@ def test_basic(self): assert tiling._img_gx0 == 0 assert tiling._img_gy0 == 0 - def test_preconditions(self): with pytest.raises(ValueError): study.StudyTiling(0, 1) @@ -59,7 +96,6 @@ def test_preconditions(self): with pytest.raises(ValueError): study.StudyTiling(1, np.nan) - def test_image_to_tile(self): tiling = study.StudyTiling(514, 514) assert tiling._p2n == 1024 @@ -68,43 +104,60 @@ def test_image_to_tile(self): assert tiling.image_to_tile(513, 0) == (3, 0, 0, 255) assert tiling.image_to_tile(513, 513) == (3, 3, 0, 0) - def test_sample_cli(self): from xml.etree import ElementTree as etree + expected = etree.fromstring(self.WTML) - for variants in ([], ['--placeholder-thumbnail']): - args = ['tile-study'] + for variants in ([], ["--placeholder-thumbnail"]): + args = ["tile-study"] args += variants - args += [ - '--outdir', self.work_path(), - test_path('NGC253ALMA.jpg') - ] + args += ["--outdir", self.work_path(), test_path("NGC253ALMA.jpg")] cli.entrypoint(args) - with open(self.work_path('index_rel.wtml'), 'rt', encoding='utf8') as f: + with open(self.work_path("index_rel.wtml"), "rt", encoding="utf8") as f: observed = etree.fromstring(f.read()) assert_xml_elements_equal(observed, expected) - AVM_WTML = """ - - + + - + International Gemini Observatory/NOIRLab/NSF/AURA/B. Reynolds (Sutherland Shire Christian School)/T. Rector (University of Alaska, Anchorage)/Australian Gemini Office. https://noirlab.edu/public/images/geminiann11015a/ Gemini GMOS image of the barred spiral galaxy NGC 7552. Benjamin Reynolds, a 10th grade student at Sutherland Shire Christian School, suggested this target for Australia’s 2011 Gemini School Astronomy Contest and won. The picture consists of separate images taken with different filters: H-alpha (red), g (blue), r (green), and i (yellow). @@ -114,7 +167,7 @@ def test_sample_cli(self): """ - @pytest.mark.skipif('not HAS_AVM') + @pytest.mark.skipif("not HAS_AVM") def test_avm(self): from xml.etree import ElementTree as etree @@ -122,19 +175,96 @@ def test_avm(self): wtml = self.AVM_WTML - if sys.platform == 'darwin': + if sys.platform == "darwin": wtml = wtml.replace('Dec="-42.58752472831171"', 'Dec="-42.587524728311706"') expected = etree.fromstring(wtml) - cli.entrypoint([ - 'tile-study', - '--avm', - '--outdir', self.work_path(), - test_path('geminiann11015a.jpg'), - ]) + cli.entrypoint( + [ + "tile-study", + "--avm", + "--outdir", + self.work_path(), + test_path("geminiann11015a.jpg"), + ] + ) + + with open(self.work_path("index_rel.wtml"), "rt", encoding="utf8") as f: + observed = etree.fromstring(f.read()) + + assert_xml_elements_equal(observed, expected) + + # Blah: this is the same as AVM_WTML, minus the metadata fields that aren't + # preserved in the FITS export. + FITS_WCS_WTML = """ + + + + + thumb.jpg + + + +""" + + def test_fits_wcs(self): + from xml.etree import ElementTree as etree + + # Platform hack: XML textualization is ever-so-slightly different. + + wtml = self.FITS_WCS_WTML + + if sys.platform in ("darwin", "win32"): + wtml = wtml.replace('Dec="-42.58752472831171"', 'Dec="-42.587524728311706"') + + expected = etree.fromstring(wtml) + + cli.entrypoint( + [ + "tile-study", + "--fits-wcs", + test_path("geminiann11015a_wcs.fits"), + "--outdir", + self.work_path(), + test_path("geminiann11015a.jpg"), + ] + ) - with open(self.work_path('index_rel.wtml'), 'rt', encoding='utf8') as f: + with open(self.work_path("index_rel.wtml"), "rt", encoding="utf8") as f: observed = etree.fromstring(f.read()) assert_xml_elements_equal(observed, expected)