From 3cbc7024ffd2f4d1734fd411c421e66360af1af1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 14:03:49 +0000 Subject: [PATCH 01/16] Add reader for OSI SAF L3 products on EASE and polar stereographic projections. --- satpy/etc/readers/osisaf_nc.yaml | 107 +++++++++++++++++++++ satpy/readers/osisaf_l3_nc.py | 154 +++++++++++++++++++++++++++++++ 2 files changed, 261 insertions(+) create mode 100644 satpy/etc/readers/osisaf_nc.yaml create mode 100644 satpy/readers/osisaf_l3_nc.py diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml new file mode 100644 index 0000000000..214345da3a --- /dev/null +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -0,0 +1,107 @@ +reader: + name: osisaf_nc + short_name: OSI-SAF netCDF + long_name: OSI-SAF data in netCDF4 format + description: > + A reader for OSI-SAF data in netCDF4 format. + References: + + - Dataset descriptions: https://osi-saf.eumetsat.int/documentation/products-documentation + + status: Beta + supports_fsspec: true + sensors: [osisaf] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + osi_sea_ice_conc: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_edge: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_emis: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_type: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + +datasets: + # Shared between various file types + status_flag: + name: status_flag + file_type: [osi_sea_ice_conc, osi_sea_ice_edge, osi_sea_ice_type] + + orbit_num_amsr: + name: orbit_num_amsr + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + orbit_num_ascat: + name: orbit_num_ascat + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + orbit_num_ssmis: + name: orbit_num_ssmis + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + param_used: + name: param_used + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + uncertainty: + name: uncertainty + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + + # Sea ice concentration datasets + algorithm_uncertainty: + name: algorithm_uncertainty + file_type: osi_sea_ice_conc + confidence_level: + name: confidence_level + file_type: osi_sea_ice_conc + ice_conc: + name: ice_conc + file_type: osi_sea_ice_conc + ice_conc_unfiltered: + name: ice_conc_unfiltered + file_type: osi_sea_ice_conc + masks: + name: masks + file_type: osi_sea_ice_conc + smearing_uncertainty: + name: smearing_uncertainty + file_type: osi_sea_ice_conc + total_uncertainty: + name: total_uncertainty + file_type: osi_sea_ice_conc + + # Ice edge product + ice_edge: + name: ice_edge + file_type: osi_sea_ice_edge + + # Ice type product + ice_type: + name: ice_type + file_type: osi_sea_ice_type + + # Ice emis product + e: + name: e + file_type: osi_sea_ice_emis + ev: + name: ev + file_type: osi_sea_ice_emis + flag: + name: flag + file_type: osi_sea_ice_emis + R: + name: R + file_type: osi_sea_ice_emis + S: + name: S + file_type: osi_sea_ice_emis + teff: + name: teff + file_type: osi_sea_ice_emis + u: + name: u + file_type: osi_sea_ice_emis diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py new file mode 100644 index 0000000000..794119a300 --- /dev/null +++ b/satpy/readers/osisaf_l3_nc.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +# type: ignore +"""A reader for OSI-SAF level 3 products in netCDF format.""" + +import logging +from datetime import datetime + +import numpy as np + +from satpy.readers.netcdf_utils import NetCDF4FileHandler + +logger = logging.getLogger(__name__) + + +class OSISAFL3NCFileHandler(NetCDF4FileHandler): + """Reader for the OSISAF l3 netCDF format.""" + + + @staticmethod + def _parse_datetime(datestr): + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + + def _get_ease_grid(self): + """Set up the EASE grid.""" + from pyresample import create_area_def + + proj4str = self["Lambert_Azimuthal_Grid/attr/proj4_string"] + x_size = self["/dimension/xc"] + y_size = self["/dimension/yc"] + p_lowerleft_lat = self["lat"].values[y_size - 1, 0] + p_lowerleft_lon = self["lon"].values[y_size - 1, 0] + p_upperright_lat = self["lat"].values[0, x_size - 1] + p_upperright_lon = self["lon"].values[0, x_size - 1] + area_extent = [p_lowerleft_lon, p_lowerleft_lat, p_upperright_lon, p_upperright_lat] + area_def = create_area_def(area_id="osisaf_lambert_azimuthal_equal_area", + description="osisaf_lambert_azimuthal_equal_area", + proj_id="osisaf_lambert_azimuthal_equal_area", + projection=proj4str, width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + + def _get_polar_stereographic_grid(self): + """Set up the polar stereographic grid.""" + from pyresample import create_area_def + + proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + x_size = self["/dimension/xc"] + y_size = self["/dimension/yc"] + p_lowerleft_lat = self["lat"].values[y_size - 1, 0] + p_lowerleft_lon = self["lon"].values[y_size - 1, 0] + p_upperright_lat = self["lat"].values[0, x_size - 1] + p_upperright_lon = self["lon"].values[0, x_size - 1] + area_extent = [p_lowerleft_lon, p_lowerleft_lat, p_upperright_lon, p_upperright_lat] + area_def = create_area_def(area_id="osisaf_polar_stereographic", + description="osisaf_polar_stereographic", + proj_id="osisaf_polar_stereographic", + projection=proj4str, width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + + + def get_area_def(self, area_id): + """Override abstract baseclass method""" + + if self.filename_info["grid"] == "ease": + return self._get_ease_grid() + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + return self._get_polar_stereographic_grid() + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + + def _get_ds_attr(self, a_name): + """Get a dataset attribute and check it's valid.""" + try: + return self[a_name] + except KeyError: + return None + + def get_dataset(self, dataset_id, ds_info): + """Load a dataset.""" + logger.debug(f"Reading {dataset_id['name']} from {self.filename}") + var_path = ds_info.get("file_key", f"{dataset_id['name']}") + + shape = self[var_path + "/shape"] + if shape[0] == 1: + # Remove the time dimension from dataset + data = self[var_path][0] + else: + data = self[var_path] + + file_units = ds_info.get("file_units") + if file_units is None: + file_units = self._get_ds_attr(var_path + "/attr/units") + if file_units is None: + file_units = 1 + + # Try to get the valid limits for the data. + # Not all datasets have these, so fall back on assuming no limits. + valid_min = self._get_ds_attr(var_path + "/attr/valid_min") + valid_max = self._get_ds_attr(var_path + "/attr/valid_max") + if valid_min is not None and valid_max is not None: + data = data.where(data >= valid_min, np.nan) + data = data.where(data <= valid_max, np.nan) + + + # Try to get the scale and offset for the data. + # As above, not all datasets have these, so fall back on assuming no limits. + scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") + scale_offset = self._get_ds_attr(var_path + "/attr/add_offset") + if scale_offset is not None and scale_factor is not None: + data = (data * scale_factor + scale_offset) + + # Try to get the fill value for the data. + # If there isn"t one, assume all remaining pixels are valid. + fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + if fill_value is not None: + data = data.where(data != fill_value, np.nan) + + # Set proper dimension names + data = data.rename({"xc": "x", "yc": "y"}) + + ds_info.update({ + "units": ds_info.get("units", file_units), + "platform_name": self["/attr/platform_name"], + "sensor": self["/attr/instrument_type"] + }) + ds_info.update(dataset_id.to_dict()) + data.attrs.update(ds_info) + return data + + @property + def start_time(self): + return self._parse_datetime(self["/attr/start_date"]) + # return self._parse_datetime(self["/attr/start_date"]) + + @property + def end_time(self): + return self._parse_datetime(self["/attr/stop_date"]) From 300a9a1cf09d62c6e2b64d2131486f089e95761e Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 14:06:50 +0000 Subject: [PATCH 02/16] Fix typos and tidy osi saf reader. --- satpy/readers/osisaf_l3_nc.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 794119a300..e61e752299 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -118,7 +118,6 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data >= valid_min, np.nan) data = data.where(data <= valid_max, np.nan) - # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") @@ -127,7 +126,7 @@ def get_dataset(self, dataset_id, ds_info): data = (data * scale_factor + scale_offset) # Try to get the fill value for the data. - # If there isn"t one, assume all remaining pixels are valid. + # If there isn't one, assume all remaining pixels are valid. fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") if fill_value is not None: data = data.where(data != fill_value, np.nan) From 13fffa673909606376014941f644785b09cfd1f4 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:26:50 +0000 Subject: [PATCH 03/16] Add tests for the OSI SAF L3 reader. --- satpy/readers/osisaf_l3_nc.py | 8 +- satpy/tests/reader_tests/test_osisaf_l3.py | 233 +++++++++++++++++++++ 2 files changed, 237 insertions(+), 4 deletions(-) create mode 100644 satpy/tests/reader_tests/test_osisaf_l3.py diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index e61e752299..00f1176b6f 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -31,7 +31,6 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): """Reader for the OSISAF l3 netCDF format.""" - @staticmethod def _parse_datetime(datestr): return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") @@ -77,11 +76,12 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filename_info["grid"] == "ease": - return self._get_ease_grid() + self.area_def = self._get_ease_grid() + return self.area_def elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - return self._get_polar_stereographic_grid() + self.area_def = self._get_polar_stereographic_grid() + return self.area_def else: raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py new file mode 100644 index 0000000000..40cf4539e1 --- /dev/null +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.osisaf_l3 module.""" + +import os +from datetime import datetime + +import numpy as np +import pytest +import xarray as xr + +from satpy import DataQuery +from satpy.readers.osisaf_l3_nc import OSISAFL3NCFileHandler + +stere_ds = xr.DataArray( + -999, + attrs={"grid_mapping_name": "polar_stereographic", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", +}) + +ease_ds = xr.DataArray( + -999, + attrs={"grid_mapping_name": "lambert_azimuthal_equal_area", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6371228.0, + "longitude_of_projection_origin": 0.0, + "latitude_of_projection_origin": -90.0, + "proj4_string": "+proj=laea +a=6371228.0 +lat_0=-90 +lon_0=0", + }) + + +class TestOSISAFL3Reader: + """Test OSI-SAF level 3 netCDF reader.""" + + def setup_method(self, proj_type): + """Create a fake dataset.""" + self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + self.base_data = np.expand_dims(self.base_data, axis=0) + self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) + self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) + self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) + self.time_data = np.array([1.]) + + self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) + self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) + self.xc = xr.DataArray( + self.xc_data, + dims=("yc", "xc"), + attrs={"standard_name": "projection_x_coordinate", "units": "km"} + ) + self.yc = xr.DataArray( + self.yc_data, + dims=("yc", "xc"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"} + ) + self.time = xr.DataArray( + self.time_data, + dims=("time"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"} + ) + self.lat = xr.DataArray( + self.lat_data, + dims=("yc", "xc"), + attrs={"standard_name": "latitude", "units": "degrees_north"} + ) + self.lon = xr.DataArray( + self.lon_data, + dims=("yc", "xc"), + attrs={"standard_name": "longitude", "units": "degrees_east"} + ) + self.conc = xr.DataArray( + self.base_data, + dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", + "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"} + ) + self.uncert = xr.DataArray( + self.unc_data, + dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, + "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + ) + + data_vars = { + "ice_conc": self.conc, + "total_uncertainty": self.uncert, + "xc": self.xc, + "yc": self.yc, + "time": self.time, + "lat": self.lat, + "lon": self.lon, + "Lambert_Azimuthal_Grid": ease_ds, + "Polar_Stereographic_Grid": stere_ds} + self.fake_dataset = xr.Dataset( + data_vars=data_vars, + attrs={ + "start_date": "2022-12-15 00:00:00", + "stop_date": "2022-12-16 00:00:00", + "platform_name": "Multi-sensor analysis", + "instrument_type": "Multi-sensor analysis"}, + ) + + def test_instantiate_single_netcdf_file(self, tmp_path): + """Test initialization of file handlers - given a single netCDF file.""" + filename_info = {} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + + def test_get_dataset(self, tmp_path): + """Test retrieval of datasets.""" + filename_info = {} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + res = test.get_dataset(DataQuery(name="ice_conc"), {"standard_name": "sea_ice_area_fraction"}) + # Check we remove singleton dimension + assert res.shape[0] == 2 + assert res.shape[1] == 5 + + # Test values are correct + test_ds = self.fake_dataset["ice_conc"][0].values + test_ds = np.where(test_ds == -999, np.nan, test_ds) + test_ds = np.where(test_ds > 10000, np.nan, test_ds) + np.testing.assert_allclose(res.values, test_ds / 100) + + res = test.get_dataset(DataQuery(name="total_uncertainty"), {"standard_name": "sea_ice_area_fraction"}) + assert res.shape[0] == 2 + assert res.shape[1] == 5 + + with pytest.raises(KeyError): + test.get_dataset(DataQuery(name="erroneous dataset"), {"standard_name": "erroneous dataset"}) + + def test_get_start_and_end_times(self, tmp_path): + """Test retrieval of the sensor name from the netCDF file.""" + good_start_time = datetime(2022, 12, 15, 0, 0, 0) + good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + + filename_info = {} + filetype_info = {} + + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + assert test.start_time == good_start_time + assert test.end_time == good_stop_time + + + def test_get_area_def_ease(self, tmp_path): + """Test getting the area definition for the EASE grid.""" + filename_info = {"grid": "ease"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_lambert_azimuthal_equal_area" + assert area_def.proj_dict["R"] == 6371228 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "laea" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) + + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "stere"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) + + def test_get_area_def_bad(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "turnips"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + with pytest.raises(ValueError, match="Unknown grid type: turnips"): + test.get_area_def(None) From ac08013b725f2d096e91250a71c2e1ae34eeb219 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 8 Nov 2023 10:35:20 -0600 Subject: [PATCH 04/16] Remove typing ignore from satpy/readers/osisaf_l3_nc.py --- satpy/readers/osisaf_l3_nc.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 00f1176b6f..293584ffa8 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -15,7 +15,6 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -# type: ignore """A reader for OSI-SAF level 3 products in netCDF format.""" import logging From 23db54b18abf2f33450eaf5823231a8c35152cca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 21:05:26 +0000 Subject: [PATCH 05/16] Add support for flux and sst products. --- satpy/etc/readers/osisaf_nc.yaml | 89 +++++++++++++++++++++++--- satpy/readers/osisaf_l3_nc.py | 105 ++++++++++++++++++++++++++----- 2 files changed, 170 insertions(+), 24 deletions(-) diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml index 214345da3a..479b5a38db 100644 --- a/satpy/etc/readers/osisaf_nc.yaml +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -16,17 +16,26 @@ reader: file_types: osi_sea_ice_conc: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_edge: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_emis: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_type: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sst: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['{start_time:%Y%m%d%H%M%S}-{processing_center}-L3C_GHRSST-SSTskin-{sensor}_{platform_name}-v{version}.nc'] + osi_radflux_stere: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['osisaf_radiative_flux_24h_hl_{grid}-050_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_radflux_grid: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['{start_time:%Y%m%d%H%M%S}-OSISAF-RADFLX-{time_period}-{platform_name}.nc'] datasets: # Shared between various file types @@ -105,3 +114,67 @@ datasets: u: name: u file_type: osi_sea_ice_emis + + # SST product + ist_dtime: + name: ist_dtime + file_type: osi_sst + ist_quality_level: + name: ist_quality_level + file_type: osi_sst + l2p_flags: + name: l2p_flags + file_type: osi_sst + landmask: + name: landmask + file_type: osi_sst + or_number_of_pixels: + name: or_number_of_pixels + file_type: osi_sst + or_number_of_pixels_ist: + name: or_number_of_pixels_ist + file_type: osi_sst + probability_of_ice: + name: probability_of_ice + file_type: osi_sst + probability_of_water: + name: probability_of_water + file_type: osi_sst + quality_level: + name: quality_level + file_type: osi_sst + sea_ice_fraction: + name: sea_ice_fraction + file_type: osi_sst + sea_surface_temperature: + name: sea_surface_temperature + file_type: osi_sst + sses_bias: + name: sses_bias + file_type: osi_sst + sses_standard_deviation: + name: sses_standard_deviation + file_type: osi_sst + sst_dtime: + name: sst_dtime + file_type: osi_sst + surface_temperature: + name: surface_temperature + file_type: osi_sst + tempflag: + name: tempflag + file_type: osi_sst + + # Radiative flux product + dli: + name: dli + file_type: [osi_radflux_stere, osi_radflux_grid] + dli_confidence_level: + name: dli_confidence_level + file_type: [osi_radflux_stere, osi_radflux_grid] + ssi: + name: ssi + file_type: [osi_radflux_stere, osi_radflux_grid] + ssi_confidence_level: + name: ssi_confidence_level + file_type: [osi_radflux_stere, osi_radflux_grid] diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 293584ffa8..b2e6ec6812 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -32,7 +32,14 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): @staticmethod def _parse_datetime(datestr): - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + try: + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + except ValueError: + try: + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + except ValueError: + return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") + def _get_ease_grid(self): """Set up the EASE grid.""" @@ -53,11 +60,37 @@ def _get_ease_grid(self): units="deg") return area_def + def _get_geographic_grid(self): + """Set up the EASE grid.""" + from pyresample import create_area_def + + x_size = self["/dimension/lon"] + y_size = self["/dimension/lat"] + lat_0 = self["lat"].min() + lon_0 = self["lon"].min() + lat_1 = self["lat"].max() + lon_1 = self["lon"].max() + area_extent = [lon_0, lat_1, lon_1, lat_0] + area_def = create_area_def(area_id="osisaf_geographic_area", + description="osisaf_geographic_area", + proj_id="osisaf_geographic_area", + projection="+proj=lonlat", width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + def _get_polar_stereographic_grid(self): """Set up the polar stereographic grid.""" from pyresample import create_area_def - - proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + try: + proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + except KeyError: + # Some products don't have the proj str, so we construct it ourselves + sma = self["Polar_Stereographic_Grid/attr/semi_major_axis"] + smb = self["Polar_Stereographic_Grid/attr/semi_minor_axis"] + lon_0 = self["Polar_Stereographic_Grid/attr/straight_vertical_longitude_from_pole"] + lat_0 = self["Polar_Stereographic_Grid/attr/latitude_of_projection_origin"] + lat_ts = self["Polar_Stereographic_Grid/attr/standard_parallel"] + proj4str = f"+a={sma} +b={smb} +lat_ts={lat_ts} +lon_0={lon_0} +proj=stere +lat_0={lat_0}" x_size = self["/dimension/xc"] y_size = self["/dimension/yc"] p_lowerleft_lat = self["lat"].values[y_size - 1, 0] @@ -75,7 +108,13 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filename_info["grid"] == "ease": + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + elif self.filename_info["grid"] == "ease": self.area_def = self._get_ease_grid() return self.area_def elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": @@ -117,6 +156,12 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data >= valid_min, np.nan) data = data.where(data <= valid_max, np.nan) + # Try to get the fill value for the data. + # If there isn't one, assume all remaining pixels are valid. + fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + if fill_value is not None: + data = data.where(data != fill_value, np.nan) + # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") @@ -124,29 +169,57 @@ def get_dataset(self, dataset_id, ds_info): if scale_offset is not None and scale_factor is not None: data = (data * scale_factor + scale_offset) - # Try to get the fill value for the data. - # If there isn't one, assume all remaining pixels are valid. - fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") - if fill_value is not None: - data = data.where(data != fill_value, np.nan) - # Set proper dimension names - data = data.rename({"xc": "x", "yc": "y"}) + if self.filetype_info["file_type"] == "osi_radflux_grid": + data = data.rename({"lon": "x", "lat": "y"}) + else: + data = data.rename({"xc": "x", "yc": "y"}) ds_info.update({ "units": ds_info.get("units", file_units), - "platform_name": self["/attr/platform_name"], - "sensor": self["/attr/instrument_type"] + "platform_name": self._get_platname(), + "sensor": self._get_instname() }) ds_info.update(dataset_id.to_dict()) data.attrs.update(ds_info) return data + def _get_instname(self): + """Get instrument name.""" + try: + return self["/attr/instrument_name"] + except KeyError: + try: + return self["/attr/sensor"] + except KeyError: + return "unknown_sensor" + + def _get_platname(self): + """Get platform name.""" + try: + return self["/attr/platform_name"] + except KeyError: + return self["/attr/platform"] + + @property def start_time(self): - return self._parse_datetime(self["/attr/start_date"]) - # return self._parse_datetime(self["/attr/start_date"]) + start_t = self._get_ds_attr("/attr/start_date") + if start_t is None: + start_t = self._get_ds_attr("/attr/start_time") + if start_t is None: + start_t = self._get_ds_attr("/attr/time_coverage_start") + if start_t is None: + raise ValueError("Unknown start time attribute.") + return self._parse_datetime(start_t) @property def end_time(self): - return self._parse_datetime(self["/attr/stop_date"]) + end_t = self._get_ds_attr("/attr/stop_date") + if end_t is None: + end_t = self._get_ds_attr("/attr/stop_time") + if end_t is None: + end_t = self._get_ds_attr("/attr/time_coverage_end") + if end_t is None: + raise ValueError("Unknown stop time attribute.") + return self._parse_datetime(end_t) From aef8d90e610f1c8f62bd6c321f666b9fec038968 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 22:11:00 +0000 Subject: [PATCH 06/16] (wip) Update OSI SAF tests. --- satpy/tests/reader_tests/test_osisaf_l3.py | 193 ++++++++++++++------- 1 file changed, 131 insertions(+), 62 deletions(-) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 40cf4539e1..65a9efc9a1 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -30,15 +30,27 @@ stere_ds = xr.DataArray( -999, attrs={"grid_mapping_name": "polar_stereographic", - "false_easting": 0.0, - "false_northing": 0.0, - "semi_major_axis": 6378273.0, - "semi_minor_axis": 6356889.44891, - "straight_vertical_longitude_from_pole": 0.0, - "latitude_of_projection_origin": -90.0, - "standard_parallel": -70.0, - "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", -}) + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", + }) + +stere_ds_noproj = xr.DataArray( + -999, + attrs={"grid_mapping_name": "polar_stereographic", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + }) ease_ds = xr.DataArray( -999, @@ -51,14 +63,34 @@ "proj4_string": "+proj=laea +a=6371228.0 +lat_0=-90 +lon_0=0", }) +attrs_ice = { + "start_date": "2022-12-15 00:00:00", + "stop_date": "2022-12-16 00:00:00", + "platform_name": "Multi-sensor analysis", + "instrument_type": "Multi-sensor analysis"} + +attrs_flux = { + "time_coverage_start": "2023-10-10T00:00:00Z", + "time_coverage_end": "2023-10-10T23:59:59Z", + "platform": "NOAA-19, NOAA-20, Metop-B, Metop-C, SNPP", + "sensor": "AVHRR, VIIRS, AVHRR, AVHRR, VIIRS"} + +attrs_geo = { + "start_time": "20221228T183000Z", + "stop_time": "20221228T193000Z", + "platform": "MSG4"} -class TestOSISAFL3Reader: - """Test OSI-SAF level 3 netCDF reader.""" - def setup_method(self, proj_type): +class OSISAFL3ReaderTests: + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self, tester="ice"): """Create a fake dataset.""" self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) self.base_data = np.expand_dims(self.base_data, axis=0) + self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) @@ -78,7 +110,7 @@ def setup_method(self, proj_type): ) self.time = xr.DataArray( self.time_data, - dims=("time"), + dims="time", attrs={"standard_name": "projection_y_coordinate", "units": "km"} ) self.lat = xr.DataArray( @@ -103,84 +135,93 @@ def setup_method(self, proj_type): attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} ) + self.ssi_geo = xr.DataArray( + self.base_data_ssi_geo, + dims=("lat", "lon"), + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "units": "W m-2", + "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} + ) + self.ssi = xr.DataArray( + self.base_data_ssi, + dims=("time", "yc", "xc"), + attrs={"_FillValue": -999.99, "units": "W m-2", + "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} + ) + self.uncert = xr.DataArray( + self.unc_data, + dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, + "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + ) data_vars = { - "ice_conc": self.conc, - "total_uncertainty": self.uncert, - "xc": self.xc, - "yc": self.yc, - "time": self.time, - "lat": self.lat, - "lon": self.lon, - "Lambert_Azimuthal_Grid": ease_ds, - "Polar_Stereographic_Grid": stere_ds} - self.fake_dataset = xr.Dataset( - data_vars=data_vars, - attrs={ - "start_date": "2022-12-15 00:00:00", - "stop_date": "2022-12-16 00:00:00", - "platform_name": "Multi-sensor analysis", - "instrument_type": "Multi-sensor analysis"}, - ) + "xc": self.xc, + "yc": self.yc, + "time": self.time, + "lat": self.lat, + "lon": self.lon, } + if tester == "ice": + data_vars["Lambert_Azimuthal_Grid"] = ease_ds + data_vars["Polar_Stereographic_Grid"] = stere_ds + data_vars["ice_conc"] = self.conc + data_vars["total_uncertainty"] = self.uncert + elif tester == "flux_stere": + data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj + data_vars["ssi"] = self.ssi + elif tester == "flux_geo": + data_vars["ssi"] = self.ssi_geo + + if tester == "ice": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) + elif tester == "flux_stere": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) + elif tester == "flux_geo": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): """Test initialization of file handlers - given a single netCDF file.""" - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) - + OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) def test_get_dataset(self, tmp_path): """Test retrieval of datasets.""" - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) - res = test.get_dataset(DataQuery(name="ice_conc"), {"standard_name": "sea_ice_area_fraction"}) + res = test.get_dataset(DataQuery(name=self.varname), {"standard_name": self.stdname}) # Check we remove singleton dimension assert res.shape[0] == 2 assert res.shape[1] == 5 # Test values are correct - test_ds = self.fake_dataset["ice_conc"][0].values - test_ds = np.where(test_ds == -999, np.nan, test_ds) - test_ds = np.where(test_ds > 10000, np.nan, test_ds) - np.testing.assert_allclose(res.values, test_ds / 100) - - res = test.get_dataset(DataQuery(name="total_uncertainty"), {"standard_name": "sea_ice_area_fraction"}) - assert res.shape[0] == 2 - assert res.shape[1] == 5 + test_ds = self.fake_dataset[self.varname][0].values + test_ds = np.where(test_ds == self.fillv, np.nan, test_ds) + test_ds = np.where(test_ds > self.maxv, np.nan, test_ds) + test_ds = test_ds / self.scl + np.testing.assert_allclose(res.values, test_ds) with pytest.raises(KeyError): test.get_dataset(DataQuery(name="erroneous dataset"), {"standard_name": "erroneous dataset"}) def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - good_start_time = datetime(2022, 12, 15, 0, 0, 0) - good_stop_time = datetime(2022, 12, 16, 0, 0, 0) - - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) - - assert test.start_time == good_start_time - assert test.end_time == good_stop_time + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + assert test.start_time == self.good_start_time + assert test.end_time == self.good_stop_time def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" filename_info = {"grid": "ease"} - filetype_info = {} + filetype_info = {"file_type": "osi_sea_ice_conc"} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) @@ -198,15 +239,12 @@ def test_get_area_def_ease(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) - def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" - filename_info = {"grid": "stere"} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" @@ -224,10 +262,41 @@ def test_get_area_def_stere(self, tmp_path): def test_get_area_def_bad(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" filename_info = {"grid": "turnips"} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) with pytest.raises(ValueError, match="Unknown grid type: turnips"): test.get_area_def(None) + + +class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self): + super().setup_method(tester="ice") + self.filename_info = {"grid": "ease"} + self.filetype_info = {"file_type": "osi_sea_ice_conc"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ice_conc" + self.stdname = "sea_ice_area_fraction" + self.fillv = -999 + self.maxv = 10000 + self.scl = 100 + + +class TestOSISAFL3ReaderFlux(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files.""" + + def setup_method(self): + super().setup_method(tester="flux_stere") + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_stere"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -999.99 + self.maxv = 1000 + self.scl = 1 From 89d6b64e2f0bd2050957b6c32440767004345ed5 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 09:56:05 +0000 Subject: [PATCH 07/16] Update OSI-SAF reader and finish the tests. --- satpy/readers/osisaf_l3_nc.py | 28 ++-- satpy/tests/reader_tests/test_osisaf_l3.py | 186 ++++++++++++++++----- 2 files changed, 157 insertions(+), 57 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index b2e6ec6812..8cdd35020c 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -108,20 +108,22 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filetype_info["file_type"] == "osi_radflux_grid": - self.area_def = self._get_geographic_grid() - return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def - elif self.filename_info["grid"] == "ease": - self.area_def = self._get_ease_grid() - return self.area_def - elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def + if "grid" in self.filename_info: + if self.filename_info["grid"] == "ease": + self.area_def = self._get_ease_grid() + return self.area_def + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") else: - raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def def _get_ds_attr(self, a_name): """Get a dataset attribute and check it's valid.""" diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 65a9efc9a1..fd035ccbac 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -88,13 +88,17 @@ def setup_method(self, tester="ice"): """Create a fake dataset.""" self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + self.base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) self.base_data = np.expand_dims(self.base_data, axis=0) self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) + self.base_data_sst = np.expand_dims(self.base_data_sst, axis=0) self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) self.time_data = np.array([1.]) + self.scl = 1. + self.add = 0. self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) @@ -138,7 +142,7 @@ def setup_method(self, tester="ice"): self.ssi_geo = xr.DataArray( self.base_data_ssi_geo, dims=("lat", "lon"), - attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "units": "W m-2", + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} ) self.ssi = xr.DataArray( @@ -147,13 +151,12 @@ def setup_method(self, tester="ice"): attrs={"_FillValue": -999.99, "units": "W m-2", "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} ) - self.uncert = xr.DataArray( - self.unc_data, - dims=("yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, - "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + self.sst = xr.DataArray( + self.base_data_sst, + dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", + "valid_min": -8000., "valid_max": 5000., "standard_name": "sea_ice_surface_temperature"} ) - data_vars = { "xc": self.xc, "yc": self.yc, @@ -165,13 +168,15 @@ def setup_method(self, tester="ice"): data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["ice_conc"] = self.conc data_vars["total_uncertainty"] = self.uncert + elif tester == "sst": + data_vars["Polar_Stereographic_Grid"] = stere_ds + data_vars["surface_temperature"] = self.sst elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj data_vars["ssi"] = self.ssi elif tester == "flux_geo": data_vars["ssi"] = self.ssi_geo - - if tester == "ice": + if tester == "ice" or tester == "sst": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) @@ -198,10 +203,10 @@ def test_get_dataset(self, tmp_path): assert res.shape[1] == 5 # Test values are correct - test_ds = self.fake_dataset[self.varname][0].values + test_ds = self.fake_dataset[self.varname].values.squeeze() test_ds = np.where(test_ds == self.fillv, np.nan, test_ds) test_ds = np.where(test_ds > self.maxv, np.nan, test_ds) - test_ds = test_ds / self.scl + test_ds = test_ds / self.scl + self.add np.testing.assert_allclose(res.values, test_ds) with pytest.raises(KeyError): @@ -218,14 +223,60 @@ def test_get_start_and_end_times(self, tmp_path): assert test.start_time == self.good_start_time assert test.end_time == self.good_stop_time + def test_get_area_def_bad(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "turnips"} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) + with pytest.raises(ValueError, match="Unknown grid type: turnips"): + test.get_area_def(None) + + +class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self): + super().setup_method(tester="ice") + self.filename_info = {"grid": "ease"} + self.filetype_info = {"file_type": "osi_sea_ice_conc"} + self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.varname = "ice_conc" + self.stdname = "sea_ice_area_fraction" + self.fillv = -999 + self.maxv = 10000 + self.scl = 100 + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + self.filename_info = {"grid": "stere"} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) + + def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" - filename_info = {"grid": "ease"} - filetype_info = {"file_type": "osi_sea_ice_conc"} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), {"grid": "ease"}, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_lambert_azimuthal_equal_area" @@ -239,6 +290,22 @@ def test_get_area_def_ease(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) + +class TestOSISAFL3ReaderFluxStere(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files on stereographic grid.""" + + def setup_method(self): + super().setup_method(tester="flux_stere") + self.filename_info = {"grid": "polstere"} + self.filetype_info = {"file_type": "osi_radflux_stere"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -999.99 + self.maxv = 1000 + self.scl = 1 + def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" @@ -259,44 +326,75 @@ def test_get_area_def_stere(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) - def test_get_area_def_bad(self, tmp_path): - """Test getting the area definition for the polar stereographic grid.""" - filename_info = {"grid": "turnips"} + +class TestOSISAFL3ReaderFluxGeo(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files on lat/lon grid (GEO sensors).""" + + def setup_method(self): + super().setup_method(tester="flux_geo") + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_grid"} + self.good_start_time = datetime(2022, 12, 28, 18, 30, 0) + self.good_stop_time = datetime(2022, 12, 28, 19, 30, 0) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -32768 + self.maxv = 1000 + self.scl = 10 + + + def test_get_area_def_grid(self, tmp_path): + """Test getting the area definition for the lat/lon grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_grid"} self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) - with pytest.raises(ValueError, match="Unknown grid type: turnips"): - test.get_area_def(None) - + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) -class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): - """Test OSI-SAF level 3 netCDF reader ice files.""" + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_geographic_area" + assert area_def.proj_dict["datum"] == "WGS84" + assert area_def.proj_dict["proj"] == "longlat" - def setup_method(self): - super().setup_method(tester="ice") - self.filename_info = {"grid": "ease"} - self.filetype_info = {"file_type": "osi_sea_ice_conc"} - self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) - self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) - self.varname = "ice_conc" - self.stdname = "sea_ice_area_fraction" - self.fillv = -999 - self.maxv = 10000 - self.scl = 100 + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-65, -68, -60, -72)) -class TestOSISAFL3ReaderFlux(OSISAFL3ReaderTests): - """Test OSI-SAF level 3 netCDF reader flux files.""" +class TestOSISAFL3ReaderSST(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader surface temperature files.""" def setup_method(self): - super().setup_method(tester="flux_stere") + super().setup_method(tester="sst") self.filename_info = {} - self.filetype_info = {"file_type": "osi_radflux_stere"} - self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) - self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) - self.varname = "ssi" - self.stdname = "surface_downwelling_shortwave_flux_in_air" - self.fillv = -999.99 + self.filetype_info = {"file_type": "osi_sst"} + self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.varname = "surface_temperature" + self.stdname = "sea_ice_surface_temperature" + self.fillv = -32768 self.maxv = 1000 - self.scl = 1 + self.scl = 100 + self.add = 273.15 + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) From 35196b684f60b6f950b91113472cd0019db3f228 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 10:47:14 +0000 Subject: [PATCH 08/16] Simplify OSI SAF code. --- satpy/readers/osisaf_l3_nc.py | 58 ++++++++++++++-------- satpy/tests/reader_tests/test_osisaf_l3.py | 8 ++- 2 files changed, 39 insertions(+), 27 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 8cdd35020c..1574b6ba74 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -106,24 +106,33 @@ def _get_polar_stereographic_grid(self): return area_def + def _get_finfo_grid(self): + """Get grid in case of filename info being used.""" + if self.filename_info["grid"] == "ease": + self.area_def = self._get_ease_grid() + return self.area_def + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + + def _get_ftype_grid(self): + """Get grid in case of filetype info being used.""" + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + def get_area_def(self, area_id): """Override abstract baseclass method""" if "grid" in self.filename_info: - if self.filename_info["grid"] == "ease": - self.area_def = self._get_ease_grid() - return self.area_def - elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def - else: - raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + return self._get_finfo_grid() else: - if self.filetype_info["file_type"] == "osi_radflux_grid": - self.area_def = self._get_geographic_grid() - return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def + return self._get_ftype_grid() + def _get_ds_attr(self, a_name): """Get a dataset attribute and check it's valid.""" @@ -132,23 +141,28 @@ def _get_ds_attr(self, a_name): except KeyError: return None + def _get_ds_units(self, ds_info, var_path): + """Find the units of the datasets.""" + + file_units = ds_info.get("file_units") + if file_units is None: + file_units = self._get_ds_attr(var_path + "/attr/units") + if file_units is None: + file_units = 1 + return file_units + def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" logger.debug(f"Reading {dataset_id['name']} from {self.filename}") var_path = ds_info.get("file_key", f"{dataset_id['name']}") shape = self[var_path + "/shape"] + data = self[var_path] if shape[0] == 1: # Remove the time dimension from dataset - data = self[var_path][0] - else: - data = self[var_path] + data = data[0] - file_units = ds_info.get("file_units") - if file_units is None: - file_units = self._get_ds_attr(var_path + "/attr/units") - if file_units is None: - file_units = 1 + file_units = self._get_ds_units(ds_info, var_path) # Try to get the valid limits for the data. # Not all datasets have these, so fall back on assuming no limits. diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index fd035ccbac..59550225b0 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -168,19 +168,17 @@ def setup_method(self, tester="ice"): data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["ice_conc"] = self.conc data_vars["total_uncertainty"] = self.uncert + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "sst": data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["surface_temperature"] = self.sst + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj data_vars["ssi"] = self.ssi - elif tester == "flux_geo": - data_vars["ssi"] = self.ssi_geo - if tester == "ice" or tester == "sst": - self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) - elif tester == "flux_stere": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) elif tester == "flux_geo": + data_vars["ssi"] = self.ssi_geo self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): From fd0ce951ada0a33d0d8313161faa3501d58bb1e9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 11:01:01 +0000 Subject: [PATCH 09/16] Tidy up OSI SAF tests. --- satpy/tests/reader_tests/test_osisaf_l3.py | 128 ++++++++------------- 1 file changed, 47 insertions(+), 81 deletions(-) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 59550225b0..e037884c04 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -86,99 +86,67 @@ class OSISAFL3ReaderTests: def setup_method(self, tester="ice"): """Create a fake dataset.""" - self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) - self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) - self.base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) - self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) - self.base_data = np.expand_dims(self.base_data, axis=0) - self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) - self.base_data_sst = np.expand_dims(self.base_data_sst, axis=0) - self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) - self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) - self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) - self.time_data = np.array([1.]) + base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) + base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) + base_data = np.expand_dims(base_data, axis=0) + base_data_ssi = np.expand_dims(base_data_ssi, axis=0) + base_data_sst = np.expand_dims(base_data_sst, axis=0) + unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) + yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) + xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) + time_data = np.array([1.]) self.scl = 1. self.add = 0. - self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) - self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) - self.xc = xr.DataArray( - self.xc_data, - dims=("yc", "xc"), - attrs={"standard_name": "projection_x_coordinate", "units": "km"} - ) - self.yc = xr.DataArray( - self.yc_data, - dims=("yc", "xc"), - attrs={"standard_name": "projection_y_coordinate", "units": "km"} - ) - self.time = xr.DataArray( - self.time_data, - dims="time", - attrs={"standard_name": "projection_y_coordinate", "units": "km"} - ) - self.lat = xr.DataArray( - self.lat_data, - dims=("yc", "xc"), - attrs={"standard_name": "latitude", "units": "degrees_north"} - ) - self.lon = xr.DataArray( - self.lon_data, - dims=("yc", "xc"), - attrs={"standard_name": "longitude", "units": "degrees_east"} - ) - self.conc = xr.DataArray( - self.base_data, - dims=("time", "yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", - "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"} - ) - self.uncert = xr.DataArray( - self.unc_data, - dims=("yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, - "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} - ) - self.ssi_geo = xr.DataArray( - self.base_data_ssi_geo, - dims=("lat", "lon"), - attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, - "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} - ) - self.ssi = xr.DataArray( - self.base_data_ssi, - dims=("time", "yc", "xc"), - attrs={"_FillValue": -999.99, "units": "W m-2", - "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} - ) - self.sst = xr.DataArray( - self.base_data_sst, - dims=("time", "yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", - "valid_min": -8000., "valid_max": 5000., "standard_name": "sea_ice_surface_temperature"} - ) - data_vars = { - "xc": self.xc, - "yc": self.yc, - "time": self.time, - "lat": self.lat, - "lon": self.lon, } + lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) + lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) + + xc = xr.DataArray(xc_data, dims=("yc", "xc"), + attrs={"standard_name": "projection_x_coordinate", "units": "km"}) + yc = xr.DataArray(yc_data, dims=("yc", "xc"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"}) + time = xr.DataArray(time_data, dims="time", + attrs={"standard_name": "projection_y_coordinate", "units": "km"}) + lat = xr.DataArray(lat_data, dims=("yc", "xc"), + attrs={"standard_name": "latitude", "units": "degrees_north"}) + lon = xr.DataArray(lon_data, dims=("yc", "xc"), + attrs={"standard_name": "longitude", "units": "degrees_east"}) + conc = xr.DataArray(base_data, dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", + "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"}) + uncert = xr.DataArray(unc_data, dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "valid_min": 0, + "valid_max": 10000, "standard_name": "total_uncertainty"}) + ssi_geo = xr.DataArray(base_data_ssi_geo, dims=("lat", "lon"), + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "valid_min": 0., + "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"}) + ssi = xr.DataArray(base_data_ssi, dims=("time", "yc", "xc"), + attrs={"_FillValue": -999.99, "units": "W m-2", "valid_min": 0., "valid_max": 1000., + "standard_name": "surface_downwelling_shortwave_flux_in_air"}) + sst = xr.DataArray(base_data_sst, dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", + "valid_min": -8000., "valid_max": 5000., + "standard_name": "sea_ice_surface_temperature"}) + data_vars = {"xc": xc, "yc": yc, "time": time, "lat": lat, "lon": lon} + if tester == "ice": data_vars["Lambert_Azimuthal_Grid"] = ease_ds data_vars["Polar_Stereographic_Grid"] = stere_ds - data_vars["ice_conc"] = self.conc - data_vars["total_uncertainty"] = self.uncert + data_vars["ice_conc"] = conc + data_vars["total_uncertainty"] = uncert self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "sst": data_vars["Polar_Stereographic_Grid"] = stere_ds - data_vars["surface_temperature"] = self.sst + data_vars["surface_temperature"] = sst self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj - data_vars["ssi"] = self.ssi + data_vars["ssi"] = ssi self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) elif tester == "flux_geo": - data_vars["ssi"] = self.ssi_geo + data_vars["ssi"] = ssi_geo self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): @@ -268,7 +236,6 @@ def test_get_area_def_stere(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) - def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" @@ -340,7 +307,6 @@ def setup_method(self): self.maxv = 1000 self.scl = 10 - def test_get_area_def_grid(self, tmp_path): """Test getting the area definition for the lat/lon grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" From bff527e3bb666f2f30683c678bb0e46051c437fb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:49:42 +0000 Subject: [PATCH 10/16] Update the OSI SAF L3 reader with some suggestions from review. --- satpy/readers/osisaf_l3_nc.py | 31 +++++++++------------- satpy/tests/reader_tests/test_osisaf_l3.py | 2 -- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 1574b6ba74..fd0a5cfc20 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. @@ -20,8 +18,6 @@ import logging from datetime import datetime -import numpy as np - from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) @@ -29,18 +25,6 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): """Reader for the OSISAF l3 netCDF format.""" - - @staticmethod - def _parse_datetime(datestr): - try: - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") - except ValueError: - try: - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") - except ValueError: - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") - - def _get_ease_grid(self): """Set up the EASE grid.""" from pyresample import create_area_def @@ -169,14 +153,14 @@ def get_dataset(self, dataset_id, ds_info): valid_min = self._get_ds_attr(var_path + "/attr/valid_min") valid_max = self._get_ds_attr(var_path + "/attr/valid_max") if valid_min is not None and valid_max is not None: - data = data.where(data >= valid_min, np.nan) - data = data.where(data <= valid_max, np.nan) + data = data.where(data >= valid_min) + data = data.where(data <= valid_max) # Try to get the fill value for the data. # If there isn't one, assume all remaining pixels are valid. fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") if fill_value is not None: - data = data.where(data != fill_value, np.nan) + data = data.where(data != fill_value) # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. @@ -217,6 +201,15 @@ def _get_platname(self): except KeyError: return self["/attr/platform"] + @staticmethod + def _parse_datetime(datestr): + try: + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + except ValueError: + try: + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + except ValueError: + return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") @property def start_time(self): diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index e037884c04..a9a595202b 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. From 7f817731cb6ff125f4e7b3e60afeaa2680f0abe6 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:52:48 +0000 Subject: [PATCH 11/16] Remove unneeded function in OSI-SAF L3 --- satpy/readers/osisaf_l3_nc.py | 31 ++++++++++++------------------- 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index fd0a5cfc20..e5e185bb51 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -118,19 +118,12 @@ def get_area_def(self, area_id): return self._get_ftype_grid() - def _get_ds_attr(self, a_name): - """Get a dataset attribute and check it's valid.""" - try: - return self[a_name] - except KeyError: - return None - def _get_ds_units(self, ds_info, var_path): """Find the units of the datasets.""" file_units = ds_info.get("file_units") if file_units is None: - file_units = self._get_ds_attr(var_path + "/attr/units") + file_units = self.get(var_path + "/attr/units") if file_units is None: file_units = 1 return file_units @@ -150,22 +143,22 @@ def get_dataset(self, dataset_id, ds_info): # Try to get the valid limits for the data. # Not all datasets have these, so fall back on assuming no limits. - valid_min = self._get_ds_attr(var_path + "/attr/valid_min") - valid_max = self._get_ds_attr(var_path + "/attr/valid_max") + valid_min = self.get(var_path + "/attr/valid_min") + valid_max = self.get(var_path + "/attr/valid_max") if valid_min is not None and valid_max is not None: data = data.where(data >= valid_min) data = data.where(data <= valid_max) # Try to get the fill value for the data. # If there isn't one, assume all remaining pixels are valid. - fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + fill_value = self.get(var_path + "/attr/_FillValue") if fill_value is not None: data = data.where(data != fill_value) # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. - scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") - scale_offset = self._get_ds_attr(var_path + "/attr/add_offset") + scale_factor = self.get(var_path + "/attr/scale_factor") + scale_offset = self.get(var_path + "/attr/add_offset") if scale_offset is not None and scale_factor is not None: data = (data * scale_factor + scale_offset) @@ -213,22 +206,22 @@ def _parse_datetime(datestr): @property def start_time(self): - start_t = self._get_ds_attr("/attr/start_date") + start_t = self.get("/attr/start_date") if start_t is None: - start_t = self._get_ds_attr("/attr/start_time") + start_t = self.get("/attr/start_time") if start_t is None: - start_t = self._get_ds_attr("/attr/time_coverage_start") + start_t = self.get("/attr/time_coverage_start") if start_t is None: raise ValueError("Unknown start time attribute.") return self._parse_datetime(start_t) @property def end_time(self): - end_t = self._get_ds_attr("/attr/stop_date") + end_t = self.get("/attr/stop_date") if end_t is None: - end_t = self._get_ds_attr("/attr/stop_time") + end_t = self.get("/attr/stop_time") if end_t is None: - end_t = self._get_ds_attr("/attr/time_coverage_end") + end_t = self.get("/attr/time_coverage_end") if end_t is None: raise ValueError("Unknown stop time attribute.") return self._parse_datetime(end_t) From c450ad502f0a024984ed1d1e326e6df1d3087055 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:55:11 +0000 Subject: [PATCH 12/16] Update OSI SAF area def docstring. --- satpy/readers/osisaf_l3_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index e5e185bb51..fa93424518 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -111,7 +111,7 @@ def _get_ftype_grid(self): return self.area_def def get_area_def(self, area_id): - """Override abstract baseclass method""" + """Get the area definition, which varies depending on file type and structure.""" if "grid" in self.filename_info: return self._get_finfo_grid() else: From cd864e300701e98b539de0b02471d2e32a0b25cc Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 18:00:02 +0000 Subject: [PATCH 13/16] Add support for NRT ice concentration files to OSI SAF L3 reader. --- satpy/etc/readers/osisaf_nc.yaml | 4 ++-- satpy/readers/osisaf_l3_nc.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml index 479b5a38db..d789ae414c 100644 --- a/satpy/etc/readers/osisaf_nc.yaml +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -17,7 +17,8 @@ reader: file_types: osi_sea_ice_conc: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc', + 'S-OSI_-{product_centre}_-{sensor}-GL_{hemisphere:2s}_CONCn__-{start_time:%Y%m%d%H%M}Z.nc'] osi_sea_ice_edge: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] @@ -42,7 +43,6 @@ datasets: status_flag: name: status_flag file_type: [osi_sea_ice_conc, osi_sea_ice_edge, osi_sea_ice_type] - orbit_num_amsr: name: orbit_num_amsr file_type: [osi_sea_ice_edge, osi_sea_ice_type] diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index fa93424518..0cc5e672b3 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -106,7 +106,7 @@ def _get_ftype_grid(self): if self.filetype_info["file_type"] == "osi_radflux_grid": self.area_def = self._get_geographic_grid() return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": + elif self.filetype_info["file_type"] in ["osi_sst", "osi_sea_ice_conc"]: self.area_def = self._get_polar_stereographic_grid() return self.area_def From fd704fe6db0de046a2462efba497dd562904a734 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 20:32:10 +0000 Subject: [PATCH 14/16] Simplify date parsing in OSI SAF reader. --- satpy/readers/osisaf_l3_nc.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 0cc5e672b3..1affb3a883 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -196,13 +196,12 @@ def _get_platname(self): @staticmethod def _parse_datetime(datestr): - try: - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") - except ValueError: + for dt_format in ("%Y-%m-%d %H:%M:%S","%Y%m%dT%H%M%SZ", "%Y-%m-%dT%H:%M:%SZ"): try: - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + return datetime.strptime(datestr, dt_format) except ValueError: - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") + continue + raise ValueError(f"Unsupported date format: {datestr}") @property def start_time(self): From 00df29f826f2990e9071b14216a9afb7d647a63c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 28 Nov 2023 13:51:43 +0000 Subject: [PATCH 15/16] Refactor date attribute getter for OSI SAF reader. --- satpy/readers/osisaf_l3_nc.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 1affb3a883..2953cc6dfc 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -205,22 +205,22 @@ def _parse_datetime(datestr): @property def start_time(self): - start_t = self.get("/attr/start_date") - if start_t is None: - start_t = self.get("/attr/start_time") - if start_t is None: - start_t = self.get("/attr/time_coverage_start") + poss_names = ["/attr/start_date", "/attr/start_time", "/attr/time_coverage_start"] + for name in poss_names: + start_t = self.get(name) + if start_t is not None: + break if start_t is None: raise ValueError("Unknown start time attribute.") return self._parse_datetime(start_t) @property def end_time(self): - end_t = self.get("/attr/stop_date") - if end_t is None: - end_t = self.get("/attr/stop_time") - if end_t is None: - end_t = self.get("/attr/time_coverage_end") + poss_names = ["/attr/stop_date", "/attr/stop_time", "/attr/time_coverage_end"] + for name in poss_names: + end_t = self.get(name) + if end_t is not None: + break if end_t is None: raise ValueError("Unknown stop time attribute.") return self._parse_datetime(end_t) From 98061ec7e6207554c10366533226b66ecefc4fb5 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:46:05 +0100 Subject: [PATCH 16/16] Fix style --- satpy/modifiers/spectral.py | 1 - satpy/readers/osisaf_l3_nc.py | 3 ++- satpy/tests/reader_tests/test_osisaf_l3.py | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 2953cc6dfc..56d4773a43 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -120,7 +120,6 @@ def get_area_def(self, area_id): def _get_ds_units(self, ds_info, var_path): """Find the units of the datasets.""" - file_units = ds_info.get("file_units") if file_units is None: file_units = self.get(var_path + "/attr/units") @@ -205,6 +204,7 @@ def _parse_datetime(datestr): @property def start_time(self): + """Get the start time.""" poss_names = ["/attr/start_date", "/attr/start_time", "/attr/time_coverage_start"] for name in poss_names: start_t = self.get(name) @@ -216,6 +216,7 @@ def start_time(self): @property def end_time(self): + """Get the end time.""" poss_names = ["/attr/stop_date", "/attr/stop_time", "/attr/time_coverage_end"] for name in poss_names: end_t = self.get(name) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index a9a595202b..3fa9e5bb35 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -178,7 +178,6 @@ def test_get_dataset(self, tmp_path): def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) @@ -202,6 +201,7 @@ class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader ice files.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="ice") self.filename_info = {"grid": "ease"} self.filetype_info = {"file_type": "osi_sea_ice_conc"} @@ -258,6 +258,7 @@ class TestOSISAFL3ReaderFluxStere(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader flux files on stereographic grid.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="flux_stere") self.filename_info = {"grid": "polstere"} self.filetype_info = {"file_type": "osi_radflux_stere"} @@ -294,6 +295,7 @@ class TestOSISAFL3ReaderFluxGeo(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader flux files on lat/lon grid (GEO sensors).""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="flux_geo") self.filename_info = {} self.filetype_info = {"file_type": "osi_radflux_grid"} @@ -329,6 +331,7 @@ class TestOSISAFL3ReaderSST(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader surface temperature files.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="sst") self.filename_info = {} self.filetype_info = {"file_type": "osi_sst"}