From 6d29442b4ba1d2a12a886a017d0d622edfac3ffa Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 28 Aug 2024 13:19:45 +0200 Subject: [PATCH] Revert "Fix quotes" This reverts commit bc9f8fccb2209b7d9787ae44d95bb827e7879560. --- doc/source/conf.py | 34 +-- pygac/__init__.py | 2 +- pygac/calibration/noaa.py | 52 ++-- pygac/configuration.py | 8 +- pygac/correct_tsm_issue.py | 2 +- pygac/gac_io.py | 184 +++++++------- pygac/gac_klm.py | 8 +- pygac/gac_reader.py | 6 +- pygac/klm_reader.py | 110 ++++----- pygac/lac_klm.py | 8 +- pygac/lac_reader.py | 4 +- pygac/patmosx_coeff_reader.py | 100 ++++---- pygac/pod_reader.py | 84 +++---- pygac/reader.py | 148 ++++++------ pygac/runner.py | 16 +- pygac/tests/test_io.py | 42 ++-- pygac/tests/test_klm.py | 20 +- .../test_noaa_calibration_coefficients.py | 14 +- pygac/tests/test_pod.py | 56 ++--- pygac/tests/test_reader.py | 224 +++++++++--------- pygac/tests/test_utils.py | 18 +- pygac/utils.py | 46 ++-- pyproject.toml | 2 +- 23 files changed, 594 insertions(+), 594 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index e0690849..388725e1 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -25,33 +25,33 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.todo", - "sphinx.ext.inheritance_diagram"] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', + 'sphinx.ext.inheritance_diagram'] # Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] +templates_path = ['_templates'] # The suffix of source filenames. -source_suffix = ".rst" +source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = "index" +master_doc = 'index' # General information about the project. -project = u"pygac" -copyright = u"2014, Abhay Devasthale, Martin Raspaud and Adam Dybbroe" +project = u'pygac' +copyright = u'2014, Abhay Devasthale, Martin Raspaud and Adam Dybbroe' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = "0.1" +version = '0.1' # The full version, including alpha/beta/rc tags. -release = "0.1" +release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -82,7 +82,7 @@ #show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" +pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] @@ -92,7 +92,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = "default" +html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -121,7 +121,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -165,7 +165,7 @@ #html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = "pygacdoc" +htmlhelp_basename = 'pygacdoc' # -- Options for LaTeX output -------------------------------------------------- @@ -179,8 +179,8 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ("index", "pygac.tex", u"pygac Documentation", - u"Abhay Devasthale, Martin Raspaud and Adam Dybbroe", "manual"), + ('index', 'pygac.tex', u'pygac Documentation', + u'Abhay Devasthale, Martin Raspaud and Adam Dybbroe', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -212,6 +212,6 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ("index", "pygac", u"pygac Documentation", - [u"Abhay Devasthale, Martin Raspaud and Adam Dybbroe"], 1) + ('index', 'pygac', u'pygac Documentation', + [u'Abhay Devasthale, Martin Raspaud and Adam Dybbroe'], 1) ] diff --git a/pygac/__init__.py b/pygac/__init__.py index 8d073303..8c97fc85 100644 --- a/pygac/__init__.py +++ b/pygac/__init__.py @@ -36,4 +36,4 @@ # add a NullHandler to prevent messages in sys.stderr if the using application does # not use logging, but pygac makes logging calls of severity WARNING and greater. # See https://docs.python.org/3/howto/logging.html (Configuring Logging for a Library) -logging.getLogger("pygac").addHandler(logging.NullHandler()) +logging.getLogger('pygac').addHandler(logging.NullHandler()) diff --git a/pygac/calibration/noaa.py b/pygac/calibration/noaa.py index 32960024..f038816b 100644 --- a/pygac/calibration/noaa.py +++ b/pygac/calibration/noaa.py @@ -68,7 +68,7 @@ def calibrate(ds, custom_coeffs=None, coeffs_file=None): delta = (start_time.astype("datetime64[D]") - start_time.astype("datetime64[Y]")).astype(int) jday = delta.astype(int) + 1 - corr = ds.attrs["sun_earth_distance_correction_factor"] + corr = ds.attrs['sun_earth_distance_correction_factor'] # how many reflective channels are there ? tot_ref = channels.shape[2] - 3 @@ -108,9 +108,9 @@ def calibrate(ds, custom_coeffs=None, coeffs_file=None): class CoeffStatus(Enum): """Indicates the status of calibration coefficients.""" - NOMINAL = "nominal" - PROVISIONAL = "provisional" - EXPERIMENTAL = "experimental" + NOMINAL = 'nominal' + PROVISIONAL = 'provisional' + EXPERIMENTAL = 'experimental' class Calibrator: @@ -122,17 +122,17 @@ class Calibrator: default_coeffs: dictonary containing default values for all spacecrafts """ version_hashs = { - "963af9b66268475ed500ad7b37da33c5": { - "name": "PATMOS-x, v2017r1", - "status": CoeffStatus.NOMINAL + '963af9b66268475ed500ad7b37da33c5': { + 'name': 'PATMOS-x, v2017r1', + 'status': CoeffStatus.NOMINAL }, - "689386c822de18a07194ac7fd71652ea": { - "name": "PATMOS-x, v2017r1, with provisional coefficients for MetOp-C", - "status": CoeffStatus.PROVISIONAL + '689386c822de18a07194ac7fd71652ea': { + 'name': 'PATMOS-x, v2017r1, with provisional coefficients for MetOp-C', + 'status': CoeffStatus.PROVISIONAL }, - "e8735ec394ecdb87b7edcd261e72d2eb": { - "name": "PATMOS-x, v2023", - "status": CoeffStatus.PROVISIONAL + 'e8735ec394ecdb87b7edcd261e72d2eb': { + 'name': 'PATMOS-x, v2023', + 'status': CoeffStatus.PROVISIONAL }, } fields = [ @@ -141,7 +141,7 @@ class Calibrator: "to_eff_blackbody_slope", "date_of_launch", "d", "spacecraft", "version" ] - Calibrator = namedtuple("Calibrator", fields) + Calibrator = namedtuple('Calibrator', fields) default_coeffs = None default_file = None default_version = None @@ -178,21 +178,21 @@ def __new__(cls, spacecraft, custom_coeffs=None, coeffs_file=None): for key in ("dark_count", "gain_switch", "s0", "s1", "s2"): arraycoeffs[key] = np.array([ coeffs[channel][key] - for channel in ("channel_1", "channel_2", "channel_3a") + for channel in ('channel_1', 'channel_2', 'channel_3a') ], dtype=float) # thermal channels for key in ("centroid_wavenumber", "space_radiance", "to_eff_blackbody_intercept", "to_eff_blackbody_slope"): arraycoeffs[key] = np.array([ coeffs[channel][key] - for channel in ("channel_3b", "channel_4", "channel_5") + for channel in ('channel_3b', 'channel_4', 'channel_5') ], dtype=float) arraycoeffs["b"] = np.array([ [ coeffs[channel][key] for key in ("b0", "b1", "b2") ] - for channel in ("channel_3b", "channel_4", "channel_5") + for channel in ('channel_3b', 'channel_4', 'channel_5') ], dtype=float) # thermometers # Note, that "thermometer_0" does not exists, and is filled with zeros to @@ -205,7 +205,7 @@ def __new__(cls, spacecraft, custom_coeffs=None, coeffs_file=None): for d in range(5) ], dtype=float) # parse date of launch - date_of_launch_str = coeffs["date_of_launch"].replace("Z", "+00:00") + date_of_launch_str = coeffs["date_of_launch"].replace('Z', '+00:00') if sys.version_info < (3, 7): # Note that here any time information is lost import dateutil.parser @@ -240,7 +240,7 @@ def read_coeffs(cls, coeffs_file): else: LOG.debug("Read PyGAC internal calibration coefficients.") coeffs_file = files("pygac") / "data/calibration.json" - with open(coeffs_file, mode="rb") as json_file: + with open(coeffs_file, mode='rb') as json_file: content = json_file.read() coeffs = json.loads(content) version = cls._get_coeffs_version(content) @@ -253,10 +253,10 @@ def _get_coeffs_version(cls, coeff_file_content): digest = md5_hash.hexdigest() version_dict = cls.version_hashs.get( digest, - {"name": None, "status": None} + {'name': None, 'status': None} ) - version = version_dict["name"] - status = version_dict["status"] + version = version_dict['name'] + status = version_dict['status'] if version is None: warning = "Unknown calibration coefficients version!" warnings.warn(warning, RuntimeWarning) @@ -265,7 +265,7 @@ def _get_coeffs_version(cls, coeff_file_content): LOG.info('Identified calibration coefficients version "%s".', version) if status != CoeffStatus.NOMINAL: - warning = "Using {} calibration coefficients".format(status) + warning = 'Using {} calibration coefficients'.format(status) warnings.warn(warning, RuntimeWarning) LOG.warning(warning) return version @@ -536,9 +536,9 @@ def calibrate_thermal(counts, prt, ict, space, line_numbers, channel, cal): wlength = 3 weighting_function = np.ones(wlength, dtype=float) / wlength - tprt_convolved = np.convolve(tprt, weighting_function, "same") - ict_convolved = np.convolve(ict, weighting_function, "same") - space_convolved = np.convolve(space, weighting_function, "same") + tprt_convolved = np.convolve(tprt, weighting_function, 'same') + ict_convolved = np.convolve(ict, weighting_function, 'same') + space_convolved = np.convolve(space, weighting_function, 'same') # take care of the beginning and end tprt_convolved[0:(wlength - 1) // 2] = tprt_convolved[(wlength - 1) // 2] diff --git a/pygac/configuration.py b/pygac/configuration.py index 4ceb7eed..9ca301e2 100644 --- a/pygac/configuration.py +++ b/pygac/configuration.py @@ -42,7 +42,7 @@ class FileNotFoundError(OSError): class Configuration(configparser.ConfigParser, object): """Configuration container for pygac.""" - config_file = "" + config_file = '' def read(self, config_file): """Read and parse the configuration file @@ -69,8 +69,8 @@ def read(self, config_file): def get(self, *args, **kwargs): """python 2 compatibility for fallback attribute""" if sys.version_info.major < 3: - if "fallback" in kwargs: - fallback = kwargs.pop("fallback") + if 'fallback' in kwargs: + fallback = kwargs.pop('fallback') else: fallback = None try: @@ -99,7 +99,7 @@ def get_config(initialized=True): try: config_file = os.environ["PYGAC_CONFIG_FILE"] except KeyError: - LOG.error("Environment variable PYGAC_CONFIG_FILE not set!") + LOG.error('Environment variable PYGAC_CONFIG_FILE not set!') raise _config.read(config_file) return _config diff --git a/pygac/correct_tsm_issue.py b/pygac/correct_tsm_issue.py index 12ea6027..f940125f 100644 --- a/pygac/correct_tsm_issue.py +++ b/pygac/correct_tsm_issue.py @@ -409,7 +409,7 @@ def std_filter(data, box_size): # need to surround the data with NaNs to calculate values at the boundary padded_data = np.pad( data, (border, border), - mode="constant", + mode='constant', constant_values=np.nan ) windows = _rolling_window(padded_data, size) diff --git a/pygac/gac_io.py b/pygac/gac_io.py index 5c2311d6..8b08b2d5 100644 --- a/pygac/gac_io.py +++ b/pygac/gac_io.py @@ -52,20 +52,20 @@ def save_gac(satellite_name, gac_file, meta_data, output_file_prefix, avhrr_dir, qual_dir, sunsatangles_dir): - midnight_scanline = meta_data["midnight_scanline"] - miss_lines = meta_data["missing_scanlines"] - corr = meta_data["sun_earth_distance_correction_factor"] + midnight_scanline = meta_data['midnight_scanline'] + miss_lines = meta_data['missing_scanlines'] + corr = meta_data['sun_earth_distance_correction_factor'] last_scan_line_number = qual_flags[-1, 0] # Strip invalid coordinates first_valid_lat, last_valid_lat = strip_invalid_lat(lats) if first_valid_lat > start_line: - LOG.info("New start_line chosen (due to invalid lat/lon " - "info) = " + str(first_valid_lat)) + LOG.info('New start_line chosen (due to invalid lat/lon ' + 'info) = ' + str(first_valid_lat)) if end_line > last_valid_lat: - LOG.info("New end_line chosen (due to invalid lat/lon " - "info) = " + str(last_valid_lat)) + LOG.info('New end_line chosen (due to invalid lat/lon ' + 'info) = ' + str(last_valid_lat)) # Check user-defined scanlines start_line, end_line = check_user_scanlines( @@ -209,35 +209,35 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, t_obj = time.strptime(enddate + endtime[0:6], "%Y%m%d%H%M%S") endtime_sec1970 = calendar.timegm(t_obj) - LOG.info("Output file prefix = " + str(output_file_prefix)) - LOG.info("AVHRR data will be written to " + str(avhrr_dir)) - ofn = os.path.join(avhrr_dir, (output_file_prefix + "_avhrr_" + - satellite_name + "_99999_" + - startdate + "T" + starttime + "Z_" + - enddate + "T" + endtime + "Z.h5")) + LOG.info('Output file prefix = ' + str(output_file_prefix)) + LOG.info('AVHRR data will be written to ' + str(avhrr_dir)) + ofn = os.path.join(avhrr_dir, (output_file_prefix + '_avhrr_' + + satellite_name + '_99999_' + + startdate + 'T' + starttime + 'Z_' + + enddate + 'T' + endtime + 'Z.h5')) - LOG.info("Filename: " + str(os.path.basename(ofn))) + LOG.info('Filename: ' + str(os.path.basename(ofn))) fout = h5py.File(ofn, "w") - dset1 = fout.create_dataset("/image1/data", dtype="int16", data=ref1) - dset2 = fout.create_dataset("/image2/data", dtype="int16", data=ref2) - dset3 = fout.create_dataset("/image3/data", dtype="int16", data=bt3) - dset4 = fout.create_dataset("/image4/data", dtype="int16", data=bt4) - dset5 = fout.create_dataset("/image5/data", dtype="int16", data=bt5) - dset6 = fout.create_dataset("/image6/data", dtype="int16", data=ref3) - dset7 = fout.create_dataset("/where/lat/data", dtype="int32", + dset1 = fout.create_dataset("/image1/data", dtype='int16', data=ref1) + dset2 = fout.create_dataset("/image2/data", dtype='int16', data=ref2) + dset3 = fout.create_dataset("/image3/data", dtype='int16', data=bt3) + dset4 = fout.create_dataset("/image4/data", dtype='int16', data=bt4) + dset5 = fout.create_dataset("/image5/data", dtype='int16', data=bt5) + dset6 = fout.create_dataset("/image6/data", dtype='int16', data=ref3) + dset7 = fout.create_dataset("/where/lat/data", dtype='int32', data=arrLat_full) - dset8 = fout.create_dataset("/where/lon/data", dtype="int32", + dset8 = fout.create_dataset("/where/lon/data", dtype='int32', data=arrLon_full) del dset8 channellist = [] - channellist.append("channel1".encode("utf8")) - channellist.append("channel2".encode("utf8")) - channellist.append("channel3b".encode("utf8")) - channellist.append("channel4".encode("utf8")) - channellist.append("channel5".encode("utf8")) - channellist.append("channel3a".encode("utf8")) + channellist.append("channel1".encode('utf8')) + channellist.append("channel2".encode('utf8')) + channellist.append("channel3b".encode('utf8')) + channellist.append("channel4".encode('utf8')) + channellist.append("channel5".encode('utf8')) + channellist.append("channel3a".encode('utf8')) dset10 = fout.create_dataset("/how/channel_list", data=channellist) del dset10 @@ -282,8 +282,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g1.attrs["product"] = np.string_("SATCH") g1.attrs["quantity"] = np.string_("REFL") - g1.attrs["dataset_name"] = np.string_("Channel 1 reflectance") - g1.attrs["units"] = np.string_("%") + g1.attrs["dataset_name"] = np.string_('Channel 1 reflectance') + g1.attrs["units"] = np.string_('%') g1.attrs["gain"] = np.float32(0.01) g1.attrs["offset"] = np.float32(0.0) g1.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -295,8 +295,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g2.attrs["product"] = np.string_("SATCH") g2.attrs["quantity"] = np.string_("REFL") - g2.attrs["dataset_name"] = np.string_("Channel 2 reflectance") - g2.attrs["units"] = np.string_("%") + g2.attrs["dataset_name"] = np.string_('Channel 2 reflectance') + g2.attrs["units"] = np.string_('%') g2.attrs["gain"] = np.float32(0.01) g2.attrs["offset"] = np.float32(0.0) g2.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -308,8 +308,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g6.attrs["product"] = np.string_("SATCH") g6.attrs["quantity"] = np.string_("REFL") - g6.attrs["dataset_name"] = np.string_("Channel 3a reflectance") - g6.attrs["units"] = np.string_("%") + g6.attrs["dataset_name"] = np.string_('Channel 3a reflectance') + g6.attrs["units"] = np.string_('%') g6.attrs["gain"] = np.float32(0.01) g6.attrs["offset"] = np.float32(0.0) g6.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -321,8 +321,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g3.attrs["product"] = np.string_("SATCH") g3.attrs["quantity"] = np.string_("TB") - g3.attrs["dataset_name"] = np.string_("Channel 3b brightness temperature") - g3.attrs["units"] = np.string_("K") + g3.attrs["dataset_name"] = np.string_('Channel 3b brightness temperature') + g3.attrs["units"] = np.string_('K') g3.attrs["gain"] = np.float32(0.01) g3.attrs["offset"] = np.float32(273.15) g3.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -334,8 +334,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g4.attrs["product"] = np.string_("SATCH") g4.attrs["quantity"] = np.string_("TB") - g4.attrs["dataset_name"] = np.string_("Channel 4 brightness temperature") - g4.attrs["units"] = np.string_("K") + g4.attrs["dataset_name"] = np.string_('Channel 4 brightness temperature') + g4.attrs["units"] = np.string_('K') g4.attrs["gain"] = np.float32(0.01) g4.attrs["offset"] = np.float32(273.15) g4.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -347,8 +347,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g5.attrs["product"] = np.string_("SATCH") g5.attrs["quantity"] = np.string_("TB") - g5.attrs["dataset_name"] = np.string_("Channel 5 brightness temperature") - g5.attrs["units"] = np.string_("K") + g5.attrs["dataset_name"] = np.string_('Channel 5 brightness temperature') + g5.attrs["units"] = np.string_('K') g5.attrs["gain"] = np.float32(0.01) g5.attrs["offset"] = np.float32(273.15) g5.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -358,8 +358,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g5.attrs["startdate"] = np.string_(startdate) g5.attrs["enddate"] = np.string_(enddate) - g7.attrs["dataset_name"] = np.string_("Latitude") - g7.attrs["units"] = np.string_("Deg") + g7.attrs["dataset_name"] = np.string_('Latitude') + g7.attrs["units"] = np.string_('Deg') g7.attrs["gain"] = np.float32(0.0010) g7.attrs["offset"] = np.float32(0.0) g7.attrs["missingdata"] = np.int32(MISSING_DATA_LATLON) @@ -369,8 +369,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g7.attrs["startdate"] = np.string_(startdate) g7.attrs["enddate"] = np.string_(enddate) - g8.attrs["dataset_name"] = np.string_("Longitude") - g8.attrs["units"] = np.string_("Deg") + g8.attrs["dataset_name"] = np.string_('Longitude') + g8.attrs["units"] = np.string_('Deg') g8.attrs["gain"] = np.float32(0.0010) g8.attrs["offset"] = np.float32(0.0) g8.attrs["missingdata"] = np.int32(MISSING_DATA_LATLON) @@ -419,25 +419,25 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, fout.close() - LOG.info("Sun and Satellite viewing angles will be " + - "written to " + str(sunsatangles_dir)) + LOG.info('Sun and Satellite viewing angles will be ' + + 'written to ' + str(sunsatangles_dir)) ofn = os.path.join(sunsatangles_dir, - (output_file_prefix + "_sunsatangles_" + - satellite_name + "_99999_" + startdate + - "T" + starttime + "Z_" + - enddate + "T" + endtime + "Z.h5")) + (output_file_prefix + '_sunsatangles_' + + satellite_name + '_99999_' + startdate + + 'T' + starttime + 'Z_' + + enddate + 'T' + endtime + 'Z.h5')) - LOG.info("Filename: " + str(os.path.basename(ofn))) + LOG.info('Filename: ' + str(os.path.basename(ofn))) fout = h5py.File(ofn, "w") - dset1 = fout.create_dataset("/image1/data", dtype="int16", data=arrSZA) - dset2 = fout.create_dataset("/image2/data", dtype="int16", data=arrSTZ) - dset3 = fout.create_dataset("/image3/data", dtype="int16", data=arrRAA) - dset4 = fout.create_dataset("/image4/data", dtype="int16", data=arrSAA) - dset5 = fout.create_dataset("/image5/data", dtype="int16", data=arrSTA) - dset6 = fout.create_dataset("/where/lat/data", dtype="int32", + dset1 = fout.create_dataset("/image1/data", dtype='int16', data=arrSZA) + dset2 = fout.create_dataset("/image2/data", dtype='int16', data=arrSTZ) + dset3 = fout.create_dataset("/image3/data", dtype='int16', data=arrRAA) + dset4 = fout.create_dataset("/image4/data", dtype='int16', data=arrSAA) + dset5 = fout.create_dataset("/image5/data", dtype='int16', data=arrSTA) + dset6 = fout.create_dataset("/where/lat/data", dtype='int32', data=arrLat_full) - dset7 = fout.create_dataset("/where/lon/data", dtype="int32", + dset7 = fout.create_dataset("/where/lon/data", dtype='int32', data=arrLon_full) del dset4, dset5, dset6, dset7 @@ -450,12 +450,12 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g5 = fout.require_group("/image5") g6 = fout.require_group("/where") - g1.attrs["description"] = np.string_("Solar zenith angle") - g2.attrs["description"] = np.string_("Satellite zenith angle") + g1.attrs["description"] = np.string_('Solar zenith angle') + g2.attrs["description"] = np.string_('Satellite zenith angle') g3.attrs["description"] = np.string_( - "Relative satellite-sun azimuth angle") - g4.attrs["description"] = np.string_("Solar azimuth angle") - g5.attrs["description"] = np.string_("Satellite azimuth angle") + 'Relative satellite-sun azimuth angle') + g4.attrs["description"] = np.string_('Solar azimuth angle') + g5.attrs["description"] = np.string_('Satellite azimuth angle') g6.attrs["num_of_pixels"] = np.int32(arrSZA.shape[1]) g6.attrs["num_of_lines"] = np.int32(arrSZA.shape[0]) g6.attrs["xscale"] = np.float32(0.0) @@ -476,8 +476,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g1.attrs["product"] = np.string_("SUNZ") g1.attrs["quantity"] = np.string_("DEG") - g1.attrs["dataset_name"] = np.string_("Solar zenith angle") - g1.attrs["units"] = np.string_("Deg") + g1.attrs["dataset_name"] = np.string_('Solar zenith angle') + g1.attrs["units"] = np.string_('Deg') g1.attrs["gain"] = np.float32(0.01) g1.attrs["offset"] = np.float32(0.0) g1.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -489,8 +489,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g2.attrs["product"] = np.string_("SATZ") g2.attrs["quantity"] = np.string_("DEG") - g2.attrs["dataset_name"] = np.string_("Satellite zenith angle") - g2.attrs["units"] = np.string_("Deg") + g2.attrs["dataset_name"] = np.string_('Satellite zenith angle') + g2.attrs["units"] = np.string_('Deg') g2.attrs["gain"] = np.float32(0.01) g2.attrs["offset"] = np.float32(0.0) g2.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -503,8 +503,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g3.attrs["product"] = np.string_("SSAZD") g3.attrs["quantity"] = np.string_("DEG") g3.attrs["dataset_name"] = np.string_( - "Relative satellite-sun azimuth angle") - g3.attrs["units"] = np.string_("Deg") + 'Relative satellite-sun azimuth angle') + g3.attrs["units"] = np.string_('Deg') g3.attrs["gain"] = np.float32(0.01) g3.attrs["offset"] = np.float32(0.0) g3.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -516,8 +516,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g4.attrs["product"] = np.string_("SUNA") g4.attrs["quantity"] = np.string_("DEG") - g4.attrs["dataset_name"] = np.string_("Solar azimuth angle") - g4.attrs["units"] = np.string_("Deg") + g4.attrs["dataset_name"] = np.string_('Solar azimuth angle') + g4.attrs["units"] = np.string_('Deg') g4.attrs["gain"] = np.float32(0.01) g4.attrs["offset"] = np.float32(0.0) g4.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -529,8 +529,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g5.attrs["product"] = np.string_("SATA") g5.attrs["quantity"] = np.string_("DEG") - g5.attrs["dataset_name"] = np.string_("Satellite azimuth angle") - g5.attrs["units"] = np.string_("Deg") + g5.attrs["dataset_name"] = np.string_('Satellite azimuth angle') + g5.attrs["units"] = np.string_('Deg') g5.attrs["gain"] = np.float32(0.01) g5.attrs["offset"] = np.float32(0.0) g5.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -540,8 +540,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g5.attrs["startdate"] = np.string_(startdate) g5.attrs["enddate"] = np.string_(enddate) - g6.attrs["dataset_name"] = np.string_("Latitude") - g6.attrs["units"] = np.string_("Deg") + g6.attrs["dataset_name"] = np.string_('Latitude') + g6.attrs["units"] = np.string_('Deg') g6.attrs["gain"] = np.float32(0.0010) g6.attrs["offset"] = np.float32(0.0) g6.attrs["missingdata"] = np.int32(MISSING_DATA_LATLON) @@ -551,8 +551,8 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g6.attrs["startdate"] = np.string_(startdate) g6.attrs["enddate"] = np.string_(enddate) - g7.attrs["dataset_name"] = np.string_("Longitude") - g7.attrs["units"] = np.string_("Deg") + g7.attrs["dataset_name"] = np.string_('Longitude') + g7.attrs["units"] = np.string_('Deg') g7.attrs["gain"] = np.float32(0.0010) g7.attrs["offset"] = np.float32(0.0) g7.attrs["missingdata"] = np.int32(MISSING_DATA_LATLON) @@ -583,25 +583,25 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, fout.close() - LOG.info("Quality flags will be " + - "written to " + str(qual_dir)) + LOG.info('Quality flags will be ' + + 'written to ' + str(qual_dir)) ofn = os.path.join(qual_dir, - (output_file_prefix + "_qualflags_" + - satellite_name + "_99999_" + startdate + - "T" + starttime + "Z_" + - enddate + "T" + endtime + "Z.h5")) + (output_file_prefix + '_qualflags_' + + satellite_name + '_99999_' + startdate + + 'T' + starttime + 'Z_' + + enddate + 'T' + endtime + 'Z.h5')) - LOG.info("Filename: " + str(os.path.basename(ofn))) + LOG.info('Filename: ' + str(os.path.basename(ofn))) fout = h5py.File(ofn, "w") g1 = fout.require_group("/qual_flags") - dset1 = g1.create_dataset("data", dtype="int16", data=qual_flags) + dset1 = g1.create_dataset("data", dtype='int16', data=qual_flags) del dset1 g1.attrs["product"] = np.string_("QFLAG") g1.attrs["quantity"] = np.string_("INT") - g1.attrs["dataset_name"] = np.string_("Scanline quality flags") - g1.attrs["units"] = np.string_("None") + g1.attrs["dataset_name"] = np.string_('Scanline quality flags') + g1.attrs["units"] = np.string_('None') g1.attrs["gain"] = np.int32(1) g1.attrs["offset"] = np.int32(0) g1.attrs["missingdata"] = np.int32(MISSING_DATA) @@ -615,13 +615,13 @@ def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime, g1.attrs["last_scan_line_number"] = last_scan_line_number g2 = fout.require_group("/ancillary") - dset2 = g2.create_dataset("missing_scanlines", dtype="int16", + dset2 = g2.create_dataset("missing_scanlines", dtype='int16', data=miss_lines) del dset2 - dset3 = g2.create_dataset("scanline_timestamps", dtype="int64", - data=xutcs.astype("int64")) - dset3.attrs["units"] = "Milliseconds since 1970-01-01 00:00:00 UTC" - dset3.attrs["calendar"] = "standard" + dset3 = g2.create_dataset("scanline_timestamps", dtype='int64', + data=xutcs.astype('int64')) + dset3.attrs['units'] = 'Milliseconds since 1970-01-01 00:00:00 UTC' + dset3.attrs['calendar'] = 'standard' g2.attrs["midnight_scanline"] = np.string_(midnight_scanline) fout.close() diff --git a/pygac/gac_klm.py b/pygac/gac_klm.py index 7d086974..3613ab01 100644 --- a/pygac/gac_klm.py +++ b/pygac/gac_klm.py @@ -134,10 +134,10 @@ ("frame_sync", ">u2", (6, )), ("id", ">u2", (2, )), ("time_code", ">u2", (4, )), - ("telemetry", [("ramp_calibration", ">u2", (5, )), - ("PRT", ">u2", (3, )), - ("ch3_patch_temp", ">u2"), - ("spare", ">u2"), ]), + ('telemetry', [("ramp_calibration", '>u2', (5, )), + ("PRT", '>u2', (3, )), + ("ch3_patch_temp", '>u2'), + ("spare", '>u2'), ]), ("back_scan", ">u2", (30, )), ("space_data", ">u2", (50, )), ("sync_delta", ">u2"), diff --git a/pygac/gac_reader.py b/pygac/gac_reader.py index ecf3efeb..daa82e12 100644 --- a/pygac/gac_reader.py +++ b/pygac/gac_reader.py @@ -58,9 +58,9 @@ def _validate_header(cls, header): # call super to enter the Method Resolution Order (MRO) super(GACReader, cls)._validate_header(header) LOG.debug("validate header") - data_set_name = header["data_set_name"].decode() + data_set_name = header['data_set_name'].decode() # split header into parts creation_site, transfer_mode, platform_id = ( - data_set_name.split(".")[:3]) - if transfer_mode != "GHRR": + data_set_name.split('.')[:3]) + if transfer_mode != 'GHRR': raise ReaderError('Improper transfer mode "%s"!' % transfer_mode) diff --git a/pygac/klm_reader.py b/pygac/klm_reader.py index db840a82..acc572ef 100644 --- a/pygac/klm_reader.py +++ b/pygac/klm_reader.py @@ -579,60 +579,60 @@ class KLM_QualityIndicator(IntFlag): ("zero_fill9", ">i2")]) -ars_header = np.dtype([("COST_number", "S6"), - ("SAA_number", "S8"), - ("order_creation_year", "S4"), - ("order_creation_day_of_year", "S3"), - ("processing_site_code", "S1"), - ("processing_software", "S8"), +ars_header = np.dtype([('COST_number', 'S6'), + ('SAA_number', 'S8'), + ('order_creation_year', 'S4'), + ('order_creation_day_of_year', 'S3'), + ('processing_site_code', 'S1'), + ('processing_software', 'S8'), # data selection criteria - ("data_set_name", "S42"), - ("ascii_blank_", "S2"), - ("select_flag", "S1"), - ("beginning_latitude", "S3"), - ("ending_latitude", "S3"), - ("beginning_longitude", "S4"), - ("ending_longitude", "S4"), - ("start_hour", "S2"), - ("start_minute", "S2"), - ("number_of_minutes", "S3"), - ("appended_data_flag", "S1"), - ("channel_select_flag", "S1", (20, )), + ('data_set_name', 'S42'), + ('ascii_blank_', 'S2'), + ('select_flag', 'S1'), + ('beginning_latitude', 'S3'), + ('ending_latitude', 'S3'), + ('beginning_longitude', 'S4'), + ('ending_longitude', 'S4'), + ('start_hour', 'S2'), + ('start_minute', 'S2'), + ('number_of_minutes', 'S3'), + ('appended_data_flag', 'S1'), + ('channel_select_flag', 'S1', (20, )), # dataset summary - ("ascii_blank__", "S29"), - ("ascend_descend_flag", "S1"), - ("first_latitude", "S3"), - ("last_latitude", "S3"), - ("first_longitude", "S4"), - ("last_longitude", "S4"), - ("data_format", "S20"), - ("size_of_record", "S6"), - ("number_of_records", "S6"), + ('ascii_blank__', 'S29'), + ('ascend_descend_flag', 'S1'), + ('first_latitude', 'S3'), + ('last_latitude', 'S3'), + ('first_longitude', 'S4'), + ('last_longitude', 'S4'), + ('data_format', 'S20'), + ('size_of_record', 'S6'), + ('number_of_records', 'S6'), # filler - ("ascii_blank", "S319") + ('ascii_blank', 'S319') ]) class KLMReader(Reader): """Reader for KLM data.""" - spacecraft_names = {4: "noaa15", - 2: "noaa16", - 6: "noaa17", - 7: "noaa18", - 8: "noaa19", - 12: "metopa", - 11: "metopb", - 13: "metopc", + spacecraft_names = {4: 'noaa15', + 2: 'noaa16', + 6: 'noaa17', + 7: 'noaa18', + 8: 'noaa19', + 12: 'metopa', + 11: 'metopb', + 13: 'metopc', } - spacecrafts_orbital = {4: "noaa 15", - 2: "noaa 16", - 6: "noaa 17", - 7: "noaa 18", - 8: "noaa 19", - 12: "metop 02", - 11: "metop 01", - 13: "metop 03", + spacecrafts_orbital = {4: 'noaa 15', + 2: 'noaa 16', + 6: 'noaa 17', + 7: 'noaa 18', + 8: 'noaa 19', + 12: 'metop 02', + 11: 'metop 01', + 13: 'metop 03', } tsm_affected_intervals = TSM_AFFECTED_INTERVALS_KLM @@ -659,7 +659,7 @@ def read(self, filename, fileobj=None): # file objects to (io.FileIO, io.BufferedReader, io.BufferedWriter) # see: numpy.compat.py3k.isfileobj self.filename = filename - LOG.info("Reading %s", self.filename) + LOG.info('Reading %s', self.filename) with file_opener(fileobj or filename) as fd_: self.ars_head, self.head = self.read_header( filename, fileobj=fd_) @@ -678,7 +678,7 @@ def read(self, filename, fileobj=None): fd_.read(analog_telemetry_v2.itemsize), dtype=analog_telemetry_v2, count=1) # LAC: 1, GAC: 2, ... - self.data_type = self.head["data_type_code"] + self.data_type = self.head['data_type_code'] # read until end of file fd_.seek(self.offset + ars_offset, 0) buffer = fd_.read() @@ -705,7 +705,7 @@ def read_header(cls, filename, fileobj=None): _ars_head, = np.frombuffer( fd_.read(ars_header.itemsize), dtype=ars_header, count=1) - if _ars_head["data_format"].startswith(b"NOAA Level 1b"): + if _ars_head['data_format'].startswith(b'NOAA Level 1b'): ars_head = _ars_head.copy() else: fd_.seek(0) @@ -724,11 +724,11 @@ def _validate_header(cls, header): # call super to enter the Method Resolution Order (MRO) super(KLMReader, cls)._validate_header(header) LOG.debug("validate header") - data_set_name = header["data_set_name"].decode() + data_set_name = header['data_set_name'].decode() # split header into parts creation_site, transfer_mode, platform_id = ( - data_set_name.split(".")[:3]) - allowed_ids = ["NK", "NL", "NM", "NN", "NP", "M1", "M2", "M3"] + data_set_name.split('.')[:3]) + allowed_ids = ['NK', 'NL', 'NM', 'NN', 'NP', 'M1', 'M2', 'M3'] if platform_id not in allowed_ids: raise ReaderError('Improper platform id "%s"!' % platform_id) @@ -741,7 +741,7 @@ def get_telemetry(self): space_counts: np.array """ - prt_counts = np.mean(self.scans["telemetry"]["PRT"], axis=1) + prt_counts = np.mean(self.scans["telemetry"]['PRT'], axis=1) # getting ICT counts @@ -775,14 +775,14 @@ def get_header_timestamp(self): A ValueError if the timestamp is corrupt. """ - year = self.head["start_of_data_set_year"] - jday = self.head["start_of_data_set_day_of_year"] - msec = self.head["start_of_data_set_utc_time_of_day"] + year = self.head['start_of_data_set_year'] + jday = self.head['start_of_data_set_day_of_year'] + msec = self.head['start_of_data_set_utc_time_of_day'] try: return self.to_datetime(self.to_datetime64(year=year, jday=jday, msec=msec)) except ValueError as err: - raise ValueError("Corrupt header timestamp: {0}".format(err)) + raise ValueError('Corrupt header timestamp: {0}'.format(err)) def _get_times(self): """Get the times of the scanlines.""" diff --git a/pygac/lac_klm.py b/pygac/lac_klm.py index d202f6d2..ccefc3a4 100644 --- a/pygac/lac_klm.py +++ b/pygac/lac_klm.py @@ -133,10 +133,10 @@ ("frame_sync", ">u2", (6, )), ("id", ">u2", (2, )), ("time_code", ">u2", (4, )), - ("telemetry", [("ramp_calibration", ">u2", (5, )), - ("PRT", ">u2", (3, )), - ("ch3_patch_temp", ">u2"), - ("spare", ">u2"), ]), + ('telemetry', [("ramp_calibration", '>u2', (5, )), + ("PRT", '>u2', (3, )), + ("ch3_patch_temp", '>u2'), + ("spare", '>u2'), ]), ("back_scan", ">u2", (30, )), ("space_data", ">u2", (50, )), ("sync_delta", ">u2"), diff --git a/pygac/lac_reader.py b/pygac/lac_reader.py index dd182937..5229d30b 100644 --- a/pygac/lac_reader.py +++ b/pygac/lac_reader.py @@ -56,9 +56,9 @@ def _validate_header(cls, header): # call super to enter the Method Resolution Order (MRO) super(LACReader, cls)._validate_header(header) LOG.debug("validate header") - data_set_name = header["data_set_name"].decode() + data_set_name = header['data_set_name'].decode() # split header into parts creation_site, transfer_mode, platform_id = ( - data_set_name.split(".")[:3]) + data_set_name.split('.')[:3]) if transfer_mode not in ["LHRR", "HRPT", "FRAC"]: raise ReaderError('Improper transfer mode "%s"!' % transfer_mode) diff --git a/pygac/patmosx_coeff_reader.py b/pygac/patmosx_coeff_reader.py index b2cadfe3..33c2258b 100644 --- a/pygac/patmosx_coeff_reader.py +++ b/pygac/patmosx_coeff_reader.py @@ -37,35 +37,35 @@ class PatmosxReader: """Read PATMOS-x coefficient files tarballs.""" # regular expression with named capturing groups to read an entire patmosx file regex = re.compile( - r"\s*(?P\w+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" + r'\s*(?P\w+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' r'\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+),?\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"(?:[a-z]+[^\n]*\n)?" + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'(?:[a-z]+[^\n]*\n)?' r'\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" - r"\s*(?P[eE0-9\.-]+)[^\n]*\n" + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' + r'\s*(?P[eE0-9\.-]+)[^\n]*\n' r'\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)\,*\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa @@ -73,8 +73,8 @@ class PatmosxReader: r'\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)\s*(?P[eE0-9\.-]+)[^\n]*\n' # noqa r'(?:\s*(?P[eE0-9\.-]+),\s*(?P[eE0-9\.-]+),\s*(?P[eE0-9\.-]+),\s*(?P[eE0-9\.-]+)[^\n]*\n)?' # noqa r'(?:\s*(?P[eE0-9\.-]+),\s*(?P[eE0-9\.-]+),\s*(?P[eE0-9\.-]+),\s*(?P[eE0-9\.-]+)[^\n]*\n)?' # noqa - r"(?:\![^v][^\n]*\n)*" - r"(?:\!(?Pv\w+))?" + r'(?:\![^v][^\n]*\n)*' + r'(?:\!(?Pv\w+))?' ) def __init__(self, tarball): @@ -115,9 +115,9 @@ class Translator: sat_names.update({"n{0:02d}".format(i): "noaa{0}".format(i) for i in range(6,20)}) description = { "visible": { - "channels": ["1", "2", "3a"], + "channels": ['1', '2', '3a'], "coefficients": { - "dark_count": "instrument counts under dark conditions []", + 'dark_count': "instrument counts under dark conditions []", "gain_switch": "dual-gain switch count, set to 'null' for single-gain instruments []", "s0": "single-gain calibration slope at launch date [%]", "s1": "linear single-gain calibration slope parameter [% years^{-1}]", @@ -127,9 +127,9 @@ class Translator: "method": 'Heidinger, A.K., W.C. Straka III, C.C. Molling, J.T. Sullivan, and X. Wu, 2010: Deriving an inter-sensor consistent calibration for the AVHRR solar reflectance data record. International Journal of Remote Sensing, 31:6493-6517' # noqa }, "thermal": { - "channels": ["3b", "4", "5"], + "channels": ['3b', '4', '5'], "coefficients": { - "centroid_wavenumber": "centroid wavenumber [cm^{-1}]", + 'centroid_wavenumber': "centroid wavenumber [cm^{-1}]", "b0": "constant non-linear radiance correction coefficient [mW m^{-2} sr cm^{-1}]", "b1": "linear non-linear radiance correction coefficient []", "b2": "quadratic non-linear radiance correction coefficient [(mW^{-1} m^2 sr^{-1} cm)]", @@ -158,34 +158,34 @@ def convert(cls, patmosx_sat_coeffs): pygac_sat_coeffs = {} # visible calibration for ch in ("1", "2", "3a"): - s0l = patmosx_sat_coeffs["ch{0}_low_gain_S0".format(ch)] - s0h = patmosx_sat_coeffs["ch{0}_high_gain_S0".format(ch)] + s0l = patmosx_sat_coeffs['ch{0}_low_gain_S0'.format(ch)] + s0h = patmosx_sat_coeffs['ch{0}_high_gain_S0'.format(ch)] if s0l == s0h: gain_switch = None s0 = s0l else: - gain_switch = patmosx_sat_coeffs["ch{0}_gain_switches_count".format(ch)] + gain_switch = patmosx_sat_coeffs['ch{0}_gain_switches_count'.format(ch)] s0 = cls.find_s0(s0l, s0h, ch) pygac_sat_coeffs["channel_{0}".format(ch)] = { - "dark_count": float(patmosx_sat_coeffs["ch{0}_dark_count".format(ch)]), + "dark_count": float(patmosx_sat_coeffs['ch{0}_dark_count'.format(ch)]), "gain_switch": gain_switch, "s0": s0, - "s1": patmosx_sat_coeffs["ch{0}_high_gain_S1".format(ch)], - "s2": patmosx_sat_coeffs["ch{0}_high_gain_S2".format(ch)] + "s1": patmosx_sat_coeffs['ch{0}_high_gain_S1'.format(ch)], + "s2": patmosx_sat_coeffs['ch{0}_high_gain_S2'.format(ch)] } - date_of_launch = cls.float2date(patmosx_sat_coeffs["date_of_launch"]) - pygac_sat_coeffs["date_of_launch"] = date_of_launch.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + date_of_launch = cls.float2date(patmosx_sat_coeffs['date_of_launch']) + pygac_sat_coeffs['date_of_launch'] = date_of_launch.strftime("%Y-%m-%dT%H:%M:%S.%fZ") # thermal channels for ch in ("3b", "4", "5"): pygac_sat_coeffs["channel_{0}".format(ch)] = { - "b0": patmosx_sat_coeffs["ch{0}_b0".format(ch)], - "b1": patmosx_sat_coeffs["ch{0}_b1".format(ch)], - "b2": patmosx_sat_coeffs["ch{0}_b2".format(ch)], - "centroid_wavenumber": patmosx_sat_coeffs["nu_{0}".format(ch)], - "space_radiance": patmosx_sat_coeffs["ch{0}_Ns".format(ch)], - "to_eff_blackbody_intercept": (-patmosx_sat_coeffs["a1_{0}".format(ch)] - / patmosx_sat_coeffs["a2_{0}".format(ch)]), - "to_eff_blackbody_slope": 1/patmosx_sat_coeffs["a2_{0}".format(ch)] + "b0": patmosx_sat_coeffs['ch{0}_b0'.format(ch)], + "b1": patmosx_sat_coeffs['ch{0}_b1'.format(ch)], + "b2": patmosx_sat_coeffs['ch{0}_b2'.format(ch)], + "centroid_wavenumber": patmosx_sat_coeffs['nu_{0}'.format(ch)], + "space_radiance": patmosx_sat_coeffs['ch{0}_Ns'.format(ch)], + "to_eff_blackbody_intercept": (-patmosx_sat_coeffs['a1_{0}'.format(ch)] + / patmosx_sat_coeffs['a2_{0}'.format(ch)]), + "to_eff_blackbody_slope": 1/patmosx_sat_coeffs['a2_{0}'.format(ch)] } for t in range(1, 5): pygac_sat_coeffs["thermometer_{0}".format(t)] = { @@ -209,7 +209,7 @@ def find_s0(s0_low, s0_high, ch): if s0_low == s0_high: # single gain case return s0_low - if ch == "3a": + if ch == '3a': g_low, g_high = 0.25, 1.75 else: g_low, g_high = 0.5, 1.5 @@ -249,32 +249,32 @@ def float2date(date_float): def save(self, filepath): """Save coefficients as PyGAC json file.""" - with open(filepath, mode="w") as json_file: + with open(filepath, mode='w') as json_file: json.dump(self.coeffs, json_file, indent=4, sort_keys=True) def main(): """The main function.""" parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("tarball", type=str, help="path to PATMOS-x coefficients tarball") - parser.add_argument("-o", "--output", type=str, metavar="JSON", + parser.add_argument('tarball', type=str, help='path to PATMOS-x coefficients tarball') + parser.add_argument('-o', '--output', type=str, metavar="JSON", help='path to PyGAC json file, defaults to tarball path with suffix ".json"') - parser.add_argument("-v", "--verbose", action="store_true", help="explain what is being done") + parser.add_argument('-v', '--verbose', action='store_true', help='explain what is being done') args = parser.parse_args() if args.verbose: loglevel = logging.INFO else: loglevel = logging.WARNING - logging.basicConfig(level=loglevel, format="[%(asctime)s] %(message)s") + logging.basicConfig(level=loglevel, format='[%(asctime)s] %(message)s') tarball = pathlib.Path(args.tarball) logging.info('Read PATMOS-x tarball "%s".', tarball) patmosx_coeffs = PatmosxReader(tarball) - logging.info("Translate PATMOS-x coefficients to PyGAC format.") + logging.info('Translate PATMOS-x coefficients to PyGAC format.') pygac_coeffs = Translator(patmosx_coeffs) output = args.output or tarball.with_suffix(".json") logging.info('Write PyGAC calibration json file "%s".', output) pygac_coeffs.save(output) - logging.info("Done!") + logging.info('Done!') if __name__ == "__main__": main() diff --git a/pygac/pod_reader.py b/pygac/pod_reader.py index e8c90783..c0e310c2 100644 --- a/pygac/pod_reader.py +++ b/pygac/pod_reader.py @@ -198,44 +198,44 @@ class POD_QualityIndicator(IntFlag): ("pitch_fixed_error_correction", ">i2")]) # archive header -tbm_header = np.dtype([("fill", "S30"), - ("data_set_name", "S44"), - ("select_flag", "S1"), - ("beginning_latitude", "S3"), - ("ending_latitude", "S3"), - ("beginning_longitude", "S4"), - ("ending_longitude", "S4"), - ("start_hour", "S2"), - ("start_minute", "S2"), - ("number_of_minutes", "S3"), - ("appended_data_flag", "S1"), - ("channel_select_flag", "S1", (20, )), - ("sensor_data_word_size", "S2"), - ("fill2", "S3")]) +tbm_header = np.dtype([('fill', 'S30'), + ('data_set_name', 'S44'), + ('select_flag', 'S1'), + ('beginning_latitude', 'S3'), + ('ending_latitude', 'S3'), + ('beginning_longitude', 'S4'), + ('ending_longitude', 'S4'), + ('start_hour', 'S2'), + ('start_minute', 'S2'), + ('number_of_minutes', 'S3'), + ('appended_data_flag', 'S1'), + ('channel_select_flag', 'S1', (20, )), + ('sensor_data_word_size', 'S2'), + ('fill2', 'S3')]) class PODReader(Reader): """The POD reader.""" - spacecrafts_orbital = {25: "tiros n", - 2: "noaa 6", - 4: "noaa 7", - 6: "noaa 8", - 7: "noaa 9", - 8: "noaa 10", - 1: "noaa 11", - 5: "noaa 12", - 3: "noaa 14", + spacecrafts_orbital = {25: 'tiros n', + 2: 'noaa 6', + 4: 'noaa 7', + 6: 'noaa 8', + 7: 'noaa 9', + 8: 'noaa 10', + 1: 'noaa 11', + 5: 'noaa 12', + 3: 'noaa 14', } - spacecraft_names = {25: "tirosn", - 2: "noaa6", - 4: "noaa7", - 6: "noaa8", - 7: "noaa9", - 8: "noaa10", - 1: "noaa11", - 5: "noaa12", - 3: "noaa14", + spacecraft_names = {25: 'tirosn', + 2: 'noaa6', + 4: 'noaa7', + 6: 'noaa8', + 7: 'noaa9', + 8: 'noaa10', + 1: 'noaa11', + 5: 'noaa12', + 3: 'noaa14', } tsm_affected_intervals = TSM_AFFECTED_INTERVALS_POD @@ -275,7 +275,7 @@ def read(self, filename, fileobj=None): """ self.filename = filename - LOG.info("Reading %s", self.filename) + LOG.info('Reading %s', self.filename) # choose the right header depending on the date with file_opener(fileobj or filename) as fd_: self.tbm_head, self.head = self.read_header( @@ -382,12 +382,12 @@ def _validate_header(cls, header): # call super to enter the Method Resolution Order (MRO) super(PODReader, cls)._validate_header(header) LOG.debug("validate header") - data_set_name = header["data_set_name"].decode() + data_set_name = header['data_set_name'].decode() # split header into parts creation_site, transfer_mode, platform_id = ( - data_set_name.split(".")[:3]) - allowed_ids = ["TN", "NA", "NB", "NC", "ND", "NE", "NF", "NG", - "NH", "NI", "NJ"] + data_set_name.split('.')[:3]) + allowed_ids = ['TN', 'NA', 'NB', 'NC', 'ND', 'NE', 'NF', 'NG', + 'NH', 'NI', 'NJ'] if platform_id not in allowed_ids: raise ReaderError('Improper platform id "%s"!' % platform_id) @@ -406,7 +406,7 @@ def get_header_timestamp(self): return self.to_datetime(self.to_datetime64(year=year, jday=jday, msec=msec)) except ValueError as err: - raise ValueError("Corrupt header timestamp: {0}".format(err)) + raise ValueError('Corrupt header timestamp: {0}'.format(err)) @staticmethod def decode_timestamps(encoded): @@ -430,7 +430,7 @@ def decode_timestamps(encoded): enc1 = encoded[:, 1] enc2 = encoded[:, 2] else: - raise ValueError("Invalid timestamp dimension") + raise ValueError('Invalid timestamp dimension') year = enc0 >> 9 year = np.where(year > 75, year + 1900, year + 2000) @@ -478,7 +478,7 @@ def _adjust_clock_drift(self): self.spacecraft_name) return - error_utcs = np.array(error_utcs, dtype="datetime64[ms]") + error_utcs = np.array(error_utcs, dtype='datetime64[ms]') # interpolate to get the clock offsets at the scan line utcs # the clock_error is given in seconds, so offsets are in seconds, too. offsets = np.interp(self.utcs.astype(np.uint64), @@ -511,7 +511,7 @@ def _adjust_clock_drift(self): try: missed_lons, missed_lats = self._compute_missing_lonlat(missed_utcs) except NoTLEData as err: - LOG.warning("Cannot perform clock drift correction: %s", str(err)) + LOG.warning('Cannot perform clock drift correction: %s', str(err)) return # create arrays of lons and lats for interpolation. The locations @@ -539,7 +539,7 @@ def _adjust_clock_drift(self): # set corrected values self.lons = slerp_res[:, :, 0] self.lats = slerp_res[:, :, 1] - self.utcs -= (offsets * 1000).astype("timedelta64[ms]") + self.utcs -= (offsets * 1000).astype('timedelta64[ms]') toc = datetime.datetime.now() LOG.debug("clock drift adjustment took %s", str(toc - tic)) diff --git a/pygac/reader.py b/pygac/reader.py index 381305ca..6aa4b3aa 100644 --- a/pygac/reader.py +++ b/pygac/reader.py @@ -48,29 +48,29 @@ # rpy values from # here:http://yyy.rsmas.miami.edu/groups/rrsl/pathfinder/Processing/proc_app_a.html rpy_coeffs = { - "noaa7": {"roll": 0.000, - "pitch": 0.000, - "yaw": 0.000, + 'noaa7': {'roll': 0.000, + 'pitch': 0.000, + 'yaw': 0.000, }, - "noaa9": {"roll": 0.000, - "pitch": 0.0025, - "yaw": 0.000, + 'noaa9': {'roll': 0.000, + 'pitch': 0.0025, + 'yaw': 0.000, }, - "noaa10": {"roll": 0.000, - "pitch": 0.000, - "yaw": 0.000, + 'noaa10': {'roll': 0.000, + 'pitch': 0.000, + 'yaw': 0.000, }, - "noaa11": {"roll": -0.0019, - "pitch": -0.0037, - "yaw": 0.000, + 'noaa11': {'roll': -0.0019, + 'pitch': -0.0037, + 'yaw': 0.000, }, - "noaa12": {"roll": 0.000, - "pitch": 0.000, - "yaw": 0.000, + 'noaa12': {'roll': 0.000, + 'pitch': 0.000, + 'yaw': 0.000, }, - "noaa14": {"roll": 0.000, - "pitch": 0.000, - "yaw": 0.000, + 'noaa14': {'roll': 0.000, + 'pitch': 0.000, + 'yaw': 0.000, }} @@ -93,7 +93,7 @@ class Reader(ABC): # data set header format, see _validate_header for more details data_set_pattern = re.compile( - r"\w{3}\.\w{4}\.\w{2}.D\d{5}\.S\d{4}\.E\d{4}\.B\d{7}\.\w{2}") + r'\w{3}\.\w{4}\.\w{2}.D\d{5}\.S\d{4}\.E\d{4}\.B\d{7}\.\w{2}') def __init__(self, interpolate_coords=True, adjust_clock_drift=True, tle_dir=None, tle_name=None, tle_thresh=7, creation_site=None, @@ -123,7 +123,7 @@ def __init__(self, interpolate_coords=True, adjust_clock_drift=True, self.tle_dir = tle_dir self.tle_name = tle_name self.tle_thresh = tle_thresh - self.creation_site = (creation_site or "NSS").encode("utf-8") + self.creation_site = (creation_site or 'NSS').encode('utf-8') self.header_date = header_date self.head = None self.scans = None @@ -211,7 +211,7 @@ def _correct_data_set_name(cls, header, filename): filename (str): path to file """ filename = str(filename) - data_set_name = header["data_set_name"] + data_set_name = header['data_set_name'] try: header["data_set_name"] = cls._decode_data_set_name(data_set_name) except DecodingError: @@ -221,10 +221,10 @@ def _correct_data_set_name(cls, header, filename): if match: data_set_name = match.group() LOG.debug(f"Set data_set_name, to filename {data_set_name}") - header["data_set_name"] = data_set_name.encode() + header['data_set_name'] = data_set_name.encode() else: LOG.debug(f"header['data_set_name']={header['data_set_name']}; filename='{filename}'") - raise ReaderError("Cannot determine data_set_name!") + raise ReaderError('Cannot determine data_set_name!') return header @classmethod @@ -243,8 +243,8 @@ def _decode_data_set_name(cls, data_set_name): def _decode_data_set_name_for_encoding(cls, data_set_name, encoding): data_set_name = data_set_name.decode(encoding, errors="ignore") if not cls.data_set_pattern.match(data_set_name): - raise DecodingError(f"The data_set_name in header {data_set_name} " - f"does not seem correct using encoding {encoding}.") + raise DecodingError(f'The data_set_name in header {data_set_name} ' + f'does not seem correct using encoding {encoding}.') else: data_set_name = data_set_name.encode() return data_set_name @@ -282,10 +282,10 @@ def _validate_header(cls, header): # second use case "diamond diagrams". # Check if the data set name matches the pattern LOG.debug("validate header") - data_set_name = header["data_set_name"].decode(errors="ignore") + data_set_name = header['data_set_name'].decode(errors='ignore') if not cls.data_set_pattern.match(data_set_name): - raise ReaderError("Data set name %s does not match!" - % header["data_set_name"]) + raise ReaderError('Data set name %s does not match!' + % header['data_set_name']) def _read_scanlines(self, buffer, count): """Read the scanlines from the given buffer. @@ -476,9 +476,9 @@ def to_datetime64(year, jday, msec): numpy.datetime64: Converted timestamps """ - return (year.astype(str).astype("datetime64[Y]") - + (jday - 1).astype("timedelta64[D]") - + msec.astype("timedelta64[ms]")) + return (year.astype(str).astype('datetime64[Y]') + + (jday - 1).astype('timedelta64[D]') + + msec.astype('timedelta64[ms]')) @staticmethod def to_datetime(datetime64): @@ -518,17 +518,17 @@ def update_meta_data(self): """Add some meta data to the meta_data dicitonary.""" meta_data = self.meta_data self._update_meta_data_object(meta_data) - if "gac_header" not in meta_data: - meta_data["gac_header"] = self.head.copy() + if 'gac_header' not in meta_data: + meta_data['gac_header'] = self.head.copy() def _update_meta_data_object(self, meta_data): - if "sun_earth_distance_correction_factor" not in meta_data: - meta_data["sun_earth_distance_correction_factor"] = ( + if 'sun_earth_distance_correction_factor' not in meta_data: + meta_data['sun_earth_distance_correction_factor'] = ( self.get_sun_earth_distance_correction()) - if "midnight_scanline" not in meta_data: - meta_data["midnight_scanline"] = self.get_midnight_scanline() - if "missing_scanlines" not in meta_data: - meta_data["missing_scanlines"] = self.get_miss_lines() + if 'midnight_scanline' not in meta_data: + meta_data['midnight_scanline'] = self.get_midnight_scanline() + if 'missing_scanlines' not in meta_data: + meta_data['missing_scanlines'] = self.get_miss_lines() def read_as_dataset(self, file_to_read): self.read(file_to_read) @@ -629,7 +629,7 @@ def get_calibrated_dataset(self): # Mask pixels affected by scan motor issue if self.is_tsm_affected(): - LOG.info("Correcting for temporary scan motor issue") + LOG.info('Correcting for temporary scan motor issue') self.mask_tsm_pixels(calibrated_ds) return calibrated_ds @@ -739,13 +739,13 @@ def tle2datetime64(times): times = np.where(times > 50000, times + 1900000, times + 2000000) # Convert float to datetime64 - doys = (times % 1000).astype("int") - 1 - years = (times // 1000).astype("int") + doys = (times % 1000).astype('int') - 1 + years = (times // 1000).astype('int') msecs = np.rint(24 * 3600 * 1000 * (times % 1)) times64 = ( - years - 1970).astype("datetime64[Y]").astype("datetime64[ms]") - times64 += doys.astype("timedelta64[D]") - times64 += msecs.astype("timedelta64[ms]") + years - 1970).astype('datetime64[Y]').astype('datetime64[ms]') + times64 += doys.astype('timedelta64[D]') + times64 += msecs.astype('timedelta64[ms]') return times64 @@ -758,13 +758,13 @@ def get_tle_file(self): raise RuntimeError("TLE name not specified!") values = {"satname": self.spacecraft_name, } tle_filename = os.path.join(tle_dir, tle_name % values) - LOG.info("TLE filename = " + str(tle_filename)) + LOG.info('TLE filename = ' + str(tle_filename)) return tle_filename def read_tle_file(self, tle_filename): """Read TLE file.""" - with open(tle_filename, "r") as fp_: + with open(tle_filename, 'r') as fp_: return fp_.readlines() def get_tle_lines(self): @@ -797,7 +797,7 @@ def get_tle_lines(self): iindex -= 1 # Make sure the TLE we found is within the threshold - delta_days = abs(sdate - dates[iindex]) / np.timedelta64(1, "D") + delta_days = abs(sdate - dates[iindex]) / np.timedelta64(1, 'D') if delta_days > self.tle_thresh: raise NoTLEData( "Can't find tle data for %s within +/- %d days around %s" % @@ -826,8 +826,8 @@ def get_sat_angles(self): return self._get_sat_angles_with_tle() except NoTLEData: LOG.warning( - "No TLE data available. Falling back to approximate " - "calculation of satellite angles." + 'No TLE data available. Falling back to approximate ' + 'calculation of satellite angles.' ) return self._get_sat_angles_without_tle() @@ -842,7 +842,7 @@ def _get_sat_angles_with_tle(self): def _get_sat_angles_without_tle(self): """Get satellite angles using lat/lon from data to approximate satellite postition instead of TLE.""" from pyorbital.orbital import get_observer_look as get_observer_look_no_tle - LOG.warning("Approximating satellite height to 850km (TIROS-N OSCAR)!") + LOG.warning('Approximating satellite height to 850km (TIROS-N OSCAR)!') sat_alt = 850.0 # km TIROS-N OSCAR mid_column = int(0.5*self.lons.shape[1]) sat_azi, sat_elev = get_observer_look_no_tle( @@ -853,7 +853,7 @@ def _get_sat_angles_without_tle(self): self.lons, self.lats, 0) # Sometimes (pyorbital <= 1.6.1) the get_observer_look_not_tle returns nodata instead of 90. # Problem solved with https://github.com/pytroll/pyorbital/pull/77 - if Version(pyorbital.__version__) <= Version("1.6.1"): + if Version(pyorbital.__version__) <= Version('1.6.1'): sat_elev[:, mid_column] = 90 return sat_azi, sat_elev @@ -979,8 +979,8 @@ def correct_scan_line_numbers(self): """ along_track = np.arange(1, len(self.scans["scan_line_number"])+1) - results = {"along_track": along_track, - "n_orig": self.scans["scan_line_number"].copy()} + results = {'along_track': along_track, + 'n_orig': self.scans['scan_line_number'].copy()} # Remove scanlines whose scanline number is outside the valid range within_range = np.logical_and(self.scans["scan_line_number"] < self.max_scanlines, @@ -1023,14 +1023,14 @@ def correct_scan_line_numbers(self): thresh = max(500, med_nz_diffs + 3*mad_nz_diffs) self.scans = self.scans[diffs <= thresh] - LOG.debug("Removed %s scanline(s) with corrupt scanline numbers", + LOG.debug('Removed %s scanline(s) with corrupt scanline numbers', str(len(along_track) - len(self.scans))) - results.update({"n_corr": self.scans["scan_line_number"], - "within_range": within_range, - "diffs": diffs, - "thresh": thresh, - "nz_diffs": nz_diffs}) + results.update({'n_corr': self.scans['scan_line_number'], + 'within_range': within_range, + 'diffs': diffs, + 'thresh': thresh, + 'nz_diffs': nz_diffs}) return results def correct_times_thresh(self, max_diff_from_t0_head=6*60*1000, @@ -1081,11 +1081,11 @@ def correct_times_thresh(self, max_diff_from_t0_head=6*60*1000, # Check whether scanline number increases monotonically nums = self.scans["scan_line_number"] - results.update({"t": self.utcs.copy(), "n": nums}) + results.update({'t': self.utcs.copy(), 'n': nums}) if np.any(np.diff(nums) < 0): LOG.error("Cannot perform timestamp correction. Scanline number " "does not increase monotonically.") - results["fail_reason"] = "Scanline number jumps backwards" + results['fail_reason'] = "Scanline number jumps backwards" return results # Convert time to milliseconds since 1970-01-01 @@ -1116,14 +1116,14 @@ def correct_times_thresh(self, max_diff_from_t0_head=6*60*1000, # we do not have reliable information and cannot proceed. near_t0_head = np.where( np.fabs(offsets - t0_head) <= max_diff_from_t0_head)[0] - results.update({"offsets": offsets, - "t0_head": t0_head, - "max_diff_from_t0_head": max_diff_from_t0_head}) + results.update({'offsets': offsets, + 't0_head': t0_head, + 'max_diff_from_t0_head': max_diff_from_t0_head}) if near_t0_head.size / float(nums.size) >= min_frac_near_t0_head: t0 = np.median(offsets[near_t0_head]) else: LOG.error("Timestamp mismatch. Cannot perform correction.") - results["fail_reason"] = "Timestamp mismatch" + results['fail_reason'] = "Timestamp mismatch" return results # Add estimated offset to the ideal timestamps @@ -1135,7 +1135,7 @@ def correct_times_thresh(self, max_diff_from_t0_head=6*60*1000, self.utcs[corrupt_lines] = tn[corrupt_lines].astype(dt64_msec) LOG.debug("Corrected %s timestamp(s)", str(len(corrupt_lines[0]))) - results.update({"tn": tn, "tcorr": self.utcs, "t0": t0}) + results.update({'tn': tn, 'tcorr': self.utcs, 't0': t0}) return results @property @@ -1181,13 +1181,13 @@ def get_midnight_scanline(self): """ self.get_times() - d0 = np.datetime64(datetime.date(1970, 1, 1), "D") - days = (self.utcs.astype("datetime64[D]") - d0).astype(int) + d0 = np.datetime64(datetime.date(1970, 1, 1), 'D') + days = (self.utcs.astype('datetime64[D]') - d0).astype(int) incr = np.where(np.diff(days) == 1)[0] if len(incr) != 1: if len(incr) > 1: - LOG.warning("Unable to determine midnight scanline: " - "UTC date increases more than once. ") + LOG.warning('Unable to determine midnight scanline: ' + 'UTC date increases more than once. ') return None else: return incr[0] @@ -1203,8 +1203,8 @@ def get_miss_lines(self): """ # Compare scanline number against the ideal case (1, 2, 3, ...) and # find the missing line numbers. - ideal = set(range(1, self.scans["scan_line_number"][-1] + 1)) - missing = sorted(ideal.difference(set(self.scans["scan_line_number"]))) + ideal = set(range(1, self.scans['scan_line_number'][-1] + 1)) + missing = sorted(ideal.difference(set(self.scans['scan_line_number']))) return np.array(missing, dtype=int) def mask_tsm_pixels(self, ds): @@ -1254,7 +1254,7 @@ def inherit_doc(cls): if isinstance(func, types.FunctionType) and not func.__doc__: for parent in cls.__bases__: parfunc = getattr(parent, name, None) - if parfunc and getattr(parfunc, "__doc__", None): + if parfunc and getattr(parfunc, '__doc__', None): func.__doc__ = parfunc.__doc__ break return cls diff --git a/pygac/runner.py b/pygac/runner.py index 78e1d4d0..7fd72140 100644 --- a/pygac/runner.py +++ b/pygac/runner.py @@ -86,15 +86,15 @@ def process_file(filename, start_line, end_line, fileobj=None): # reader specific values config = get_config() - tle_dir = config.get("tle", "tledir", raw=True) - tle_name = config.get("tle", "tlename", raw=True) - coeffs_file = config.get("calibration", "coeffs_file", fallback="") + tle_dir = config.get('tle', 'tledir', raw=True) + tle_name = config.get('tle', 'tlename', raw=True) + coeffs_file = config.get("calibration", "coeffs_file", fallback='') # output specific values - output_dir = config.get("output", "output_dir", raw=True) - output_file_prefix = config.get("output", "output_file_prefix", raw=True) - avhrr_dir = os.environ.get("SM_AVHRR_DIR") - qual_dir = os.environ.get("SM_AVHRR_DIR") - sunsatangles_dir = os.environ.get("SM_SUNSATANGLES_DIR") + output_dir = config.get('output', 'output_dir', raw=True) + output_file_prefix = config.get('output', 'output_file_prefix', raw=True) + avhrr_dir = os.environ.get('SM_AVHRR_DIR') + qual_dir = os.environ.get('SM_AVHRR_DIR') + sunsatangles_dir = os.environ.get('SM_SUNSATANGLES_DIR') # Keep the file open while searching for the reader class and later # creation of the instance. diff --git a/pygac/tests/test_io.py b/pygac/tests/test_io.py index c859c74b..df3a4cb7 100644 --- a/pygac/tests/test_io.py +++ b/pygac/tests/test_io.py @@ -44,14 +44,14 @@ def test_strip_invalid_lat(self): def test_update_scanline(self): """Test updating the scanlines.""" - test_data = [{"new_start_line": 100, "new_end_line": 200, - "scanline": 110, "scanline_exp": 10}, - {"new_start_line": 100, "new_end_line": 200, - "scanline": 90, "scanline_exp": None}, - {"new_start_line": 100, "new_end_line": 200, - "scanline": 210, "scanline_exp": None}] + test_data = [{'new_start_line': 100, 'new_end_line': 200, + 'scanline': 110, 'scanline_exp': 10}, + {'new_start_line': 100, 'new_end_line': 200, + 'scanline': 90, 'scanline_exp': None}, + {'new_start_line': 100, 'new_end_line': 200, + 'scanline': 210, 'scanline_exp': None}] for t in test_data: - scanline_exp = t.pop("scanline_exp") + scanline_exp = t.pop('scanline_exp') scanline = utils._update_scanline(**t) self.assertEqual(scanline, scanline_exp) @@ -59,12 +59,12 @@ def test_update_missing_scanlines(self): """Test updating the missing scanlines.""" qual_flags = np.array([[1, 2, 4, 5, 6, 8, 9, 11, 12]]).transpose() miss_lines = np.array([3, 7, 10]) - test_data = [{"start_line": 0, "end_line": 8, - "miss_lines_exp": [3, 7, 10]}, - {"start_line": 3, "end_line": 6, - "miss_lines_exp": [1, 2, 3, 4, 7, 10, 11, 12]}] + test_data = [{'start_line': 0, 'end_line': 8, + 'miss_lines_exp': [3, 7, 10]}, + {'start_line': 3, 'end_line': 6, + 'miss_lines_exp': [1, 2, 3, 4, 7, 10, 11, 12]}] for t in test_data: - miss_lines_exp = t.pop("miss_lines_exp") + miss_lines_exp = t.pop('miss_lines_exp') miss_lines = utils._update_missing_scanlines( miss_lines=miss_lines, qual_flags=qual_flags, **t) numpy.testing.assert_array_equal(miss_lines, miss_lines_exp) @@ -185,10 +185,10 @@ def test_check_user_scanlines(self): self.assertRaises(ValueError, gac_io.check_user_scanlines, 110, 120, None, None, 100) - @mock.patch("pygac.gac_io.strip_invalid_lat") - @mock.patch("pygac.gac_io.avhrrGAC_io") - @mock.patch("pygac.gac_io.slice_channel") - @mock.patch("pygac.gac_io.check_user_scanlines") + @mock.patch('pygac.gac_io.strip_invalid_lat') + @mock.patch('pygac.gac_io.avhrrGAC_io') + @mock.patch('pygac.gac_io.slice_channel') + @mock.patch('pygac.gac_io.check_user_scanlines') def test_save_gac(self, check_user_scanlines, slice_channel, avhrr_gac_io, strip_invalid_lat): """Test saving.""" @@ -218,13 +218,13 @@ def test_save_gac(self, check_user_scanlines, slice_channel, avhrr_gac_io, qual_dir=mm, sunsatangles_dir=mm ) - slice_channel.return_value = mm, "miss", "midnight" + slice_channel.return_value = mm, 'miss', 'midnight' strip_invalid_lat.return_value = 0, 0 - check_user_scanlines.return_value = "start", "end" + check_user_scanlines.return_value = 'start', 'end' gac_io.save_gac(start_line=0, end_line=0, **kwargs) slice_channel.assert_called_with(mock.ANY, - start_line="start", end_line="end", + start_line='start', end_line='end', first_valid_lat=mock.ANY, last_valid_lat=mock.ANY ) @@ -255,8 +255,8 @@ def test_save_gac(self, check_user_scanlines, slice_channel, avhrr_gac_io, mock.ANY, mock.ANY, mock.ANY, - "midnight", - "miss", + 'midnight', + 'miss', mock.ANY, mock.ANY, mock.ANY, diff --git a/pygac/tests/test_klm.py b/pygac/tests/test_klm.py index cc771f04..0b476808 100644 --- a/pygac/tests/test_klm.py +++ b/pygac/tests/test_klm.py @@ -60,24 +60,24 @@ def test_get_lonlat(self): def test_get_header_timestamp(self): """Test readout of header timestamp.""" self.reader.head = { - "start_of_data_set_year": np.array([2019]), - "start_of_data_set_day_of_year": np.array([123]), - "start_of_data_set_utc_time_of_day": np.array([123456]) + 'start_of_data_set_year': np.array([2019]), + 'start_of_data_set_day_of_year': np.array([123]), + 'start_of_data_set_utc_time_of_day': np.array([123456]) } time = self.reader.get_header_timestamp() assert time == dt.datetime(2019, 5, 3, 0, 2, 3, 456000) def test_get_times(self): """Test readout of scanline timestamps.""" - self.reader.scans = {"scan_line_year": 1, - "scan_line_day_of_year": 2, - "scan_line_utc_time_of_day": 3} + self.reader.scans = {'scan_line_year': 1, + 'scan_line_day_of_year': 2, + 'scan_line_utc_time_of_day': 3} assert self.reader._get_times() == (1, 2, 3) def test_get_ch3_switch(self): """Test channel 3 identification.""" self.reader.scans = { - "scan_line_bit_field": np.array([1, 2, 3, 4, 5, 6])} + 'scan_line_bit_field': np.array([1, 2, 3, 4, 5, 6])} switch_exp = np.array([1, 2, 3, 0, 1, 2]) numpy.testing.assert_array_equal( self.reader.get_ch3_switch(), switch_exp) @@ -85,7 +85,7 @@ def test_get_ch3_switch(self): def test_postproc(self): """Test KLM specific postprocessing.""" self.reader.scans = { - "scan_line_bit_field": np.array([0, 1, 2])} + 'scan_line_bit_field': np.array([0, 1, 2])} channels = np.array([[[1., 2., 3., 4.], [1., 2., 3., 4.]], [[1., 2., 3., 4.], @@ -136,7 +136,7 @@ def setup_method(self): """Set up the tests.""" self.reader = GACKLMReader() - @mock.patch("pygac.klm_reader.get_tsm_idx") + @mock.patch('pygac.klm_reader.get_tsm_idx') def test_get_tsm_pixels(self, get_tsm_idx): """Test channel set used for TSM correction.""" ones = np.ones((409, 100)) @@ -174,7 +174,7 @@ def setup_method(self): def test_get_ch3_switch(self): """Test channel 3 identification.""" self.reader.scans = { - "scan_line_bit_field": np.array([1, 2, 3, 4, 5, 6])} + 'scan_line_bit_field': np.array([1, 2, 3, 4, 5, 6])} switch_exp = np.array([1, 2, 3, 0, 1, 2]) numpy.testing.assert_array_equal( self.reader.get_ch3_switch(), switch_exp) diff --git a/pygac/tests/test_noaa_calibration_coefficients.py b/pygac/tests/test_noaa_calibration_coefficients.py index 8a0c0a9b..af1ef030 100644 --- a/pygac/tests/test_noaa_calibration_coefficients.py +++ b/pygac/tests/test_noaa_calibration_coefficients.py @@ -119,11 +119,11 @@ class TestCalibrationCoefficientsHandling(unittest.TestCase): @mock.patch("pygac.calibration.noaa.open", mock.mock_open(read_data=user_json_file)) def test_user_coefficients_file(self): if sys.version_info.major < 3: - cal = Calibrator("noaa19", coeffs_file="/path/to/unknow/defaults.json") + cal = Calibrator('noaa19', coeffs_file="/path/to/unknow/defaults.json") else: with self.assertWarnsRegex(RuntimeWarning, "Unknown calibration coefficients version!"): - cal = Calibrator("noaa19", coeffs_file="/path/to/unknow/defaults.json") + cal = Calibrator('noaa19', coeffs_file="/path/to/unknow/defaults.json") self.assertEqual(cal.dark_count[0], 0) self.assertEqual(cal.gain_switch[0], 1000) @@ -183,13 +183,13 @@ def test_read_coeffs_warnings(self): """Test warnings issued by Calibrator.read_coeffs.""" version_dicts = [ # Non-nominal coefficients - {"name": "v123", - "status": CoeffStatus.PROVISIONAL}, + {'name': 'v123', + 'status': CoeffStatus.PROVISIONAL}, # Unknown coefficients - {"name": None, - "status": None} + {'name': None, + 'status': None} ] - with mock.patch.object(Calibrator, "version_hashs") as version_hashs: + with mock.patch.object(Calibrator, 'version_hashs') as version_hashs: for version_dict in version_dicts: version_hashs.get.return_value = version_dict with self.assertWarns(RuntimeWarning): diff --git a/pygac/tests/test_pod.py b/pygac/tests/test_pod.py index f9d89a46..ace0f7ae 100644 --- a/pygac/tests/test_pod.py +++ b/pygac/tests/test_pod.py @@ -51,31 +51,31 @@ def setUp(self): def test__validate_header(self): """Test the header validation""" - filename = b"NSS.GHRR.TN.D80001.S0332.E0526.B0627173.WI" - head = {"data_set_name": filename} + filename = b'NSS.GHRR.TN.D80001.S0332.E0526.B0627173.WI' + head = {'data_set_name': filename} GACPODReader._validate_header(head) # wrong name pattern with self.assertRaisesRegex(ReaderError, - "Data set name .* does not match!"): - head = {"data_set_name": b"abc.txt"} + 'Data set name .* does not match!'): + head = {'data_set_name': b'abc.txt'} GACPODReader._validate_header(head) # wrong platform - name = b"NSS.GHRR.NL.D02187.S1904.E2058.B0921517.GC" + name = b'NSS.GHRR.NL.D02187.S1904.E2058.B0921517.GC' with self.assertRaisesRegex(ReaderError, 'Improper platform id "NL"!'): - head = {"data_set_name": name} + head = {'data_set_name': name} GACPODReader._validate_header(head) # wrong transfer mode - name = filename.replace(b"GHRR", b"LHRR") + name = filename.replace(b'GHRR', b'LHRR') with self.assertRaisesRegex(ReaderError, 'Improper transfer mode "LHRR"!'): - head = {"data_set_name": name} + head = {'data_set_name': name} GACPODReader._validate_header(head) # other change reader - head = {"data_set_name": name} + head = {'data_set_name': name} LACPODReader._validate_header(head) - @mock.patch("pygac.reader.Reader.get_calibrated_channels") + @mock.patch('pygac.reader.Reader.get_calibrated_channels') def test__get_calibrated_channels_uniform_shape(self, get_channels): """Test the uniform shape as required by gac_io.save_gac.""" channels = np.arange(2*2*5, dtype=float).reshape((2, 2, 5)) @@ -97,24 +97,24 @@ def test_decode_timestamps(self): # Test whether PODReader decodes them correctly self.assertEqual(GACPODReader.decode_timestamps(t2000_enc), t2000_ref, - msg="Timestamp after 2000 was decoded incorrectly") + msg='Timestamp after 2000 was decoded incorrectly') self.assertEqual(GACPODReader.decode_timestamps(t1900_enc), t1900_ref, - msg="Timestamp before 2000 was decoded incorrectly") + msg='Timestamp before 2000 was decoded incorrectly') - @mock.patch("pygac.gac_pod.GACPODReader.decode_timestamps") + @mock.patch('pygac.gac_pod.GACPODReader.decode_timestamps') def test_get_header_timestamp(self, decode_timestamps): """Test readout of header timestamp.""" - self.reader.head = {"start_time": 123} + self.reader.head = {'start_time': 123} decode_timestamps.return_value = np.array( [2019]), np.array([123]), np.array([123456]) time = self.reader.get_header_timestamp() decode_timestamps.assert_called_with(123) self.assertEqual(time, dt.datetime(2019, 5, 3, 0, 2, 3, 456000)) - @mock.patch("pygac.gac_pod.GACPODReader.decode_timestamps") + @mock.patch('pygac.gac_pod.GACPODReader.decode_timestamps') def test_get_times(self, decode_timestamps): """Test getting times.""" - self.reader.scans = {"time_code": 123} + self.reader.scans = {'time_code': 123} self.reader._get_times() decode_timestamps.assert_called_with(123) @@ -132,7 +132,7 @@ def test_get_lonlat(self): numpy.testing.assert_array_equal(lons, lons_exp) numpy.testing.assert_array_equal(lats, lats_exp) - @mock.patch("pygac.pod_reader.get_tsm_idx") + @mock.patch('pygac.pod_reader.get_tsm_idx') def test_get_tsm_pixels(self, get_tsm_idx): """Test channel set used for TSM correction.""" ones = np.ones((409, 100)) @@ -157,7 +157,7 @@ def test_quality_indicators(self): QFlag.FATAL_FLAG, # 100...00 QFlag.CALIBRATION | QFlag.NO_EARTH_LOCATION, QFlag.TIME_ERROR | QFlag.DATA_GAP, - ], dtype=">u4") + ], dtype='>u4') # check if the bits look as expected bits = np.unpackbits(quality_indicators.view(np.uint8)).reshape((-1, 32)) # For a big endian integer, the number 1 fills only the last of the 32 bits @@ -188,10 +188,10 @@ def test_quality_indicators(self): expected_mask ) - @mock.patch("pygac.pod_reader.get_lonlatalt") - @mock.patch("pygac.pod_reader.compute_pixels") - @mock.patch("pygac.reader.Reader.get_tle_lines") - @mock.patch("pygac.pod_reader.avhrr_gac") + @mock.patch('pygac.pod_reader.get_lonlatalt') + @mock.patch('pygac.pod_reader.compute_pixels') + @mock.patch('pygac.reader.Reader.get_tle_lines') + @mock.patch('pygac.pod_reader.avhrr_gac') def test__adjust_clock_drift(self, avhrr_gac, get_tle_lines, compute_pixels, get_lonlatalt): """Test the clock drift adjustment.""" @@ -209,7 +209,7 @@ def test__adjust_clock_drift(self, avhrr_gac, get_tle_lines, # '1980-01-01T00:00:03.500', '1980-01-01T00:00:04.000', # '1980-01-01T00:00:04.500', '1980-01-01T00:00:05.000'] scan_utcs = ( - (1000 * scan_rate * (scan_lines - scan_lines[0])).astype("timedelta64[ms]") + (1000 * scan_rate * (scan_lines - scan_lines[0])).astype('timedelta64[ms]') + np.datetime64("1980", "ms") ) # For the geolocations, we assume an artificial swath of two pixels width @@ -229,7 +229,7 @@ def test__adjust_clock_drift(self, avhrr_gac, get_tle_lines, # '1980-01-01T00:00:00.750', '1980-01-01T00:00:01.250'] offset = 3.75 scan_offsets = offset*np.ones_like(scan_lines, dtype=float) # seconds - expected_utcs = scan_utcs - (1000*scan_offsets).astype("timedelta64[ms]") + expected_utcs = scan_utcs - (1000*scan_offsets).astype('timedelta64[ms]') # the adjustment of geolocations should keep the lons unchanged, # but should shift the lats by scan_angel * offset / scan_rate @@ -283,13 +283,13 @@ def test__adjust_clock_drift(self, avhrr_gac, get_tle_lines, # undo changes to clock_offsets_txt clock_offsets_txt.pop(sat_name) - @mock.patch("pygac.pod_reader.get_offsets") - @mock.patch("pygac.reader.Reader.get_tle_lines") + @mock.patch('pygac.pod_reader.get_offsets') + @mock.patch('pygac.reader.Reader.get_tle_lines') def test__adjust_clock_drift_without_tle(self, get_tle_lines, get_offsets): """Test that clockdrift adjustment can handle missing TLE data.""" reader = self.reader - reader.utcs = np.zeros(10, dtype="datetime64[ms]") + reader.utcs = np.zeros(10, dtype='datetime64[ms]') reader.scans = {"scan_line_number": np.arange(10)} get_offsets.return_value = np.zeros(10), np.zeros(10) - get_tle_lines.side_effect = NoTLEData("No TLE data available") + get_tle_lines.side_effect = NoTLEData('No TLE data available') reader._adjust_clock_drift() # should pass without errors diff --git a/pygac/tests/test_reader.py b/pygac/tests/test_reader.py index 3acb0881..cfa6b03d 100644 --- a/pygac/tests/test_reader.py +++ b/pygac/tests/test_reader.py @@ -70,9 +70,9 @@ def __init__(self, *args, **kwargs): scans["scan_line_number"] = np.arange(self.along_track) scans["sensor_data"] = 128 self.scans = scans - self.head = {"foo": "bar"} - self.head = np.rec.fromrecords([("bar", ),],names="foo") - self.spacecraft_name = "noaa6" + self.head = {'foo': 'bar'} + self.head = np.rec.fromrecords([("bar", ),],names='foo') + self.spacecraft_name = 'noaa6' def _get_times(self): year = np.full(self.along_track, 1970, dtype=int) @@ -124,9 +124,9 @@ class TestGacReader(unittest.TestCase): longMessage = True - @mock.patch.multiple("pygac.gac_reader.GACReader", + @mock.patch.multiple('pygac.gac_reader.GACReader', __abstractmethods__=set()) - @mock.patch("pygac.gac_reader.gtp.gac_lat_lon_interpolator") + @mock.patch('pygac.gac_reader.gtp.gac_lat_lon_interpolator') def setUp(self, interpolator, *mocks): """Set up the tests.""" self.interpolator = interpolator @@ -135,8 +135,8 @@ def setUp(self, interpolator, *mocks): def test_filename(self): """Test the setter of the filename property.""" # test path with .gz extension - filename = "NSS.GHRR.TN.D80001.S0332.E0526.B0627173.WI" - filepath = "/path/to/" + filename + ".gz" + filename = 'NSS.GHRR.TN.D80001.S0332.E0526.B0627173.WI' + filepath = '/path/to/' + filename + '.gz' self.reader.filename = filepath self.assertEqual(self.reader.filename, filename) self.reader.filename = None @@ -148,94 +148,94 @@ def test_filename(self): def test__read_scanlines(self): """Test the scanline extraction.""" self.reader.scanline_type = np.dtype([ - ("a", "S2"), ("b", "= num_valid_lines: end_line = num_valid_lines - 1 - LOG.warning("Given end line exceeds scanline range, resetting " - "to {}".format(end_line)) + LOG.warning('Given end line exceeds scanline range, resetting ' + 'to {}'.format(end_line)) if start_line > num_valid_lines: - raise ValueError("Given start line {} exceeds scanline range {}" + raise ValueError('Given start line {} exceeds scanline range {}' .format(start_line, num_valid_lines)) return start_line, end_line @@ -239,13 +239,13 @@ def plot_correct_times_thresh(res, filename=None): """Visualize results of GACReader.correct_times_thresh.""" import matplotlib.pyplot as plt - t = res["t"] - tcorr = res.get("tcorr") - n = res["n"] - offsets = res.get("offsets") - t0_head = res.get("t0_head") - max_diff_from_t0_head = res.get("max_diff_from_t0_head") - fail_reason = res.get("fail_reason", "Failed for unknown reason") + t = res['t'] + tcorr = res.get('tcorr') + n = res['n'] + offsets = res.get('offsets') + t0_head = res.get('t0_head') + max_diff_from_t0_head = res.get('max_diff_from_t0_head') + fail_reason = res.get('fail_reason', 'Failed for unknown reason') # Setup figure along_track = np.arange(t.size) @@ -295,13 +295,13 @@ def plot_correct_scanline_numbers(res, filename=None): """Visualize results of GACReader.correct_scanline_numbers.""" import matplotlib.pyplot as plt - along_track = res["along_track"] - n_orig = res["n_orig"] - n_corr = res["n_corr"] - within_range = res["within_range"] - thresh = res["thresh"] - diffs = res["diffs"] - nz_diffs = res["nz_diffs"] + along_track = res['along_track'] + n_orig = res['n_orig'] + n_corr = res['n_corr'] + within_range = res['within_range'] + thresh = res['thresh'] + diffs = res['diffs'] + nz_diffs = res['nz_diffs'] # Setup figure _, (ax0, ax1) = plt.subplots(nrows=2) @@ -327,6 +327,6 @@ def plot_correct_scanline_numbers(res, filename=None): plt.tight_layout() if filename: - plt.savefig(filename, bbox_inches="tight") + plt.savefig(filename, bbox_inches='tight') else: plt.show() diff --git a/pyproject.toml b/pyproject.toml index 28cbf745..8137cfe3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,4 +67,4 @@ version-file = "pygac/version.py" line-length = 120 [tool.ruff.lint] -select = ["E", "W", "F", "I", "Q"] +select = ["E", "W", "F", "I"]