From 627536d4b2c6970da71b8c1c9e8320477578cb4f Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 30 Jun 2020 16:30:01 +0200 Subject: [PATCH 01/14] Switch vertical sections to use xarray DataArrays This will make it easier to support native MPAS grids in future plots. --- .../ocean/meridional_heat_transport.py | 54 +- mpas_analysis/ocean/plot_hovmoller_subtask.py | 21 +- mpas_analysis/ocean/plot_transect_subtask.py | 187 +++--- mpas_analysis/ocean/streamfunction_moc.py | 55 +- mpas_analysis/shared/plot/vertical_section.py | 534 +++++++----------- 5 files changed, 353 insertions(+), 498 deletions(-) diff --git a/mpas_analysis/ocean/meridional_heat_transport.py b/mpas_analysis/ocean/meridional_heat_transport.py index 958227543..1f7880f69 100644 --- a/mpas_analysis/ocean/meridional_heat_transport.py +++ b/mpas_analysis/ocean/meridional_heat_transport.py @@ -181,9 +181,9 @@ def run_task(self): # {{{ if os.path.exists(outFileName): self.logger.info(' Reading results from previous analysis run...') annualClimatology = xr.open_dataset(outFileName) - refZMid = annualClimatology.refZMid.values + refZMid = annualClimatology.refZMid binBoundaryMerHeatTrans = \ - annualClimatology.binBoundaryMerHeatTrans.values + annualClimatology.binBoundaryMerHeatTrans else: # Read in depth and MHT latitude points @@ -195,15 +195,18 @@ def run_task(self): # {{{ 'one for MHT calcuation') with xr.open_dataset(restartFileName) as dsRestart: - refBottomDepth = dsRestart.refBottomDepth.values + refBottomDepth = dsRestart.refBottomDepth - nVertLevels = len(refBottomDepth) + nVertLevels = refBottomDepth.sizes['nVertLevels'] refLayerThickness = np.zeros(nVertLevels) refLayerThickness[0] = refBottomDepth[0] refLayerThickness[1:nVertLevels] = \ refBottomDepth[1:nVertLevels] - \ refBottomDepth[0:nVertLevels - 1] + refLayerThickness = xr.DataArray(dims='nVertLevels', + data=refLayerThickness) + refZMid = -refBottomDepth + 0.5 * refLayerThickness binBoundaryMerHeatTrans = None @@ -220,7 +223,7 @@ def run_task(self): # {{{ with xr.open_dataset(inputFile) as ds: if 'binBoundaryMerHeatTrans' in ds.data_vars: binBoundaryMerHeatTrans = \ - ds.binBoundaryMerHeatTrans.values + ds.binBoundaryMerHeatTrans break if binBoundaryMerHeatTrans is None: @@ -251,12 +254,22 @@ def run_task(self): # {{{ annualClimatology = xr.open_dataset(climatologyFileName) annualClimatology = annualClimatology[variableList] + annualClimatology = annualClimatology.rename( + {'timeMonthly_avg_meridionalHeatTransportLat': + 'meridionalHeatTransportLat', + 'timeMonthly_avg_meridionalHeatTransportLatZ': + 'meridionalHeatTransportLatZ'}) if 'Time' in annualClimatology.dims: annualClimatology = annualClimatology.isel(Time=0) - annualClimatology.coords['refZMid'] = (('nVertLevels',), refZMid) + annualClimatology.coords['refZMid'] = refZMid annualClimatology.coords['binBoundaryMerHeatTrans'] = \ - (('nMerHeatTransBinsP1',), binBoundaryMerHeatTrans) + binBoundaryMerHeatTrans + + if config.getboolean(self.sectionName, 'plotVerticalSection'): + # normalize 2D MHT by layer thickness + annualClimatology['meridionalHeatTransportLatZ'] /= \ + refLayerThickness write_netcdf(annualClimatology, outFileName) @@ -265,9 +278,10 @@ def run_task(self): # {{{ self.logger.info(' Plot global MHT...') # Plot 1D MHT (zonally averaged, depth integrated) x = binBoundaryMerHeatTrans - y = annualClimatology.timeMonthly_avg_meridionalHeatTransportLat - xLabel = 'latitude [deg]' - yLabel = 'meridional heat transport [PW]' + y = annualClimatology.meridionalHeatTransportLat + xLabel = 'latitude (deg)' + yLabel = 'meridional heat transport (PW)' + title = 'Global MHT (ANN, years {:04d}-{:04d})\n {}'.format( self.startYear, self.endYear, mainRunName) filePrefix = self.filePrefixes['mht'] @@ -314,8 +328,7 @@ def run_task(self): # {{{ lineWidths.append(1.2) legendText.append(controlRunName) xArrays.append(dsControl.binBoundaryMerHeatTrans) - fieldArrays.append( - dsControl.timeMonthly_avg_meridionalHeatTransportLat) + fieldArrays.append(dsControl.meridionalHeatTransportLat) errArrays.append(None) if len(legendText) == 1: @@ -333,26 +346,19 @@ def run_task(self): # {{{ if config.getboolean(self.sectionName, 'plotVerticalSection'): # Plot 2D MHT (zonally integrated) - # normalize 2D MHT by layer thickness - MHTLatZVar = \ - annualClimatology.timeMonthly_avg_meridionalHeatTransportLatZ - MHTLatZ = MHTLatZVar.values.T[:, :] - for k in range(nVertLevels): - MHTLatZ[k, :] = MHTLatZ[k, :] / refLayerThickness[k] - x = binBoundaryMerHeatTrans y = refZMid - z = MHTLatZ - xLabel = 'latitude [deg]' - yLabel = 'depth [m]' + z = annualClimatology.meridionalHeatTransportLatZ + xLabel = 'latitude (deg)' + yLabel = 'depth (m)' title = 'Global MHT (ANN, years {:04d}-{:04d})\n {}'.format( self.startYear, self.endYear, mainRunName) filePrefix = self.filePrefixes['mhtZ'] outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) - colorbarLabel = '[PW/m]' + colorbarLabel = '(PW/m)' plot_vertical_section(config, x, y, z, self.sectionName, suffix='', colorbarLabel=colorbarLabel, - title=title, xlabel=xLabel, ylabel=yLabel, + title=title, xlabels=xLabel, ylabel=yLabel, xLim=xLimGlobal, yLim=depthLimGlobal, invertYAxis=False, movingAveragePoints=movingAveragePoints, diff --git a/mpas_analysis/ocean/plot_hovmoller_subtask.py b/mpas_analysis/ocean/plot_hovmoller_subtask.py index a59456c0a..089dc6f31 100644 --- a/mpas_analysis/ocean/plot_hovmoller_subtask.py +++ b/mpas_analysis/ocean/plot_hovmoller_subtask.py @@ -288,9 +288,15 @@ def run_task(self): # {{{ z = np.zeros(depths.shape) z[0] = -0.5 * depths[0] z[1:] = -0.5 * (depths[0:-1] + depths[1:]) + z = xr.DataArray(dims='nVertLevels', data=z) - Time = ds.Time.values - field = ds[self.mpasFieldName].values.transpose() + Time = ds.Time + field = ds[self.mpasFieldName] + + # drop any NaN values, because this causes issues with rolling averages + mask = field.notnull().all(dim='Time') + field = field.where(mask, drop=True) + z = z.where(mask, drop=True) xLabel = 'Time (years)' yLabel = 'Depth (m)' @@ -342,7 +348,12 @@ def run_task(self): # {{{ regionDim = 'nOceanRegionsTmp' dsRef = dsRef.isel(**{regionDim: regionIndex}) - refField = dsRef[self.mpasFieldName].values.transpose() + refField = dsRef[self.mpasFieldName] + # drop any NaN values, because this causes issues with rolling + # averages + refMask = refField.notnull().all(dim='Time') + assert(np.all(refMask.values == mask.values)) + refField = refField.where(mask, drop=True) assert(field.shape == refField.shape) diff = field - refField refTitle = self.controlConfig.get('runs', 'mainRunName') @@ -366,8 +377,8 @@ def run_task(self): # {{{ fig, _, suptitle = plot_vertical_section_comparison( config, Time, z, field, refField, diff, self.sectionName, colorbarLabel=self.unitsLabel, title=title, modelTitle=mainRunName, - refTitle=refTitle, diffTitle=diffTitle, xlabel=xLabel, - ylabel=yLabel, lineWidth=1, xArrayIsTime=True, + refTitle=refTitle, diffTitle=diffTitle, xlabels=xLabel, + ylabel=yLabel, lineWidth=1, xCoordIsTime=True, movingAveragePoints=movingAverageMonths, calendar=self.calendar, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks, yLim=yLim, invertYAxis=False, titleFontSize=titleFontSize, diff --git a/mpas_analysis/ocean/plot_transect_subtask.py b/mpas_analysis/ocean/plot_transect_subtask.py index c77a3c838..2807bda56 100644 --- a/mpas_analysis/ocean/plot_transect_subtask.py +++ b/mpas_analysis/ocean/plot_transect_subtask.py @@ -36,8 +36,6 @@ from mpas_analysis.shared.constants import constants -from mpas_analysis.ocean.utility import nans_to_numpy_mask - class PlotTransectSubtask(AnalysisTask): # {{{ """ @@ -377,23 +375,13 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): mainRunName = config.get('runs', 'mainRunName') - # broadcast x and z to have the same dimensions - x, z = xr.broadcast(remappedModelClimatology.x, - remappedModelClimatology.z) + x = remappedModelClimatology.x + z = remappedModelClimatology.z # set lat and lon in case we want to plot versus these quantities lat = remappedModelClimatology.lat lon = remappedModelClimatology.lon - # convert x, z, lat, and lon to numpy arrays; make a copy because - # they are sometimes read-only (not sure why) - x = x.values.copy().transpose() - z = z.values.copy().transpose() - lat = lat.values.copy().transpose() - lon = lon.values.copy().transpose() - self.lat = lat - self.lon = lon - # This will do strange things at the antemeridian but there's little # we can do about that. lon_pm180 = numpy.mod(lon + 180., 360.) - 180. @@ -402,11 +390,11 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): mask = numpy.logical_and( remappedModelClimatology.x.values >= self.horizontalBounds[0], remappedModelClimatology.x.values <= self.horizontalBounds[1]) - inset_lon = lon_pm180[mask] - inset_lat = lat[mask] + inset_lon = lon_pm180.values[mask] + inset_lat = lat.values[mask] else: - inset_lon = lon_pm180 - inset_lat = lat + inset_lon = lon_pm180.values + inset_lat = lat.values fc = FeatureCollection() fc.add_feature( {"type": "Feature", @@ -421,23 +409,15 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): # z is masked out with NaNs in some locations (where there is land) but # this makes pcolormesh unhappy so we'll zero out those locations - z[numpy.isnan(z)] = 0. + z = z.where(z.notnull(), 0.) - modelOutput = nans_to_numpy_mask( - remappedModelClimatology[self.mpasFieldName].values) - modelOutput = modelOutput.transpose() + modelOutput = remappedModelClimatology[self.mpasFieldName] if remappedRefClimatology is None: refOutput = None bias = None else: refOutput = remappedRefClimatology[self.refFieldName] - dims = refOutput.dims - refOutput = nans_to_numpy_mask(refOutput.values) - if dims[1] != 'nPoints': - assert(dims[0] == 'nPoints') - refOutput = refOutput.transpose() - bias = modelOutput - refOutput filePrefix = self.filePrefix @@ -446,8 +426,9 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): self.fieldNameInTitle, season, self.startYear, self.endYear) - xLabel = 'Distance [km]' - yLabel = 'Depth [m]' + xs = [x] + xLabels = ['Distance (km)'] + yLabel = 'Depth (m)' # define the axis labels and the data to use for the upper # x axis or axes, if such additional axes have been requested @@ -458,45 +439,50 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): 'transects', 'upperXAxisTickLabelPrecision') - self._set_third_x_axis_to_none() - - if upperXAxes == 'neither': - self._set_second_x_axis_to_none() - elif upperXAxes == 'lat': - self._set_second_x_axis_to_latitude() - elif upperXAxes == 'lon': - self._set_second_x_axis_to_longitude() - elif upperXAxes == 'both': - self._set_second_x_axis_to_longitude() - self._set_third_x_axis_to_latitude() - elif upperXAxes == 'greatestExtent': - if self._greatest_extent(lat, lon): - self._set_second_x_axis_to_latitude() + add_lat = False + add_lon = False + + if upperXAxes in ['lat', 'both']: + add_lat = True + if upperXAxes in ['lon', 'both']: + add_lon = True + if upperXAxes == 'greatestExtent': + if self._lat_greater_extent(lat, lon): + add_lat = True else: - self._set_second_x_axis_to_longitude() + add_lon = True elif upperXAxes == 'strictlyMonotonic': - if self._strictly_monotonic(lat, lon): - self._set_second_x_axis_to_latitude() + if self._strictly_monotonic(lat): + add_lat = True + elif self._strictly_monotonic(lon): + add_lon = True else: - self._set_second_x_axis_to_longitude() + raise ValueError('Neither lat nor lon is strictly monotonic.') elif upperXAxes == 'mostMonotonic': - if self._most_monotonic(lat, lon): - self._set_second_x_axis_to_latitude() + if self._lat_most_monotonic(lat, lon): + add_lat = True else: - self._set_second_x_axis_to_longitude() + add_lon = True elif upperXAxes == 'mostStepsInSameDirection': - if self._most_steps_in_same_direction(lat, lon): - self._set_second_x_axis_to_latitude() + if self._lat_most_steps_in_same_direction(lat, lon): + add_lat = True else: - self._set_second_x_axis_to_longitude() + add_lon = True elif upperXAxes == 'fewestDirectionChanges': - if self._fewest_direction_changes(lat, lon): - self._set_second_x_axis_to_latitude() + if self._lat_fewest_direction_changes(lat, lon): + add_lat = True else: - self._set_second_x_axis_to_longitude() + add_lon = True else: raise ValueError('invalid option for upperXAxes') + if add_lat: + xs.append(lat) + xLabels.append('Latitude') + if add_lon: + xs.append(lon) + xLabels.append('Longitude') + # get the parameters determining what type of plot to use, # what line styles and line colors to use, and whether and how # to label contours @@ -542,23 +528,19 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): fig, axes, suptitle = plot_vertical_section_comparison( config, - x, + xs, z, modelOutput, refOutput, bias, configSectionName, colorbarLabel=self.unitsLabel, - xlabel=xLabel, + xlabels=xLabels, ylabel=yLabel, title=title, modelTitle='{}'.format(mainRunName), refTitle=self.refTitleLabel, diffTitle=self.diffTitleLabel, - secondXAxisData=self.secondXAxisData, - secondXAxisLabel=self.secondXAxisLabel, - thirdXAxisData=self.thirdXAxisData, - thirdXAxisLabel=self.thirdXAxisLabel, numUpperTicks=numUpperTicks, upperXAxisTickLabelPrecision=upperXAxisTickLabelPrecision, invertYAxis=False, @@ -607,31 +589,8 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): # }}} - def _set_second_x_axis_to_latitude(self): - self.secondXAxisData = self.lat - self.secondXAxisLabel = 'Latitude' - - def _set_second_x_axis_to_longitude(self): - self.secondXAxisData = self.lon - self.secondXAxisLabel = 'Longitude' - - def _set_second_x_axis_to_none(self): - self.secondXAxisData = None - self.secondXAxisLabel = None - - def _set_third_x_axis_to_latitude(self): - self.thirdXAxisData = self.lat - self.thirdXAxisLabel = 'Latitude' - - def _set_third_x_axis_to_longitude(self): - self.thirdXAxisData = self.lon - self.thirdXAxisLabel = 'Longitude' - def _set_third_x_axis_to_none(self): - self.thirdXAxisData = None - self.thirdXAxisLabel = None - - def _greatest_extent(self, lat, lon): + def _lat_greater_extent(self, lat, lon): # {{{ """ Returns True if lat has a greater extent (in degrees) than lon (or the @@ -639,7 +598,7 @@ def _greatest_extent(self, lat, lon): Authors ------- - Greg Streletz + Greg Streletz, Xylar Asay-Davis """ lat_extent = numpy.max(lat) - numpy.min(lat) @@ -709,41 +668,31 @@ def _greatest_extent(self, lat, lon): return False # }}} - def _strictly_monotonic(self, lat, lon): + def _strictly_monotonic(self, coord): # {{{ - """ - Returns True if lat is strictly monotonic; returns false if lon is - strictly monotonic and lat is not strictly monotonic; throws an error - if neither are strictly monotonic. + """Whether the coordinate is strictly monotonic""" + # Authors + # ------- + # Greg Streletz, Xylar Asay-Davis - Authors - ------- - Greg Streletz - """ - lat_diff = numpy.diff(lat) - if numpy.all(lat_diff > 0) or numpy.all(lat_diff < 0): - return True - lon_diff = numpy.diff(lon) - lon_diff = numpy.where(lon_diff > 180, lon_diff - 360, lon_diff) - lon_diff = numpy.where(lon_diff < -180, lon_diff + 360, lon_diff) - if numpy.all(lon_diff > 0) or numpy.all(lon_diff < 0): - return False - else: - raise ValueError('neither input array is strictly monotonic') + coord_diff = numpy.diff(coord.values) + coord_diff = numpy.where(coord_diff > 180, coord_diff - 360, coord_diff) + coord_diff = numpy.where(coord_diff < -180, coord_diff + 360, + coord_diff) + return numpy.all(coord_diff > 0) or numpy.all(coord_diff < 0) # }}} - def _most_monotonic(self, lat, lon): + def _lat_most_monotonic(self, lat, lon): # {{{ """ Returns True if lat is "more monotonic" than lon in terms of the difference between the total number of degrees traversed and the net number of degrees traversed (or if both are equally as monotonic in this sense), and False otherwise. - - Authors - ------- - Greg Streletz """ + # Authors + # ------- + # Greg Streletz, Xylar Asay-Davis lat_diff = numpy.diff(lat) lat_score = numpy.sum(numpy.fabs(lat_diff)) - abs(numpy.sum(lat_diff)) @@ -758,17 +707,17 @@ def _most_monotonic(self, lat, lon): return False # }}} - def _most_steps_in_same_direction(self, lat, lon): + def _lat_most_steps_in_same_direction(self, lat, lon): # {{{ """ Returns True if lat is has more steps in the same direction (either steps that increase the latitude or steps that decrease the latitude) than lon (or the same number as lon), and False otherwise. - - Authors - ------- - Greg Streletz """ + # Authors + # ------- + # Greg Streletz, Xylar Asay-Davis + lat_changes = numpy.diff(lat) lat_changes = lat_changes[lat_changes != 0.0] # ignore flat regions lat_changedirs = lat_changes / numpy.fabs(lat_changes) @@ -793,7 +742,7 @@ def _most_steps_in_same_direction(self, lat, lon): return False # }}} - def _fewest_direction_changes(self, lat, lon): + def _lat_fewest_direction_changes(self, lat, lon): # {{{ """ Returns True if lat is has fewer changes in direction (from increasing @@ -802,7 +751,7 @@ def _fewest_direction_changes(self, lat, lon): Authors ------- - Greg Streletz + Greg Streletz, Xylar Asay-Davis """ lat_changes = numpy.diff(lat) lat_changes = lat_changes[lat_changes != 0.0] # ignore flat regions diff --git a/mpas_analysis/ocean/streamfunction_moc.py b/mpas_analysis/ocean/streamfunction_moc.py index 7617df95c..b152e6181 100644 --- a/mpas_analysis/ocean/streamfunction_moc.py +++ b/mpas_analysis/ocean/streamfunction_moc.py @@ -755,26 +755,16 @@ def run_task(self): # {{{ maxLat = config.getExpression('streamfunctionMOC{}'.format(region), 'latBinMax') indLat = np.logical_and(x >= minLat, x <= maxLat) - x = x[indLat] - regionMOC = regionMOC[:, indLat] + x = x.where(indLat, drop=True) + regionMOC = regionMOC.where(indLat, drop=True) if self.controlConfig is None: refRegionMOC = None diff = None else: # the coords of the ref MOC won't necessarily match this MOC # so we need to interpolate - nz, nx = regionMOC.shape - refNz, refNx = refMOC[region].shape - temp = np.zeros((refNz, nx)) - for zIndex in range(refNz): - temp[zIndex, :] = np.interp( - x, refLat[region], refMOC[region][zIndex, :], - left=np.nan, right=np.nan) - refRegionMOC = np.zeros((nz, nx)) - for xIndex in range(nx): - refRegionMOC[:, xIndex] = np.interp( - depth, refDepth, temp[:, xIndex], - left=np.nan, right=np.nan) + refRegionMOC = _interp_moc(x, z, regionMOC, refLat[region], + refDepth, refMOC[region]) diff = regionMOC - refRegionMOC @@ -786,7 +776,7 @@ def run_task(self): # {{{ modelTitle=mainRunName, refTitle=refTitle, diffTitle=diffTitle, - xlabel=xLabel, + xlabels=xLabel, ylabel=yLabel, movingAveragePoints=movingAveragePointsClimatological, maxTitleLength=70) @@ -820,15 +810,13 @@ def _load_moc(self, config): # {{{ endYear) # Read from file - ncFile = netCDF4.Dataset(inputFileName, mode='r') - depth = ncFile.variables['depth'][:] + ds = xr.open_dataset(inputFileName) + depth = ds['depth'] lat = {} moc = {} for region in self.regionNames: - lat[region] = ncFile.variables['lat{}'.format(region)][:] - moc[region] = \ - ncFile.variables['moc{}'.format(region)][:, :] - ncFile.close() + lat[region] = ds['lat{}'.format(region)] + moc[region] = ds['moc{}'.format(region)] return depth, lat, moc # }}} # }}} @@ -1606,4 +1594,29 @@ def _compute_moc(latBins, nz, latCell, regionCellMask, transportZ, mocTop = mocTop.T return mocTop # }}} + +def _interp_moc(x, z, regionMOC, refX, refZ, refMOC): + x = x.values + z = z.values + dims = regionMOC.dims + regionMOC = regionMOC.values + refX = refX.values + refZ = refZ.values + refMOC = refMOC.values + + nz, nx = regionMOC.shape + refNz, refNx = refMOC.shape + temp = np.zeros((refNz, nx)) + for zIndex in range(refNz): + temp[zIndex, :] = np.interp( + x, refX, refMOC[zIndex, :], + left=np.nan, right=np.nan) + refRegionMOC = np.zeros((nz, nx)) + for xIndex in range(nx): + refRegionMOC[:, xIndex] = np.interp( + z, refZ, temp[:, xIndex], + left=np.nan, right=np.nan) + + return xr.DataArray(dims=dims, data=refRegionMOC) + # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/plot/vertical_section.py b/mpas_analysis/shared/plot/vertical_section.py index d6831d36f..d6f3f0ab3 100644 --- a/mpas_analysis/shared/plot/vertical_section.py +++ b/mpas_analysis/shared/plot/vertical_section.py @@ -22,7 +22,6 @@ import matplotlib import matplotlib.pyplot as plt import xarray as xr -import pandas as pd import numpy as np from mpas_analysis.shared.timekeeping.utility import date_to_days @@ -34,14 +33,14 @@ def plot_vertical_section_comparison( config, - xArray, - depthArray, + xCoords, + zCoord, modelArray, refArray, diffArray, colorMapSectionName, colorbarLabel=None, - xlabel=None, + xlabels=None, ylabel=None, title=None, modelTitle='Model', @@ -59,14 +58,10 @@ def plot_vertical_section_comparison( backgroundColor='grey', xLim=None, yLim=None, - secondXAxisData=None, - secondXAxisLabel=None, - thirdXAxisData=None, - thirdXAxisLabel=None, numUpperTicks=None, upperXAxisTickLabelPrecision=None, invertYAxis=True, - xArrayIsTime=False, + xCoordIsTime=False, movingAveragePoints=None, firstYearXTicks=None, yearStrideXTicks=None, @@ -96,14 +91,18 @@ def plot_vertical_section_comparison( the configuration, containing a [plot] section with options that control plotting - xArray : float array - x array (latitude, longitude, spherical distance, or distance along - a transect; or, time for Hovmoller plots) + xCoords : xarray.DataArray or list of xarray.DataArray + The x coordinate(s) in ``field``. Optional second + and third entries will be used for a second and third x axis above the + plot. The typical use for the second and third axis is for transects, + for which the primary x axis represents distance along a transect, and + the second and third x axes are used to display the corresponding + latitudes and longitudes. - depthArray : float array - depth array [m] + zCoord : xarray.DataArray + The z coordinates in ``field`` - modelArray, refArray : float arrays + modelArray, refArray : xarray.DataArray model and observational or control run data sets diffArray : float array @@ -121,8 +120,11 @@ def plot_vertical_section_comparison( parenthetically appended to the legend entries of the contour comparison plot. - xlabel, ylabel : str, optional - label of x- and y-axis + xlabels : str or list of str, optional + labels of x-axes. Labels correspond to entries in ``xCoords``. + + ylabel : str, optional + label of y-axis title : str, optional the subtitle of the plot @@ -182,28 +184,6 @@ def plot_vertical_section_comparison( yLim : float array, optional y range of plot - secondXAxisData : the data to use to display a second x axis (which will be - placed above the plot). This array must have the same number of values - as xArray, and it is assumed that the values in this array define - locations along the x axis that are the same as those defined by the - corresponding values in xArray, but in some different unit system. - - secondXAxisLabel : the label for the second x axis, if requested - - thirdXAxisData : the data to use to display a third x axis (which will be - placed above the plot and above the second x axis, which must be - specified if a third x axis is to be specified). This array must have - the same number of values as xArray, and it is assumed that the values - in this array define locations along the x axis that are the same as - those defined by the corresponding values in xArray, but in some - different unit system (which is presumably also different from the unit - system used for the values in the secondXAxisData array). The typical - use for this third axis is for transects, for which the primary x axis - represents distance along a transect, and the second and third x axes - are used to display the corresponding latitudes and longitudes. - - thirdXAxisLabel : the label for the third x axis, if requested - numUpperTicks : the approximate number of ticks to use on the upper x axis or axes (these are the second and third x axes, which are placed above the plot if they have been requested by specifying the secondXAxisData @@ -217,13 +197,13 @@ def plot_vertical_section_comparison( invertYAxis : logical, optional if True, invert Y axis - xArrayIsTime : logical, optional + xCoordIsTime : logical, optional if True, format the x axis for time (this applies only to the primary x axis, not to the optional second or third x axes) movingAveragePoints : int, optional the number of points over which to perform a moving average - NOTE: this option is mostly intended for use when xArrayIsTime is True, + NOTE: this option is mostly intended for use when xCoordIsTime is True, although it will work with other data as well. Also, the moving average calculation is based on number of points, not actual x axis values, so for best results, the values in the xArray should be equally @@ -240,11 +220,11 @@ def plot_vertical_section_comparison( maxXTicks : int, optional the maximum number of tick marks that will be allowed along the primary x axis. This may need to be adjusted depending on the figure size and - aspect ratio. NOTE: maxXTicks is only used if xArrayIsTime is True + aspect ratio. NOTE: maxXTicks is only used if xCoordIsTime is True calendar : str, optional the calendar to use for formatting the time axis - NOTE: calendar is only used if xArrayIsTime is True + NOTE: calendar is only used if xCoordIsTime is True compareAsContours : bool, optional if compareAsContours is True, instead of creating a three panel plot @@ -298,6 +278,11 @@ def plot_vertical_section_comparison( if defaultFontSize is None: defaultFontSize = config.getint('plot', 'defaultFontSize') matplotlib.rc('font', size=defaultFontSize) + if not isinstance(xCoords, list): + xCoords = [xCoords] + + if not isinstance(xlabels, list): + xlabels = [xlabels] if refArray is None or compareAsContours: singlePanel = True @@ -312,13 +297,13 @@ def plot_vertical_section_comparison( # depending on how many x axes are to be displayed on the plots if singlePanel: if compareAsContours and refArray is not None: - if thirdXAxisData is not None: + if len(xCoords) == 3: figsize = (8, 8) else: figsize = (8, 7) else: figsize = (8, 5) - elif thirdXAxisData is not None: + elif len(xCoords) == 3: figsize = (8, 17) else: figsize = (8, 13) @@ -339,12 +324,12 @@ def plot_vertical_section_comparison( if plotTitleFontSize is None: plotTitleFontSize = config.get('plot', 'threePanelPlotTitleFontSize') - if thirdXAxisData is not None: + if len(xCoords) == 3: if singlePanel: titleY = 1.64 else: titleY = 1.34 - elif secondXAxisData is not None: + elif len(xCoords) >= 2: titleY = 1.20 else: titleY = 1.06 @@ -357,12 +342,12 @@ def plot_vertical_section_comparison( if not compareAsContours or refArray is None: title = modelTitle - contourComparisonFieldArray = None + contourComparisonField = None comparisonFieldName = None originalFieldName = None else: title = None - contourComparisonFieldArray = refArray + contourComparisonField = refArray comparisonFieldName = refTitle originalFieldName = modelTitle @@ -370,14 +355,14 @@ def plot_vertical_section_comparison( _, ax = plot_vertical_section( config, - xArray, - depthArray, + xCoords, + zCoord, modelArray, colorMapSectionName, suffix=resultSuffix, colorbarLabel=colorbarLabel, title=title, - xlabel=xlabel, + xlabels=xlabels, ylabel=ylabel, figsize=None, titleFontSize=plotTitleFontSize, @@ -389,21 +374,17 @@ def plot_vertical_section_comparison( lineWidth=lineWidth, lineStyle=lineStyle, lineColor=lineColor, - secondXAxisData=secondXAxisData, - secondXAxisLabel=secondXAxisLabel, - thirdXAxisData=thirdXAxisData, - thirdXAxisLabel=thirdXAxisLabel, numUpperTicks=numUpperTicks, upperXAxisTickLabelPrecision=upperXAxisTickLabelPrecision, invertYAxis=invertYAxis, - xArrayIsTime=xArrayIsTime, + xCoordIsTime=xCoordIsTime, movingAveragePoints=movingAveragePoints, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks, maxXTicks=maxXTicks, calendar=calendar, backgroundColor=backgroundColor, plotAsContours=compareAsContours, - contourComparisonFieldArray=contourComparisonFieldArray, + contourComparisonField=contourComparisonField, comparisonFieldName=comparisonFieldName, originalFieldName=originalFieldName, comparisonContourLineStyle=comparisonContourLineStyle, @@ -418,14 +399,14 @@ def plot_vertical_section_comparison( plt.subplot(3, 1, 2) _, ax = plot_vertical_section( config, - xArray, - depthArray, + xCoords, + zCoord, refArray, colorMapSectionName, suffix=resultSuffix, colorbarLabel=colorbarLabel, title=refTitle, - xlabel=xlabel, + xlabels=xlabels, ylabel=ylabel, figsize=None, titleFontSize=plotTitleFontSize, @@ -437,14 +418,10 @@ def plot_vertical_section_comparison( lineWidth=lineWidth, lineStyle=lineStyle, lineColor=lineColor, - secondXAxisData=secondXAxisData, - secondXAxisLabel=secondXAxisLabel, - thirdXAxisData=thirdXAxisData, - thirdXAxisLabel=thirdXAxisLabel, upperXAxisTickLabelPrecision=upperXAxisTickLabelPrecision, numUpperTicks=numUpperTicks, invertYAxis=invertYAxis, - xArrayIsTime=xArrayIsTime, + xCoordIsTime=xCoordIsTime, movingAveragePoints=movingAveragePoints, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks, @@ -460,14 +437,14 @@ def plot_vertical_section_comparison( plt.subplot(3, 1, 3) _, ax = plot_vertical_section( config, - xArray, - depthArray, + xCoords, + zCoord, diffArray, colorMapSectionName, suffix=diffSuffix, colorbarLabel=colorbarLabel, title=diffTitle, - xlabel=xlabel, + xlabels=xlabels, ylabel=ylabel, figsize=None, titleFontSize=plotTitleFontSize, @@ -479,14 +456,10 @@ def plot_vertical_section_comparison( lineWidth=lineWidth, lineStyle=lineStyle, lineColor=lineColor, - secondXAxisData=secondXAxisData, - secondXAxisLabel=secondXAxisLabel, - thirdXAxisData=thirdXAxisData, - thirdXAxisLabel=thirdXAxisLabel, upperXAxisTickLabelPrecision=upperXAxisTickLabelPrecision, numUpperTicks=numUpperTicks, invertYAxis=invertYAxis, - xArrayIsTime=xArrayIsTime, + xCoordIsTime=xCoordIsTime, movingAveragePoints=movingAveragePoints, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks, @@ -500,7 +473,7 @@ def plot_vertical_section_comparison( axes.append(ax) if singlePanel: - if thirdXAxisData is not None and refArray is None: + if len(xCoords) == 3 and refArray is None: plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.0, 0.0, 1.0, 0.98]) else: plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.0, 0.0, 1.0, 0.95]) @@ -512,14 +485,14 @@ def plot_vertical_section_comparison( def plot_vertical_section( config, - xArray, - depthArray, - fieldArray, + xCoords, + zCoord, + field, colorMapSectionName, suffix='', colorbarLabel=None, title=None, - xlabel=None, + xlabels=None, ylabel=None, figsize=(10, 4), dpi=None, @@ -533,21 +506,17 @@ def plot_vertical_section( lineStyle='solid', lineColor='black', backgroundColor='grey', - secondXAxisData=None, - secondXAxisLabel=None, - thirdXAxisData=None, - thirdXAxisLabel=None, numUpperTicks=None, upperXAxisTickLabelPrecision=None, invertYAxis=True, - xArrayIsTime=False, + xCoordIsTime=False, movingAveragePoints=None, firstYearXTicks=None, yearStrideXTicks=None, maxXTicks=20, calendar='gregorian', plotAsContours=False, - contourComparisonFieldArray=None, + contourComparisonField=None, comparisonFieldName=None, originalFieldName=None, comparisonContourLineStyle=None, @@ -559,12 +528,12 @@ def plot_vertical_section( Plots a data set as a x distance (latitude, longitude, or spherical distance) vs depth map (vertical section). - Or, if xArrayIsTime is True, plots data set on a vertical + Or, if xCoordIsTime is True, plots data set on a vertical Hovmoller plot (depth vs. time). - Typically, the fieldArray data are plotted using a heatmap, but if - contourComparisonFieldArray is not None, then contours of both - fieldArray and contourComparisonFieldArray are plotted instead. + Typically, the ``field`` data are plotted using a heatmap, but if + ``contourComparisonField`` is not None, then contours of both + ``field`` and ``contourComparisonField`` are plotted instead. Parameters ---------- @@ -572,15 +541,25 @@ def plot_vertical_section( the configuration, containing a [plot] section with options that control plotting - xArray : float array - x array (latitude, longitude, or spherical distance; or, time for a - Hovmoller plot) - - depthArray : float array - depth array [m] - - fieldArray : float array - field array to plot + xCoords : xarray.DataArray or list of xarray.DataArray + The x coordinate(s) in ``field``. Optional second + and third entries will be used for a second and third x axis above the + plot. The typical use for the second and third axis is for transects, + for which the primary x axis represents distance along a transect, and + the second and third x axes are used to display the corresponding + latitudes and longitudes. + + zCoord : xarray.DataArray + The z coordinates in ``field`` + + field : xarray.DataArray + field array to plot. For contour plots, ``xCoords`` and ``zCoords`` + should broadcast to the same shape as ``field``. For heatmap plots, + ``xCoords`` and ``zCoords`` are the corners of the plot. If they + broadcast to the same shape as ``field``, ``field`` will be bilinearly + interpolated to center values for each plot cell. If the coordinates + have one extra element in each direction than ``field``, ``field`` is + assumed to contain cell values and no interpolation is performed. colorMapSectionName : str section name in ``config`` where color map info can be found. @@ -592,16 +571,19 @@ def plot_vertical_section( the label for the colorbar. If plotAsContours and labelContours are both True, colorbarLabel is used as follows (typically in order to indicate the units that are associated with the contour labels): - if contourComparisonFieldArray is None, the colorbarLabel string is + if ``contourComparisonField`` is None, the ``colorbarLabel`` string is parenthetically appended to the plot title; if - contourComparisonFieldArray is not None, it is parenthetically appended + ``contourComparisonField`` is not None, it is parenthetically appended to the legend entries of the contour comparison plot. title : str, optional title of plot - xlabel, ylabel : str, optional - label of x- and y-axis + xlabels : str or list of str, optional + labels of x-axes. Labels correspond to entries in ``xCoords``. + + ylabel : str, optional + label of y-axis figsize : tuple of float, optional size of the figure in inches, or None if the current figure should @@ -635,45 +617,25 @@ def plot_vertical_section( lineStyle : str, optional the line style of contour lines (if specified); this applies to the style of contour lines of fieldArray (the style of the contour lines - of contourComparisonFieldArray is set using + of contourComparisonField is set using contourComparisonLineStyle). lineColor : str, optional the color of contour lines (if specified); this applies to the contour lines of fieldArray (the color of the contour lines of - contourComparisonFieldArray is set using contourComparisonLineColor + contourComparisonField is set using contourComparisonLineColor backgroundColor : str, optional the background color for the plot (NaNs will be shown in this color) - secondXAxisData : the data to use to display a second x axis (which will be - placed above the plot). This array must have the same number of values - as xArray, and it is assumed that the values in this array define - locations along the x axis that are the same as those defined by the - corresponding values in xArray, but in some different unit system. - - secondXAxisLabel : the label for the second x axis, if requested - - thirdXAxisData : the data to use to display a third x axis (which will be - placed above the plot and above the second x axis, which must be - specified if a third x axis is to be specified). This array must have - the same number of values as xArray, and it is assumed that the values - in this array define locations along the x axis that are the same as - those defined by the corresponding values in xArray, but in some - different unit system (which is presumably also different from the unit - system used for the values in the secondXAxisData array). The typical - use for this third axis is for transects, for which the primary x axis - represents distance along a transect, and the second and third x axes - are used to display the corresponding latitudes and longitudes. - - thirdXAxisLabel : the label for the third x axis, if requested - - numUpperTicks : the approximate number of ticks to use on the upper x axis + numUpperTicks : int, optional + the approximate number of ticks to use on the upper x axis or axes (these are the second and third x axes, which are placed above the plot if they have been requested by specifying the secondXAxisData or thirdXAxisData arrays above) - upperXAxisTickLabelPrecision : the number of decimal places (to the right + upperXAxisTickLabelPrecision : int, optional + the number of decimal places (to the right of the decimal point) to use for values at upper axis ticks. This value can be adjusted (in concert with numUpperTicks) to avoid problems with overlapping numbers along the upper axis. @@ -681,17 +643,17 @@ def plot_vertical_section( invertYAxis : logical, optional if True, invert Y axis - xArrayIsTime : logical, optional + xCoordIsTime : logical, optional if True, format the x axis for time (this applies only to the primary x axis, not to the optional second or third x axes) movingAveragePoints : int, optional the number of points over which to perform a moving average - NOTE: this option is mostly intended for use when xArrayIsTime is True, + NOTE: this option is mostly intended for use when ``xCoordIsTime`` is True, although it will work with other data as well. Also, the moving average calculation is based on number of points, not actual x axis - values, so for best results, the values in the xArray should be equally - spaced. + values, so for best results, the values in the first entry in + ``xCoords`` should be equally spaced. firstYearXTicks : int, optional The year of the first tick on the x axis. By default, the first time @@ -704,38 +666,36 @@ def plot_vertical_section( maxXTicks : int, optional the maximum number of tick marks that will be allowed along the primary x axis. This may need to be adjusted depending on the figure size and - aspect ratio. NOTE: maxXTicks is only used if xArrayIsTime is True + aspect ratio. NOTE: maxXTicks is only used if xCoordIsTime is True calendar : str, optional the calendar to use for formatting the time axis - NOTE: calendar is only used if xArrayIsTime is True + NOTE: calendar is only used if xCoordIsTime is True plotAsContours : bool, optional - if plotAsContours is True, instead of plotting fieldArray as a - heatmap, the function will plot only the contours of fieldArray. In - addition, if contourComparisonFieldArray is not None, the contours + if plotAsContours is True, instead of plotting ``field`` as a + heatmap, the function will plot only the contours of ``field``. In + addition, if contourComparisonField is not None, the contours of this field will be plotted on the same plot. The selection of contour levels is still determined as for the contours on the heatmap - plots, via the 'contours' entry in colorMapSectionName. - - contourComparisonFieldArray : float array, optional - a comparison field array (typically observational data or results from - another simulation run), assumed to be of the same shape as fieldArray, - and related to xArray and depthArray in the same way fieldArray is. - If contourComparisonFieldArray is None, then fieldArray will be plotted - as a heatmap. However, if countourComparisonFieldArray is not None, - then contours of both fieldArray and contourComparisonFieldArray will - be plotted in order to enable a comparison of the two fields on the - same plot. If plotAsContours is False, this parameter is ignored. + plots, via the 'contours' entry in ``colorMapSectionName``. + + contourComparisonField : float array, optional + a comparison ``field`` array (typically observational data or results + from another simulation run), assumed to be of the same shape as + ``field``. If ``plotAsContours`` is ``True`` and + ``countourComparisonFieldArray`` is not ``None``, then contours of both + ``field`` and ``contourComparisonField`` will be plotted in order to + enable a comparison of the two fields on the same plot. comparisonFieldName : str, optional - the name for the comparison field. If contourComparisonFieldArray is - None, this parameter is ignored. + the name for the comparison field. If contourComparisonField is + None, this parameter is ignored. originalFieldName : str, optional - the name for the fieldArray field (for the purposes of labeling the - contours on a contour comparison plot). If contourComparisonFieldArray - is None, this parameter is ignored. + the name for the ``field`` field (for the purposes of labeling the + contours on a contour comparison plot). If contourComparisonField + is None, this parameter is ignored. comparisonContourLineStyle : str, optional the line style of contour lines of the comparisonFieldName field on @@ -771,143 +731,44 @@ def plot_vertical_section( if defaultFontSize is None: defaultFontSize = config.getint('plot', 'defaultFontSize') matplotlib.rc('font', size=defaultFontSize) + if not isinstance(xCoords, list): + xCoords = [xCoords] + + if not isinstance(xlabels, list): + xlabels = [xlabels] + + if len(xCoords) != len(xlabels): + raise ValueError('Expected the same number of xCoords and xlabels') + + x, y = xr.broadcast(xCoords[0], zCoord) + dims_in_field = all([dim in field.dims for dim in x.dims]) + + if dims_in_field: + x = x.transpose(*field.dims) + y = y.transpose(*field.dims) + else: + xsize = list(x.sizes.values()) + fieldsize = list(field.sizes.values()) + if xsize[0] == fieldsize[0] + 1 and xsize[1] == fieldsize[1] + 1: + pass + elif xsize[0] == fieldsize[1] + 1 and xsize[1] == fieldsize[0] + 1: + x = x.transpose(x.dims[1], x.dims[0]) + y = y.transpose(y.dims[1], y.dims[0]) + else: + raise ValueError('Sizes of coords {}x{} and field {}x{} not ' + 'compatible.'.format(xsize[0], xsize[1], + fieldsize[0], fieldsize[1])) # compute moving averages with respect to the x dimension if movingAveragePoints is not None and movingAveragePoints != 1: - N = movingAveragePoints - movingAverageDepthSlices = [] - for nVertLevel in range(len(depthArray)): - depthSlice = fieldArray[[nVertLevel]][0] - # in case it's not an xarray already - depthSlice = xr.DataArray(depthSlice) - mean = pd.Series.rolling(depthSlice.to_series(), N, - center=True).mean() - mean = xr.DataArray.from_series(mean) - mean = mean[int(N / 2.0):-int(round(N / 2.0) - 1)] - movingAverageDepthSlices.append(mean) - xArray = xArray[int(N / 2.0):-int(round(N / 2.0) - 1)] - fieldArray = xr.DataArray(movingAverageDepthSlices) - - dimX = xArray.shape - dimZ = depthArray.shape - dimF = fieldArray.shape - if contourComparisonFieldArray is not None: - dimC = contourComparisonFieldArray.shape - - if len(dimX) != 1 and len(dimX) != 2: - raise ValueError('xArray must have either one or two dimensions ' - '(has %d)' % dimX) - - if len(dimZ) != 1 and len(dimZ) != 2: - raise ValueError('depthArray must have either one or two dimensions ' - '(has %d)' % dimZ) - - if len(dimF) != 2: - raise ValueError('fieldArray must have two dimensions (has %d)' % dimF) - - if contourComparisonFieldArray is not None: - if len(dimC) != 2: - raise ValueError('contourComparisonFieldArray must have two ' - 'dimensions (has %d)' % dimC) - elif (fieldArray.shape[0] != contourComparisonFieldArray.shape[0]) or \ - (fieldArray.shape[1] != contourComparisonFieldArray.shape[1]): - raise ValueError('size mismatch between fieldArray (%d x %d) and ' - 'contourComparisonFieldArray (%d x %d)' % - (fieldArray.shape[0], fieldArray.shape[1], - contourComparisonFieldArray.shape[0], - contourComparisonFieldArray.shape[1])) - - # verify that the dimensions of fieldArray are consistent with those of - # xArray and depthArray - if len(dimX) == 1 and len(dimZ) == 1: - num_x = dimX[0] - num_z = dimZ[0] - if num_x != fieldArray.shape[1] or num_z != fieldArray.shape[0]: - raise ValueError('size mismatch between xArray (%d), ' - 'depthArray (%d), and fieldArray (%d x %d)' % - (num_x, num_z, fieldArray.shape[0], - fieldArray.shape[1])) - elif len(dimX) == 1: - num_x = dimX[0] - num_x_Z = dimZ[1] - num_z = dimZ[0] - if num_x != fieldArray.shape[1] or num_z != fieldArray.shape[0] or \ - num_x != num_x_Z: - raise ValueError('size mismatch between xArray (%d), ' - 'depthArray (%d x %d), and fieldArray (%d x %d)' % - (num_x, num_z, num_x_Z, - fieldArray.shape[0], - fieldArray.shape[1])) - elif len(dimZ) == 1: - num_x = dimX[1] - num_z_X = dimX[0] - num_z = dimZ[0] - if num_x != fieldArray.shape[1] or num_z != fieldArray.shape[0] or \ - num_z != num_z_X: - raise ValueError('size mismatch between xArray (%d x %d), ' - 'depthArray (%d), and fieldArray (%d x %d)' % - (num_z_X, num_x, num_z, - fieldArray.shape[0], - fieldArray.shape[1])) - else: - num_x = dimX[1] - num_z_X = dimX[0] - num_x_Z = dimZ[1] - num_z = dimZ[0] - if num_x != fieldArray.shape[1] or num_z != fieldArray.shape[0] \ - or num_x != num_x_Z or num_z != num_z_X: - raise ValueError('size mismatch between xArray (%d x %d), ' - 'depthArray (%d x %d), and fieldArray (%d x %d)' % - (num_z_X, num_x, num_z, num_x_Z, - fieldArray.shape[0], - fieldArray.shape[1])) - - # Verify that the upper x-axis parameters are consistent with each other - # and with xArray - if secondXAxisData is None and thirdXAxisData is not None: - raise ValueError('secondXAxisData cannot be None if thirdXAxisData ' - 'is not None') - if secondXAxisData is not None: - arrayShape = secondXAxisData.shape - if len(arrayShape) == 1 and arrayShape[0] != num_x: - raise ValueError('secondXAxisData has %d x values, ' - 'but should have num_x = %d x values' % - (arrayShape[0], num_x)) - elif len(arrayShape) == 2 and arrayShape[1] != num_x: - raise ValueError('secondXAxisData has %d x values, ' - 'but should have num_x = %d x values' % - (arrayShape[1], num_x)) - elif len(arrayShape) > 2: - raise ValueError('secondXAxisData must be a 1D or 2D array, ' - 'but is of dimension %d' % - (len(arrayShape))) - if thirdXAxisData is not None: - arrayShape = thirdXAxisData.shape - if len(arrayShape) == 1 and arrayShape[0] != num_x: - raise ValueError('thirdXAxisData has %d x values, ' - 'but should have num_x = %d x values' % - (arrayShape[0], num_x)) - elif len(arrayShape) == 2 and arrayShape[1] != num_x: - raise ValueError('thirdXAxisData has %d x values, ' - 'but should have num_x = %d x values' % - (arrayShape[1], num_x)) - elif len(arrayShape) > 2: - raise ValueError('thirdXAxisData must be a 1D or 2D array, ' - 'but is of dimension %d' % - (len(arrayShape))) - - # define x and y as the appropriate 2D arrays for plotting - if len(dimX) == 1 and len(dimZ) == 1: - x, y = np.meshgrid(xArray, depthArray) - elif len(dimX) == 1: - x, y = np.meshgrid(xArray, np.zeros(num_z)) - y = depthArray - elif len(dimZ) == 1: - x, y = np.meshgrid(np.zeros(num_x), depthArray) - x = xArray - else: - x = xArray - y = depthArray + + dim = field.dims[0] + field = field.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim) + x = x.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim) + y = y.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim) # set up figure if dpi is None: @@ -919,21 +780,28 @@ def plot_vertical_section( colormapDict = setup_colormap(config, colorMapSectionName, suffix=suffix) - if not plotAsContours: # display a heatmap of fieldArray + if not plotAsContours: + # display a heatmap of fieldArray if colormapDict['levels'] is None: # interpFieldArray contains the values at centers of grid cells, # for pcolormesh plots (using bilinear interpolation) - interpFieldArray = \ - 0.5 * (0.5 * (fieldArray[1:, 1:] + fieldArray[0:-1, 1:]) + - 0.5 * (fieldArray[1:, 0:-1] + fieldArray[0:-1, 0:-1])) - plotHandle = plt.pcolormesh(x, y, interpFieldArray, + interpField = field.values + if field.sizes == x.sizes: + # interpFieldArray contains the values at centers of grid cells, + # for pcolormesh plots (using bilinear interpolation) + interpField = 0.25 * (interpField[1:, 1:] + + interpField[0:-1, 1:] + + interpField[1:, 0:-1] + + interpField[0:-1, 0:-1]) + + plotHandle = plt.pcolormesh(x.values, y.values, interpField, cmap=colormapDict['colormap'], norm=colormapDict['norm'], rasterized=True) else: - plotHandle = plt.contourf(x, y, fieldArray, + plotHandle = plt.contourf(x.values, y.values, field.values, cmap=colormapDict['colormap'], norm=colormapDict['norm'], levels=colormapDict['levels'], @@ -949,9 +817,10 @@ def plot_vertical_section( if colorbarLabel is not None: cbar.set_label(colorbarLabel) - else: # display a white heatmap to get a white background for non-land - zeroArray = np.ma.where(fieldArray != np.nan, 0.0, fieldArray) - plt.contourf(x, y, zeroArray, colors='white') + else: + # display a white heatmap to get a white background for non-land + zeroArray = xr.zeros_like(field).where(field.nonnull()) + plt.contourf(x.values, y.values, zeroArray.values, colors='white') # set the color for NaN or masked regions, and draw a black # outline around them; technically, the contour level used should @@ -959,18 +828,20 @@ def plot_vertical_section( # is used instead ax = plt.gca() ax.set_facecolor(backgroundColor) - landArray = np.ma.where(fieldArray != np.nan, 1.0, fieldArray) - landArray = np.ma.masked_where(landArray == np.nan, landArray, copy=True) - landArray = landArray.filled(0.0) - plt.contour(x, y, landArray, levels=[0.999], colors='black', linewidths=1) + landMask = np.isnan(field.values) + plt.contour(x, y, landMask, levels=[0.0001], colors='black', linewidths=1) # plot contours, if they were requested contourLevels = colormapDict['contours'] + fmt_string = None + cs1 = None + cs2 = None + if contourLevels is not None: if len(contourLevels) == 0: # automatic calculation of contour levels contourLevels = None - cs1 = plt.contour(x, y, fieldArray, + cs1 = plt.contour(x, y, field, levels=contourLevels, colors=lineColor, linestyles=lineStyle, @@ -978,8 +849,8 @@ def plot_vertical_section( if labelContours: fmt_string = "%%1.%df" % int(contourLabelPrecision) plt.clabel(cs1, fmt=fmt_string) - if plotAsContours and contourComparisonFieldArray is not None: - cs2 = plt.contour(x, y, contourComparisonFieldArray, + if plotAsContours and contourComparisonField is not None: + cs2 = plt.contour(x, y, contourComparisonField, levels=contourLevels, colors=comparisonContourLineColor, linestyles=comparisonContourLineStyle, @@ -987,7 +858,7 @@ def plot_vertical_section( if labelContours: plt.clabel(cs2, fmt=fmt_string) - if plotAsContours and contourComparisonFieldArray is not None: + if plotAsContours and contourComparisonField is not None: h1, _ = cs1.legend_elements() h2, _ = cs2.legend_elements() if labelContours: @@ -999,7 +870,7 @@ def plot_vertical_section( if title is not None: if plotAsContours and labelContours \ - and contourComparisonFieldArray is None: + and contourComparisonField is None: title = limit_title(title, maxTitleLength-(3+len(colorbarLabel))) title = title + " (" + colorbarLabel + ")" else: @@ -1014,13 +885,12 @@ def plot_vertical_section( else: plt.title(title, **title_font) - if (xlabel is not None) or (ylabel is not None): - if axisFontSize is None: - axisFontSize = config.get('plot', 'axisFontSize') - axis_font = {'size': axisFontSize} + if axisFontSize is None: + axisFontSize = config.get('plot', 'axisFontSize') + axis_font = {'size': axisFontSize} - if xlabel is not None: - plt.xlabel(xlabel, **axis_font) + if xlabels is not None: + plt.xlabel(xlabels[0], **axis_font) if ylabel is not None: plt.ylabel(ylabel, **axis_font) @@ -1032,42 +902,48 @@ def plot_vertical_section( if yLim: ax.set_ylim(yLim) - if xArrayIsTime: + if xCoordIsTime: if firstYearXTicks is None: - minDays = [xArray[0]] + minDays = xCoords[0][0].values else: minDays = date_to_days(year=firstYearXTicks, calendar=calendar) - maxDays = [xArray[-1]] + maxDays = xCoords[0][-1].values plot_xtick_format(calendar, minDays, maxDays, maxXTicks, yearStride=yearStrideXTicks) # add a second x-axis scale, if it was requested - if secondXAxisData is not None: + if len(xCoords) >= 2: ax2 = ax.twiny() ax2.set_facecolor(backgroundColor) - ax2.set_xlabel(secondXAxisLabel, **axis_font) + if xlabels[1] is not None: + ax2.set_xlabel(xlabels[1], **axis_font) xlimits = ax.get_xlim() ax2.set_xlim(xlimits) - xticks = np.linspace(xlimits[0], xlimits[1], numUpperTicks) - tickValues = np.interp(xticks, x.flatten()[:num_x], secondXAxisData) - ax2.set_xticks(xticks) - formatString = "{{0:.{:d}f}}{}".format( - upperXAxisTickLabelPrecision, r'$\degree$') - ax2.set_xticklabels([formatString.format(member) - for member in tickValues]) + formatString = None + xticks = None + if numUpperTicks is not None: + xticks = np.linspace(xlimits[0], xlimits[1], numUpperTicks) + tickValues = np.interp(xticks, xCoords[0].values, xCoords[1].values) + ax2.set_xticks(xticks) + formatString = "{{0:.{:d}f}}{}".format( + upperXAxisTickLabelPrecision, r'$\degree$') + ax2.set_xticklabels([formatString.format(member) + for member in tickValues]) # add a third x-axis scale, if it was requested - if thirdXAxisData is not None: + if len(xCoords) == 3: ax3 = ax.twiny() ax3.set_facecolor(backgroundColor) - ax3.set_xlabel(thirdXAxisLabel, **axis_font) + ax3.set_xlabel(xlabels[2], **axis_font) ax3.set_xlim(xlimits) ax3.set_xticks(xticks) - tickValues = np.interp(xticks, x.flatten()[:num_x], thirdXAxisData) - ax3.set_xticklabels([formatString.format(member) - for member in tickValues]) - ax3.spines['top'].set_position(('outward', 36)) + if numUpperTicks is not None: + tickValues = np.interp(xticks, xCoords[0].values, + xCoords[2].values) + ax3.set_xticklabels([formatString.format(member) + for member in tickValues]) + ax3.spines['top'].set_position(('outward', 36)) return fig, ax # }}} From dcd244037d565c1e336d96ffcebc5a5f7aa0d359 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 24 Jul 2020 15:28:08 +0200 Subject: [PATCH 02/14] Switch vertical sections to use triangles Plotting is now with tripcolor, tricontour and tricontourf. This will allow generalization to plots where triangulation is done ahead of time, e.g. for sections on the native MPAS mesh. --- mpas_analysis/shared/plot/vertical_section.py | 135 ++++++++++++------ 1 file changed, 91 insertions(+), 44 deletions(-) diff --git a/mpas_analysis/shared/plot/vertical_section.py b/mpas_analysis/shared/plot/vertical_section.py index d6f3f0ab3..e7c1e5aa6 100644 --- a/mpas_analysis/shared/plot/vertical_section.py +++ b/mpas_analysis/shared/plot/vertical_section.py @@ -21,6 +21,7 @@ import matplotlib import matplotlib.pyplot as plt +from matplotlib.tri import Triangulation import xarray as xr import numpy as np @@ -56,6 +57,7 @@ def plot_vertical_section_comparison( lineStyle='solid', lineColor='black', backgroundColor='grey', + outlineValid=True, xLim=None, yLim=None, numUpperTicks=None, @@ -178,6 +180,10 @@ def plot_vertical_section_comparison( the background color for the plot (NaNs and masked areas will be shown in this color) + outlineValid : bool, optional + whether to outline the boundary between the valid an invalid regions + with a black contour + xLim : float array, optional x range of plot @@ -383,6 +389,7 @@ def plot_vertical_section_comparison( yearStrideXTicks=yearStrideXTicks, maxXTicks=maxXTicks, calendar=calendar, backgroundColor=backgroundColor, + outlineValid=outlineValid, plotAsContours=compareAsContours, contourComparisonField=contourComparisonField, comparisonFieldName=comparisonFieldName, @@ -428,6 +435,7 @@ def plot_vertical_section_comparison( maxXTicks=maxXTicks, calendar=calendar, backgroundColor=backgroundColor, + outlineValid=outlineValid, labelContours=labelContours, contourLabelPrecision=contourLabelPrecision, maxTitleLength=maxTitleLength) @@ -466,6 +474,7 @@ def plot_vertical_section_comparison( maxXTicks=maxXTicks, calendar=calendar, backgroundColor=backgroundColor, + outlineValid=outlineValid, labelContours=labelContours, contourLabelPrecision=contourLabelPrecision, maxTitleLength=maxTitleLength) @@ -506,6 +515,7 @@ def plot_vertical_section( lineStyle='solid', lineColor='black', backgroundColor='grey', + outlineValid=True, numUpperTicks=None, upperXAxisTickLabelPrecision=None, invertYAxis=True, @@ -628,6 +638,10 @@ def plot_vertical_section( backgroundColor : str, optional the background color for the plot (NaNs will be shown in this color) + outlineValid : bool, optional + whether to outline the boundary between the valid an invalid regions + with a black contour + numUpperTicks : int, optional the approximate number of ticks to use on the upper x axis or axes (these are the second and third x axes, which are placed above @@ -780,56 +794,49 @@ def plot_vertical_section( colormapDict = setup_colormap(config, colorMapSectionName, suffix=suffix) + mask = field.notnull() + maskedTriangulation, unmaskedTriangulation = _get_triangulation(x, y, mask) if not plotAsContours: # display a heatmap of fieldArray + fieldMasked = field.where(mask, 0.0).values.ravel() if colormapDict['levels'] is None: - # interpFieldArray contains the values at centers of grid cells, - # for pcolormesh plots (using bilinear interpolation) - - interpField = field.values - if field.sizes == x.sizes: - # interpFieldArray contains the values at centers of grid cells, - # for pcolormesh plots (using bilinear interpolation) - interpField = 0.25 * (interpField[1:, 1:] + - interpField[0:-1, 1:] + - interpField[1:, 0:-1] + - interpField[0:-1, 0:-1]) - - plotHandle = plt.pcolormesh(x.values, y.values, interpField, - cmap=colormapDict['colormap'], - norm=colormapDict['norm'], - rasterized=True) + + plotHandle = plt.tripcolor(maskedTriangulation, fieldMasked, + cmap=colormapDict['colormap'], + norm=colormapDict['norm'], + rasterized=True) else: - plotHandle = plt.contourf(x.values, y.values, field.values, - cmap=colormapDict['colormap'], - norm=colormapDict['norm'], - levels=colormapDict['levels'], - extend='both') + plotHandle = plt.tricontourf(maskedTriangulation, fieldMasked, + cmap=colormapDict['colormap'], + norm=colormapDict['norm'], + levels=colormapDict['levels'], + extend='both') cbar = plt.colorbar(plotHandle, orientation='vertical', spacing='uniform', aspect=9, - ticks=colormapDict['ticks'], - boundaries=colormapDict['ticks']) + ticks=colormapDict['ticks']) if colorbarLabel is not None: cbar.set_label(colorbarLabel) else: # display a white heatmap to get a white background for non-land - zeroArray = xr.zeros_like(field).where(field.nonnull()) - plt.contourf(x.values, y.values, zeroArray.values, colors='white') - - # set the color for NaN or masked regions, and draw a black - # outline around them; technically, the contour level used should - # be 1.0, but the contours don't show up when using 1.0, so 0.999 - # is used instead - ax = plt.gca() - ax.set_facecolor(backgroundColor) - landMask = np.isnan(field.values) - plt.contour(x, y, landMask, levels=[0.0001], colors='black', linewidths=1) + zeroArray = xr.zeros_like(field) + plt.tricontourf(maskedTriangulation, zeroArray.values, colors='white') + + if outlineValid: + # set the color for NaN or masked regions, and draw a black + # outline around them; technically, the contour level used should + # be 1.0, but the contours don't show up when using 1.0, so 0.999 + # is used instead + ax = plt.gca() + ax.set_facecolor(backgroundColor) + landMask = np.isnan(field.values).ravel() + plt.tricontour(unmaskedTriangulation, landMask, levels=[0.0001], + colors='black', linewidths=1) # plot contours, if they were requested contourLevels = colormapDict['contours'] @@ -841,20 +848,21 @@ def plot_vertical_section( if len(contourLevels) == 0: # automatic calculation of contour levels contourLevels = None - cs1 = plt.contour(x, y, field, - levels=contourLevels, - colors=lineColor, - linestyles=lineStyle, - linewidths=lineWidth) + cs1 = plt.tricontour(maskedTriangulation, field.values.ravel(), + levels=contourLevels, + colors=lineColor, + linestyles=lineStyle, + linewidths=lineWidth) if labelContours: fmt_string = "%%1.%df" % int(contourLabelPrecision) plt.clabel(cs1, fmt=fmt_string) if plotAsContours and contourComparisonField is not None: - cs2 = plt.contour(x, y, contourComparisonField, - levels=contourLevels, - colors=comparisonContourLineColor, - linestyles=comparisonContourLineStyle, - linewidths=lineWidth) + cs2 = plt.tricontour(maskedTriangulation, + contourComparisonField.values.ravel(), + levels=contourLevels, + colors=comparisonContourLineColor, + linestyles=comparisonContourLineStyle, + linewidths=lineWidth) if labelContours: plt.clabel(cs2, fmt=fmt_string) @@ -948,4 +956,43 @@ def plot_vertical_section( return fig, ax # }}} +def _get_triangulation(x, y, mask): + """divide each quad in the x/y mesh into 2 triangles""" + + nx = x.sizes[x.dims[0]] - 1 + ny = x.sizes[x.dims[1]] - 1 + nTriangles = 2*nx*ny + + mask = mask.values + mask = np.logical_and(np.logical_and(mask[0:-1, 0:-1], mask[1:, 0:-1]), + np.logical_and(mask[0:-1, 1:], mask[1:, 1:])) + triMask = np.zeros((nx, ny, 2), bool) + triMask[:, :, 0] = np.logical_not(mask) + triMask[:, :, 1] = triMask[:, :, 0] + + triMask = triMask.ravel() + + xIndices, yIndices = np.meshgrid(np.arange(nx), np.arange(ny), + indexing='ij') + + tris = np.zeros((nx, ny, 2, 3), int) + # upper triangles: + tris[:, :, 0, 0] = (ny+1)*xIndices + yIndices + tris[:, :, 0, 1] = (ny+1)*(xIndices + 1) + yIndices + tris[:, :, 0, 2] = (ny+1)*xIndices + yIndices + 1 + # lower triangle + tris[:, :, 1, 0] = (ny+1)*xIndices + yIndices + 1 + tris[:, :, 1, 1] = (ny+1)*(xIndices + 1) + yIndices + tris[:, :, 1, 2] = (ny+1)*(xIndices + 1) + yIndices + 1 + + tris = tris.reshape((nTriangles, 3)) + + x = x.values.ravel() + y = y.values.ravel() + + maskedTriangulation = Triangulation(x=x, y=y, triangles=tris, mask=triMask) + unmaskedTriangulation = Triangulation(x=x, y=y, triangles=tris) + + return maskedTriangulation, unmaskedTriangulation + # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python From 2b20326650b87d20e49873c586dd6c318b199360 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 1 Aug 2020 17:21:29 +0200 Subject: [PATCH 03/14] Support passing a triangulation to vertical section --- mpas_analysis/shared/plot/vertical_section.py | 188 +++++++++++------- 1 file changed, 119 insertions(+), 69 deletions(-) diff --git a/mpas_analysis/shared/plot/vertical_section.py b/mpas_analysis/shared/plot/vertical_section.py index e7c1e5aa6..297e165ce 100644 --- a/mpas_analysis/shared/plot/vertical_section.py +++ b/mpas_analysis/shared/plot/vertical_section.py @@ -34,12 +34,13 @@ def plot_vertical_section_comparison( config, - xCoords, - zCoord, modelArray, refArray, diffArray, colorMapSectionName, + xCoords=None, + zCoord=None, + dsTransectTriangles=None, colorbarLabel=None, xlabels=None, ylabel=None, @@ -93,22 +94,28 @@ def plot_vertical_section_comparison( the configuration, containing a [plot] section with options that control plotting - xCoords : xarray.DataArray or list of xarray.DataArray - The x coordinate(s) in ``field``. Optional second + modelArray, refArray : xarray.DataArray + model and observational or control run data sets + + diffArray : float array + difference between modelArray and refArray + + xCoords : xarray.DataArray or list of xarray.DataArray, optional + The x coordinate(s) for the model, ref and diff arrays. Optional second and third entries will be used for a second and third x axis above the plot. The typical use for the second and third axis is for transects, for which the primary x axis represents distance along a transect, and the second and third x axes are used to display the corresponding latitudes and longitudes. - zCoord : xarray.DataArray - The z coordinates in ``field`` + zCoord : xarray.DataArray, optional + The z coordinates for the model, ref and diff arrays - modelArray, refArray : xarray.DataArray - model and observational or control run data sets - - diffArray : float array - difference between modelArray and refArray + dsTransectTriangles : xarray.Dataset, optional + A dataset defining the transect on triangles rather than as a logically + rectangular grid. If this option is provided, ``xCoords`` is only + used for tick marks if more than one x axis is requested, and + ``zCoord`` will be ignored. colorMapSectionName : str section name in ``config`` where color map info can be found. @@ -361,10 +368,11 @@ def plot_vertical_section_comparison( _, ax = plot_vertical_section( config, - xCoords, - zCoord, modelArray, colorMapSectionName, + xCoords=xCoords, + zCoord=zCoord, + dsTransectTriangles=dsTransectTriangles, suffix=resultSuffix, colorbarLabel=colorbarLabel, title=title, @@ -406,10 +414,11 @@ def plot_vertical_section_comparison( plt.subplot(3, 1, 2) _, ax = plot_vertical_section( config, - xCoords, - zCoord, refArray, colorMapSectionName, + xCoords=xCoords, + zCoord=zCoord, + dsTransectTriangles=dsTransectTriangles, suffix=resultSuffix, colorbarLabel=colorbarLabel, title=refTitle, @@ -445,10 +454,11 @@ def plot_vertical_section_comparison( plt.subplot(3, 1, 3) _, ax = plot_vertical_section( config, - xCoords, - zCoord, diffArray, colorMapSectionName, + xCoords=xCoords, + zCoord=zCoord, + dsTransectTriangles=dsTransectTriangles, suffix=diffSuffix, colorbarLabel=colorbarLabel, title=diffTitle, @@ -494,10 +504,11 @@ def plot_vertical_section_comparison( def plot_vertical_section( config, - xCoords, - zCoord, field, colorMapSectionName, + xCoords=None, + zCoord=None, + dsTransectTriangles=None, suffix='', colorbarLabel=None, title=None, @@ -551,17 +562,6 @@ def plot_vertical_section( the configuration, containing a [plot] section with options that control plotting - xCoords : xarray.DataArray or list of xarray.DataArray - The x coordinate(s) in ``field``. Optional second - and third entries will be used for a second and third x axis above the - plot. The typical use for the second and third axis is for transects, - for which the primary x axis represents distance along a transect, and - the second and third x axes are used to display the corresponding - latitudes and longitudes. - - zCoord : xarray.DataArray - The z coordinates in ``field`` - field : xarray.DataArray field array to plot. For contour plots, ``xCoords`` and ``zCoords`` should broadcast to the same shape as ``field``. For heatmap plots, @@ -574,6 +574,23 @@ def plot_vertical_section( colorMapSectionName : str section name in ``config`` where color map info can be found. + xCoords : xarray.DataArray or list of xarray.DataArray, optional + The x coordinate(s) for the ``field``. Optional second + and third entries will be used for a second and third x axis above the + plot. The typical use for the second and third axis is for transects, + for which the primary x axis represents distance along a transect, and + the second and third x axes are used to display the corresponding + latitudes and longitudes. + + zCoord : xarray.DataArray, optional + The z coordinates for the ``field`` + + dsTransectTriangles : xarray.Dataset, optional + A dataset defining the transect on triangles rather than as a logically + rectangular grid. If this option is provided, ``xCoords`` is only + used for tick marks if more than one x axis is requested, and + ``zCoord`` will be ignored. + suffix : str, optional the suffix used for colorbar config options @@ -745,44 +762,55 @@ def plot_vertical_section( if defaultFontSize is None: defaultFontSize = config.getint('plot', 'defaultFontSize') matplotlib.rc('font', size=defaultFontSize) - if not isinstance(xCoords, list): - xCoords = [xCoords] + mask = field.notnull() + if xCoords is not None: + if not isinstance(xCoords, list): + xCoords = [xCoords] - if not isinstance(xlabels, list): - xlabels = [xlabels] + if not isinstance(xlabels, list): + xlabels = [xlabels] - if len(xCoords) != len(xlabels): - raise ValueError('Expected the same number of xCoords and xlabels') + if len(xCoords) != len(xlabels): + raise ValueError('Expected the same number of xCoords and xlabels') - x, y = xr.broadcast(xCoords[0], zCoord) - dims_in_field = all([dim in field.dims for dim in x.dims]) + if dsTransectTriangles is None: - if dims_in_field: - x = x.transpose(*field.dims) - y = y.transpose(*field.dims) - else: - xsize = list(x.sizes.values()) - fieldsize = list(field.sizes.values()) - if xsize[0] == fieldsize[0] + 1 and xsize[1] == fieldsize[1] + 1: - pass - elif xsize[0] == fieldsize[1] + 1 and xsize[1] == fieldsize[0] + 1: - x = x.transpose(x.dims[1], x.dims[0]) - y = y.transpose(y.dims[1], y.dims[0]) - else: - raise ValueError('Sizes of coords {}x{} and field {}x{} not ' - 'compatible.'.format(xsize[0], xsize[1], - fieldsize[0], fieldsize[1])) + x, y = xr.broadcast(xCoords[0], zCoord) + dims_in_field = all([dim in field.dims for dim in x.dims]) - # compute moving averages with respect to the x dimension - if movingAveragePoints is not None and movingAveragePoints != 1: - - dim = field.dims[0] - field = field.rolling(dim={dim: movingAveragePoints}, - center=True).mean().dropna(dim) - x = x.rolling(dim={dim: movingAveragePoints}, - center=True).mean().dropna(dim) - y = y.rolling(dim={dim: movingAveragePoints}, - center=True).mean().dropna(dim) + if dims_in_field: + x = x.transpose(*field.dims) + y = y.transpose(*field.dims) + else: + xsize = list(x.sizes.values()) + fieldsize = list(field.sizes.values()) + if xsize[0] == fieldsize[0] + 1 and xsize[1] == fieldsize[1] + 1: + pass + elif xsize[0] == fieldsize[1] + 1 and xsize[1] == fieldsize[0] + 1: + x = x.transpose(x.dims[1], x.dims[0]) + y = y.transpose(y.dims[1], y.dims[0]) + else: + raise ValueError('Sizes of coords {}x{} and field {}x{} not ' + 'compatible.'.format(xsize[0], xsize[1], + fieldsize[0], + fieldsize[1])) + + # compute moving averages with respect to the x dimension + if movingAveragePoints is not None and movingAveragePoints != 1: + + dim = field.dims[0] + field = field.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim) + x = x.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim) + y = y.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim) + + maskedTriangulation, unmaskedTriangulation = _get_triangulation( + x, y, mask) + else: + maskedTriangulation, unmaskedTriangulation = _get_ds_triangulation( + dsTransectTriangles, mask) # set up figure if dpi is None: @@ -792,10 +820,9 @@ def plot_vertical_section( else: fig = plt.gcf() - colormapDict = setup_colormap(config, colorMapSectionName, suffix=suffix) + colormapDict = setup_colormap(config, colorMapSectionName, + suffix=suffix) - mask = field.notnull() - maskedTriangulation, unmaskedTriangulation = _get_triangulation(x, y, mask) if not plotAsContours: # display a heatmap of fieldArray fieldMasked = field.where(mask, 0.0).values.ravel() @@ -827,12 +854,12 @@ def plot_vertical_section( zeroArray = xr.zeros_like(field) plt.tricontourf(maskedTriangulation, zeroArray.values, colors='white') + ax = plt.gca() if outlineValid: # set the color for NaN or masked regions, and draw a black # outline around them; technically, the contour level used should # be 1.0, but the contours don't show up when using 1.0, so 0.999 # is used instead - ax = plt.gca() ax.set_facecolor(backgroundColor) landMask = np.isnan(field.values).ravel() plt.tricontour(unmaskedTriangulation, landMask, levels=[0.0001], @@ -910,7 +937,7 @@ def plot_vertical_section( if yLim: ax.set_ylim(yLim) - if xCoordIsTime: + if xCoords is not None and xCoordIsTime: if firstYearXTicks is None: minDays = xCoords[0][0].values else: @@ -921,7 +948,7 @@ def plot_vertical_section( yearStride=yearStrideXTicks) # add a second x-axis scale, if it was requested - if len(xCoords) >= 2: + if xCoords is not None and len(xCoords) >= 2: ax2 = ax.twiny() ax2.set_facecolor(backgroundColor) if xlabels[1] is not None: @@ -956,6 +983,29 @@ def plot_vertical_section( return fig, ax # }}} +def _get_ds_triangulation(dsTransectTriangles, mask): + """get matplotlib Triangulation from triangulation dataset""" + + triMask = np.logical_not(mask) + # if any node of a triangle is masked, the triangle is masked + triMask = triMask.max(dim='nTriangleNodes') + + nTransectTriangles = dsTransectTriangles.sizes['nTransectTriangles'] + dNode = dsTransectTriangles.dNode.isel( + nSegments=dsTransectTriangles.segmentIndices, + nHorizBounds=dsTransectTriangles.nodeHorizBoundsIndices) + x = dNode.values.ravel() + + zTransectNode = dsTransectTriangles.zTransectNode + y = zTransectNode.values.ravel() + + tris = np.arange(3 * nTransectTriangles).reshape((nTransectTriangles, 3)) + maskedTriangulation = Triangulation(x=x, y=y, triangles=tris, mask=triMask) + unmaskedTriangulation = Triangulation(x=x, y=y, triangles=tris) + + return maskedTriangulation, unmaskedTriangulation + + def _get_triangulation(x, y, mask): """divide each quad in the x/y mesh into 2 triangles""" From d7400fc7adf1b2e46ab6ddf17fc1d961e0ffc87a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 1 Aug 2020 17:22:21 +0200 Subject: [PATCH 04/14] Update vertical seciton calls with keyword args The coordinates are now keyword arguments since they are optional. --- mpas_analysis/ocean/meridional_heat_transport.py | 5 +++-- mpas_analysis/ocean/plot_hovmoller_subtask.py | 8 ++++---- mpas_analysis/ocean/plot_transect_subtask.py | 4 ++-- mpas_analysis/ocean/streamfunction_moc.py | 2 +- mpas_analysis/shared/plot/vertical_section.py | 3 ++- 5 files changed, 12 insertions(+), 10 deletions(-) diff --git a/mpas_analysis/ocean/meridional_heat_transport.py b/mpas_analysis/ocean/meridional_heat_transport.py index 1f7880f69..9e5e6fec3 100644 --- a/mpas_analysis/ocean/meridional_heat_transport.py +++ b/mpas_analysis/ocean/meridional_heat_transport.py @@ -356,8 +356,9 @@ def run_task(self): # {{{ filePrefix = self.filePrefixes['mhtZ'] outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) colorbarLabel = '(PW/m)' - plot_vertical_section(config, x, y, z, self.sectionName, - suffix='', colorbarLabel=colorbarLabel, + plot_vertical_section(config, z, self.sectionName, xCoords=x, + zCoord=y, suffix='', + colorbarLabel=colorbarLabel, title=title, xlabels=xLabel, ylabel=yLabel, xLim=xLimGlobal, yLim=depthLimGlobal, invertYAxis=False, diff --git a/mpas_analysis/ocean/plot_hovmoller_subtask.py b/mpas_analysis/ocean/plot_hovmoller_subtask.py index 089dc6f31..3b7a038b5 100644 --- a/mpas_analysis/ocean/plot_hovmoller_subtask.py +++ b/mpas_analysis/ocean/plot_hovmoller_subtask.py @@ -375,10 +375,10 @@ def run_task(self): # {{{ defaultFontSize = None fig, _, suptitle = plot_vertical_section_comparison( - config, Time, z, field, refField, diff, self.sectionName, - colorbarLabel=self.unitsLabel, title=title, modelTitle=mainRunName, - refTitle=refTitle, diffTitle=diffTitle, xlabels=xLabel, - ylabel=yLabel, lineWidth=1, xCoordIsTime=True, + config, field, refField, diff, self.sectionName, xCoords=Time, + zCoord=z, colorbarLabel=self.unitsLabel, title=title, + modelTitle=mainRunName, refTitle=refTitle, diffTitle=diffTitle, + xlabels=xLabel, ylabel=yLabel, lineWidth=1, xCoordIsTime=True, movingAveragePoints=movingAverageMonths, calendar=self.calendar, firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks, yLim=yLim, invertYAxis=False, titleFontSize=titleFontSize, diff --git a/mpas_analysis/ocean/plot_transect_subtask.py b/mpas_analysis/ocean/plot_transect_subtask.py index 2807bda56..b018f91d5 100644 --- a/mpas_analysis/ocean/plot_transect_subtask.py +++ b/mpas_analysis/ocean/plot_transect_subtask.py @@ -528,12 +528,12 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): fig, axes, suptitle = plot_vertical_section_comparison( config, - xs, - z, modelOutput, refOutput, bias, configSectionName, + xCoords=xs, + zCoord=z, colorbarLabel=self.unitsLabel, xlabels=xLabels, ylabel=yLabel, diff --git a/mpas_analysis/ocean/streamfunction_moc.py b/mpas_analysis/ocean/streamfunction_moc.py index b152e6181..c192e484e 100644 --- a/mpas_analysis/ocean/streamfunction_moc.py +++ b/mpas_analysis/ocean/streamfunction_moc.py @@ -769,7 +769,7 @@ def run_task(self): # {{{ diff = regionMOC - refRegionMOC plot_vertical_section_comparison( - config, x, z, regionMOC, refRegionMOC, diff, + config, regionMOC, refRegionMOC, diff, xCoords=x, zCoord=z, colorMapSectionName='streamfunctionMOC{}'.format(region), colorbarLabel=colorbarLabel, title=title, diff --git a/mpas_analysis/shared/plot/vertical_section.py b/mpas_analysis/shared/plot/vertical_section.py index 297e165ce..f5d0e44a9 100644 --- a/mpas_analysis/shared/plot/vertical_section.py +++ b/mpas_analysis/shared/plot/vertical_section.py @@ -762,7 +762,6 @@ def plot_vertical_section( if defaultFontSize is None: defaultFontSize = config.getint('plot', 'defaultFontSize') matplotlib.rc('font', size=defaultFontSize) - mask = field.notnull() if xCoords is not None: if not isinstance(xCoords, list): xCoords = [xCoords] @@ -806,9 +805,11 @@ def plot_vertical_section( y = y.rolling(dim={dim: movingAveragePoints}, center=True).mean().dropna(dim) + mask = field.notnull() maskedTriangulation, unmaskedTriangulation = _get_triangulation( x, y, mask) else: + mask = field.notnull() maskedTriangulation, unmaskedTriangulation = _get_ds_triangulation( dsTransectTriangles, mask) From 6c8d028bb98c1caaac2118f73f7a71f2cbd54cf7 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 30 Jun 2020 17:08:55 +0200 Subject: [PATCH 05/14] Formatting clean-up of inset code --- mpas_analysis/shared/plot/inset.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/mpas_analysis/shared/plot/inset.py b/mpas_analysis/shared/plot/inset.py index 929176264..aed9cef60 100644 --- a/mpas_analysis/shared/plot/inset.py +++ b/mpas_analysis/shared/plot/inset.py @@ -8,9 +8,9 @@ # Additional copyright and license information can be found in the LICENSE file # distributed with this code, or at # https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/master/LICENSE -''' +""" Functions for plotting inset maps in plots (e.g. for transects) -''' +""" # Authors # ------- # Xylar Asay-Davis @@ -31,7 +31,7 @@ def add_inset(fig, fc, latlonbuffer=45., polarbuffer=5., width=1.0, height=1.0, lowerleft=None, xbuffer=None, ybuffer=None, maxlength=1.): - ''' + """ Plots an inset map showing the location of a transect or polygon. Shapes are plotted on a polar grid if they are entirely poleward of +/-50 deg. latitude and with a lat/lon grid if not. @@ -71,7 +71,7 @@ def add_inset(fig, fc, latlonbuffer=45., polarbuffer=5., width=1.0, ------- inset : ``matplotlib.axes.Axes`` The new inset axis - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -170,13 +170,14 @@ def add_inset(fig, fc, latlonbuffer=45., polarbuffer=5., width=1.0, def _set_circular_boundary(ax): - '''Set the boundary of the given axis to be circular (for a polar plot)''' + """Set the boundary of the given axis to be circular (for a polar plot)""" # Compute a circle in axes coordinates, which we can use as a boundary # for the map. We can pan/zoom as much as we like - the boundary will be # permanently circular. theta = numpy.linspace(0, 2*numpy.pi, 100) - center, radius = [0.5, 0.5], 0.5 + center = numpy.array([0.5, 0.5]) + radius = 0.5 verts = numpy.vstack([numpy.sin(theta), numpy.cos(theta)]).T circle = matplotlib.path.Path(verts * radius + center) @@ -184,12 +185,12 @@ def _set_circular_boundary(ax): def _get_bounds(fc): - '''Compute the lon/lat bounding box for all transects and regions''' + """Compute the lon/lat bounding box for all transects and regions""" bounds = shapely.geometry.GeometryCollection() for feature in fc.features: shape = shapely.geometry.shape(feature['geometry']) shape_bounds = shapely.geometry.box(*shape.bounds) - bounds = shapely.geometry.box(*(bounds.union(shape_bounds).bounds)) + bounds = shapely.geometry.box(*bounds.union(shape_bounds).bounds) return bounds.bounds From 589b3923d9e7a5bd7664ae83181f7fc1715d276a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 30 Jun 2020 18:48:01 +0200 Subject: [PATCH 06/14] Update zMid computation to require zero-based indexing --- mpas_analysis/ocean/climatology_map_ohc_anomaly.py | 2 +- mpas_analysis/ocean/regional_ts_diagrams.py | 2 +- mpas_analysis/ocean/remap_depth_slices_subtask.py | 2 +- mpas_analysis/ocean/time_series_ocean_regions.py | 2 +- mpas_analysis/ocean/utility.py | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py index 598508c67..9b2c96547 100644 --- a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py +++ b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py @@ -349,7 +349,7 @@ def _compute_ohc(self, climatology): # {{{ nVertLevels = dsRestart.sizes['nVertLevels'] - zMid = compute_zmid(dsRestart.bottomDepth, dsRestart.maxLevelCell, + zMid = compute_zmid(dsRestart.bottomDepth, dsRestart.maxLevelCell-1, dsRestart.layerThickness) vertIndex = xarray.DataArray.from_dict( diff --git a/mpas_analysis/ocean/regional_ts_diagrams.py b/mpas_analysis/ocean/regional_ts_diagrams.py index 90fd890c7..2469499bd 100644 --- a/mpas_analysis/ocean/regional_ts_diagrams.py +++ b/mpas_analysis/ocean/regional_ts_diagrams.py @@ -660,7 +660,7 @@ def _write_mpas_t_s(self, config): # {{{ ds = ds[variableList] ds['zMid'] = compute_zmid(dsRestart.bottomDepth, - dsRestart.maxLevelCell, + dsRestart.maxLevelCell-1, dsRestart.layerThickness) ds['volume'] = (dsRestart.areaCell * diff --git a/mpas_analysis/ocean/remap_depth_slices_subtask.py b/mpas_analysis/ocean/remap_depth_slices_subtask.py index 51603b757..f1cd082c7 100644 --- a/mpas_analysis/ocean/remap_depth_slices_subtask.py +++ b/mpas_analysis/ocean/remap_depth_slices_subtask.py @@ -117,7 +117,7 @@ def run_task(self): # {{{ depthNames = [str(depth) for depth in self.depths] - zMid = compute_zmid(ds.bottomDepth, ds.maxLevelCell, + zMid = compute_zmid(ds.bottomDepth, ds.maxLevelCell-1, ds.layerThickness) nVertLevels = zMid.shape[1] diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index 5b732245e..bef0375b2 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -313,7 +313,7 @@ def run_task(self): # {{{ config_zmax = None dsRestart = xarray.open_dataset(restartFileName).isel(Time=0) - zMid = compute_zmid(dsRestart.bottomDepth, dsRestart.maxLevelCell, + zMid = compute_zmid(dsRestart.bottomDepth, dsRestart.maxLevelCell-1, dsRestart.layerThickness) areaCell = dsRestart.areaCell if 'landIceMask' in dsRestart: diff --git a/mpas_analysis/ocean/utility.py b/mpas_analysis/ocean/utility.py index 468d3bdd8..9d59b3643 100644 --- a/mpas_analysis/ocean/utility.py +++ b/mpas_analysis/ocean/utility.py @@ -33,7 +33,7 @@ def compute_zmid(bottomDepth, maxLevelCell, layerThickness): # {{{ the depth of the ocean bottom (positive) maxLevelCell : ``xarray.DataArray`` - the 1-based vertical index of the bottom of the ocean + the 0-based vertical index of the bottom of the ocean layerThickness : ``xarray.DataArray`` the thickness of MPAS-Ocean layers (possibly as a function of time) @@ -54,7 +54,7 @@ def compute_zmid(bottomDepth, maxLevelCell, layerThickness): # {{{ xarray.DataArray.from_dict({'dims': ('nVertLevels',), 'data': numpy.arange(nVertLevels)}) - layerThickness = layerThickness.where(vertIndex < maxLevelCell) + layerThickness = layerThickness.where(vertIndex <= maxLevelCell) thicknessSum = layerThickness.sum(dim='nVertLevels') thicknessCumSum = layerThickness.cumsum(dim='nVertLevels') From b4e4cbca07c979b7faf077a5c37f2776696c30db Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 30 Jun 2020 19:04:37 +0200 Subject: [PATCH 07/14] Two minor fixes to climatology infrastructure --- mpas_analysis/shared/climatology/climatology.py | 2 +- .../shared/climatology/remap_mpas_climatology_subtask.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/shared/climatology/climatology.py b/mpas_analysis/shared/climatology/climatology.py index fab6fd57a..d3d25aa24 100644 --- a/mpas_analysis/shared/climatology/climatology.py +++ b/mpas_analysis/shared/climatology/climatology.py @@ -585,7 +585,7 @@ def get_remapped_mpas_climatology_file_name(config, season, componentName, comparisonGridName) comparisonFullMeshName = comparisonDescriptor.meshName else: - comparisonFullMeshName = comparisonGridName + comparisonFullMeshName = comparisonGridName.replace(' ', '_') stageDirectory = '{}/remapped'.format(climatologyOpDirectory) diff --git a/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py b/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py index 8f4653206..d2236ea37 100644 --- a/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py +++ b/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py @@ -537,7 +537,7 @@ def _mask_climatologies(self, season, dsMask): # {{{ # add valid mask as a variable, useful for remapping later climatology['validMask'] = \ - xr.DataArray(numpy.ones(climatology.dims['nCells']), + xr.DataArray(numpy.ones(climatology.sizes['nCells']), dims=['nCells']) # mask the data set for variableName in self.variableList: From e0ede426f531d1b317564060122ba34c88d5f6ce Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 30 Jun 2020 17:12:26 +0200 Subject: [PATCH 08/14] First draft of native transect module --- mpas_analysis/default.cfg | 25 +- .../ocean/compute_transects_subtask.py | 311 +++++++++--------- mpas_analysis/ocean/plot_transect_subtask.py | 8 +- 3 files changed, 190 insertions(+), 154 deletions(-) diff --git a/mpas_analysis/default.cfg b/mpas_analysis/default.cfg index 38a2e72e7..720ab2f1f 100644 --- a/mpas_analysis/default.cfg +++ b/mpas_analysis/default.cfg @@ -2332,7 +2332,10 @@ seasons = ['ANN'] # The approximate horizontal resolution (in km) of each transect. Latitude/ # longitude between observation points will be subsampled at this interval. -# Use 'obs' to indicate no subsampling. +# Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of +# model data on the native grid, in which case comparison with observations +# will take place on the observation grid. +#horizontalResolution = mpas #horizontalResolution = obs horizontalResolution = 5 @@ -2345,13 +2348,17 @@ horizontalBounds = {'WOCE_A21': [], # The name of the vertical comparison grid. Valid values are 'mpas' for the # MPAS vertical grid, 'obs' to use the locations of observations or -# any other name if the vertical grid is defined by 'verticalComparisonGrid' +# any other name if the vertical grid is defined by 'verticalComparisonGrid'. +# If horizontalResolution is 'mpas', model data (both main and control) will be +# plotted on the MPAS vertical grid, regardless of the comparison grid. #verticalComparisonGridName = mpas #verticalComparisonGridName = obs verticalComparisonGridName = uniform_0_to_4000m_at_10m # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or # 'obs'. This should be numpy array of (typically negative) elevations (in m). +# The first and last entries are used as axis bounds for 'mpas' and 'obs' +# vertical comparison grids verticalComparisonGrid = numpy.linspace(0, -4000, 401) # The minimum weight of a destination cell after remapping. Any cell with @@ -2471,7 +2478,6 @@ normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} contourLevelsDifference = [] - [geojsonTransects] ## options related to plotting model transects at points determined by a ## geojson file. To generate your own geojson file, go to: @@ -2524,7 +2530,9 @@ seasons = ['ANN'] # The approximate horizontal resolution (in km) of each transect. Latitude/ # longitude between observation points will be subsampled at this interval. -# Use 'obs' to indicate no subsampling. +# Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of +# model data on the native grid. +#horizontalResolution = mpas #horizontalResolution = obs horizontalResolution = 5 @@ -2536,6 +2544,8 @@ verticalComparisonGridName = uniform_0_to_4000m_at_10m # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas'. # This should be numpy array of (typically negative) elevations (in m). +# The first and last entries are used as axis bounds for 'mpas' vertical +# comparison grids verticalComparisonGrid = numpy.linspace(0, -4000, 401) # The minimum weight of a destination cell after remapping. Any cell with @@ -2713,7 +2723,10 @@ seasons = ['ANN', 'JFM', 'JAS'] # The approximate horizontal resolution (in km) of each transect. Latitude/ # longitude between observation points will be subsampled at this interval. -# Use 'obs' to indicate no subsampling. +# Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of +# model data on the native grid, in which case comparison with observations +# will take place on the observation grid. +#horizontalResolution = mpas #horizontalResolution = obs horizontalResolution = 5 @@ -2726,6 +2739,8 @@ verticalComparisonGridName = uniform_10_to_1500m_at_10m # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or # 'obs'. This should be numpy array of (typically negative) elevations (in m). +# The first and last entries are used as axis bounds for 'mpas' and 'obs' +# vertical comparison grids verticalComparisonGrid = numpy.linspace(-10, -1500, 150) # The minimum weight of a destination cell after remapping. Any cell with diff --git a/mpas_analysis/ocean/compute_transects_subtask.py b/mpas_analysis/ocean/compute_transects_subtask.py index fd299856e..40afcd52f 100644 --- a/mpas_analysis/ocean/compute_transects_subtask.py +++ b/mpas_analysis/ocean/compute_transects_subtask.py @@ -28,6 +28,10 @@ from mpas_analysis.shared.interpolation import interp_1d +from mpas_analysis.shared.transects import subdivide_great_circle, \ + cartesian_to_great_circle_distance, find_transect_edges_and_cells, \ + get_edge_bounds + class ComputeTransectsSubtask(RemapMpasClimatologySubtask): # {{{ """ @@ -80,7 +84,7 @@ def __init__(self, mpasClimatologyTask, parentTask, climatologyName, subtaskName='remapTransects'): # {{{ - ''' + """ Construct the analysis task and adds it as a subtask of the ``parentTask``. @@ -131,7 +135,7 @@ def __init__(self, mpasClimatologyTask, parentTask, climatologyName, subtaskName : str, optional The name of the subtask - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -147,11 +151,17 @@ def __init__(self, mpasClimatologyTask, parentTask, climatologyName, self.transectCollectionName = transectCollectionName self.verticalComparisonGridName = verticalComparisonGridName self.verticalComparisonGrid = verticalComparisonGrid + self.transectNumber = None + self.x = None + self.collectionDescriptor = None + self.maxLevelCell = None + self.zMid = None + self.remap = self.obsDatasets.horizontalResolution != 'mpas' # }}} def setup_and_check(self): # {{{ - ''' + """ Creates a PointCollectionDescriptor describing all the points in the transects to remap to. Keeps track of which transects index each point belongs to. @@ -162,90 +172,91 @@ def setup_and_check(self): # {{{ If a restart file is not available from which to read mesh information or if no history files are available from which to compute the climatology in the desired time range. - ''' + """ # Authors # ------- # Xylar Asay-Davis - transectNumber = [] - lats = [] - lons = [] - x = [] - obsDatasets = self.obsDatasets.get_observations() - datasets = list(obsDatasets.values()) - for transectIndex, ds in enumerate(datasets): - localLats = list(ds.lat.values) - localLons = list(ds.lon.values) - localX = list(ds.x.values) - localIndices = [transectIndex for lat in localLats] - lats.extend(localLats) - lons.extend(localLons) - x.extend(localX) - transectNumber.extend(localIndices) - - self.transectNumber = xr.DataArray.from_dict( - {'dims': ('nPoints'), - 'data': transectNumber}) - - self.x = xr.DataArray.from_dict( - {'dims': ('nPoints'), - 'data': x}) - - self.collectionDescriptor = PointCollectionDescriptor( - lats, lons, collectionName=self.transectCollectionName, - units='degrees', outDimension='nPoints') - - self.add_comparison_grid_descriptor(self.transectCollectionName, - self.collectionDescriptor) - - # then, call setup_and_check from the base class + if self.remap: + transectNumber = [] + lats = [] + lons = [] + x = [] + obsDatasets = self.obsDatasets.get_observations() + datasets = list(obsDatasets.values()) + for transectIndex, ds in enumerate(datasets): + localLats = list(ds.lat.values) + localLons = list(ds.lon.values) + localX = list(ds.x.values) + localIndices = [transectIndex for _ in localLats] + lats.extend(localLats) + lons.extend(localLons) + x.extend(localX) + transectNumber.extend(localIndices) + + self.transectNumber = xr.DataArray.from_dict( + {'dims': ('nPoints',), + 'data': transectNumber}) + + self.x = xr.DataArray.from_dict( + {'dims': ('nPoints',), + 'data': x}) + + self.collectionDescriptor = PointCollectionDescriptor( + lats, lons, collectionName=self.transectCollectionName, + units='degrees', outDimension='nPoints') + + self.add_comparison_grid_descriptor(self.transectCollectionName, + self.collectionDescriptor) + + for transectName in obsDatasets: + obsDatasets[transectName].close() + + # then, call setup_and_check from the parent class # (RemapMpasClimatologySubtask) - super(ComputeTransectsSubtask, self).setup_and_check() - - for transectName in obsDatasets: - obsDatasets[transectName].close() + super().setup_and_check() def run_task(self): # {{{ - ''' + """ Compute climatologies of melt rates from E3SM/MPAS output This function has been overridden to compute ``zMid`` based on data from a restart file for later use in vertically interpolating to reference depths. - ''' + """ # Authors # ------- # Xylar Asay-Davis - # first, compute zMid and cell mask from the restart file - with xr.open_dataset(self.restartFileName) as ds: - ds = ds[['maxLevelCell', 'bottomDepth', 'layerThickness']] - ds = ds.isel(Time=0) + # first, get maxLevelCell and zMid, needed for masking - self.maxLevelCell = ds.maxLevelCell - 1 + dsMesh = xr.open_dataset(self.restartFileName) + dsMesh = dsMesh.isel(Time=0) - zMid = compute_zmid(ds.bottomDepth, ds.maxLevelCell, - ds.layerThickness) + self.maxLevelCell = dsMesh.maxLevelCell - 1 + zMid = compute_zmid(dsMesh.bottomDepth, dsMesh.maxLevelCell-1, + dsMesh.layerThickness) - self.zMid = \ - xr.DataArray.from_dict({'dims': ('nCells', 'nVertLevels'), - 'data': zMid}) - ds.close() + self.zMid = \ + xr.DataArray.from_dict({'dims': ('nCells', 'nVertLevels'), + 'data': zMid}) # then, call run from the base class (RemapMpasClimatologySubtask), - # which will perform the horizontal remapping + # which will perform masking and possibly horizontal remapping super(ComputeTransectsSubtask, self).run_task() obsDatasets = self.obsDatasets.get_observations() - self.logger.info('Interpolating each transect vertically...') - # finally, vertically interpolate and write out each transect - for season in self.seasons: + if self.remap: + + self.logger.info('Interpolating each transect vertically...') + # vertically interpolate and write out each transect + for season in self.seasons: - remappedFileName = self.get_remapped_file_name( - season, comparisonGridName=self.transectCollectionName) + remappedFileName = self.get_remapped_file_name( + season, comparisonGridName=self.transectCollectionName) - with xr.open_dataset(remappedFileName) as ds: + ds = xr.open_dataset(remappedFileName) transectNames = list(obsDatasets.keys()) for transectIndex, transectName in enumerate(transectNames): self.logger.info(' {}'.format(transectName)) @@ -258,14 +269,58 @@ def run_task(self): # {{{ outFileName, outObsFileName) ds.close() + else: + edgeBounds = get_edge_bounds(dsMesh) + + for transectName, dsTransect in obsDatasets.items(): + self.logger.info(' {}'.format(transectName)) + if 'z' in dsTransect: + transectZ = dsTransect.z + else: + transectZ = None + edgeIndices, cellIndices, X, Z, mask, lonOut, latOut, \ + obsCellIndices, obsLayerIndices = \ + find_transect_edges_and_cells( + dsTransect.lon, dsTransect.lat, edgeBounds, dsMesh, + transectZ=transectZ, degrees=True) + + for season in self.seasons: + maskedFileName = self.get_masked_file_name(season) + dsMask = xr.open_dataset(maskedFileName) + + dsOnMpas = dsMask.isel(nCells=cellIndices) + dsOnMpas = dsOnMpas.where(cellIndices>=0) + dsOnMpas.coords['x'] = X + dsOnMpas.coords['z'] = Z + dsOnMpas.coords['mask'] = mask + dsOnMpas.coords['lon'] = lonOut + dsOnMpas.coords['lat'] = latOut + + outFileName = self.get_remapped_file_name( + season, comparisonGridName=transectName) + write_netcdf(dsOnMpas, outFileName) + + if obsCellIndices is not None and \ + obsLayerIndices is not None: + dsAtObs = dsMask.isel(nCells=obsCellIndices, + nVertLevels=obsLayerIndices) + outFileName = self.get_remapped_file_name( + season, + comparisonGridName='{}AtObs'.format(transectName)) + write_netcdf(dsAtObs, outFileName) + + dsMask.close() + + dsMesh.close() + for transectName in obsDatasets: obsDatasets[transectName].close() # }}} def customize_masked_climatology(self, climatology, season): # {{{ - ''' - Add zMid to the climatologys + """ + Add zMid to the climatologies Parameters ---------- @@ -279,7 +334,7 @@ def customize_masked_climatology(self, climatology, season): # {{{ ------- climatology : ``xarray.Dataset`` object the modified climatology data set - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -302,7 +357,7 @@ def customize_masked_climatology(self, climatology, season): # {{{ def customize_remapped_climatology(self, climatology, comparisonGridNames, season): # {{{ - ''' + """ Add the transect index to the data set Parameters @@ -321,7 +376,7 @@ def customize_remapped_climatology(self, climatology, comparisonGridNames, climatology : ``xarray.Dataset``` The same data set with any custom fields added or modifications made - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -342,7 +397,7 @@ def customize_remapped_climatology(self, climatology, comparisonGridNames, def _vertical_interp(self, ds, transectIndex, dsObs, outFileName, outObsFileName): - ''' + """ Vertically interpolate a transect and write it to a unique file Parameters @@ -363,7 +418,7 @@ def _vertical_interp(self, ds, transectIndex, dsObs, outFileName, outObsFileName : str The name of the file to which the resulting obs data set should be written if it is interpolated - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -438,7 +493,7 @@ class TransectsObservations(object): # {{{ def __init__(self, config, obsFileNames, horizontalResolution, transectCollectionName): # {{{ - ''' + """ Construct the object, setting the observations dictionary to None. Parameters @@ -458,7 +513,7 @@ def __init__(self, config, obsFileNames, horizontalResolution, A name that describes the collection of transects (e.g. the name of the collection of observations) used to name the destination "mesh" for regridding - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -466,21 +521,21 @@ def __init__(self, config, obsFileNames, horizontalResolution, self.obsDatasets = None self.config = config self.obsFileNames = obsFileNames - if horizontalResolution != 'obs': + if horizontalResolution not in ['obs', 'mpas']: horizontalResolution = float(horizontalResolution) self.horizontalResolution = horizontalResolution self.transectCollectionName = transectCollectionName def get_observations(self): # {{{ - ''' + """ Read in and set up the observations. Returns ------- obsDatasets : OrderedDict The observational dataset - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -500,17 +555,14 @@ def get_observations(self): for coord in ['lon', 'lat']: dsObs.coords[coord] = dsObs[coord] - if self.horizontalResolution == 'obs': - dsObs = self._add_distance(dsObs) - else: - dsObs = self._subdivide_observations(dsObs) + dsObs = self._add_distance(dsObs) write_netcdf(dsObs, outFileName) obsDatasets[name] = dsObs return obsDatasets # }}} def build_observational_dataset(self, fileName, transectName): # {{{ - ''' + """ read in the data sets for observations, and possibly rename some variables and dimensions @@ -526,7 +578,7 @@ def build_observational_dataset(self, fileName, transectName): # {{{ ------- dsObs : ``xarray.Dataset`` The observational dataset - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -542,7 +594,7 @@ def build_observational_dataset(self, fileName, transectName): # {{{ def get_out_file_name(self, transectName, verticalComparisonGridName='obs'): # {{{ - ''' + """ Given config options, the name of a field and a string identifying the months in a seasonal climatology, returns the full path for MPAS climatology files before and after remapping. @@ -564,7 +616,7 @@ def get_out_file_name(self, transectName, ------- fileName : str The path to the climatology file for the specified season. - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -578,91 +630,54 @@ def get_out_file_name(self, transectName, make_directories(remappedDirectory) + transectSuffix = transectName.replace(' ', '_') + if verticalComparisonGridName == 'obs': fileName = '{}/{}_{}.nc'.format( - remappedDirectory, self.transectCollectionName, transectName) + remappedDirectory, self.transectCollectionName, transectSuffix) else: fileName = '{}/{}_{}_{}.nc'.format( - remappedDirectory, self.transectCollectionName, transectName, + remappedDirectory, self.transectCollectionName, transectSuffix, verticalComparisonGridName) return fileName # }}} def _add_distance(self, dsObs): # {{{ - ''' - Subdivide each segment of the transect so the horizontal resolution - approximately matches the requested resolution - ''' - - lat = dsObs.lat.values - lon = dsObs.lon.values - - # compute the great circle distance between these points - dxIn = self._haversine(lon[0:-1], lat[0:-1], lon[1:], lat[1:]) - - xIn = numpy.zeros(lat.shape) - xIn[1:] = numpy.cumsum(dxIn) - - dsObs['x'] = (('nPoints',), xIn) - return dsObs # }}} - - def _subdivide_observations(self, dsObs): # {{{ - ''' - Subdivide each segment of the transect so the horizontal resolution - approximately matches the requested resolution - ''' + """ + Add a distance coordinate for the transect. If a horizontal resolution + for subdivision is provided, subdivide each segment of the transect so + the horizontal resolution is at least as high as the requested + resolution + """ - lat = dsObs.lat.values - lon = dsObs.lon.values + subdivide = self.horizontalResolution not in ['obs', 'mpas'] - # compute the great circle distance between these points - dxIn = self._haversine(lon[0:-1], lat[0:-1], lon[1:], lat[1:]) + lat = numpy.deg2rad(dsObs.lat.values) + lon = numpy.deg2rad(dsObs.lon.values) - nSegments = numpy.maximum( - (dxIn / self.horizontalResolution + 0.5).astype(int), 1) + earth_radius = 6371 # Radius of earth in kilometers - xIn = numpy.zeros(lat.shape) - xIn[1:] = numpy.cumsum(dxIn) + x = earth_radius * numpy.cos(lat) * numpy.cos(lon) + y = earth_radius * numpy.cos(lat) * numpy.sin(lon) + z = earth_radius * numpy.sin(lat) - outIndex = [] - for index in range(len(xIn) - 1): - n = nSegments[index] - outIndex.extend(index + numpy.arange(0, n) / n) - outIndex.append(len(xIn) - 1) + if subdivide: + xOut, yOut, zOut, dIn, dOut = subdivide_great_circle( + x, y, z, self.horizontalResolution, earth_radius) - xOut = numpy.interp(outIndex, numpy.arange(len(xIn)), xIn) + dsObs['xIn'] = (('nPoints',), dIn) + dsObs['xOut'] = (('nPointsOut',), dOut) - dsObs['xIn'] = (('nPoints',), xIn) - dsObs['xOut'] = (('nPointsOut',), xOut) + # interpolate fields without and with vertical dimension + dsObs = interp_1d(dsObs, inInterpDim='nPoints', + inInterpCoord='xIn', outInterpDim='nPointsOut', + outInterpCoord='xOut') + dsObs = dsObs.drop_vars(['xIn']) + dsObs = dsObs.rename({'nPointsOut': 'nPoints', 'xOut': 'x'}) + else: + dIn = cartesian_to_great_circle_distance(x, y, z, earth_radius) + dsObs['x'] = (('nPoints',), dIn) - # interpolate fields without and with vertical dimension - dsObs = interp_1d(dsObs, inInterpDim='nPoints', - inInterpCoord='xIn', outInterpDim='nPointsOut', - outInterpCoord='xOut') - dsObs = dsObs.drop_vars(['xIn']) - dsObs = dsObs.rename({'nPointsOut': 'nPoints', 'xOut': 'x'}) return dsObs # }}} - - def _haversine(self, lon1, lat1, lon2, lat2): # {{{ - """ - Calculate the great circle distance in km between two points on the - earth (specified in decimal degrees). Based on - https://stackoverflow.com/a/4913653 - """ - # convert decimal degrees to radians - lon1 = numpy.deg2rad(lon1) - lat1 = numpy.deg2rad(lat1) - lon2 = numpy.deg2rad(lon2) - lat2 = numpy.deg2rad(lat2) - - # haversine formula - dlon = lon2 - lon1 - dlat = lat2 - lat1 - a = numpy.sin(dlat / 2.)**2 + numpy.cos(lat1) * numpy.cos(lat2) * \ - numpy.sin(dlon / 2.)**2 - c = 2 * numpy.arcsin(numpy.sqrt(a)) - r = 6371 # Radius of earth in kilometers. Use 3956 for miles - return c * r # }}} - # }}} # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/ocean/plot_transect_subtask.py b/mpas_analysis/ocean/plot_transect_subtask.py index b018f91d5..19726d10e 100644 --- a/mpas_analysis/ocean/plot_transect_subtask.py +++ b/mpas_analysis/ocean/plot_transect_subtask.py @@ -382,6 +382,12 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): lat = remappedModelClimatology.lat lon = remappedModelClimatology.lon + if len(lat.dims) > 1: + lat = lat[:, 0] + + if len(lon.dims) > 1: + lon = lon[:, 0] + # This will do strange things at the antemeridian but there's little # we can do about that. lon_pm180 = numpy.mod(lon + 180., 360.) - 180. @@ -544,7 +550,7 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): numUpperTicks=numUpperTicks, upperXAxisTickLabelPrecision=upperXAxisTickLabelPrecision, invertYAxis=False, - backgroundColor='#918167', + backgroundColor='#d9bf96', xLim=self.horizontalBounds, compareAsContours=compareAsContours, lineStyle=contourLineStyle, From 4e2c1c454c146a51d56be88cc6d89fb06178f4d1 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 3 Aug 2020 20:11:01 +0200 Subject: [PATCH 09/14] Add a function to climatology module defaults This is just for more convenient imports --- mpas_analysis/shared/climatology/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mpas_analysis/shared/climatology/__init__.py b/mpas_analysis/shared/climatology/__init__.py index 175cee269..0f9c73d17 100644 --- a/mpas_analysis/shared/climatology/__init__.py +++ b/mpas_analysis/shared/climatology/__init__.py @@ -4,7 +4,8 @@ get_unmasked_mpas_climatology_directory, \ get_unmasked_mpas_climatology_file_name, \ get_masked_mpas_climatology_file_name, \ - get_remapped_mpas_climatology_file_name + get_remapped_mpas_climatology_file_name, \ + get_climatology_op_directory from mpas_analysis.shared.climatology.mpas_climatology_task import \ MpasClimatologyTask From d526b7459f7688229af8ce82d69add79120242d9 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 3 Aug 2020 20:12:23 +0200 Subject: [PATCH 10/14] Several updates to vertical sections * Pass arguments to create a triangulation rather than requiring a specific form for a data set * Update how outlines are handled if passed in as arguments --- mpas_analysis/shared/plot/vertical_section.py | 123 ++++++++++-------- 1 file changed, 71 insertions(+), 52 deletions(-) diff --git a/mpas_analysis/shared/plot/vertical_section.py b/mpas_analysis/shared/plot/vertical_section.py index f5d0e44a9..ed89d9326 100644 --- a/mpas_analysis/shared/plot/vertical_section.py +++ b/mpas_analysis/shared/plot/vertical_section.py @@ -40,7 +40,9 @@ def plot_vertical_section_comparison( colorMapSectionName, xCoords=None, zCoord=None, - dsTransectTriangles=None, + triangulation_args=None, + xOutline=None, + zOutline=None, colorbarLabel=None, xlabels=None, ylabel=None, @@ -111,11 +113,19 @@ def plot_vertical_section_comparison( zCoord : xarray.DataArray, optional The z coordinates for the model, ref and diff arrays - dsTransectTriangles : xarray.Dataset, optional - A dataset defining the transect on triangles rather than as a logically - rectangular grid. If this option is provided, ``xCoords`` is only - used for tick marks if more than one x axis is requested, and - ``zCoord`` will be ignored. + triangulation_args : dict, optional + A dict of arguments to create a matplotlib.tri.Triangulation of the + transect that does not rely on it being on a logically rectangular grid. + The arguments rather than the triangulation itself are passed because + multiple triangulations with different masks are needed internally and + there is not an obvious mechanism for copying an existing triangulation. + If this option is provided, ``xCoords`` is only used for tick marks if + more than one x axis is requested, and ``zCoord`` will be ignored. + + xOutline, zOutline : numpy.ndarray, optional + pairs of points defining line segments that are used to outline the + valid region of the mesh if ``outlineValid = True`` and + ``triangulation_args`` is not ``None`` colorMapSectionName : str section name in ``config`` where color map info can be found. @@ -372,7 +382,9 @@ def plot_vertical_section_comparison( colorMapSectionName, xCoords=xCoords, zCoord=zCoord, - dsTransectTriangles=dsTransectTriangles, + triangulation_args=triangulation_args, + xOutline=xOutline, + zOutline=zOutline, suffix=resultSuffix, colorbarLabel=colorbarLabel, title=title, @@ -418,7 +430,9 @@ def plot_vertical_section_comparison( colorMapSectionName, xCoords=xCoords, zCoord=zCoord, - dsTransectTriangles=dsTransectTriangles, + triangulation_args=triangulation_args, + xOutline=xOutline, + zOutline=zOutline, suffix=resultSuffix, colorbarLabel=colorbarLabel, title=refTitle, @@ -458,7 +472,9 @@ def plot_vertical_section_comparison( colorMapSectionName, xCoords=xCoords, zCoord=zCoord, - dsTransectTriangles=dsTransectTriangles, + triangulation_args=triangulation_args, + xOutline=xOutline, + zOutline=zOutline, suffix=diffSuffix, colorbarLabel=colorbarLabel, title=diffTitle, @@ -508,7 +524,9 @@ def plot_vertical_section( colorMapSectionName, xCoords=None, zCoord=None, - dsTransectTriangles=None, + triangulation_args=None, + xOutline=None, + zOutline=None, suffix='', colorbarLabel=None, title=None, @@ -585,11 +603,21 @@ def plot_vertical_section( zCoord : xarray.DataArray, optional The z coordinates for the ``field`` - dsTransectTriangles : xarray.Dataset, optional - A dataset defining the transect on triangles rather than as a logically - rectangular grid. If this option is provided, ``xCoords`` is only - used for tick marks if more than one x axis is requested, and - ``zCoord`` will be ignored. + triangulation_args : dict, optional + A dict of arguments to create a matplotlib.tri.Triangulation of the + transect that does not rely on it being on a logically rectangular grid. + The arguments rather than the triangulation itself are passed because + multiple triangulations with different masks are needed internally and + there is not an obvious mechanism for copying an existing triangulation. + If this option is provided, ``xCoords`` is only used for tick marks if + more than one x axis is requested, and ``zCoord`` will be ignored. + + xOutline, zOutline : numpy.ndarray, optional + pairs of points defining line segments that are used to outline the + valid region of the mesh if ``outlineValid = True`` and + ``triangulation_args`` is not ``None`` + + suffix : str, optional the suffix used for colorbar config options @@ -772,7 +800,7 @@ def plot_vertical_section( if len(xCoords) != len(xlabels): raise ValueError('Expected the same number of xCoords and xlabels') - if dsTransectTriangles is None: + if triangulation_args is None: x, y = xr.broadcast(xCoords[0], zCoord) dims_in_field = all([dim in field.dims for dim in x.dims]) @@ -810,8 +838,13 @@ def plot_vertical_section( x, y, mask) else: mask = field.notnull() - maskedTriangulation, unmaskedTriangulation = _get_ds_triangulation( - dsTransectTriangles, mask) + triMask = np.logical_not(mask.values) + # if any node of a triangle is masked, the triangle is masked + triMask = np.amax(triMask, axis=1) + unmaskedTriangulation = Triangulation(**triangulation_args) + mask_args = dict(triangulation_args) + mask_args['mask'] = triMask + maskedTriangulation = Triangulation(**mask_args) # set up figure if dpi is None: @@ -824,6 +857,13 @@ def plot_vertical_section( colormapDict = setup_colormap(config, colorMapSectionName, suffix=suffix) + if outlineValid and xOutline is not None or zOutline is not None: + # we might have some invalid cells that are inside the outline. Let's + # make them white + zeroArray = xr.zeros_like(field) + plt.tricontourf(unmaskedTriangulation, zeroArray.values.ravel(), + colors='white') + if not plotAsContours: # display a heatmap of fieldArray fieldMasked = field.where(mask, 0.0).values.ravel() @@ -833,7 +873,7 @@ def plot_vertical_section( plotHandle = plt.tripcolor(maskedTriangulation, fieldMasked, cmap=colormapDict['colormap'], norm=colormapDict['norm'], - rasterized=True) + rasterized=True, shading='gouraud') else: plotHandle = plt.tricontourf(maskedTriangulation, fieldMasked, cmap=colormapDict['colormap'], @@ -853,18 +893,21 @@ def plot_vertical_section( else: # display a white heatmap to get a white background for non-land zeroArray = xr.zeros_like(field) - plt.tricontourf(maskedTriangulation, zeroArray.values, colors='white') + plt.tricontourf(maskedTriangulation, zeroArray.values.ravel(), colors='white') ax = plt.gca() + ax.set_facecolor(backgroundColor) if outlineValid: - # set the color for NaN or masked regions, and draw a black - # outline around them; technically, the contour level used should - # be 1.0, but the contours don't show up when using 1.0, so 0.999 - # is used instead - ax.set_facecolor(backgroundColor) - landMask = np.isnan(field.values).ravel() - plt.tricontour(unmaskedTriangulation, landMask, levels=[0.0001], - colors='black', linewidths=1) + if xOutline is None or zOutline is None: + # set the color for NaN or masked regions, and draw a black + # outline around them; technically, the contour level used should + # be 1.0, but the contours don't show up when using 1.0, so 0.999 + # is used instead + landMask = np.isnan(field.values).ravel() + plt.tricontour(unmaskedTriangulation, landMask, levels=[0.0001], + colors='black', linewidths=1) + else: + plt.plot(xOutline, zOutline, color='black', linewidth=1) # plot contours, if they were requested contourLevels = colormapDict['contours'] @@ -983,30 +1026,6 @@ def plot_vertical_section( return fig, ax # }}} - -def _get_ds_triangulation(dsTransectTriangles, mask): - """get matplotlib Triangulation from triangulation dataset""" - - triMask = np.logical_not(mask) - # if any node of a triangle is masked, the triangle is masked - triMask = triMask.max(dim='nTriangleNodes') - - nTransectTriangles = dsTransectTriangles.sizes['nTransectTriangles'] - dNode = dsTransectTriangles.dNode.isel( - nSegments=dsTransectTriangles.segmentIndices, - nHorizBounds=dsTransectTriangles.nodeHorizBoundsIndices) - x = dNode.values.ravel() - - zTransectNode = dsTransectTriangles.zTransectNode - y = zTransectNode.values.ravel() - - tris = np.arange(3 * nTransectTriangles).reshape((nTransectTriangles, 3)) - maskedTriangulation = Triangulation(x=x, y=y, triangles=tris, mask=triMask) - unmaskedTriangulation = Triangulation(x=x, y=y, triangles=tris) - - return maskedTriangulation, unmaskedTriangulation - - def _get_triangulation(x, y, mask): """divide each quad in the x/y mesh into 2 triangles""" From ac6c920f31adb5785acc9902992ae09b4f31dc81 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 3 Aug 2020 20:15:04 +0200 Subject: [PATCH 11/14] Update ocean transects to handle native mpas grid Substantial changes are made to how transects are computed and plotted to accommondate this move. --- mpas_analysis/default.cfg | 9 + .../ocean/compute_transects_subtask.py | 256 +++++++++++++----- mpas_analysis/ocean/geojson_transects.py | 5 +- mpas_analysis/ocean/plot_transect_subtask.py | 114 ++++++-- mpas_analysis/ocean/sose_transects.py | 14 +- mpas_analysis/ocean/woce_transects.py | 7 +- 6 files changed, 315 insertions(+), 90 deletions(-) diff --git a/mpas_analysis/default.cfg b/mpas_analysis/default.cfg index 720ab2f1f..6abd4e481 100644 --- a/mpas_analysis/default.cfg +++ b/mpas_analysis/default.cfg @@ -2361,6 +2361,9 @@ verticalComparisonGridName = uniform_0_to_4000m_at_10m # vertical comparison grids verticalComparisonGrid = numpy.linspace(0, -4000, 401) +# A range for the y axis (if any) +verticalBounds = [] + # The minimum weight of a destination cell after remapping. Any cell with # weights lower than this threshold will therefore be masked out. renormalizationThreshold = 0.01 @@ -2548,6 +2551,9 @@ verticalComparisonGridName = uniform_0_to_4000m_at_10m # comparison grids verticalComparisonGrid = numpy.linspace(0, -4000, 401) +# A range for the y axis (if any) +verticalBounds = [] + # The minimum weight of a destination cell after remapping. Any cell with # weights lower than this threshold will therefore be masked out. renormalizationThreshold = 0.01 @@ -2743,6 +2749,9 @@ verticalComparisonGridName = uniform_10_to_1500m_at_10m # vertical comparison grids verticalComparisonGrid = numpy.linspace(-10, -1500, 150) +# A range for the y axis (if any) +verticalBounds = [] + # The minimum weight of a destination cell after remapping. Any cell with # weights lower than this threshold will therefore be masked out. renormalizationThreshold = 0.01 diff --git a/mpas_analysis/ocean/compute_transects_subtask.py b/mpas_analysis/ocean/compute_transects_subtask.py index 40afcd52f..5b0c6fbab 100644 --- a/mpas_analysis/ocean/compute_transects_subtask.py +++ b/mpas_analysis/ocean/compute_transects_subtask.py @@ -18,7 +18,17 @@ from pyremap import PointCollectionDescriptor -from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask +from mpas_tools.viz import mesh_to_triangles +from mpas_tools.transects import subdivide_great_circle, \ + cartesian_to_great_circle_distance +from mpas_tools.viz.transects import find_transect_cells_and_weights, \ + make_triangle_tree +from mpas_tools.ocean.transects import find_transect_levels_and_weights, \ + interp_mpas_to_transect_triangle_nodes, \ + interp_transect_grid_to_transect_triangle_nodes + +from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \ + get_climatology_op_directory from mpas_analysis.shared.io.utility import build_config_full_path, \ make_directories @@ -28,10 +38,6 @@ from mpas_analysis.shared.interpolation import interp_1d -from mpas_analysis.shared.transects import subdivide_great_circle, \ - cartesian_to_great_circle_distance, find_transect_edges_and_cells, \ - get_edge_bounds - class ComputeTransectsSubtask(RemapMpasClimatologySubtask): # {{{ """ @@ -157,6 +163,10 @@ def __init__(self, mpasClimatologyTask, parentTask, climatologyName, self.maxLevelCell = None self.zMid = None self.remap = self.obsDatasets.horizontalResolution != 'mpas' + if self.obsDatasets.horizontalResolution == 'mpas' and \ + self.verticalComparisonGridName != 'mpas': + raise ValueError('If the horizontal comparison grid is "mpas", the ' + 'vertical grid must also be "mpas".') # }}} @@ -234,12 +244,14 @@ def run_task(self): # {{{ dsMesh = dsMesh.isel(Time=0) self.maxLevelCell = dsMesh.maxLevelCell - 1 - zMid = compute_zmid(dsMesh.bottomDepth, dsMesh.maxLevelCell-1, - dsMesh.layerThickness) - self.zMid = \ - xr.DataArray.from_dict({'dims': ('nCells', 'nVertLevels'), - 'data': zMid}) + if self.remap: + zMid = compute_zmid(dsMesh.bottomDepth, dsMesh.maxLevelCell-1, + dsMesh.layerThickness) + + self.zMid = \ + xr.DataArray.from_dict({'dims': ('nCells', 'nVertLevels'), + 'data': zMid}) # then, call run from the base class (RemapMpasClimatologySubtask), # which will perform masking and possibly horizontal remapping @@ -269,53 +281,14 @@ def run_task(self): # {{{ outFileName, outObsFileName) ds.close() - else: - edgeBounds = get_edge_bounds(dsMesh) - - for transectName, dsTransect in obsDatasets.items(): - self.logger.info(' {}'.format(transectName)) - if 'z' in dsTransect: - transectZ = dsTransect.z - else: - transectZ = None - edgeIndices, cellIndices, X, Z, mask, lonOut, latOut, \ - obsCellIndices, obsLayerIndices = \ - find_transect_edges_and_cells( - dsTransect.lon, dsTransect.lat, edgeBounds, dsMesh, - transectZ=transectZ, degrees=True) - - for season in self.seasons: - maskedFileName = self.get_masked_file_name(season) - dsMask = xr.open_dataset(maskedFileName) - - dsOnMpas = dsMask.isel(nCells=cellIndices) - dsOnMpas = dsOnMpas.where(cellIndices>=0) - dsOnMpas.coords['x'] = X - dsOnMpas.coords['z'] = Z - dsOnMpas.coords['mask'] = mask - dsOnMpas.coords['lon'] = lonOut - dsOnMpas.coords['lat'] = latOut - - outFileName = self.get_remapped_file_name( - season, comparisonGridName=transectName) - write_netcdf(dsOnMpas, outFileName) - - if obsCellIndices is not None and \ - obsLayerIndices is not None: - dsAtObs = dsMask.isel(nCells=obsCellIndices, - nVertLevels=obsLayerIndices) - outFileName = self.get_remapped_file_name( - season, - comparisonGridName='{}AtObs'.format(transectName)) - write_netcdf(dsAtObs, outFileName) + for transectName in obsDatasets: + obsDatasets[transectName].close() - dsMask.close() + else: + self._compute_mpas_transects(dsMesh) dsMesh.close() - for transectName in obsDatasets: - obsDatasets[transectName].close() - # }}} def customize_masked_climatology(self, climatology, season): # {{{ @@ -343,13 +316,14 @@ def customize_masked_climatology(self, climatology, season): # {{{ {'dims': ('nVertLevels',), 'data': numpy.arange(climatology.sizes['nVertLevels'])}) - cellMask = zIndex < self.maxLevelCell + cellMask = zIndex <= self.maxLevelCell for variableName in self.variableList: climatology[variableName] = \ climatology[variableName].where(cellMask) - climatology['zMid'] = self.zMid + if self.remap: + climatology['zMid'] = self.zMid climatology = climatology.transpose('nVertLevels', 'nCells') @@ -458,6 +432,149 @@ def _vertical_interp(self, ds, transectIndex, dsObs, outFileName, ds = ds.drop_vars(['validMask', 'transectNumber']) write_netcdf(ds, outFileName) # }}} + def get_mpas_transect_file_name(self, transectName): # {{{ + """Get the file name for a masked MPAS transect info""" + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + mpasMeshName = config.get('input', 'mpasMeshName') + + climatologyOpDirectory = get_climatology_op_directory(config, 'avg') + + comparisonFullMeshName = transectName.replace(' ', '_') + + stageDirectory = '{}/remapped'.format(climatologyOpDirectory) + + directory = '{}/{}_{}_to_{}'.format( + stageDirectory, self.climatologyName, mpasMeshName, + comparisonFullMeshName) + + make_directories(directory) + + fileName = '{}/mpas_transect_info.nc'.format(directory) + + return fileName # }}} + + def _compute_mpas_transects(self, dsMesh): # {{{ + + # see if all transects have already been computed + allExist = True + transectNames = list(self.obsDatasets.obsFileNames.keys()) + for transectName in transectNames: + transectInfoFileName = self.get_mpas_transect_file_name( + transectName) + if not os.path.exists(transectInfoFileName): + allExist = False + break + obsFileName = self.obsDatasets.get_out_file_name( + transectName, self.verticalComparisonGridName) + if not os.path.exists(obsFileName): + allExist = False + break + + if allExist: + return + + dsTris = mesh_to_triangles(dsMesh) + + triangleTree = make_triangle_tree(dsTris) + + for transectName in transectNames: + obsFileName = self.obsDatasets.get_out_file_name( + transectName, self.verticalComparisonGridName) + transectInfoFileName = self.get_mpas_transect_file_name( + transectName) + if not os.path.exists(obsFileName) or \ + not os.path.exists(transectInfoFileName): + dsTransect = self.obsDatasets.build_observational_dataset( + self.obsDatasets.obsFileNames[transectName], transectName) + + dsTransect.load() + # make sure lat and lon are coordinates + for coord in ['lon', 'lat']: + dsTransect.coords[coord] = dsTransect[coord] + + if 'z' in dsTransect: + transectZ = dsTransect.z + else: + transectZ = None + + dsMpasTransect = find_transect_cells_and_weights( + dsTransect.lon, dsTransect.lat, dsTris, dsMesh, + triangleTree, degrees=True) + + dsMpasTransect = find_transect_levels_and_weights( + dsMpasTransect, dsMesh.layerThickness, + dsMesh.bottomDepth, dsMesh.maxLevelCell - 1, + transectZ) + + if 'landIceFraction' in dsMesh: + interpCellIndices = dsMpasTransect.interpHorizCellIndices + interpCellWeights = dsMpasTransect.interpHorizCellWeights + landIceFraction = dsMesh.landIceFraction.isel( + nCells=interpCellIndices) + landIceFraction = (landIceFraction * interpCellWeights).sum( + dim='nHorizWeights') + dsMpasTransect['landIceFraction'] = landIceFraction + + # use to_netcdf rather than write_netcdf because integer indices + # are getting converted to floats when xarray reads them back + # because of _FillValue + dsMpasTransect.to_netcdf(transectInfoFileName) + + dsTransectOnMpas = xr.Dataset(dsMpasTransect) + dsTransectOnMpas['x'] = dsMpasTransect.dNode.isel( + nSegments=dsMpasTransect.segmentIndices, + nHorizBounds=dsMpasTransect.nodeHorizBoundsIndices) + + dsTransectOnMpas['z'] = dsMpasTransect.zTransectNode + + for var in dsTransect.data_vars: + dims = dsTransect[var].dims + if 'nPoints' in dims and 'nz' in dims: + da = dsTransect[var] + da = self._interp_obs_to_mpas(da, dsMpasTransect) + dsTransectOnMpas[var] = da + + dsTransectOnMpas.to_netcdf(obsFileName) + + for transectName in transectNames: + transectInfoFileName = self.get_mpas_transect_file_name( + transectName) + dsMpasTransect = xr.open_dataset(transectInfoFileName) + + for season in self.seasons: + maskedFileName = self.get_masked_file_name(season) + with xr.open_dataset(maskedFileName) as dsMask: + dsOnMpas = xr.Dataset(dsMpasTransect) + for var in dsMask.data_vars: + dims = dsMask[var].dims + if 'nCells' in dims and 'nVertLevels' in dims: + dsOnMpas[var] = \ + interp_mpas_to_transect_triangle_nodes( + dsMpasTransect, dsMask[var]) + + outFileName = self.get_remapped_file_name( + season, comparisonGridName=transectName) + dsOnMpas.to_netcdf(outFileName) + + # }}} + + def _interp_obs_to_mpas(self, da, dsMpasTransect, threshold=0.1): # {{{ + """ + Interpolate observations to the native MPAS transect with masking + """ + daMask = da.notnull() + da = da.where(daMask, 0.) + da = interp_transect_grid_to_transect_triangle_nodes( + dsMpasTransect, da) + daMask = interp_transect_grid_to_transect_triangle_nodes( + dsMpasTransect, daMask) + da = (da / daMask).where(daMask > threshold) + return da # }}} + # }}} @@ -476,8 +593,8 @@ class TransectsObservations(object): # {{{ observations for a transect horizontalResolution : str - 'obs' for the obs as they are or a size in km if subdivision is - desired. + 'obs' for the obs as they are, 'mpas' for the native MPAS mesh, or a + size in km if subdivision of the observational transect is desired. transectCollectionName : str A name that describes the collection of transects (e.g. the name @@ -506,8 +623,8 @@ def __init__(self, config, obsFileNames, horizontalResolution, observations for a transect horizontalResolution : str - 'obs' for the obs as they are or a size in km if subdivision is - desired. + 'obs' for the obs as they are, 'mpas' for the native MPAS mesh, or a + size in km if subdivision of the observational transect is desired. transectCollectionName : str A name that describes the collection of transects (e.g. the name @@ -540,6 +657,11 @@ def get_observations(self): # ------- # Xylar Asay-Davis + if self.horizontalResolution == 'mpas': + # by default, we don't do anything for transects on the native grid + # but subclasses might need to do something + return None + obsDatasets = OrderedDict() for name in self.obsFileNames: outFileName = self.get_out_file_name(name) @@ -649,20 +771,22 @@ def _add_distance(self, dsObs): # {{{ resolution """ - subdivide = self.horizontalResolution not in ['obs', 'mpas'] - lat = numpy.deg2rad(dsObs.lat.values) lon = numpy.deg2rad(dsObs.lon.values) - earth_radius = 6371 # Radius of earth in kilometers + earth_radius = 6.371e6 # Radius of earth in meters x = earth_radius * numpy.cos(lat) * numpy.cos(lon) y = earth_radius * numpy.cos(lat) * numpy.sin(lon) z = earth_radius * numpy.sin(lat) - if subdivide: + if self.horizontalResolution == 'obs': + dIn = cartesian_to_great_circle_distance(x, y, z, earth_radius) + dsObs['x'] = (('nPoints',), dIn) + elif self.horizontalResolution != 'mpas': + # subdivide xOut, yOut, zOut, dIn, dOut = subdivide_great_circle( - x, y, z, self.horizontalResolution, earth_radius) + x, y, z, 1e3*self.horizontalResolution, earth_radius) dsObs['xIn'] = (('nPoints',), dIn) dsObs['xOut'] = (('nPointsOut',), dOut) @@ -673,11 +797,9 @@ def _add_distance(self, dsObs): # {{{ outInterpCoord='xOut') dsObs = dsObs.drop_vars(['xIn']) dsObs = dsObs.rename({'nPointsOut': 'nPoints', 'xOut': 'x'}) - else: - dIn = cartesian_to_great_circle_distance(x, y, z, earth_radius) - dsObs['x'] = (('nPoints',), dIn) return dsObs # }}} + # }}} # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/ocean/geojson_transects.py b/mpas_analysis/ocean/geojson_transects.py index a10778f88..4ccbbeda9 100644 --- a/mpas_analysis/ocean/geojson_transects.py +++ b/mpas_analysis/ocean/geojson_transects.py @@ -81,6 +81,8 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): verticalComparisonGrid = config.getExpression( sectionName, 'verticalComparisonGrid', usenumpyfunc=True) + verticalBounds = config.getExpression(sectionName, 'verticalBounds') + fields = config.getExpression(sectionName, 'fields') obsFileNames = OrderedDict() @@ -163,7 +165,8 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): groupLink='geojson', galleryName=field['titleName'], configSectionName='geojson{}Transects'.format( - fieldPrefixUpper)) + fieldPrefixUpper), + verticalBounds=verticalBounds) self.add_subtask(subtask) # }}} diff --git a/mpas_analysis/ocean/plot_transect_subtask.py b/mpas_analysis/ocean/plot_transect_subtask.py index 19726d10e..843badb7a 100644 --- a/mpas_analysis/ocean/plot_transect_subtask.py +++ b/mpas_analysis/ocean/plot_transect_subtask.py @@ -21,9 +21,12 @@ import xarray as xr import numpy +from matplotlib.tri import Triangulation from geometric_features import FeatureCollection +from mpas_tools.ocean.transects import get_outline_segments + from mpas_analysis.shared.plot import plot_vertical_section_comparison, \ savefig, add_inset @@ -182,7 +185,7 @@ def __init__(self, parentTask, season, transectName, fieldName, def set_plot_info(self, outFileLabel, fieldNameInTitle, mpasFieldName, refFieldName, refTitleLabel, unitsLabel, imageCaption, galleryGroup, groupSubtitle, groupLink, - galleryName, configSectionName, + galleryName, configSectionName, verticalBounds, diffTitleLabel='Model - Observations'): # {{{ ''' @@ -229,8 +232,13 @@ def set_plot_info(self, outFileLabel, fieldNameInTitle, mpasFieldName, configSectionName : str the name of the section where the color map and range is defined + verticalBounds : list + the min and max for the vertical axis, or an emtpy list if the + range automatically determined by matplotlib should be used + diffTitleLabel : str, optional the title of the difference subplot + ''' # Authors # ------- @@ -255,6 +263,10 @@ def set_plot_info(self, outFileLabel, fieldNameInTitle, mpasFieldName, self.thumbnailDescription = self.season self.configSectionName = configSectionName + if len(verticalBounds) == 0: + self.verticalBounds = None + else: + self.verticalBounds = verticalBounds # }}} def setup_and_check(self): # {{{ @@ -328,7 +340,7 @@ def run_task(self): # {{{ verticalComparisonGridName) remappedRefClimatology = xr.open_dataset(remappedFileName) - # if Time is an axis, take the appropriate avarage to get the + # if Time is an axis, take the appropriate average to get the # climatology if 'Time' in remappedRefClimatology.dims: monthValues = constants.monthDictionary[season] @@ -375,18 +387,40 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): mainRunName = config.get('runs', 'mainRunName') - x = remappedModelClimatology.x - z = remappedModelClimatology.z + remap = self.remapMpasClimatologySubtask.remap + + if remap: + x = 1e-3*remappedModelClimatology.x + z = remappedModelClimatology.z - # set lat and lon in case we want to plot versus these quantities - lat = remappedModelClimatology.lat - lon = remappedModelClimatology.lon + # set lat and lon in case we want to plot versus these quantities + lat = remappedModelClimatology.lat + lon = remappedModelClimatology.lon + + if len(lat.dims) > 1: + lat = lat[:, 0] + + if len(lon.dims) > 1: + lon = lon[:, 0] + + # z is masked out with NaNs in some locations (where there is land) + # but this makes pcolormesh unhappy so we'll zero out those + # locations + z = z.where(z.notnull(), 0.) + + else: + x = 1e-3*remappedModelClimatology.dNode + z = None + lon = remappedModelClimatology.lonNode + lat = remappedModelClimatology.latNode - if len(lat.dims) > 1: - lat = lat[:, 0] + remappedModelClimatology['dNode'] = x - if len(lon.dims) > 1: - lon = lon[:, 0] + # flatten the x, lon and lat arrays because this is what + # vertical_section is expecting + x = xr.DataArray(data=x.values.ravel(), dims=('nx',)) + lon = xr.DataArray(data=lon.values.ravel(), dims=('nx',)) + lat = xr.DataArray(data=lat.values.ravel(), dims=('nx',)) # This will do strange things at the antemeridian but there's little # we can do about that. @@ -394,13 +428,14 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): if self.horizontalBounds is not None: mask = numpy.logical_and( - remappedModelClimatology.x.values >= self.horizontalBounds[0], - remappedModelClimatology.x.values <= self.horizontalBounds[1]) + x.values >= self.horizontalBounds[0], + x.values <= self.horizontalBounds[1]) inset_lon = lon_pm180.values[mask] inset_lat = lat.values[mask] else: inset_lon = lon_pm180.values inset_lat = lat.values + fc = FeatureCollection() fc.add_feature( {"type": "Feature", @@ -413,12 +448,17 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): "type": "LineString", "coordinates": list(map(list, zip(inset_lon, inset_lat)))}}) - # z is masked out with NaNs in some locations (where there is land) but - # this makes pcolormesh unhappy so we'll zero out those locations - z = z.where(z.notnull(), 0.) - modelOutput = remappedModelClimatology[self.mpasFieldName] + if remap: + triangulation_args = None + dOutline = None + zOutline = None + else: + triangulation_args = self._get_ds_triangulation( + remappedModelClimatology) + dOutline, zOutline = get_outline_segments(remappedModelClimatology) + if remappedRefClimatology is None: refOutput = None bias = None @@ -540,6 +580,9 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): configSectionName, xCoords=xs, zCoord=z, + triangulation_args=triangulation_args, + xOutline=dOutline, + zOutline=zOutline, colorbarLabel=self.unitsLabel, xlabels=xLabels, ylabel=yLabel, @@ -552,6 +595,7 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): invertYAxis=False, backgroundColor='#d9bf96', xLim=self.horizontalBounds, + yLim=self.verticalBounds, compareAsContours=compareAsContours, lineStyle=contourLineStyle, lineColor=contourLineColor, @@ -567,6 +611,24 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): pos = suptitle.get_position() suptitle.set_position((pos[0] - 0.05, pos[1])) + if not remap: + # add open air ice shelves + d = remappedModelClimatology.dNode.values.ravel() + ssh = remappedModelClimatology.ssh.values.ravel() + mask = ssh < 0. + for ax in axes: + ax.fill_between(d, ssh, numpy.zeros(ssh.shape), where=mask, + interpolate=True, color='white', + edgecolor='black', linewidth=1.) + if 'landIceFraction' in remappedModelClimatology: + landIceFraction = remappedModelClimatology.landIceFraction + landIceMask = landIceFraction.values.ravel() > 0.25 + mask = numpy.logical_and(landIceMask, ssh < 0.) + for ax in axes: + ax.fill_between(d, ssh, numpy.zeros(ssh.shape), where=mask, + interpolate=False, color='#e1eaf7', + edgecolor='black', linewidth=1.) + # make a red start axis and green end axis to correspond to the dots # in the inset for ax in axes: @@ -783,6 +845,24 @@ def _lat_fewest_direction_changes(self, lat, lon): return False # }}} + def _get_ds_triangulation(self, dsTransectTriangles): + """get matplotlib Triangulation from triangulation dataset""" + + nTransectTriangles = dsTransectTriangles.sizes['nTransectTriangles'] + dNode = dsTransectTriangles.dNode.isel( + nSegments=dsTransectTriangles.segmentIndices, + nHorizBounds=dsTransectTriangles.nodeHorizBoundsIndices) + x = dNode.values.ravel() + + zTransectNode = dsTransectTriangles.zTransectNode + y = zTransectNode.values.ravel() + + tris = numpy.arange(3 * nTransectTriangles).reshape( + (nTransectTriangles, 3)) + triangulation_args = dict(x=x, y=y, triangles=tris) + + return triangulation_args + # }}} diff --git a/mpas_analysis/ocean/sose_transects.py b/mpas_analysis/ocean/sose_transects.py index 46d62e5a3..31e7caf90 100644 --- a/mpas_analysis/ocean/sose_transects.py +++ b/mpas_analysis/ocean/sose_transects.py @@ -86,6 +86,8 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): verticalComparisonGrid = config.getExpression( sectionName, 'verticalComparisonGrid', usenumpyfunc=True) + verticalBounds = config.getExpression(sectionName, 'verticalBounds') + longitudes = sorted(config.getExpression(sectionName, 'longitudes', usenumpyfunc=True)) @@ -134,7 +136,7 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): if field['mpas'] != 'velMag'] transectCollectionName = 'SOSE_transects' - if horizontalResolution != 'obs': + if horizontalResolution not in ['obs', 'mpas']: transectCollectionName = '{}_{}km'.format(transectCollectionName, horizontalResolution) @@ -203,7 +205,8 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): groupLink='sose_transects', galleryName=field['titleName'], configSectionName='sose{}Transects'.format( - fieldPrefixUpper)) + fieldPrefixUpper), + verticalBounds=verticalBounds) self.add_subtask(subtask) # }}} @@ -378,6 +381,11 @@ def combine_observations(self): # {{{ dsObs.velMag.attrs['units'] = 'm s$^{-1}$' dsObs.velMag.attrs['description'] = description + # make a copy of the top set of data at z=0 + dsObs = xr.concat((dsObs.isel(z=0), dsObs), dim='z') + z = dsObs.z.values + z[0] = 0. + dsObs['z'] = ('z', z) write_netcdf(dsObs, combinedFileName) print(' Done.') @@ -413,7 +421,7 @@ def build_observational_dataset(self, fileName, transectName): # {{{ dsObs = dsObs.sel(method=str('nearest'), lon=lon) lon = dsObs.lon.values - # do some dropping and renaming so we end up wiht the right coordinates + # do some dropping and renaming so we end up with the right coordinates # and dimensions dsObs = dsObs.rename({'lat': 'nPoints', 'z': 'nz'}) dsObs['lat'] = dsObs.nPoints diff --git a/mpas_analysis/ocean/woce_transects.py b/mpas_analysis/ocean/woce_transects.py index c9434786b..f19c7d4b3 100644 --- a/mpas_analysis/ocean/woce_transects.py +++ b/mpas_analysis/ocean/woce_transects.py @@ -77,6 +77,8 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): verticalComparisonGrid = config.getExpression( sectionName, 'verticalComparisonGrid', usenumpyfunc=True) + verticalBounds = config.getExpression(sectionName, 'verticalBounds') + horizontalBounds = config.getExpression( sectionName, 'horizontalBounds') @@ -115,7 +117,7 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): 'units': r'kg m$^{-3}$'}} transectCollectionName = 'WOCE_transects' - if horizontalResolution != 'obs': + if horizontalResolution not in ['obs', 'mpas']: transectCollectionName = '{}_{}km'.format(transectCollectionName, horizontalResolution) @@ -187,7 +189,8 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): groupLink='woce', galleryName=titleName, configSectionName='woce{}Transects'.format( - fieldNameUpper)) + fieldNameUpper), + verticalBounds=verticalBounds) self.add_subtask(subtask) # }}} From 09070c8d0966f5ed6dd6fa33eb83804d25c85d2e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 3 Aug 2020 20:16:48 +0200 Subject: [PATCH 12/14] Update "polar regions" config to use native MPAS transects --- mpas_analysis/polar_regions.cfg | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/mpas_analysis/polar_regions.cfg b/mpas_analysis/polar_regions.cfg index 54a1fdd06..c8aa4abd3 100644 --- a/mpas_analysis/polar_regions.cfg +++ b/mpas_analysis/polar_regions.cfg @@ -133,6 +133,22 @@ regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", longitudes = [318., 325., 0., 75., 117., 145., 160., 184., 187., 198., 253., 280., 288.] +# The approximate horizontal resolution (in km) of each transect. Latitude/ +# longitude between observation points will be subsampled at this interval. +# Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of +# model data on the native grid, in which case comparison with observations +# will take place on the observation grid. +horizontalResolution = mpas + +# The name of the vertical comparison grid. Valid values are 'mpas' for the +# MPAS vertical grid, 'obs' to use the locations of observations or +# any other name if the vertical grid is defined by 'verticalComparisonGrid' +verticalComparisonGridName = mpas + +# A range for the y axis (if any) +verticalBounds = [-1500., 0.] + + [soseTemperatureTransects] # options related to plotting SOSE transects of potential temperature @@ -178,6 +194,18 @@ colorbarLevelsDifference = [-0.5, -0.2, -0.1, -0.05, -0.02, 0, 0.02, 0.05, ## options related to plotting model vs. World Ocean Circulation Experiment ## (WOCE) transects. +# The approximate horizontal resolution (in km) of each transect. Latitude/ +# longitude between observation points will be subsampled at this interval. +# Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of +# model data on the native grid, in which case comparison with observations +# will take place on the observation grid. +horizontalResolution = mpas + +# The name of the vertical comparison grid. Valid values are 'mpas' for the +# MPAS vertical grid, 'obs' to use the locations of observations or +# any other name if the vertical grid is defined by 'verticalComparisonGrid' +verticalComparisonGridName = mpas + # Horizontal bounds of the plot (in km), or an empty list for automatic bounds # The bounds are a 2-element list of the minimum and maximum distance along the # transect @@ -185,6 +213,9 @@ horizontalBounds = {'WOCE_A21': [630., 830.], 'WOCE_A23': [0., 200.], 'WOCE_A12': [4620., 4820.]} +# A range for the y axis (if any) +verticalBounds = [-4000., 0.] + [woceTemperatureTransects] ## options related to plotting WOCE transects of potential temperature From 14806b87395228c76abdf1e6b30d674f176ff8a2 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 3 Aug 2020 20:17:15 +0200 Subject: [PATCH 13/14] Update docs to describe new native MPAS transects --- docs/api.rst | 1 + docs/config/transects.rst | 54 ++++++++++++++++++++++++--------------- 2 files changed, 35 insertions(+), 20 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 430183b79..7c0e18d60 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -268,6 +268,7 @@ Plotting plot_polar_comparison plot_global_comparison plot_1D + plot_vertical_section_comparison plot_vertical_section colormap.setup_colormap ticks.plot_xtick_format diff --git a/docs/config/transects.rst b/docs/config/transects.rst index 4408c149c..d466bfc71 100644 --- a/docs/config/transects.rst +++ b/docs/config/transects.rst @@ -8,39 +8,49 @@ the comparison grid for each transect:: # The approximate horizontal resolution (in km) of each transect. Latitude/ # longitude between observation points will be subsampled at this interval. - # Use 'obs' to indicate no subsampling. - horizontalResolution = obs + # Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of + # model data on the native grid, in which case comparison with observations + # will take place on the observation grid. + #horizontalResolution = mpas + #horizontalResolution = obs + horizontalResolution = 5 # The name of the vertical comparison grid. Valid values are 'mpas' for the # MPAS vertical grid, 'obs' to use the locations of observations or - # any other name if the vertical grid is defined by 'verticalComparisonGrid' - # verticalComparisonGridName = obs - verticalComparisonGridName = uniform_0_to_4000m_at_10m + # any other name if the vertical grid is defined by 'verticalComparisonGrid'. + # If horizontalResolution is 'mpas', model data (both main and control) will be + # plotted on the MPAS vertical grid, regardless of the comparison grid. #verticalComparisonGridName = mpas + #verticalComparisonGridName = obs + verticalComparisonGridName = uniform_0_to_4000m_at_10m # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or # 'obs'. This should be numpy array of (typically negative) elevations (in m). verticalComparisonGrid = numpy.linspace(0, -4000, 401) -The ``horizontalResolution`` of all transects can be either ``obs`` or a number -of kilometers. If ``obs``, model data are sampled at latitute and longitude -points corresponding to WOCE stations. It a number of kilometers is given, -linear interpolation between observation points is performed with approximately -the requested resolution. The distance between stations is always divided into -an integer number of segments of equal length so the resolution may be slightly -above or below ``horizontalResolution``. + # A range for the y axis (if any) + verticalBounds = [] +The ``horizontalResolution`` of all transects can be ``obs``, ``mpas`` or a +number of kilometers. If ``obs``, model data are sampled at latitude and +longitude points corresponding to the observations. If the horizontal grid +is ``mpas``, then the native MPAS-Ocean mesh is used for both the horizontal and +vertical grids. If a number of kilometers is given, linear interpolation +between observation points is performed with approximately the requested +resolution. The distance between observation points is always divided into an +integer number of segments of equal length so the resolution may be slightly +above or below ``horizontalResolution``. The vertical grid is determined by two parameters, ``verticalComparisonGridName`` and ``verticalComparisonGrid``. If -``verticalComparisonGridName = mpas``, the MPAS-Ocean vertical coordinate will -be interpolated horitontally from grid cell centers to the latitude and -longitude of each point along the transect, and the observations will be -interpolated vertically to the resulting grid. If -``verticalComparisonGridName = obs``, the vertical grid of the observations -is used instead. If ``verticalComparisonGridName`` is anything else, it is -taken to be the name of a user-defined vertical grid (best to make it -descriptive and unique, e.g. ``uniform_0_to_4000m_at_10m``) and +``verticalComparisonGridName = mpas``, but ``horizontalResoltuion`` is not +``mpas``, the MPAS-Ocean vertical coordinate will be interpolated horizontally +from grid cell centers to the latitude and longitude of each point along the +transect, and the observations will be interpolated vertically to the resulting +grid. If ``verticalComparisonGridName = obs``, the vertical grid of the +observations is used instead. If ``verticalComparisonGridName`` is anything +else, it is taken to be the name of a user-defined vertical grid (best to make +it descriptive and unique, e.g. ``uniform_0_to_4000m_at_10m``) and ``verticalComparisonGrid`` should be assigned a valid array of positive-up depth values (in the form of a python list or numpy array), e.g.:: @@ -48,6 +58,10 @@ depth values (in the form of a python list or numpy array), e.g.:: produces points between 0 and -4000 m sampled every 10 m. +``verticalBounds`` is a list of minimum and maximum limits for the vertical axis +of the transect. The default is an empty list, which means ``matplotlib`` +selects the axis limits to encompass the full range of the vertical grid. + .. note:: Some types of transects (e.g. those produce with geojson files) do not have From 005a43c83356e9c630e1d96a15d068a8dad51a92 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 3 Aug 2020 20:53:21 +0200 Subject: [PATCH 14/14] Fix PEP8 --- mpas_analysis/ocean/plot_transect_subtask.py | 1 - mpas_analysis/shared/plot/vertical_section.py | 8 +++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/mpas_analysis/ocean/plot_transect_subtask.py b/mpas_analysis/ocean/plot_transect_subtask.py index 843badb7a..0c212ab58 100644 --- a/mpas_analysis/ocean/plot_transect_subtask.py +++ b/mpas_analysis/ocean/plot_transect_subtask.py @@ -657,7 +657,6 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): # }}} - def _lat_greater_extent(self, lat, lon): # {{{ """ diff --git a/mpas_analysis/shared/plot/vertical_section.py b/mpas_analysis/shared/plot/vertical_section.py index ed89d9326..eea14f770 100644 --- a/mpas_analysis/shared/plot/vertical_section.py +++ b/mpas_analysis/shared/plot/vertical_section.py @@ -708,8 +708,8 @@ def plot_vertical_section( movingAveragePoints : int, optional the number of points over which to perform a moving average - NOTE: this option is mostly intended for use when ``xCoordIsTime`` is True, - although it will work with other data as well. Also, the moving + NOTE: this option is mostly intended for use when ``xCoordIsTime`` is + True, although it will work with other data as well. Also, the moving average calculation is based on number of points, not actual x axis values, so for best results, the values in the first entry in ``xCoords`` should be equally spaced. @@ -893,7 +893,8 @@ def plot_vertical_section( else: # display a white heatmap to get a white background for non-land zeroArray = xr.zeros_like(field) - plt.tricontourf(maskedTriangulation, zeroArray.values.ravel(), colors='white') + plt.tricontourf(maskedTriangulation, zeroArray.values.ravel(), + colors='white') ax = plt.gca() ax.set_facecolor(backgroundColor) @@ -1026,6 +1027,7 @@ def plot_vertical_section( return fig, ax # }}} + def _get_triangulation(x, y, mask): """divide each quad in the x/y mesh into 2 triangles"""