Skip to content

Commit

Permalink
Use unifed interpolation and framework functions for GIS mesh_gen.
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewdnolan committed Apr 16, 2024
1 parent ae51f06 commit 32bb814
Show file tree
Hide file tree
Showing 2 changed files with 79 additions and 125 deletions.
181 changes: 59 additions & 122 deletions compass/landice/tests/greenland/mesh.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
from os.path import exists
from shutil import copyfile
import os

import netCDF4
import numpy as np
from mpas_tools.logging import check_call
from mpas_tools.scrip.from_mpas import scrip_from_mpas

from compass.landice.mesh import (
build_cell_width,
build_mali_mesh,
clean_up_after_interp,
interp_gridded2mali,
make_region_masks,
)
from compass.model import make_graph_file
Expand All @@ -21,8 +21,8 @@ class Mesh(Step):
Attributes
----------
mesh_type : str
The resolution or mesh type of the test case
mesh_filename : str
File name of the MALI mesh
"""
def __init__(self, test_case):
"""
Expand All @@ -33,14 +33,17 @@ def __init__(self, test_case):
test_case : compass.TestCase
The test case this step belongs to
mesh_type : str
The resolution or mesh type of the test case
"""
super().__init__(test_case=test_case, name='mesh', cpus_per_task=128,
min_cpus_per_task=1)

# output files
self.mesh_filename = 'GIS.nc'
self.add_output_file(filename='graph.info')
self.add_output_file(filename='GIS.nc')
self.add_output_file(filename=self.mesh_filename)
self.add_output_file(filename=f'{self.mesh_filename[:-3]}_'
f'regionMasks.nc')
# input files
self.add_input_file(
filename='greenland_1km_2024_01_29.epsg3413.icesheetonly.nc',
target='greenland_1km_2024_01_29.epsg3413.icesheetonly.nc',
Expand All @@ -56,136 +59,67 @@ def run(self):
Run this step of the test case
"""
logger = self.logger
mesh_name = 'GIS.nc'
section_name = 'mesh'
config = self.config
section = config[section_name]
data_path = section.get('data_path')
nProcs = section.get('nProcs')

section_gis = config['greenland']

nProcs = section_gis.get('nProcs')
src_proj = section_gis.get("src_proj")
data_path = section_gis.get('data_path')
measures_filename = section_gis.get("measures_filename")
bedmachine_filename = section_gis.get("bedmachine_filename")

measures_dataset = os.path.join(data_path, measures_filename)
bedmachine_dataset = os.path.join(data_path, bedmachine_filename)

section_name = 'mesh'

source_gridded_dataset_1km = 'greenland_1km_2024_01_29.epsg3413.icesheetonly.nc' # noqa: E501
source_gridded_dataset_2km = 'greenland_2km_2024_01_29.epsg3413.nc'

logger.info('calling build_cell_width')
cell_width, x1, y1, geom_points, geom_edges, floodMask = \
build_cell_width(
self, section_name=section_name,
gridded_dataset='greenland_2km_2024_01_29.epsg3413.nc',
gridded_dataset=source_gridded_dataset_2km,
flood_fill_start=[100, 700])

# Now build the base mesh and perform the standard interpolation
build_mali_mesh(
self, cell_width, x1, y1, geom_points, geom_edges,
mesh_name=mesh_name, section_name=section_name,
gridded_dataset='greenland_1km_2024_01_29.epsg3413.icesheetonly.nc', # noqa
projection='gis-gimp', geojson_file=None)

# Create scrip files if they don't already exist
if exists(data_path + '/BedMachineGreenland-v5.scrip.nc'):
logger.info('BedMachine script file exists;'
' skipping file creation')
else:
logger.info('creating scrip file for BedMachine dataset')
args = ['create_SCRIP_file_from_planar_rectangular_grid.py',
'-i', data_path + '/BedMachineGreenland-v5_edits_floodFill_extrap.nc', # noqa
'-s', data_path + '/BedMachineGreenland-v5.scrip.nc',
'-p', 'gis-gimp', '-r', '2']
check_call(args, logger=logger)
if exists(data_path + '/greenland_vel_mosaic500.scrip.nc'):
logger.info('Measures script file exists; skipping file creation')
else:
logger.info('creating scrip file for 2006-2010 velocity dataset')
args = ['create_SCRIP_file_from_planar_rectangular_grid.py',
'-i', data_path + '/greenland_vel_mosaic500_extrap.nc',
'-s', data_path + '/greenland_vel_mosaic500.scrip.nc',
'-p', 'gis-gimp', '-r', '2']
check_call(args, logger=logger)

logger.info('calling set_lat_lon_fields_in_planar_grid.py')
args = ['set_lat_lon_fields_in_planar_grid.py', '-f',
'GIS.nc', '-p', 'gis-gimp']
check_call(args, logger=logger)
mesh_name=self.mesh_filename, section_name=section_name,
gridded_dataset=source_gridded_dataset_1km,
projection=src_proj, geojson_file=None)

# Create scrip file for the newly generated mesh
logger.info('creating scrip file for destination mesh')
scrip_from_mpas('GIS.nc', 'GIS.scrip.nc')
args = ['create_SCRIP_file_from_MPAS_mesh.py',
'-m', 'GIS.nc',
'-s', 'GIS.scrip.nc']
check_call(args, logger=logger)

# Create weight files from datasets to mesh
if exists('BedMachine_to_MPAS_weights.nc'):
logger.info('BedMachine_to_MPAS_weights.nc exists; skipping')
else:
logger.info('generating gridded dataset -> MPAS weights')
args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', '--source',
data_path + 'BedMachineGreenland-v5.scrip.nc',
'--destination',
'GIS.scrip.nc',
'--weight', 'BedMachine_to_MPAS_weights.nc',
'--method', 'conserve',
"-i", "-64bit_offset",
"--dst_regional", "--src_regional", '--netcdf4']
check_call(args, logger=logger)

if exists('measures_to_MPAS_weights.nc'):
logger.info('measures_to_MPAS_weights.nc exists; skipping')
else:
logger.info('generating gridded dataset -> MPAS weights')
args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', '--source',
data_path + 'greenland_vel_mosaic500.scrip.nc',
'--destination',
'GIS.scrip.nc',
'--weight', 'measures_to_MPAS_weights.nc',
'--method', 'conserve',
"-i", "-64bit_offset", '--netcdf4',
"--dst_regional", "--src_regional", '--ignore_unmapped']
check_call(args, logger=logger)

# interpoalte fields from BedMachine and Measures
# Using conservative remapping
logger.info('calling interpolate_to_mpasli_grid.py')
args = ['interpolate_to_mpasli_grid.py', '-s',
data_path + '/BedMachineGreenland-v5_edits_floodFill_extrap.nc', # noqa
'-d', 'GIS.nc', '-m', 'e',
'-w', 'BedMachine_to_MPAS_weights.nc']
check_call(args, logger=logger)

logger.info('calling interpolate_to_mpasli_grid.py')
args = ['interpolate_to_mpasli_grid.py', '-s',
data_path + '/greenland_vel_mosaic500_extrap.nc',
'-d', 'GIS.nc', '-m', 'e',
'-w', 'measures_to_MPAS_weights.nc',
'-v', 'observedSurfaceVelocityX',
'observedSurfaceVelocityY',
'observedSurfaceVelocityUncertainty']
check_call(args, logger=logger)

logger.info('Marking domain boundaries dirichlet')
args = ['mark_domain_boundaries_dirichlet.py',
'-f', 'GIS.nc']
check_call(args, logger=logger)

dst_scrip_file = f"{self.mesh_filename.split('.')[:-1][0]}_scrip.nc"
scrip_from_mpas(self.mesh_filename, dst_scrip_file)

# Now perform bespoke interpolation of geometry and velocity data
# from their respective sources
interp_gridded2mali(self, bedmachine_dataset, dst_scrip_file, nProcs,
self.mesh_filename, src_proj, variables="all")

# only interpolate a subset of MEASURES varibles onto the MALI mesh
measures_vars = ['observedSurfaceVelocityX',
'observedSurfaceVelocityY',
'observedSurfaceVelocityUncertainty']
interp_gridded2mali(self, measures_dataset, dst_scrip_file, nProcs,
self.mesh_filename, src_proj,
variables=measures_vars)

# perform some final cleanup details
clean_up_after_interp(self.mesh_filename)

# create graph file
logger.info('creating graph.info')
make_graph_file(mesh_filename=mesh_name,
make_graph_file(mesh_filename=self.mesh_filename,
graph_filename='graph.info')
# Create a backup in case clean-up goes awry
copyfile('GIS.nc', 'GIS_backup.nc')

# Clean up: trim to iceMask and set large velocity
# uncertainties where appropriate.
data = netCDF4.Dataset('GIS.nc', 'r+')
data.set_auto_mask(False)
data.variables['thickness'][:] *= (data.variables['iceMask'][:] > 1.5)

mask = np.logical_or(
np.isnan(
data.variables['observedSurfaceVelocityUncertainty'][:]),
data.variables['thickness'][:] < 1.0)
mask = np.logical_or(
mask,
data.variables['observedSurfaceVelocityUncertainty'][:] == 0.0)
data.variables['observedSurfaceVelocityUncertainty'][0, mask[0, :]] = 1.0 # noqa

# create region masks
mask_filename = f'{mesh_name[:-3]}_regionMasks.nc'
make_region_masks(self, mesh_name, mask_filename,
mask_filename = f'{self.mesh_filename[:-3]}_regionMasks.nc'
make_region_masks(self, self.mesh_filename, mask_filename,
self.cpus_per_task,
tags=['eastCentralGreenland',
'northEastGreenland',
Expand All @@ -196,6 +130,9 @@ def run(self):
'southWestGreenland',
'westCentralGreenland'])

# is there a way to encompass this in an existing framework function?
data = netCDF4.Dataset('GIS.nc', 'r+')
data.set_auto_mask(False)
# Ensure basalHeatFlux is positive
data.variables['basalHeatFlux'][:] = np.abs(
data.variables['basalHeatFlux'][:])
Expand Down
23 changes: 20 additions & 3 deletions compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
# config options for high_res_mesh test case
[mesh]

# path to directory containing BedMachine and Measures datasets
data_path = /global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/

# number of levels in the mesh
levels = 10

Expand Down Expand Up @@ -50,3 +47,23 @@ use_speed = True
use_dist_to_grounding_line = False
use_dist_to_edge = True
use_bed = True

[greenland]
# path to directory containing BedMachine and Measures datasets
# (default value is for Perlmutter)
data_path = /global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/

# filename of the BedMachine thickness and bedTopography dataset
# (default value is for Perlmutter)
bedmachine_filename = BedMachineGreenland-v5_edits_floodFill_extrap.nc

# filename of the MEASURES ice velocity dataset
# (default value is for Perlmutter)
measures_filename = greenland_vel_mosaic500_extrap.nc

# projection of the source datasets, according to the dictionary keys
# create_SCRIP_file_from_planar_rectangular_grid.py from MPAS_Tools
src_proj = gis-gimp

# number of processors to use for ESMF_RegridWeightGen
nProcs = 128

0 comments on commit 32bb814

Please sign in to comment.