Skip to content

Commit

Permalink
Review updates
Browse files Browse the repository at this point in the history
  • Loading branch information
Pperezhogin committed Aug 8, 2023
1 parent 7e1c02e commit 4beb12a
Show file tree
Hide file tree
Showing 12 changed files with 13,432 additions and 3,233 deletions.
1,565 changes: 1,565 additions & 0 deletions notebooks/3-2-dealiasing-online.ipynb

Large diffs are not rendered by default.

1,967 changes: 1,967 additions & 0 deletions notebooks/3-2-dealiasing.ipynb

Large diffs are not rendered by default.

3,364 changes: 3,364 additions & 0 deletions notebooks/JAMES_figures-review.ipynb

Large diffs are not rendered by default.

15 changes: 3 additions & 12 deletions notebooks/JAMES_figures.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,7 @@
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/ext3/miniconda3/lib/python3.9/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
" from .autonotebook import tqdm as notebook_tqdm\n"
]
}
],
"outputs": [],
"source": [
"import xarray as xr\n",
"import numpy as np\n",
Expand Down Expand Up @@ -2644,7 +2635,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "env_07_04",
"display_name": "environment_13_Jul_2023",
"language": "python",
"name": "my_env"
},
Expand All @@ -2658,7 +2649,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.12"
"version": "3.11.4"
},
"vscode": {
"interpreter": {
Expand Down
1,063 changes: 1,059 additions & 4 deletions notebooks/Shallow_CNNs.ipynb

Large diffs are not rendered by default.

1,327 changes: 1,327 additions & 0 deletions notebooks/generalization.ipynb

Large diffs are not rendered by default.

3,393 changes: 190 additions & 3,203 deletions notebooks/grid_convergence.ipynb

Large diffs are not rendered by default.

1,974 changes: 1,974 additions & 0 deletions notebooks/hires-subgrid-forcing.ipynb

Large diffs are not rendered by default.

1,881 changes: 1,881 additions & 0 deletions notebooks/molecular-viscosity.ipynb

Large diffs are not rendered by default.

107 changes: 98 additions & 9 deletions pyqg_generative/tools/comparison_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,14 @@ def coarsegrain_reference_dataset(ds, resolution, operator):
operator = op.Operator2
elif operator == 'Operator4':
operator = op.Operator4
elif operator == 'Operator5':
operator = op.Operator5
else:
raise ValueError('operator must be Operator1 or Operator2')

dsf = xr.Dataset()
for var in ['q', 'u', 'v', 'psi']:
print(f'var={var}')
dsf[var] = operator(ds[var], resolution)

# Coarsegrain spectral flux
Expand Down Expand Up @@ -191,22 +194,98 @@ def compute_Eflux(ds):

return normalized_differences, differences, scales

def dataset_statistics(ds, delta=0.25, **kw_ispec):
'''
If path is given, the dataset is returned as is
If dataset is given, statistics are computed
'''
if isinstance(ds, str):
ds = xr.open_mfdataset(ds, combine='nested', concat_dim='run', decode_times=False, chunks={'time':1, 'run':1})
if 'years' not in ds['time'].attrs:
ds['time'] = ds['time'] / 360
ds['time'].attrs = {'long_name':'time [$years$]'}
return ds

def KE(ds):
return (ds.u**2 + ds.v**2) * 0.5

def KE_time(ds):
if 'run' in ds.dims:
dims = ['run', 'x', 'y']
else:
dims = ['x', 'y']
return op.ave_lev(KE(ds), delta=delta).mean(dims)

stats = xr.Dataset()

m = pyqg.QGModel(nx=len(ds.x), log_level=0)
for key in ['APEflux', 'APEgenspec', 'Dissspec', 'ENSDissspec',
'ENSflux', 'ENSfrictionspec', 'ENSgenspec', 'ENSparamspec',
'Ensspec', 'KEflux', 'KEfrictionspec', 'KEspec', 'entspec',
'paramspec', 'paramspec_APEflux', 'paramspec_KEflux']:
if key not in ds.keys():
continue
var = ds[key]
if 'run' in ds.dims:
var = var.mean(dim='run')
if 'lev' in var.dims:
sps = []
for z in [0,1]:
k, sp = calc_ispec(m, var.isel(lev=z).values, **kw_ispec)
sps.append(sp)
sp = np.stack(sps, axis=0)
stats[key+'r'] = \
xr.DataArray(sp, dims=['lev', 'kr'],
coords=[[1,2], coord(k, 'wavenumber, $m^{-1}$')])

var_mean = op.ave_lev(var, delta)
k, sp = calc_ispec(m, var_mean.values, **kw_ispec)
stats[key+'r_mean'] = \
xr.DataArray(sp, dims=['kr'],
coords=[coord(k, 'wavenumber, $m^{-1}$')])
else:
k, sp = calc_ispec(m, var.values, **kw_ispec)
stats[key+'r'] = \
xr.DataArray(sp, dims=['kr'],
coords=[coord(k, 'wavenumber, $m^{-1}$')])

budget_sum = 0
for key in ['KEfluxr', 'APEfluxr', 'APEgenspecr', 'KEfrictionspecr',
'paramspec_APEfluxr', 'paramspec_KEfluxr']:
if key in stats.keys():
budget_sum += stats[key]
stats['Energysumr'] = budget_sum

Eflux = 0
for key in ['KEfluxr', 'APEfluxr', 'paramspec_KEfluxr', 'paramspec_APEfluxr']:
if key in stats.keys():
Eflux = Eflux + stats[key]
stats['Efluxr'] = Eflux

stats['KE_time'] = KE_time(ds)

if 'years' not in stats['time'].attrs:
stats['time'] = stats['time'] / 360
stats['time'].attrs = {'long_name':'time [$years$]'}

return stats

def cache_path(path):
dir = os.path.dirname(path)
files = os.path.basename(path)
#https://www.delftstack.com/howto/python/str-to-hex-python/
cachename = files.encode('utf-8').hex() + '.cache_netcdf'
return os.path.join(dir,cachename)

def dataset_smart_read(path, delta=0.25, read_cache=True):
def dataset_smart_read(path, delta=0.25, read_cache=True, compute_all=True):
#print(path)
nfiles = len(glob.glob(path))
#if nfiles < 10:
#print('Warning! Computations are unstable. Number of files is less than 10 and equal to', nfiles)
cache = cache_path(path)
if os.path.exists(cache) and read_cache:
#print('Read cache ' + cache)
ds1 = xr.open_mfdataset(path, combine='nested', concat_dim='run', decode_times=False)
ds1 = xr.open_mfdataset(path, combine='nested', concat_dim='run', decode_times=False, chunks={'time':1, 'run':1})
ds2 = xr.open_dataset(cache)
ds1['time'] = ds1['time'] / 360
ds1['time'].attrs = {'long_name':'time [$years$]'}
Expand All @@ -216,7 +295,7 @@ def dataset_smart_read(path, delta=0.25, read_cache=True):
#print('Delete cache ' + cache)
os.remove(cache)

ds = xr.open_mfdataset(path, combine='nested', concat_dim='run', decode_times=False)
ds = xr.open_mfdataset(path, combine='nested', concat_dim='run', decode_times=False, chunks={'time':1, 'run':1})
ds['time'] = ds['time'] / 360
ds['time'].attrs = {'long_name':'time [$years$]'}

Expand All @@ -238,13 +317,18 @@ def KE_time(ds):
def Vabs(ds):
return np.sqrt(2*KE(ds))

stats['omega'] = relative_vorticity(ds)
stats['KE'] = KE(ds)
stats['Ens'] = Ens(ds)
stats['Vabs'] = Vabs(ds)
if compute_all:
stats['omega'] = relative_vorticity(ds)
stats['KE'] = KE(ds)
stats['Ens'] = Ens(ds)
stats['Vabs'] = Vabs(ds)

def PDF_var(ds, var, lev):
ds_ = ds.isel(time=AVERAGE_SLICE_ANDREW).isel(lev=lev)
if compute_all:
time = AVERAGE_SLICE_ANDREW
else:
time = slice(-1, None)
ds_ = ds.isel(time=time).isel(lev=lev)
if var == 'KE':
values = KE(ds_)
elif var == 'Ens':
Expand All @@ -268,7 +352,12 @@ def PDF_var(ds, var, lev):
points, density = PDF_histogram(values, xmin=xmin, xmax=xmax)
return xr.DataArray(density, dims=f'{var}_{lev}', coords=[points])

for var in ['q', 'u', 'v', 'KE', 'Ens']:
if compute_all:
variables = ['q', 'u', 'v', 'KE', 'Ens']
else:
variables = ['q', 'u', 'v', 'KE']

for var in variables:
for lev in [0,1]:
stats[f'PDF_{var}{lev+1}'] = PDF_var(ds, var, lev)

Expand Down
7 changes: 3 additions & 4 deletions pyqg_generative/tools/plot_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,15 @@ def default_rcParams(kw={}):
- without plotting something as initialization,
inline does not work
'''
plt.plot()
plt.close()
rcParams = matplotlib.rcParamsDefault.copy()

# We do not change backend because it can break
# inlining; Also, 'backend' key is broken and
# we cannot use pop method
for key, val in rcParams.items():
if key != 'backend':
rcParams[key] = val
if 'backend' not in key and 'interactive' not in key:
#print(key, val)
matplotlib.rcParams[key] = val

matplotlib.rcParams.update({
'font.family': 'MathJax_Main',
Expand Down
2 changes: 1 addition & 1 deletion pyqg_generative/tools/simulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def set_initial_condition(m):
print(args)

if args.forcing == "yes":
Nc = [48, 64, 96]
Nc = [8, 12, 16, 24, 32, 48, 64, 96]

datasets = generate_subgrid_forcing(Nc, eval(args.pyqg_params), args.sampling_freq)
for key in datasets.keys():
Expand Down

0 comments on commit 4beb12a

Please sign in to comment.