From 20fb88c057b22991ed3b889c7388a271cc5db99d Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Wed, 28 Aug 2024 17:01:06 +0000 Subject: [PATCH 001/157] replace separate JEDI radiance bias correction files with tarball (#2862) --- parm/archive/gdas_restarta.yaml.j2 | 2 ++ parm/stage/analysis.yaml.j2 | 9 +---- ush/python/pygfs/task/analysis.py | 36 ++++++++++---------- ush/python/pygfs/task/atm_analysis.py | 48 ++++++++++----------------- 4 files changed, 38 insertions(+), 57 deletions(-) diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2 index 9d86292065..7a011671be 100644 --- a/parm/archive/gdas_restarta.yaml.j2 +++ b/parm/archive/gdas_restarta.yaml.j2 @@ -32,6 +32,8 @@ gdas_restarta: - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_int" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt" + {% else %} + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radbcor" {% endif %} # Snow surface data diff --git a/parm/stage/analysis.yaml.j2 b/parm/stage/analysis.yaml.j2 index d30389644a..4068f1e928 100644 --- a/parm/stage/analysis.yaml.j2 +++ b/parm/stage/analysis.yaml.j2 @@ -10,17 +10,10 @@ analysis: {% for mem in range(first_mem, last_mem + 1) %} {% set imem = mem - first_mem %} {% set COMOUT_ATMOS_ANALYSIS_MEM = COMOUT_ATMOS_ANALYSIS_MEM_list[imem] %} - {% for ftype in ["abias", "abias_air", "abias_int", "abias_pc", "atminc.nc", "radstat", "ratminc.nc"] %} + {% for ftype in ["abias", "abias_air", "abias_int", "abias_pc", "atminc.nc", "radstat", "ratminc.nc", "radbcor"] %} {% if path_exists(ICSDIR ~ "/" ~ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) ~ "/" ~ RUN ~ ".t" ~ current_cycle_HH ~ "z." ~ ftype) %} - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ RUN }}.t{{ current_cycle_HH }}z.{{ ftype }}", "{{ COMOUT_ATMOS_ANALYSIS_MEM }}"] {% endif %} {% endfor %} - {% if DO_JEDIATMVAR %} - {% for ftype in ["satbias.nc", "satbias_cov.nc", "tlapse.txt"] %} - {% for file in glob(ICSDIR ~ "/" ~ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) ~ "/" ~ RUN ~ ".t" ~ current_cycle_HH ~ "z.atms_*." ~ ftype) %} - - ["{{ file }}", "{{ COMOUT_ATMOS_ANALYSIS_MEM }}"] - {% endfor %} - {% endfor %} - {% endif %} {% endfor %} # mem loop {% endif %} diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index bf47b9a950..b8e7809263 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -42,8 +42,7 @@ def initialize(self) -> None: FileHandler(obs_dict).sync() # some analyses need to stage bias corrections - bias_dict = self.get_bias_dict() - FileHandler(bias_dict).sync() + self.get_bias() # link jedi executable to run directory self.link_jediexe() @@ -128,25 +127,21 @@ def get_obs_dict(self) -> Dict[str, Any]: return obs_dict @logit(logger) - def get_bias_dict(self) -> Dict[str, Any]: - """Compile a dictionary of observation files to copy + def get_bias(self) -> None: + """Stage radiance bias correciton files - This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of - observation bias correction files that are to be copied to the run directory - from the component directory. - TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in - `analysis.py` and should be implemented in the component where this is applicable. + This method stages radiance bias correction files in the obs sub-diretory of the run directory Parameters ---------- + Task: GDAS task Returns ---------- - bias_dict: Dict - a dictionary containing the list of observation bias files to copy for FileHandler + None """ - logger.info(f"Extracting a list of bias correction files from Jedi config file") + logger.info(f"Copy radiance bias correction tarball if Jedi config processes bias corrected radiances") observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') logger.debug(f"observations:\n{pformat(observations)}") @@ -156,17 +151,22 @@ def get_bias_dict(self) -> Dict[str, Any]: obfile = ob['obs bias']['input file'] obdir = os.path.dirname(obfile) basename = os.path.basename(obfile) - prefix = '.'.join(basename.split('.')[:-2]) - for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']: - bfile = f"{prefix}.{file}" - copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) - # TODO: Why is this specific to ATMOS? + prefix = '.'.join(basename.split('.')[:-3]) + bfile = f"{prefix}.radbcor" + copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + break bias_dict = { 'mkdir': [os.path.join(self.task_config.DATA, 'bc')], 'copy': copylist } - return bias_dict + FileHandler(bias_dict).sync() + + radtar = os.path.join(obdir, bfile) + with tarfile.open(radtar, "r") as radbcor: + radbcor.extractall(path=os.path.join(self.task_config.DATA, 'obs')) + logger.info(f"Extract {radbcor.getnames()}") + radbcor.close() @logit(logger) def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: List) -> None: diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 4e9d37335c..e32dcdf815 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -207,37 +207,23 @@ def finalize(self: Analysis) -> None: } FileHandler(yaml_copy).sync() - # copy bias correction files to ROTDIR - logger.info("Copy bias correction files from DATA/ to COM/") - biasdir = os.path.join(self.task_config.DATA, 'bc') - biasls = os.listdir(biasdir) - biaslist = [] - for bfile in biasls: - src = os.path.join(biasdir, bfile) - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, bfile) - biaslist.append([src, dest]) - - gprefix = f"{self.task_config.GPREFIX}" - gsuffix = f"{to_YMDH(self.task_config.previous_cycle)}" + ".txt" - aprefix = f"{self.task_config.APREFIX}" - asuffix = f"{to_YMDH(self.task_config.current_cycle)}" + ".txt" - - logger.info(f"Copying {gprefix}*{gsuffix} from DATA/ to COM/ as {aprefix}*{asuffix}") - obsdir = os.path.join(self.task_config.DATA, 'obs') - obsls = os.listdir(obsdir) - for ofile in obsls: - if ofile.endswith(".txt"): - src = os.path.join(obsdir, ofile) - tfile = ofile.replace(gprefix, aprefix) - tfile = tfile.replace(gsuffix, asuffix) - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, tfile) - biaslist.append([src, dest]) - - bias_copy = { - 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS], - 'copy': biaslist, - } - FileHandler(bias_copy).sync() + # path of output radiance bias correction tarfile + bfile = f"{self.task_config.APREFIX}radbcor" + radtar = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, bfile) + + # get lists of radiance bias correction files to put in tarball + satlist = glob.glob(os.path.join(self.task_config.DATA, 'bc', '*satbias*nc')) + tlaplist = glob.glob(os.path.join(self.task_config.DATA, 'obs', '*tlapse.txt')) + + # tar radiance bias correction files to ROTDIR + logger.info(f"Creating radiance bias correction tar file {radtar}") + with tarfile.open(radtar, 'w') as radbcor: + for satfile in satlist: + radbcor.add(satfile, arcname=os.path.basename(satfile)) + for tlapfile in tlaplist: + radbcor.add(tlapfile, arcname=os.path.basename(tlapfile)) + logger.info(f"Add {radbcor.getnames()}") + radbcor.close() # Copy FV3 atm increment to comrot directory logger.info("Copy UFS model readable atm increment file") From 73aa233f743ccd9232481e34fd5a3c72ace17a2f Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Thu, 29 Aug 2024 10:26:14 +0000 Subject: [PATCH 002/157] rename JEDI radiance bias correction tarball file to be more self-describing (#2862) --- parm/archive/gdas_restarta.yaml.j2 | 2 +- parm/stage/analysis.yaml.j2 | 2 +- ush/python/pygfs/task/analysis.py | 2 +- ush/python/pygfs/task/atm_analysis.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2 index 7a011671be..fc5ce9478d 100644 --- a/parm/archive/gdas_restarta.yaml.j2 +++ b/parm/archive/gdas_restarta.yaml.j2 @@ -33,7 +33,7 @@ gdas_restarta: - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt" {% else %} - - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radbcor" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}rad_varbc_params.tar" {% endif %} # Snow surface data diff --git a/parm/stage/analysis.yaml.j2 b/parm/stage/analysis.yaml.j2 index 4068f1e928..2d4b7d2854 100644 --- a/parm/stage/analysis.yaml.j2 +++ b/parm/stage/analysis.yaml.j2 @@ -10,7 +10,7 @@ analysis: {% for mem in range(first_mem, last_mem + 1) %} {% set imem = mem - first_mem %} {% set COMOUT_ATMOS_ANALYSIS_MEM = COMOUT_ATMOS_ANALYSIS_MEM_list[imem] %} - {% for ftype in ["abias", "abias_air", "abias_int", "abias_pc", "atminc.nc", "radstat", "ratminc.nc", "radbcor"] %} + {% for ftype in ["abias", "abias_air", "abias_int", "abias_pc", "atminc.nc", "radstat", "ratminc.nc", "rad_varbc_params.tar"] %} {% if path_exists(ICSDIR ~ "/" ~ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) ~ "/" ~ RUN ~ ".t" ~ current_cycle_HH ~ "z." ~ ftype) %} - ["{{ ICSDIR }}/{{ COMOUT_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ RUN }}.t{{ current_cycle_HH }}z.{{ ftype }}", "{{ COMOUT_ATMOS_ANALYSIS_MEM }}"] {% endif %} diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index b8e7809263..ee674f8ef0 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -152,7 +152,7 @@ def get_bias(self) -> None: obdir = os.path.dirname(obfile) basename = os.path.basename(obfile) prefix = '.'.join(basename.split('.')[:-3]) - bfile = f"{prefix}.radbcor" + bfile = f"{prefix}.rad_varbc_params.tar" copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) break diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index e32dcdf815..99c6e600cb 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -208,7 +208,7 @@ def finalize(self: Analysis) -> None: FileHandler(yaml_copy).sync() # path of output radiance bias correction tarfile - bfile = f"{self.task_config.APREFIX}radbcor" + bfile = f"{self.task_config.APREFIX}rad_varbc_params.tar" radtar = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, bfile) # get lists of radiance bias correction files to put in tarball From 1203866f068eea2a9ccd28ad9d65b4c6c7d62311 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Sat, 7 Sep 2024 11:36:35 +0000 Subject: [PATCH 003/157] update gdas.cd hash to include recent commits to GDASApp develop (#2862) --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index faa95efb18..554c55a6ad 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit faa95efb18f0f52acab2cf09b17f78406f9b48b1 +Subproject commit 554c55a6ad1a4fc7d5868122a9b0147af3b300a2 From 5985e9f718ded5316007f6d38ac16b5abb2c9649 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Mon, 9 Sep 2024 14:22:13 +0000 Subject: [PATCH 004/157] update pygfs jedi class and atmospheric tasks to extract JEDI radiance bias correction files from tarball (#2862) --- ush/python/pygfs/jedi/jedi.py | 9 +++-- ush/python/pygfs/task/analysis.py | 45 ------------------------ ush/python/pygfs/task/atm_analysis.py | 16 +++++++++ ush/python/pygfs/task/atmens_analysis.py | 16 +++++++++ 4 files changed, 36 insertions(+), 50 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 62dcb517ca..dace86c4fe 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -216,11 +216,10 @@ def get_bias_dict(self, task_config: AttrDict) -> Dict[str, Any]: obfile = ob['obs bias']['input file'] obdir = os.path.dirname(obfile) basename = os.path.basename(obfile) - prefix = '.'.join(basename.split('.')[:-2]) - for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']: - bfile = f"{prefix}.{file}" - copylist.append([os.path.join(task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) - # TODO: Why is this specific to ATMOS? + prefix = '.'.join(basename.split('.')[:-3]) + bfile = f"{prefix}.rad_varbc_params.tar" + copylist.append([os.path.join(task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + break bias_dict = { 'mkdir': [os.path.join(task_config.DATA, 'bc')], diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index d9cd358219..1d8b38483b 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -41,9 +41,6 @@ def initialize(self) -> None: obs_dict = self.get_obs_dict() FileHandler(obs_dict).sync() - # some analyses need to stage bias corrections - self.get_bias() - # link jedi executable to run directory self.link_jediexe() @@ -126,48 +123,6 @@ def get_obs_dict(self) -> Dict[str, Any]: } return obs_dict - @logit(logger) - def get_bias(self) -> None: - """Stage radiance bias correciton files - - This method stages radiance bias correction files in the obs sub-diretory of the run directory - - Parameters - ---------- - Task: GDAS task - - Returns - ---------- - None - """ - - logger.info(f"Copy radiance bias correction tarball if Jedi config processes bias corrected radiances") - observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') - logger.debug(f"observations:\n{pformat(observations)}") - - copylist = [] - for ob in observations['observers']: - if 'obs bias' in ob.keys(): - obfile = ob['obs bias']['input file'] - obdir = os.path.dirname(obfile) - basename = os.path.basename(obfile) - prefix = '.'.join(basename.split('.')[:-3]) - bfile = f"{prefix}.rad_varbc_params.tar" - copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) - break - - bias_dict = { - 'mkdir': [os.path.join(self.task_config.DATA, 'bc')], - 'copy': copylist - } - FileHandler(bias_dict).sync() - - radtar = os.path.join(obdir, bfile) - with tarfile.open(radtar, "r") as radbcor: - radbcor.extractall(path=os.path.join(self.task_config.DATA, 'obs')) - logger.info(f"Extract {radbcor.getnames()}") - radbcor.close() - @logit(logger) def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: List) -> None: """Add cubed-sphere increments to cubed-sphere backgrounds diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index ad082063bb..36f768fa13 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import os +import re import glob import gzip import tarfile @@ -143,6 +144,21 @@ def initialize_analysis(self) -> None: FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + # Extract radiance bias correction files from tarball + for action, filelist in bias_dict.items(): + if 'copy' in action: + for sublist in filelist: + if len(sublist) != 2: + raise Exception( + f"List must be of the form ['src', 'dest'], not {sublist}") + src = sublist[0] + if re.search(".tar", src): + radtar = src + with tarfile.open(radtar, "r") as radbcor: + radbcor.extractall(path=os.path.join(self.task_config.DATA, 'obs')) + logger.info(f"Extract {radbcor.getnames()}") + radbcor.close() + # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") crtm_fix_dict = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 55e72702b1..7004bf059c 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import os +import re import glob import gzip import tarfile @@ -142,6 +143,21 @@ def initialize_analysis(self) -> None: FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + # Extract radiance bias correction files from tarball + for action, filelist in bias_dict.items(): + if 'copy' in action: + for sublist in filelist: + if len(sublist) != 2: + raise Exception( + f"List must be of the form ['src', 'dest'], not {sublist}") + src = sublist[0] + if re.search(".tar", src): + radtar = src + with tarfile.open(radtar, "r") as radbcor: + radbcor.extractall(path=os.path.join(self.task_config.DATA, 'obs')) + logger.info(f"Extract {radbcor.getnames()}") + radbcor.close() + # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") crtm_fix_dict = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) From 4098f108a6e48df1822b1da42e9ad307845e5ef5 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Mon, 9 Sep 2024 10:58:38 -0400 Subject: [PATCH 005/157] Remove the GTS BUFR2IODA part of the snow obs prep job and Replace it with a direct read from BUFR at runtime. --- parm/config/gfs/config.prepsnowobs | 3 - parm/config/gfs/config.snowanl | 1 + scripts/exglobal_prep_snow_obs.py | 1 - ush/python/pygfs/task/snow_analysis.py | 92 ++++---------------------- 4 files changed, 15 insertions(+), 82 deletions(-) diff --git a/parm/config/gfs/config.prepsnowobs b/parm/config/gfs/config.prepsnowobs index 60ca16ce9e..20bdd89ddf 100644 --- a/parm/config/gfs/config.prepsnowobs +++ b/parm/config/gfs/config.prepsnowobs @@ -8,11 +8,8 @@ echo "BEGIN: config.prepsnowobs" # Get task specific resources . "${EXPDIR}/config.resources" prepsnowobs -export GTS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_gts.yaml.j2" export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" -export BUFR2IODAX="${EXECgfs}/bufr2ioda.x" - export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index a2984f190b..c1a99f99ea 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -9,6 +9,7 @@ echo "BEGIN: config.snowanl" source "${EXPDIR}/config.resources" snowanl export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" +export GTS_LIST="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.yaml.j2" # Name of the JEDI executable and its yaml template export JEDIEXE="${EXECgfs}/gdas.x" diff --git a/scripts/exglobal_prep_snow_obs.py b/scripts/exglobal_prep_snow_obs.py index a6a9070151..aa1eb1bb7d 100755 --- a/scripts/exglobal_prep_snow_obs.py +++ b/scripts/exglobal_prep_snow_obs.py @@ -20,6 +20,5 @@ # Instantiate the snow prepare task SnowAnl = SnowAnalysis(config) - SnowAnl.prepare_GTS() if SnowAnl.task_config.cyc == 0: SnowAnl.prepare_IMS() diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 9656b00a8e..ca2d4fd49b 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -54,83 +54,6 @@ def __init__(self, config): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) - @logit(logger) - def prepare_GTS(self) -> None: - """Prepare the GTS data for a global snow analysis - - This method will prepare GTS data for a global snow analysis using JEDI. - This includes: - - processing GTS bufr snow depth observation data to IODA format - - Parameters - ---------- - Analysis: parent class for GDAS task - - Returns - ---------- - None - """ - - # create a temporary dict of all keys needed in this method - localconf = AttrDict() - keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', - 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] - for key in keys: - localconf[key] = self.task_config[key] - - # Read and render the GTS_OBS_LIST yaml - logger.info(f"Reading {self.task_config.GTS_OBS_LIST}") - prep_gts_config = parse_j2yaml(self.task_config.GTS_OBS_LIST, localconf) - logger.debug(f"{self.task_config.GTS_OBS_LIST}:\n{pformat(prep_gts_config)}") - - # copy the GTS obs files from COM_OBS to DATA/obs - logger.info("Copying GTS obs for bufr2ioda.x") - FileHandler(prep_gts_config.gtsbufr).sync() - - logger.info("Link BUFR2IODAX into DATA/") - exe_src = self.task_config.BUFR2IODAX - exe_dest = os.path.join(localconf.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - - # Create executable instance - exe = Executable(self.task_config.BUFR2IODAX) - - def _gtsbufr2iodax(exe, yaml_file): - if not os.path.isfile(yaml_file): - logger.exception(f"FATAL ERROR: {yaml_file} not found") - raise FileNotFoundError(yaml_file) - - logger.info(f"Executing {exe}") - try: - exe(yaml_file) - except OSError: - raise OSError(f"Failed to execute {exe} {yaml_file}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exe} {yaml_file}") - - # Loop over entries in prep_gts_config.bufr2ioda keys - # 1. generate bufr2ioda YAML files - # 2. execute bufr2ioda.x - for name in prep_gts_config.bufr2ioda.keys(): - gts_yaml = os.path.join(self.task_config.DATA, f"bufr_{name}_snow.yaml") - logger.info(f"Generate BUFR2IODA YAML file: {gts_yaml}") - temp_yaml = parse_j2yaml(prep_gts_config.bufr2ioda[name], localconf) - save_as_yaml(temp_yaml, gts_yaml) - logger.info(f"Wrote bufr2ioda YAML to: {gts_yaml}") - - # execute BUFR2IODAX to convert {name} bufr data into IODA format - _gtsbufr2iodax(exe, gts_yaml) - - # Ensure the IODA snow depth GTS file is produced by the IODA converter - # If so, copy to COM_OBS/ - try: - FileHandler(prep_gts_config.gtsioda).sync() - except OSError as err: - logger.exception(f"{self.task_config.BUFR2IODAX} failed to produce GTS ioda files") - raise OSError(err) - @logit(logger) def prepare_IMS(self) -> None: """Prepare the IMS data for a global snow analysis @@ -248,7 +171,7 @@ def initialize(self) -> None: # create a temporary dict of all keys needed in this method localconf = AttrDict() - keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] for key in keys: localconf[key] = self.task_config[key] @@ -272,6 +195,19 @@ def initialize(self) -> None: save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") + # Read and render the GTS_LIST yaml + logger.info(f"Reading {self.task_config.GTS_LIST}") + gts_config = parse_j2yaml(self.task_config.GTS_LIST, localconf) + logger.debug(f"{self.task_config.GTS_LIST}:\n{pformat(gts_config)}") + + # Generate bufr2ioda mapping YAML files + for name in gts_config.bufr2ioda.keys(): + mapping_yaml = os.path.join(self.task_config.DATA, "obs", f"bufr_{name}_mapping.yaml") + logger.info(f"Generate BUFR2IODA YAML file: {mapping_yaml}") + temp_yaml = parse_j2yaml(gts_config.bufr2ioda[name], localconf) + save_as_yaml(temp_yaml, mapping_yaml) + logger.info(f"Wrote bufr2ioda YAML to: {mapping_yaml}") + # need output dir for diags and anl logger.info("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ From 8d1bc8743c30b8db006d7e0731c7bd94eb1baf73 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Mon, 9 Sep 2024 20:38:56 -0400 Subject: [PATCH 006/157] Change HOMEgfs to PARMgfs. --- ush/python/pygfs/task/snow_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index ca2d4fd49b..aa51a65bf6 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -171,7 +171,7 @@ def initialize(self) -> None: # create a temporary dict of all keys needed in this method localconf = AttrDict() - keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + keys = ['PARMgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] for key in keys: localconf[key] = self.task_config[key] From 73e56a7fa14c553f0d64003b88c8e9118f088bea Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Tue, 10 Sep 2024 10:50:33 +0000 Subject: [PATCH 007/157] move radiance bias correction staging to jedi class (#2868) --- ush/python/pygfs/jedi/jedi.py | 29 ++++++++++++++++-------- ush/python/pygfs/task/atm_analysis.py | 20 +--------------- ush/python/pygfs/task/atmens_analysis.py | 20 +--------------- 3 files changed, 21 insertions(+), 48 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index dace86c4fe..4e527460ab 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -1,10 +1,13 @@ #!/usr/bin/env python3 import os +import tarfile from logging import getLogger +from pprint import pformat from typing import List, Dict, Any, Optional from jcb import render from wxflow import (AttrDict, + FileHandler, chdir, rm_p, parse_j2yaml, logit, @@ -188,14 +191,10 @@ def get_obs_dict(self, task_config: AttrDict) -> Dict[str, Any]: return obs_dict @logit(logger) - def get_bias_dict(self, task_config: AttrDict) -> Dict[str, Any]: + def get_bias(self, task_config: AttrDict) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of - observation bias correction files that are to be copied to the run directory - from the component directory. - TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in - `analysis.py` and should be implemented in the component where this is applicable. + This method stages radiance bias correction files in the obs sub-diretory of the run directory Parameters ---------- @@ -204,8 +203,7 @@ def get_bias_dict(self, task_config: AttrDict) -> Dict[str, Any]: Returns ---------- - bias_dict: Dict - a dictionary containing the list of observation bias files to copy for FileHandler + None """ observations = find_value_in_nested_dict(self.config, 'observations') @@ -218,14 +216,25 @@ def get_bias_dict(self, task_config: AttrDict) -> Dict[str, Any]: basename = os.path.basename(obfile) prefix = '.'.join(basename.split('.')[:-3]) bfile = f"{prefix}.rad_varbc_params.tar" - copylist.append([os.path.join(task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + radtar = os.path.join(obdir, bfile) + copylist.append([os.path.join(task_config.COM_ATMOS_ANALYSIS_PREV, bfile), radtar]) break bias_dict = { 'mkdir': [os.path.join(task_config.DATA, 'bc')], 'copy': copylist } - return bias_dict + + # stage bias corrections + FileHandler(bias_dict).sync() + logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + + # extract radiance bias correction files from tarball + radtar = os.path.join(obdir, bfile) + with tarfile.open(radtar, "r") as radbcor: + radbcor.extractall(path=os.path.join(task_config.DATA, 'obs')) + logger.info(f"Extract {radbcor.getnames()}") + radbcor.close() @logit(logger) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 36f768fa13..c1a6558ade 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 import os -import re import glob import gzip import tarfile @@ -140,24 +139,7 @@ def initialize_analysis(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files generated from JEDI config") - bias_dict = self.jedi.get_bias_dict(self.task_config) - FileHandler(bias_dict).sync() - logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") - - # Extract radiance bias correction files from tarball - for action, filelist in bias_dict.items(): - if 'copy' in action: - for sublist in filelist: - if len(sublist) != 2: - raise Exception( - f"List must be of the form ['src', 'dest'], not {sublist}") - src = sublist[0] - if re.search(".tar", src): - radtar = src - with tarfile.open(radtar, "r") as radbcor: - radbcor.extractall(path=os.path.join(self.task_config.DATA, 'obs')) - logger.info(f"Extract {radbcor.getnames()}") - radbcor.close() + self.jedi.get_bias(self.task_config) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 7004bf059c..1f3a065133 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 import os -import re import glob import gzip import tarfile @@ -139,24 +138,7 @@ def initialize_analysis(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files generated from JEDI config") - bias_dict = self.jedi.get_bias_dict(self.task_config) - FileHandler(bias_dict).sync() - logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") - - # Extract radiance bias correction files from tarball - for action, filelist in bias_dict.items(): - if 'copy' in action: - for sublist in filelist: - if len(sublist) != 2: - raise Exception( - f"List must be of the form ['src', 'dest'], not {sublist}") - src = sublist[0] - if re.search(".tar", src): - radtar = src - with tarfile.open(radtar, "r") as radbcor: - radbcor.extractall(path=os.path.join(self.task_config.DATA, 'obs')) - logger.info(f"Extract {radbcor.getnames()}") - radbcor.close() + self.jedi.get_bias(self.task_config) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From 666d7d6d89c8c17ef9627367a3efdb76454d07f7 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Tue, 10 Sep 2024 15:03:43 +0000 Subject: [PATCH 008/157] initial attempt to generalize processing of variational bias correction files using jedi class (#2862) --- ush/python/pygfs/jedi/jedi.py | 6 +++--- ush/python/pygfs/task/atm_analysis.py | 6 ++++-- ush/python/pygfs/task/atmens_analysis.py | 6 ++++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 4e527460ab..2b899df4e3 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -191,7 +191,7 @@ def get_obs_dict(self, task_config: AttrDict) -> Dict[str, Any]: return obs_dict @logit(logger) - def get_bias(self, task_config: AttrDict) -> Dict[str, Any]: + def get_bias(self, task_config: AttrDict, bias_file) -> Dict[str, Any]: """Compile a dictionary of observation files to copy This method stages radiance bias correction files in the obs sub-diretory of the run directory @@ -215,9 +215,9 @@ def get_bias(self, task_config: AttrDict) -> Dict[str, Any]: obdir = os.path.dirname(obfile) basename = os.path.basename(obfile) prefix = '.'.join(basename.split('.')[:-3]) - bfile = f"{prefix}.rad_varbc_params.tar" + bfile = f"{prefix}.{bias_file}" radtar = os.path.join(obdir, bfile) - copylist.append([os.path.join(task_config.COM_ATMOS_ANALYSIS_PREV, bfile), radtar]) + copylist.append([os.path.join(task_config.VarBcDir, bfile), radtar]) break bias_dict = { diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index c1a6558ade..454d557e87 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -66,7 +66,8 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'atm_obsdatain_path': f"{self.task_config.DATA}/obs/", 'atm_obsdataout_path': f"{self.task_config.DATA}/diags/", - 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications + 'BKG_TSTEP': "PT1H", # Placeholder for 4D applications + 'VarBcDir': f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}", } ) @@ -139,7 +140,8 @@ def initialize_analysis(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files generated from JEDI config") - self.jedi.get_bias(self.task_config) + bias_file = f"rad_varbc_params.tar" + self.jedi.get_bias(self.task_config, bias_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 1f3a065133..01fc4faac0 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -66,7 +66,8 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'atm_obsdatain_path': f"./obs/", 'atm_obsdataout_path': f"./diags/", - 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications + 'BKG_TSTEP': "PT1H", # Placeholder for 4D applications + 'VarBcDir': f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}", } ) @@ -138,7 +139,8 @@ def initialize_analysis(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files generated from JEDI config") - self.jedi.get_bias(self.task_config) + bias_file = f"rad_varbc_params.tar" + self.jedi.get_bias(self.task_config, bias_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From 1e4a28434dc475cf533250219383bf20c347977e Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Tue, 10 Sep 2024 17:09:42 +0000 Subject: [PATCH 009/157] move VarBcDir inside initialize_analysis method (#2862) --- ush/python/pygfs/task/atm_analysis.py | 4 ++-- ush/python/pygfs/task/atmens_analysis.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 454d557e87..2cc738008a 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -66,8 +66,7 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'atm_obsdatain_path': f"{self.task_config.DATA}/obs/", 'atm_obsdataout_path': f"{self.task_config.DATA}/diags/", - 'BKG_TSTEP': "PT1H", # Placeholder for 4D applications - 'VarBcDir': f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) @@ -140,6 +139,7 @@ def initialize_analysis(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files generated from JEDI config") + self.task_config.VarBcDir = f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}" bias_file = f"rad_varbc_params.tar" self.jedi.get_bias(self.task_config, bias_file) diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 01fc4faac0..b57c80e22c 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -66,8 +66,7 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'atm_obsdatain_path': f"./obs/", 'atm_obsdataout_path': f"./diags/", - 'BKG_TSTEP': "PT1H", # Placeholder for 4D applications - 'VarBcDir': f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) @@ -139,6 +138,7 @@ def initialize_analysis(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files generated from JEDI config") + self.task_config.VarBcDir = f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}" bias_file = f"rad_varbc_params.tar" self.jedi.get_bias(self.task_config, bias_file) From b48e126341795a11ad3941e0beb56c95644fac99 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Wed, 11 Sep 2024 18:29:24 +0000 Subject: [PATCH 010/157] update sorc/gdas.cd hash to bring in radiance tarball ctest changes (#2862) --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 554c55a6ad..032b708f6a 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 554c55a6ad1a4fc7d5868122a9b0147af3b300a2 +Subproject commit 032b708f6a476ae4726d1533b82feb21fc8daa92 From 5074d11abf0fce18b4fca2b4c31623cb0582516a Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Thu, 12 Sep 2024 07:13:51 -0400 Subject: [PATCH 011/157] Made changes to stage the static GTS mapping files. --- ush/python/pygfs/task/snow_analysis.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index aa51a65bf6..4a6c6efd8a 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -191,23 +191,15 @@ def initialize(self) -> None: logger.info("Staging ensemble backgrounds") FileHandler(self.get_ens_bkg_dict(localconf)).sync() + # stage GTS bufr2ioda mapping YAML files + logger.info(f"Staging GTS bufr2ioda mapping YAML files from {self.task_config.GTS_LIST}") + gts_mapping_list = parse_j2yaml(self.task_config.GTS_LIST, localconf) + FileHandler(gts_mapping_list).sync() + # Write out letkfoi YAML file save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") - # Read and render the GTS_LIST yaml - logger.info(f"Reading {self.task_config.GTS_LIST}") - gts_config = parse_j2yaml(self.task_config.GTS_LIST, localconf) - logger.debug(f"{self.task_config.GTS_LIST}:\n{pformat(gts_config)}") - - # Generate bufr2ioda mapping YAML files - for name in gts_config.bufr2ioda.keys(): - mapping_yaml = os.path.join(self.task_config.DATA, "obs", f"bufr_{name}_mapping.yaml") - logger.info(f"Generate BUFR2IODA YAML file: {mapping_yaml}") - temp_yaml = parse_j2yaml(gts_config.bufr2ioda[name], localconf) - save_as_yaml(temp_yaml, mapping_yaml) - logger.info(f"Wrote bufr2ioda YAML to: {mapping_yaml}") - # need output dir for diags and anl logger.info("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ From f122edf47da50c78a0dee9e5dd2f00514456eebd Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Thu, 12 Sep 2024 09:35:22 -0400 Subject: [PATCH 012/157] Change the name to be more descriptive. --- parm/config/gfs/config.snowanl | 2 +- ush/python/pygfs/task/snow_analysis.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index c1a99f99ea..b1460dfa67 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -9,7 +9,7 @@ echo "BEGIN: config.snowanl" source "${EXPDIR}/config.resources" snowanl export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" -export GTS_LIST="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.yaml.j2" +export GTS_SNOW_STAGE_YAML="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.yaml.j2" # Name of the JEDI executable and its yaml template export JEDIEXE="${EXECgfs}/gdas.x" diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 4a6c6efd8a..4b991d2b34 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -192,8 +192,8 @@ def initialize(self) -> None: FileHandler(self.get_ens_bkg_dict(localconf)).sync() # stage GTS bufr2ioda mapping YAML files - logger.info(f"Staging GTS bufr2ioda mapping YAML files from {self.task_config.GTS_LIST}") - gts_mapping_list = parse_j2yaml(self.task_config.GTS_LIST, localconf) + logger.info(f"Staging GTS bufr2ioda mapping YAML files from {self.task_config.GTS_SNOW_STAGE_YAML}") + gts_mapping_list = parse_j2yaml(self.task_config.GTS_SNOW_STAGE_YAML, localconf) FileHandler(gts_mapping_list).sync() # Write out letkfoi YAML file From ba77d4ec1954968de8e88ebc002073ff85ab6953 Mon Sep 17 00:00:00 2001 From: "russ.treadon" Date: Thu, 12 Sep 2024 16:05:43 +0000 Subject: [PATCH 013/157] add extract_tar method to jedi class, use extract_tar in atm and atmens analysis scripts (#2862) --- ush/python/pygfs/jedi/jedi.py | 51 +++++++++++++++++------- ush/python/pygfs/task/atm_analysis.py | 8 +++- ush/python/pygfs/task/atmens_analysis.py | 8 +++- 3 files changed, 50 insertions(+), 17 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 2b899df4e3..b5fdb42c7c 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -3,7 +3,6 @@ import os import tarfile from logging import getLogger -from pprint import pformat from typing import List, Dict, Any, Optional from jcb import render from wxflow import (AttrDict, @@ -191,19 +190,24 @@ def get_obs_dict(self, task_config: AttrDict) -> Dict[str, Any]: return obs_dict @logit(logger) - def get_bias(self, task_config: AttrDict, bias_file) -> Dict[str, Any]: + def get_bias_dict(self, task_config: AttrDict, bias_file) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method stages radiance bias correction files in the obs sub-diretory of the run directory + This method extracts 'observers' from the JEDI yaml and determines from that list + if bias correction tar files are to be copied to the run directory + from the component directory. Parameters ---------- task_config: AttrDict Attribute-dictionary of all configuration variables associated with a GDAS task. + bias_file + name of bias correction tar file Returns ---------- - None + bias_dict: Dict + a dictionary containing the list of observation bias files to copy for FileHandler """ observations = find_value_in_nested_dict(self.config, 'observations') @@ -216,8 +220,8 @@ def get_bias(self, task_config: AttrDict, bias_file) -> Dict[str, Any]: basename = os.path.basename(obfile) prefix = '.'.join(basename.split('.')[:-3]) bfile = f"{prefix}.{bias_file}" - radtar = os.path.join(obdir, bfile) - copylist.append([os.path.join(task_config.VarBcDir, bfile), radtar]) + tar_file = os.path.join(obdir, bfile) + copylist.append([os.path.join(task_config.VarBcDir, bfile), tar_file]) break bias_dict = { @@ -225,16 +229,33 @@ def get_bias(self, task_config: AttrDict, bias_file) -> Dict[str, Any]: 'copy': copylist } - # stage bias corrections - FileHandler(bias_dict).sync() - logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + return bias_dict + + @logit(logger) + def extract_tar(self, task_config: AttrDict, tar_dict) -> Dict[str, Any]: + """Extract files from list of tarfiles + + This method extract bias correction files from tarball(s) + + Parameters + ---------- + task_config: AttrDict + Attribute-dictionary of all configuration variables associated with a GDAS task. + tar_dict + a dictionary containing the list of tar files + + Returns + ---------- + None + """ - # extract radiance bias correction files from tarball - radtar = os.path.join(obdir, bfile) - with tarfile.open(radtar, "r") as radbcor: - radbcor.extractall(path=os.path.join(task_config.DATA, 'obs')) - logger.info(f"Extract {radbcor.getnames()}") - radbcor.close() + # extract bias correction files from tar file + for tar_file in tar_dict['copy']: + if ".tar" in tar_file[1]: + with tarfile.open(tar_file[1], "r") as tarball: + tarball.extractall(path=os.path.join(task_config.DATA, 'obs')) + logger.info(f"Extract {tarball.getnames()}") + tarball.close() @logit(logger) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 2cc738008a..900e78ba1c 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -141,7 +141,13 @@ def initialize_analysis(self) -> None: logger.info(f"Staging list of bias correction files generated from JEDI config") self.task_config.VarBcDir = f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}" bias_file = f"rad_varbc_params.tar" - self.jedi.get_bias(self.task_config, bias_file) + bias_dict = self.jedi.get_bias_dict(self.task_config, bias_file) + FileHandler(bias_dict).sync() + logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + + # extract bias corrections + logger.info(f"Extract bias correction files from tarball") + self.jedi.extract_tar(self.task_config, bias_dict) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index b57c80e22c..a02932d8f5 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -140,7 +140,13 @@ def initialize_analysis(self) -> None: logger.info(f"Staging list of bias correction files generated from JEDI config") self.task_config.VarBcDir = f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}" bias_file = f"rad_varbc_params.tar" - self.jedi.get_bias(self.task_config, bias_file) + bias_dict = self.jedi.get_bias(self.task_config, bias_file) + FileHandler(bias_dict).sync() + logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + + # extract bias corrections + logger.info(f"Extract bias correction files from tarball") + self.jedi.extract_tar(self.task_config, bias_dict) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From bc3a6ddab05faaea4466719e96bc36133901a199 Mon Sep 17 00:00:00 2001 From: "russ.treadon" Date: Thu, 12 Sep 2024 16:15:21 +0000 Subject: [PATCH 014/157] correct typo in atmens_analysis.py (2862) --- ush/python/pygfs/task/atmens_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index a02932d8f5..8bab631495 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -140,7 +140,7 @@ def initialize_analysis(self) -> None: logger.info(f"Staging list of bias correction files generated from JEDI config") self.task_config.VarBcDir = f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}" bias_file = f"rad_varbc_params.tar" - bias_dict = self.jedi.get_bias(self.task_config, bias_file) + bias_dict = self.jedi.get_bias_dict(self.task_config, bias_file) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") From 086158dba05b6f152a1e54f838865817fd3ba1fd Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Sun, 15 Sep 2024 12:43:11 -0400 Subject: [PATCH 015/157] Update IMS preprocessing job. --- scripts/exglobal_prep_snow_obs.py | 3 +-- workflow/rocoto/gfs_cycled_xml.py | 11 +++++++++++ workflow/rocoto/gfs_tasks.py | 8 ++++++-- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/scripts/exglobal_prep_snow_obs.py b/scripts/exglobal_prep_snow_obs.py index aa1eb1bb7d..b544d6327c 100755 --- a/scripts/exglobal_prep_snow_obs.py +++ b/scripts/exglobal_prep_snow_obs.py @@ -20,5 +20,4 @@ # Instantiate the snow prepare task SnowAnl = SnowAnalysis(config) - if SnowAnl.task_config.cyc == 0: - SnowAnl.prepare_IMS() + SnowAnl.prepare_IMS() diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py index afd663c337..fda1385798 100644 --- a/workflow/rocoto/gfs_cycled_xml.py +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -24,6 +24,17 @@ def get_cycledefs(self): sdate_str = sdate.strftime("%Y%m%d%H%M") strings.append(f'\t{sdate_str} {edate_str} {interval_str}') + if self._app_config.do_jedisnowda: + sdate_snocvr = self._base['SDATE'] + edate_snocvr = self._base['EDATE'] + interval_snocvr = to_timedelta(f"24:00:00H") + sdate_snocvr = sdate_snocvr + interval_snocvr + sdate_snocvr_str = sdate_snocvr.replace(hour=0, minute=0, second=0).strftime("%Y%m%d%H%M") + edate_snocvr_str = edate_snocvr.strftime("%Y%m%d%H%M") + interval_str = timedelta_to_HMS(interval_snocvr) + if sdate_snocvr <= edate_snocvr: + strings.append(f'\t{sdate_snocvr_str} {edate_snocvr_str} {interval_str}') + if self._app_config.gfs_cyc != 0: sdate_gfs = self._base['SDATE_GFS'] edate_gfs = self._base['EDATE_GFS'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 89da933d00..4b0c98e9a2 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -571,13 +571,14 @@ def prepsnowobs(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) + cycledef = 'gdas_prep_snocvr' resources = self.get_resource('prepsnowobs') task_name = f'{self.run}prepsnowobs' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, - 'cycledef': self.run.replace('enkf', ''), + 'cycledef': cycledef, 'command': f'{self.HOMEgfs}/jobs/rocoto/prepsnowobs.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', @@ -591,7 +592,10 @@ def prepsnowobs(self): def snowanl(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowobs'} + if f'@H' == '00': + dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowobs'} + else: + dep_dict = {'type': 'task', 'name': f'{self.run}prep'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) From 2df23be2d536e9f4bf457d700065742c3b8869ca Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Mon, 16 Sep 2024 08:05:06 -0400 Subject: [PATCH 016/157] Rename IMS snow preprocessing jobs/names to be snowcover --- env/HERA.env | 2 +- env/HERCULES.env | 2 +- env/JET.env | 2 +- env/ORION.env | 2 +- env/S4.env | 2 +- env/WCOSS2.env | 2 +- ...{JGLOBAL_PREP_SNOW_OBS => JGLOBAL_PREP_SNOWCOVER} | 4 ++-- jobs/rocoto/{prepsnowobs.sh => prepsnowcover.sh} | 4 ++-- .../gfs/{config.prepsnowobs => config.prepsnowcover} | 8 ++++---- parm/config/gfs/config.resources | 4 ++-- ...l_prep_snow_obs.py => exglobal_prep_snowcover.py} | 6 +++--- workflow/applications/gfs_cycled.py | 4 ++-- workflow/rocoto/gfs_tasks.py | 12 ++++++------ workflow/rocoto/tasks.py | 2 +- 14 files changed, 28 insertions(+), 28 deletions(-) rename jobs/{JGLOBAL_PREP_SNOW_OBS => JGLOBAL_PREP_SNOWCOVER} (90%) rename jobs/rocoto/{prepsnowobs.sh => prepsnowcover.sh} (91%) rename parm/config/gfs/{config.prepsnowobs => config.prepsnowcover} (66%) rename scripts/{exglobal_prep_snow_obs.py => exglobal_prep_snowcover.py} (80%) diff --git a/env/HERA.env b/env/HERA.env index 0d77547b5b..c602490d60 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -52,7 +52,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="HERA" export launcher_PREP="srun" -elif [[ "${step}" = "prepsnowobs" ]]; then +elif [[ "${step}" = "prepsnowcover" ]]; then export APRUN_CALCFIMS="${APRUN_default}" diff --git a/env/HERCULES.env b/env/HERCULES.env index 0138e33645..2df873648a 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -50,7 +50,7 @@ case ${step} in export sys_tp="HERCULES" export launcher_PREP="srun" ;; - "prepsnowobs") + "prepsnowcover") export APRUN_CALCFIMS="${APRUN_default}" ;; diff --git a/env/JET.env b/env/JET.env index f2b018d2d7..930ff3ff37 100755 --- a/env/JET.env +++ b/env/JET.env @@ -40,7 +40,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="JET" export launcher_PREP="srun" -elif [[ "${step}" = "prepsnowobs" ]]; then +elif [[ "${step}" = "prepsnowcover" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" diff --git a/env/ORION.env b/env/ORION.env index e8c1bcbf58..b81e08cad8 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -47,7 +47,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="ORION" export launcher_PREP="srun" -elif [[ "${step}" = "prepsnowobs" ]]; then +elif [[ "${step}" = "prepsnowcover" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" diff --git a/env/S4.env b/env/S4.env index 5d5ffd23b1..d17e3bf452 100755 --- a/env/S4.env +++ b/env/S4.env @@ -40,7 +40,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="S4" export launcher_PREP="srun" -elif [[ "${step}" = "prepsnowobs" ]]; then +elif [[ "${step}" = "prepsnowcover" ]]; then export APRUN_CALCFIMS="${APRUN_default}" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index d2dae3ba93..385a3787f1 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -34,7 +34,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="wcoss2" export launcher_PREP="mpiexec" -elif [[ "${step}" = "prepsnowobs" ]]; then +elif [[ "${step}" = "prepsnowcover" ]]; then export APRUN_CALCFIMS="${APRUN_default}" diff --git a/jobs/JGLOBAL_PREP_SNOW_OBS b/jobs/JGLOBAL_PREP_SNOWCOVER similarity index 90% rename from jobs/JGLOBAL_PREP_SNOW_OBS rename to jobs/JGLOBAL_PREP_SNOWCOVER index 0e3557697d..e208cd0362 100755 --- a/jobs/JGLOBAL_PREP_SNOW_OBS +++ b/jobs/JGLOBAL_PREP_SNOWCOVER @@ -2,7 +2,7 @@ source "${HOMEgfs}/ush/preamble.sh" export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "prepsnowobs" -c "base prepsnowobs" +source "${HOMEgfs}/ush/jjob_header.sh" -e "prepsnowcover" -c "base prepsnowcover" ############################################## # Set variables used in the script @@ -25,7 +25,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ ############################################################### # Run relevant script -EXSCRIPT=${GDASSNOWPREPPY:-${SCRgfs}/exglobal_prep_snow_obs.py} +EXSCRIPT=${GDASSNOWPREPPY:-${SCRgfs}/exglobal_prep_snowcover.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && (echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}") diff --git a/jobs/rocoto/prepsnowobs.sh b/jobs/rocoto/prepsnowcover.sh similarity index 91% rename from jobs/rocoto/prepsnowobs.sh rename to jobs/rocoto/prepsnowcover.sh index 3f23bc16a5..6f4bf51a5f 100755 --- a/jobs/rocoto/prepsnowobs.sh +++ b/jobs/rocoto/prepsnowcover.sh @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="prepsnowobs" +export job="prepsnowcover" export jobid="${job}.$$" ############################################################### @@ -21,6 +21,6 @@ export PYTHONPATH ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_PREP_SNOW_OBS" +"${HOMEgfs}/jobs/JGLOBAL_PREP_SNOWCOVER" status=$? exit "${status}" diff --git a/parm/config/gfs/config.prepsnowobs b/parm/config/gfs/config.prepsnowcover similarity index 66% rename from parm/config/gfs/config.prepsnowobs rename to parm/config/gfs/config.prepsnowcover index 20bdd89ddf..12b97ca470 100644 --- a/parm/config/gfs/config.prepsnowobs +++ b/parm/config/gfs/config.prepsnowcover @@ -1,12 +1,12 @@ #! /usr/bin/env bash -########## config.prepsnowobs ########## +########## config.prepsnowcover ########## # Snow Obs Prep specific -echo "BEGIN: config.prepsnowobs" +echo "BEGIN: config.prepsnowcover" # Get task specific resources -. "${EXPDIR}/config.resources" prepsnowobs +. "${EXPDIR}/config.resources" prepsnowcover export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" @@ -15,4 +15,4 @@ export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py" -echo "END: config.prepsnowobs" +echo "END: config.prepsnowcover" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index a89c72e951..39f9919441 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -12,7 +12,7 @@ if (( $# != 1 )); then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" echo "stage_ic aerosol_init" - echo "prep prepsnowobs prepatmiodaobs" + echo "prep prepsnowcover prepatmiodaobs" echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlobs atmensanlsol atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl esnowrecen" @@ -151,7 +151,7 @@ case ${step} in memory="40GB" ;; - "prepsnowobs") + "prepsnowcover") walltime="00:05:00" ntasks=1 threads_per_task=1 diff --git a/scripts/exglobal_prep_snow_obs.py b/scripts/exglobal_prep_snowcover.py similarity index 80% rename from scripts/exglobal_prep_snow_obs.py rename to scripts/exglobal_prep_snowcover.py index b544d6327c..c7be33a2cf 100755 --- a/scripts/exglobal_prep_snow_obs.py +++ b/scripts/exglobal_prep_snowcover.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 -# exglobal_prep_snow_obs.py +# exglobal_prep_snowcover.py # This script creates a SnowAnalysis object -# and runs the prepare_GTS and prepare_IMS method -# which perform the pre-processing for GTS and IMS data +# and runs the prepare_IMS method which perform +# the pre-processing for IMS data import os from wxflow import Logger, cast_strdict_as_dtypedict diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 4bb473f454..dab222ed26 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -113,7 +113,7 @@ def _get_app_configs(self): configs += ['prepobsaero'] if self.do_jedisnowda: - configs += ['prepsnowobs', 'snowanl'] + configs += ['prepsnowcover', 'snowanl'] if self.do_hybvar: configs += ['esnowrecen'] @@ -156,7 +156,7 @@ def get_task_names(self): gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] if self.do_jedisnowda: - gdas_gfs_common_tasks_before_fcst += ['prepsnowobs', 'snowanl'] + gdas_gfs_common_tasks_before_fcst += ['prepsnowcover', 'snowanl'] wave_prep_tasks = ['waveinit', 'waveprep'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 4b0c98e9a2..d539c8e8f9 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -564,7 +564,7 @@ def aeroanlfinal(self): return task - def prepsnowobs(self): + def prepsnowcover(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.run}prep'} @@ -572,14 +572,14 @@ def prepsnowobs(self): dependencies = rocoto.create_dependency(dep=deps) cycledef = 'gdas_prep_snocvr' - resources = self.get_resource('prepsnowobs') - task_name = f'{self.run}prepsnowobs' + resources = self.get_resource('prepsnowcover') + task_name = f'{self.run}prepsnowcover' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/jobs/rocoto/prepsnowobs.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/prepsnowcover.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -593,7 +593,7 @@ def snowanl(self): deps = [] if f'@H' == '00': - dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowobs'} + dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowcover'} else: dep_dict = {'type': 'task', 'name': f'{self.run}prep'} deps.append(rocoto.add_dependency(dep_dict)) @@ -618,7 +618,7 @@ def snowanl(self): def esnowrecen(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}prepsnowobs'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}prepsnowcover'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}snowanl'} deps.append(rocoto.add_dependency(dep_dict)) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index df2b0467db..5ac321817b 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal', 'aeroanlgenb', - 'prepsnowobs', 'snowanl', 'esnowrecen', + 'prepsnowcover', 'snowanl', 'esnowrecen', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', 'atmos_prod', 'ocean_prod', 'ice_prod', From 0ab7bdd6083051f5cb9b9bc687d2a4bbbe5e2e34 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Tue, 17 Sep 2024 18:20:47 +0000 Subject: [PATCH 017/157] improve error handling for jedi class tarfile.extractall (#2862) --- ush/python/pygfs/jedi/jedi.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index b5fdb42c7c..08cfbc02af 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -252,10 +252,19 @@ def extract_tar(self, task_config: AttrDict, tar_dict) -> Dict[str, Any]: # extract bias correction files from tar file for tar_file in tar_dict['copy']: if ".tar" in tar_file[1]: - with tarfile.open(tar_file[1], "r") as tarball: - tarball.extractall(path=os.path.join(task_config.DATA, 'obs')) - logger.info(f"Extract {tarball.getnames()}") - tarball.close() + try: + with tarfile.open(tar_file[1], "r") as tarball: + tarball.extractall(path=os.path.join(task_config.DATA, 'obs')) + logger.info(f"Extract {tarball.getnames()}") + except tarfile.ReadError as err: + if tarfile.is_tarfile(tarfile[1]): + logger.error(f"FATAL ERROR: {tarfile[1]} could not be read") + raise tarfile.ReadError(f"FATAL ERROR: unable to read {tarfile[1]}") + else: + logger.info() + except tarfile.ExtractError as err: + logger.exception(f"FATAL ERROR: unable to extract from {tarfile[1]}") + raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tarfile[1]}") @logit(logger) From 8b627a159fdef4bc67e5877b8d75f9a696699675 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Tue, 17 Sep 2024 18:23:52 +0000 Subject: [PATCH 018/157] remove unnecessary close following tarfile add (#2862) --- ush/python/pygfs/task/atm_analysis.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 900e78ba1c..27243159fa 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -287,7 +287,6 @@ def finalize(self) -> None: for tlapfile in tlaplist: radbcor.add(tlapfile, arcname=os.path.basename(tlapfile)) logger.info(f"Add {radbcor.getnames()}") - radbcor.close() # Copy FV3 atm increment to comrot directory logger.info("Copy UFS model readable atm increment file") From eb4fc374b83b693ac767587452a8d63aca77675f Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Tue, 17 Sep 2024 18:45:14 +0000 Subject: [PATCH 019/157] use endswith to clean up scripting (#2862) --- ush/python/pygfs/jedi/jedi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 08cfbc02af..61ad820302 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -251,7 +251,7 @@ def extract_tar(self, task_config: AttrDict, tar_dict) -> Dict[str, Any]: # extract bias correction files from tar file for tar_file in tar_dict['copy']: - if ".tar" in tar_file[1]: + if tar_file[1].endswith('.tar'): try: with tarfile.open(tar_file[1], "r") as tarball: tarball.extractall(path=os.path.join(task_config.DATA, 'obs')) From ed52b724ef8762f9c1eea2c347a5276f49c9469b Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Wed, 18 Sep 2024 17:13:24 +0000 Subject: [PATCH 020/157] refactor jedi class method extract_tar (#2862) --- ush/python/pygfs/jedi/jedi.py | 43 +++++++++++------------- ush/python/pygfs/task/atm_analysis.py | 5 +-- ush/python/pygfs/task/atmens_analysis.py | 5 +-- 3 files changed, 26 insertions(+), 27 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 61ad820302..9e69065bb2 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -232,39 +232,36 @@ def get_bias_dict(self, task_config: AttrDict, bias_file) -> Dict[str, Any]: return bias_dict @logit(logger) - def extract_tar(self, task_config: AttrDict, tar_dict) -> Dict[str, Any]: - """Extract files from list of tarfiles + def extract_tar(self, tar_file: str) -> None: + """Extract bias correction files from a tarball - This method extract bias correction files from tarball(s) + This method extract files from a tarball Parameters ---------- - task_config: AttrDict - Attribute-dictionary of all configuration variables associated with a GDAS task. - tar_dict - a dictionary containing the list of tar files + tar_file + path/name of tarball Returns ---------- None """ - # extract bias correction files from tar file - for tar_file in tar_dict['copy']: - if tar_file[1].endswith('.tar'): - try: - with tarfile.open(tar_file[1], "r") as tarball: - tarball.extractall(path=os.path.join(task_config.DATA, 'obs')) - logger.info(f"Extract {tarball.getnames()}") - except tarfile.ReadError as err: - if tarfile.is_tarfile(tarfile[1]): - logger.error(f"FATAL ERROR: {tarfile[1]} could not be read") - raise tarfile.ReadError(f"FATAL ERROR: unable to read {tarfile[1]}") - else: - logger.info() - except tarfile.ExtractError as err: - logger.exception(f"FATAL ERROR: unable to extract from {tarfile[1]}") - raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tarfile[1]}") + # extract files from tar file + tar_path = os.path.dirname(tar_file) + try: + with tarfile.open(tar_file, "r") as tarball: + tarball.extractall(path=tar_path) + logger.info(f"Extract {tarball.getnames()}") + except tarfile.ReadError as err: + if tarfile.is_tarfile(tarfile[1]): + logger.error(f"FATAL ERROR: {tarfile[1]} could not be read") + raise tarfile.ReadError(f"FATAL ERROR: unable to read {tarfile[1]}") + else: + logger.info() + except tarfile.ExtractError as err: + logger.exception(f"FATAL ERROR: unable to extract from {tarfile[1]}") + raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tarfile[1]}") @logit(logger) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 27243159fa..badb01a74a 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -146,8 +146,9 @@ def initialize_analysis(self) -> None: logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections - logger.info(f"Extract bias correction files from tarball") - self.jedi.extract_tar(self.task_config, bias_dict) + tar_file = os.path.join(self.task_config.DATA, 'obs', f"{self.task_config.GPREFIX}{bias_file}") + logger.info(f"Extract bias correction files from {tar_file}") + self.jedi.extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 8bab631495..4b2f8ebbf4 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -145,8 +145,9 @@ def initialize_analysis(self) -> None: logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections - logger.info(f"Extract bias correction files from tarball") - self.jedi.extract_tar(self.task_config, bias_dict) + tar_file = os.path.join(self.task_config.DATA, 'obs', f"{self.task_config.GPREFIX}{bias_file}") + logger.info(f"Extract bias correction files from {tar_file}") + self.jedi.extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From 80305b67cfbf941e40604d71b7f1413106b2d807 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Wed, 18 Sep 2024 18:04:24 +0000 Subject: [PATCH 021/157] update sorc/gdas.cd to current head of GDASApp develop (#2862) --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 032b708f6a..55e895f1dc 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 032b708f6a476ae4726d1533b82feb21fc8daa92 +Subproject commit 55e895f1dcf4e6be36eb0eb4c8a7995d429157e0 From 821ee760945bbd38b5a4c6c7db28d9fa08de7fb2 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Wed, 18 Sep 2024 18:21:10 +0000 Subject: [PATCH 022/157] make jedi method extract_tar static, clean up comments, correct typo (#2862) --- ush/python/pygfs/jedi/jedi.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 9e69065bb2..415a0a3c08 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -231,9 +231,10 @@ def get_bias_dict(self, task_config: AttrDict, bias_file) -> Dict[str, Any]: return bias_dict + @staticmethod @logit(logger) - def extract_tar(self, tar_file: str) -> None: - """Extract bias correction files from a tarball + def extract_tar(tar_file: str) -> None: + """Extract files from a tarball This method extract files from a tarball @@ -254,14 +255,14 @@ def extract_tar(self, tar_file: str) -> None: tarball.extractall(path=tar_path) logger.info(f"Extract {tarball.getnames()}") except tarfile.ReadError as err: - if tarfile.is_tarfile(tarfile[1]): - logger.error(f"FATAL ERROR: {tarfile[1]} could not be read") - raise tarfile.ReadError(f"FATAL ERROR: unable to read {tarfile[1]}") + if tarfile.is_tarfile(tar_file): + logger.error(f"FATAL ERROR: {tar_file} could not be read") + raise tarfile.ReadError(f"FATAL ERROR: unable to read {tar_file}") else: logger.info() except tarfile.ExtractError as err: - logger.exception(f"FATAL ERROR: unable to extract from {tarfile[1]}") - raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tarfile[1]}") + logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") + raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") @logit(logger) From 042ec5094cd62dff9d5abc1e4770dd2451284d0c Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Thu, 19 Sep 2024 18:17:17 +0000 Subject: [PATCH 023/157] correctly prefix tlapse radiance bias correction files for cycling (#2862) --- ush/python/pygfs/task/atm_analysis.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index badb01a74a..5f67ea9d72 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -276,9 +276,21 @@ def finalize(self) -> None: bfile = f"{self.task_config.APREFIX}rad_varbc_params.tar" radtar = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, bfile) - # get lists of radiance bias correction files to put in tarball + # rename and copy tlapse radiance bias correction files from obs to bc + tlapobs = glob.glob(os.path.join(self.task_config.DATA, 'obs', '*tlapse.txt')) + copylist = [] + for tlapfile in tlapobs: + obsfile = os.path.basename(tlapfile).split('.', 2) + newfile = f"{self.task_config.APREFIX}{obsfile[2]}" + copylist.append([tlapfile, os.path.join(self.task_config.DATA, 'bc', newfile)]) + tlapse_dict = { + 'copy': copylist + } + FileHandler(tlapse_dict).sync() + + # get lists of radiance bias correction files to add to tarball satlist = glob.glob(os.path.join(self.task_config.DATA, 'bc', '*satbias*nc')) - tlaplist = glob.glob(os.path.join(self.task_config.DATA, 'obs', '*tlapse.txt')) + tlaplist = glob.glob(os.path.join(self.task_config.DATA, 'bc', '*tlapse.txt')) # tar radiance bias correction files to ROTDIR logger.info(f"Creating radiance bias correction tar file {radtar}") From 5306b069727777d814af4433c9b57d46c47f2269 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Fri, 20 Sep 2024 10:01:29 +0000 Subject: [PATCH 024/157] extend duration of C96C48_ufs_hybatmDA CI by one cycle to 2024022406 (#2862) --- ci/cases/pr/C96C48_ufs_hybatmDA.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml index 0b5aa7b6ac..b1566d77a0 100644 --- a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml @@ -11,7 +11,7 @@ arguments: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48/20240610 idate: 2024022318 - edate: 2024022400 + edate: 2024022406 nens: 2 gfs_cyc: 1 start: warm From 3a812884ab7b65fee4c966700088cab82778db0c Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 27 Sep 2024 12:54:34 +0000 Subject: [PATCH 025/157] Initial commit --- ush/python/pygfs/task/atm_analysis.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 8d340a5b73..62de16ffbe 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -129,8 +129,24 @@ def initialize_analysis(self) -> None: ---------- None """ - super().initialize() + # stage observations + logger.info(f"Staging list of observation files generated from JEDI config") + jcb_config = parse_j2yaml(self.task_config.JCB_BASE_YAML, self.task_config) + jcb_config.update(parse_j2yaml(self.task_config.JCB_ALGO_YAML, self.task_config)) + jcb_config['algorithm'] = 'atm_obs_staging' + obs_dict = render(jcb_config) + FileHandler(obs_dict).sync() + logger.debug(f"Observation files:\n{pformat(obs_dict)}") + + # Test + jcb_config = {} + jcb_config = parse_j2yaml(self.task_config.JCB_BASE_YAML, self.task_config) + jcb_config.update(parse_j2yaml(self.task_config.JCB_ALGO_YAML, self.task_config)) + jcb_config['algorithm'] = 'atm_bias_staging' + bias_dict = render(jcb_config) + logger.debug(f"foo:\n{pformat(bias_dict)}") + # stage observations logger.info(f"Staging list of observation files generated from JEDI config") obs_dict = self.jedi.get_obs_dict(self.task_config) From 43c8075b39e9134c0567dd7a4c06c9879a0010a1 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA Date: Fri, 27 Sep 2024 13:10:12 +0000 Subject: [PATCH 026/157] correct ORION.env typo, adjust JEDI ORION job configurations (#2862) --- env/ORION.env | 2 +- parm/config/gfs/config.resources.ORION | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/env/ORION.env b/env/ORION.env index 1bc7eb60d4..3b8053d060 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -142,7 +142,7 @@ elif [[ "${step}" = "marineanlchkpt" ]]; then export NTHREADS_OCNANAL=${NTHREADSmax} - export APRUN_MARINEANLCHKPT="${APRUN} --cpus-per-task=${NTHREADS_OCNANAL}" + export APRUN_MARINEANLCHKPT="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "ocnanalecen" ]]; then diff --git a/parm/config/gfs/config.resources.ORION b/parm/config/gfs/config.resources.ORION index d761df7b73..461b6f14f7 100644 --- a/parm/config/gfs/config.resources.ORION +++ b/parm/config/gfs/config.resources.ORION @@ -23,6 +23,16 @@ case ${step} in # Remove this block once the GSI issue is resolved. export walltime="00:45:00" ;; + "atmanlvar") + # Run on 8 nodes for memory requirement + export tasks_per_node=8 + export walltime="00:45:00" + ;; + "atmensanlobs") + # Run on 8 nodes for memory requirement + export tasks_per_node=8 + export walltime="00:45:00" + ;; *) ;; esac From 4f0446a08488a48bd1f82ae04b0c7e42331ae20b Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 27 Sep 2024 18:01:00 +0000 Subject: [PATCH 027/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 55e895f1dc..d39bf61570 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 55e895f1dcf4e6be36eb0eb4c8a7995d429157e0 +Subproject commit d39bf61570394730e17cd6508307ff7a624cd3cd From 3c08705845c1d36399520d89226370e350c67ff3 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Fri, 27 Sep 2024 19:22:26 -0400 Subject: [PATCH 028/157] Address reviewer's comments. --- workflow/rocoto/gfs_cycled_xml.py | 2 +- workflow/rocoto/gfs_tasks.py | 12 +++++------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py index fda1385798..c5411c108a 100644 --- a/workflow/rocoto/gfs_cycled_xml.py +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -27,7 +27,7 @@ def get_cycledefs(self): if self._app_config.do_jedisnowda: sdate_snocvr = self._base['SDATE'] edate_snocvr = self._base['EDATE'] - interval_snocvr = to_timedelta(f"24:00:00H") + interval_snocvr = to_timedelta('24H') sdate_snocvr = sdate_snocvr + interval_snocvr sdate_snocvr_str = sdate_snocvr.replace(hour=0, minute=0, second=0).strftime("%Y%m%d%H%M") edate_snocvr_str = edate_snocvr.strftime("%Y%m%d%H%M") diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index d539c8e8f9..35a348f2aa 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -571,14 +571,13 @@ def prepsnowcover(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) - cycledef = 'gdas_prep_snocvr' resources = self.get_resource('prepsnowcover') task_name = f'{self.run}prepsnowcover' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, - 'cycledef': cycledef, + 'cycledef': 'gdas_prep_snocvr', 'command': f'{self.HOMEgfs}/jobs/rocoto/prepsnowcover.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', @@ -592,12 +591,11 @@ def prepsnowcover(self): def snowanl(self): deps = [] - if f'@H' == '00': - dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowcover'} - else: - dep_dict = {'type': 'task', 'name': f'{self.run}prep'} + dep_dict = {'type': 'task', 'name': f'{self.run}prep'} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowcover'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) resources = self.get_resource('snowanl') task_name = f'{self.run}snowanl' From 7c30e482e1fe1d939ec53c1bec5e203739298466 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 4 Oct 2024 16:31:45 +0000 Subject: [PATCH 029/157] Update --- .../exglobal_atm_analysis_fv3_increment.py | 3 +- scripts/exglobal_atm_analysis_initialize.py | 4 +- scripts/exglobal_atm_analysis_variational.py | 2 +- .../exglobal_atmens_analysis_initialize.py | 4 +- scripts/exglobal_atmens_analysis_obs.py | 2 +- scripts/exglobal_atmens_analysis_sol.py | 2 +- ush/python/pygfs/jedi/jedi.py | 203 +++++++----------- ush/python/pygfs/task/atm_analysis.py | 96 ++------- ush/python/pygfs/task/atmens_analysis.py | 76 ++----- 9 files changed, 118 insertions(+), 274 deletions(-) diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index 72413ddbd4..f1422cca89 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -21,5 +21,4 @@ AtmAnl = AtmAnalysis(config, 'atmanlfv3inc') # Initialize and execute FV3 increment converter - AtmAnl.initialize_jedi() - AtmAnl.execute(config.APRUN_ATMANLFV3INC) + AtmAnl.jedi.execute(config.APRUN_ATMANLFV3INC) diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py index 9deae07bb3..d9af271235 100755 --- a/scripts/exglobal_atm_analysis_initialize.py +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -23,5 +23,5 @@ AtmAnl = AtmAnalysis(config, 'atmanlvar') # Initialize JEDI variational analysis - AtmAnl.initialize_jedi() - AtmAnl.initialize_analysis() + AtmAnl.jedi.initialize(AtmAnl.task_config) + AtmAnl.initialize() diff --git a/scripts/exglobal_atm_analysis_variational.py b/scripts/exglobal_atm_analysis_variational.py index 8359532069..cba7a33a5d 100755 --- a/scripts/exglobal_atm_analysis_variational.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config, 'atmanlvar') # Execute JEDI variational analysis - AtmAnl.execute(config.APRUN_ATMANLVAR, ['fv3jedi', 'variational']) + AtmAnl.jedi.execute(config.APRUN_ATMANLVAR, ['fv3jedi', 'variational']) diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py index 326fe80628..26bb9a6dab 100755 --- a/scripts/exglobal_atmens_analysis_initialize.py +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -26,5 +26,5 @@ AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlobs') # Initialize JEDI ensemble DA analysis - AtmEnsAnl.initialize_jedi() - AtmEnsAnl.initialize_analysis() + AtmEnsAnl.jedi.initialize(AtmEnsAnl.task_config) + AtmEnsAnl.initialize() diff --git a/scripts/exglobal_atmens_analysis_obs.py b/scripts/exglobal_atmens_analysis_obs.py index c701f8cb4e..ac3271272e 100755 --- a/scripts/exglobal_atmens_analysis_obs.py +++ b/scripts/exglobal_atmens_analysis_obs.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlobs') # Initialize and execute JEDI ensembler DA analysis in observer mode - AtmEnsAnl.execute(config.APRUN_ATMENSANLOBS, ['fv3jedi', 'localensembleda']) + AtmEnsAnl.jedi.execute(config.APRUN_ATMENSANLOBS, ['fv3jedi', 'localensembleda']) diff --git a/scripts/exglobal_atmens_analysis_sol.py b/scripts/exglobal_atmens_analysis_sol.py index be78e694b1..d93c42ddef 100755 --- a/scripts/exglobal_atmens_analysis_sol.py +++ b/scripts/exglobal_atmens_analysis_sol.py @@ -21,5 +21,5 @@ AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlsol') # Initialize and execute JEDI ensemble DA analysis in solver mode - AtmEnsAnl.initialize_jedi() + AtmEnsAnl.jedi.initialize(AtmEnsAnl.task_config) AtmEnsAnl.execute(config.APRUN_ATMENSANLSOL, ['fv3jedi', 'localensembleda']) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 415a0a3c08..366f0f7470 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -4,14 +4,12 @@ import tarfile from logging import getLogger from typing import List, Dict, Any, Optional +from pprint import pformat from jcb import render -from wxflow import (AttrDict, - FileHandler, +from wxflow import (AttrDict, FileHandler, Task, Executable, chdir, rm_p, - parse_j2yaml, + parse_j2yaml, save_as_yaml, logit, - Task, - Executable, WorkflowException) logger = getLogger(__name__.split('.')[-1]) @@ -22,7 +20,7 @@ class Jedi: Class for initializing and executing JEDI applications """ @logit(logger, name="Jedi") - def __init__(self, task_config: AttrDict, yaml_name: Optional[str] = None) -> None: + def __init__(self, DATA: str, JEDIEXE: str, yaml_name: Optional[str]) -> None: """Constructor for JEDI objects This method will construct a Jedi object. @@ -44,69 +42,51 @@ def __init__(self, task_config: AttrDict, yaml_name: Optional[str] = None) -> No None """ - # For provenance, save incoming task_config as a private attribute of JEDI object - self._task_config = task_config + _exe_name = os.path.basename(JEDIEXE) - _exe_name = os.path.basename(task_config.JEDIEXE) - - self.exe = os.path.join(task_config.DATA, _exe_name) + self.exe_src = JEDIEXE + self.rundir = DATA + self.exe = os.path.join(DATA, _exe_name) if yaml_name: - self.yaml = os.path.join(task_config.DATA, yaml_name + '.yaml') + self.yaml = os.path.join(DATA, yaml_name + '.yaml') else: - self.yaml = os.path.join(task_config.DATA, os.path.splitext(_exe_name)[0] + '.yaml') + self.yaml = os.path.join(DATA, os.path.splitext(_exe_name)[0] + '.yaml') + + # Initialize empty JEDI input config attribute-dictionary self.config = AttrDict() - self.j2tmpl_dir = os.path.join(task_config.PARMgfs, 'gdas') + +# self.j2tmpl_dir = os.path.join(task_config.PARMgfs, 'gdas') @logit(logger) - def set_config(self, task_config: AttrDict, algorithm: Optional[str] = None) -> AttrDict: - """Compile a JEDI configuration dictionary from a template file and save to a YAML file + def initialize(self, task_config: AttrDict) -> None: + """Initialize JEDI application - Parameters - ---------- - task_config : AttrDict - Dictionary of all configuration variables associated with a GDAS task. - algorithm (optional) : str - Name of the algorithm used to generate the JEDI configuration dictionary. - It will override the algorithm set in the task_config.JCB_<>_YAML file. - - Returns - ---------- - None + This method will initialize a JEDI application. + This includes: + - generating JEDI YAML config + - saving JEDI YAML config to run directory + - linking the JEDI executable to run directory """ - if 'JCB_BASE_YAML' in task_config.keys(): - # Step 1: Fill templates of the JCB base YAML file - jcb_config = parse_j2yaml(task_config.JCB_BASE_YAML, task_config) - - # Step 2: If algorithm is present then override the algorithm in the JEDI - # config. Otherwise, if the algorithm J2-YAML is present, fill - # its templates and merge. - if algorithm: - jcb_config['algorithm'] = algorithm - elif 'JCB_ALGO' in task_config.keys(): - jcb_config['algorithm'] = task_config.JCB_ALGO - elif 'JCB_ALGO_YAML' in task_config.keys(): - jcb_algo_config = parse_j2yaml(task_config.JCB_ALGO_YAML, task_config) - jcb_config.update(jcb_algo_config) - - # Step 3: Generate the JEDI YAML using JCB - self.config = render(jcb_config) - elif 'JEDIYAML' in task_config.keys(): - # Generate JEDI YAML without using JCB - self.config = parse_j2yaml(task_config.JEDIYAML, task_config, - searchpath=self.j2tmpl_dir) - else: - logger.exception(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") - raise KeyError(f"FATAL ERROR: Task config must contain JCB_BASE_YAML or JEDIYAML") + # Render JEDI config dictionary + logger.info(f"Generating JEDI YAML config: {self.yaml}") + self.config = self.get_config(task_config) + logger.debug(f"JEDI config:\n{pformat(self.config)}") + + # Save JEDI config dictionary to YAML in run directory + logger.debug(f"Writing JEDI YAML config to: {self.yaml}") + save_as_yaml(self.config, self.yaml) + # Link JEDI executable to run directory + logger.info(f"Linking JEDI executable {self.exe_src} to {self.exe}") + self.link_exe() + @logit(logger) - def execute(self, task_config: AttrDict, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: + def execute(self, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: """Execute JEDI application Parameters ---------- - task_config: AttrDict - Attribute-dictionary of all configuration variables associated with a GDAS task. aprun_cmd: str String comprising the run command for the JEDI executable. jedi_args (optional): List @@ -118,7 +98,7 @@ def execute(self, task_config: AttrDict, aprun_cmd: str, jedi_args: Optional[Lis Attribute-dictionary of JEDI configuration rendered from a template. """ - chdir(task_config.DATA) + chdir(self.rundir) exec_cmd = Executable(aprun_cmd) exec_cmd.add_default_arg(self.exe) @@ -127,6 +107,7 @@ def execute(self, task_config: AttrDict, aprun_cmd: str, jedi_args: Optional[Lis exec_cmd.add_default_arg(arg) exec_cmd.add_default_arg(self.yaml) + logger.info(f"Executing {exec_cmd}") try: exec_cmd() except OSError: @@ -134,103 +115,82 @@ def execute(self, task_config: AttrDict, aprun_cmd: str, jedi_args: Optional[Lis except Exception: raise WorkflowException(f"FATAL ERROR: An error occurred during execution of {exec_cmd}") - @staticmethod @logit(logger) - def link_exe(task_config: AttrDict) -> None: - """Link JEDI executable to run directory + def get_config(self, task_config: AttrDict, algorithm: Optional[str] = None) -> AttrDict: + """Compile a JEDI configuration dictionary from a template file and save to a YAML file Parameters ---------- - task_config: AttrDict - Attribute-dictionary of all configuration variables associated with a GDAS task. + task_config : AttrDict + Dictionary of all configuration variables associated with a GDAS task. + algorithm (optional) : str + Name of the algorithm used to generate the JEDI configuration dictionary. + It will override the algorithm set in the task_config.JCB_ALGO_YAML file. Returns ---------- None """ - # TODO: linking is not permitted per EE2. - # Needs work in JEDI to be able to copy the exec. [NOAA-EMC/GDASApp#1254] - logger.warn("Linking is not permitted per EE2.") - exe_dest = os.path.join(task_config.DATA, os.path.basename(task_config.JEDIEXE)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(task_config.JEDIEXE, exe_dest) + # Fill JCB base YAML template and build JCB config dictionary + jcb_config = parse_j2yaml(task_config.JCB_BASE_YAML, task_config) + + # Add JCB algorithm YAML, if it exists, to JCB config dictionary + if 'JCB_ALGO_YAML' in task_config.keys(): + jcb_config.update(parse_j2yaml(task_config.JCB_ALGO_YAML, task_config)) - @logit(logger) - def get_obs_dict(self, task_config: AttrDict) -> Dict[str, Any]: - """Compile a dictionary of observation files to copy + # Set algorithm in JCB config dictionary or override the one set by JCB_ALGO_YAML + if algorithm: + jcb_config['algorithm'] = algorithm - This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of - observation files that are to be copied to the run directory - from the observation input directory + # Generate JEDI YAML config by rendering JCB config dictionary + jedi_config = render(jcb_config) + + return jedi_config + + @logit(logger) + def link_exe(self) -> None: + """Link JEDI executable to run directory Parameters ---------- - task_config: AttrDict - Attribute-dictionary of all configuration variables associated with a GDAS task. + None Returns ---------- - obs_dict: Dict - a dictionary containing the list of observation files to copy for FileHandler + None """ - observations = find_value_in_nested_dict(self.config, 'observations') - - copylist = [] - for ob in observations['observers']: - obfile = ob['obs space']['obsdatain']['engine']['obsfile'] - basename = os.path.basename(obfile) - copylist.append([os.path.join(task_config.COM_OBS, basename), obfile]) - obs_dict = { - 'mkdir': [os.path.join(task_config.DATA, 'obs')], - 'copy': copylist - } - return obs_dict + # TODO: linking is not permitted per EE2. + # Needs work in JEDI to be able to copy the exec. [NOAA-EMC/GDASApp#1254] + logger.warn("Linking is not permitted per EE2.") + if os.path.exists(self.exe): + rm_p(self.exe) + os.symlink(self.exe_src, self.exe) + @staticmethod @logit(logger) - def get_bias_dict(self, task_config: AttrDict, bias_file) -> Dict[str, Any]: - """Compile a dictionary of observation files to copy - - This method extracts 'observers' from the JEDI yaml and determines from that list - if bias correction tar files are to be copied to the run directory - from the component directory. + def remove_redundant(input_list: List) -> List: + """Remove reduncancies from list with possible redundant, non-mutable elements Parameters ---------- - task_config: AttrDict - Attribute-dictionary of all configuration variables associated with a GDAS task. - bias_file - name of bias correction tar file + input_list : List + List with possible redundant, non-mutable elements Returns ---------- - bias_dict: Dict - a dictionary containing the list of observation bias files to copy for FileHandler + output_list : List + Input list but with redundancies removed """ - observations = find_value_in_nested_dict(self.config, 'observations') - - copylist = [] - for ob in observations['observers']: - if 'obs bias' in ob.keys(): - obfile = ob['obs bias']['input file'] - obdir = os.path.dirname(obfile) - basename = os.path.basename(obfile) - prefix = '.'.join(basename.split('.')[:-3]) - bfile = f"{prefix}.{bias_file}" - tar_file = os.path.join(obdir, bfile) - copylist.append([os.path.join(task_config.VarBcDir, bfile), tar_file]) - break - - bias_dict = { - 'mkdir': [os.path.join(task_config.DATA, 'bc')], - 'copy': copylist - } - - return bias_dict + output_list = [] + for item in input_list: + if item not in output_list: + output_list.append(item); + return output_list + @staticmethod @logit(logger) def extract_tar(tar_file: str) -> None: @@ -264,7 +224,6 @@ def extract_tar(tar_file: str) -> None: logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") - @logit(logger) def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: """ diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 6582db100d..8a21b1552f 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -6,8 +6,7 @@ import tarfile from logging import getLogger from pprint import pformat -from typing import Optional, Dict, Any - +from typing import Any, Dict, List, Optional from wxflow import (AttrDict, FileHandler, add_to_datetime, to_fv3time, to_timedelta, to_YMDH, @@ -74,45 +73,15 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): self.task_config = AttrDict(**self.task_config, **local_dict) # Create JEDI object - self.jedi = Jedi(self.task_config, yaml_name) - - @logit(logger) - def initialize_jedi(self): - """Initialize JEDI application - - This method will initialize a JEDI application used in the global atm analysis. - This includes: - - generating and saving JEDI YAML config - - linking the JEDI executable - - Parameters - ---------- - None - - Returns - ---------- - None - """ - - # get JEDI-to-FV3 increment converter config and save to YAML file - logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") - self.jedi.set_config(self.task_config) - logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") - - # save JEDI config to YAML file - logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") - save_as_yaml(self.jedi.config, self.jedi.yaml) - - # link JEDI executable - logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") - self.jedi.link_exe(self.task_config) + self.jedi = Jedi(self.task_config.DATA, self.task_config.JEDIEXE, yaml_name) @logit(logger) - def initialize_analysis(self) -> None: + def initialize(self) -> None: """Initialize a global atm analysis This method will initialize a global atm analysis. This includes: + - initializing JEDI variational application - staging observation files - staging bias correction files - staging CRTM fix files @@ -130,36 +99,30 @@ def initialize_analysis(self) -> None: None """ - # stage observations - logger.info(f"Staging list of observation files generated from JEDI config") - jcb_config = parse_j2yaml(self.task_config.JCB_BASE_YAML, self.task_config) - jcb_config.update(parse_j2yaml(self.task_config.JCB_ALGO_YAML, self.task_config)) - jcb_config['algorithm'] = 'atm_obs_staging' - obs_dict = render(jcb_config) - FileHandler(obs_dict).sync() - logger.debug(f"Observation files:\n{pformat(obs_dict)}") - - # Test - jcb_config = {} - jcb_config = parse_j2yaml(self.task_config.JCB_BASE_YAML, self.task_config) - jcb_config.update(parse_j2yaml(self.task_config.JCB_ALGO_YAML, self.task_config)) - jcb_config['algorithm'] = 'atm_bias_staging' - bias_dict = render(jcb_config) - logger.debug(f"foo:\n{pformat(bias_dict)}") + # initialize JEDI variational application + logger.info(f"Initializing JEDI ensemble DA application") + self.jedi.initialize(self.task_config) # stage observations - logger.info(f"Staging list of observation files generated from JEDI config") - obs_dict = self.jedi.get_obs_dict(self.task_config) + logger.info(f"Staging list of observation files") + obs_dict = self.jedi.get_config(self.task_config, 'atm_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") + # stage bias corrections + logger.info(f"Staging list of bias correction files") + bias_dict = self.jedi.get_config(self.task_config, 'atm_bias_staging') + bias_dict['copy'] = jedi.remove_redundant(bias_dict['copy']) + FileHandler(bias_dict).sync() + logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + # stage bias corrections logger.info(f"Staging list of bias correction files generated from JEDI config") self.task_config.VarBcDir = f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}" bias_file = f"rad_varbc_params.tar" bias_dict = self.jedi.get_bias_dict(self.task_config, bias_file) FileHandler(bias_dict).sync() - logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + logger.debug(f"Bias correction files bar2:\n{pformat(bias_dict)}") # extract bias corrections tar_file = os.path.join(self.task_config.DATA, 'obs', f"{self.task_config.GPREFIX}{bias_file}") @@ -208,31 +171,6 @@ def initialize_analysis(self) -> None: ] FileHandler({'mkdir': newdirs}).sync() - @logit(logger) - def execute(self, aprun_cmd: str, jedi_args: Optional[str] = None) -> None: - """Run JEDI executable - - This method will run JEDI executables for the global atm analysis - - Parameters - ---------- - aprun_cmd : str - Run command for JEDI application on HPC system - jedi_args : List - List of additional optional arguments for JEDI application - - Returns - ---------- - None - """ - - if jedi_args: - logger.info(f"Executing {self.jedi.exe} {' '.join(jedi_args)} {self.jedi.yaml}") - else: - logger.info(f"Executing {self.jedi.exe} {self.jedi.yaml}") - - self.jedi.execute(self.task_config, aprun_cmd, jedi_args) - @logit(logger) def finalize(self) -> None: """Finalize a global atm analysis diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 4b2f8ebbf4..73cf5ea8c8 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -74,45 +74,15 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): self.task_config = AttrDict(**self.task_config, **local_dict) # Create JEDI object - self.jedi = Jedi(self.task_config, yaml_name) + self.jedi = Jedi(self.task_config.DATA, self.task_config.JEDIEXE, yaml_name) @logit(logger) - def initialize_jedi(self): - """Initialize JEDI application - - This method will initialize a JEDI application used in the global atmens analysis. - This includes: - - generating and saving JEDI YAML config - - linking the JEDI executable - - Parameters - ---------- - None - - Returns - ---------- - None - """ - - # get JEDI config and save to YAML file - logger.info(f"Generating JEDI config: {self.jedi.yaml}") - self.jedi.set_config(self.task_config) - logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") - - # save JEDI config to YAML file - logger.info(f"Writing JEDI config to YAML file: {self.jedi.yaml}") - save_as_yaml(self.jedi.config, self.jedi.yaml) - - # link JEDI-to-FV3 increment converter executable - logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") - self.jedi.link_exe(self.task_config) - - @logit(logger) - def initialize_analysis(self) -> None: + def initialize(self) -> None: """Initialize a global atmens analysis This method will initialize a global atmens analysis. This includes: + - initialize JEDI ensemble DA application - staging observation files - staging bias correction files - staging CRTM fix files @@ -128,19 +98,21 @@ def initialize_analysis(self) -> None: ---------- None """ - super().initialize() + + # initialize JEDI ensemble DA application + logger.info(f"Initializing JEDI ensemble DA application") + self.jedi.initialize(self.task_config) # stage observations - logger.info(f"Staging list of observation files generated from JEDI config") - obs_dict = self.jedi.get_obs_dict(self.task_config) + logger.info(f"Staging list of observation files") + obs_dict = self.jedi.get_config(self.task_config, 'atm_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") # stage bias corrections - logger.info(f"Staging list of bias correction files generated from JEDI config") - self.task_config.VarBcDir = f"{self.task_config.COM_ATMOS_ANALYSIS_PREV}" - bias_file = f"rad_varbc_params.tar" - bias_dict = self.jedi.get_bias_dict(self.task_config, bias_file) + logger.info(f"Staging list of bias correction files") + bias_dict = self.jedi.get_config(self.task_config, 'atm_bias_staging') + bias_dict['copy'] = jedi.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") @@ -175,30 +147,6 @@ def initialize_analysis(self) -> None: ] FileHandler({'mkdir': newdirs}).sync() - @logit(logger) - def execute(self, aprun_cmd: str, jedi_args: Optional[str] = None) -> None: - """Run JEDI executable - - This method will run JEDI executables for the global atmens analysis - - Parameters - ---------- - aprun_cmd : str - Run command for JEDI application on HPC system - jedi_args : List - List of additional optional arguments for JEDI application - Returns - ---------- - None - """ - - if jedi_args: - logger.info(f"Executing {self.jedi.exe} {' '.join(jedi_args)} {self.jedi.yaml}") - else: - logger.info(f"Executing {self.jedi.exe} {self.jedi.yaml}") - - self.jedi.execute(self.task_config, aprun_cmd, jedi_args) - @logit(logger) def finalize(self) -> None: """Finalize a global atmens analysis From f18d4ce5e585053a64c6b3a933997a472f09feae Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 7 Oct 2024 17:48:53 +0000 Subject: [PATCH 030/157] Initialize all JEDI applications on the initialize jobs --- parm/config/gfs/config.atmanl | 6 +- parm/config/gfs/config.atmanlfv3inc | 3 - parm/config/gfs/config.atmensanl | 13 +-- parm/config/gfs/config.atmensanlfv3inc | 3 - parm/config/gfs/config.atmensanlobs | 2 - parm/config/gfs/config.atmensanlsol | 2 - parm/config/gfs/yaml/defaults.yaml | 9 +- .../exglobal_atm_analysis_fv3_increment.py | 4 +- scripts/exglobal_atm_analysis_initialize.py | 3 +- scripts/exglobal_atm_analysis_variational.py | 4 +- .../exglobal_atmens_analysis_fv3_increment.py | 5 +- .../exglobal_atmens_analysis_initialize.py | 10 +-- scripts/exglobal_atmens_analysis_letkf.py | 9 +- scripts/exglobal_atmens_analysis_obs.py | 4 +- scripts/exglobal_atmens_analysis_sol.py | 5 +- ush/python/pygfs/jedi/jedi.py | 75 ++++++++-------- ush/python/pygfs/task/atm_analysis.py | 51 +++++++++-- ush/python/pygfs/task/atmens_analysis.py | 89 +++++++++++++++++-- 18 files changed, 195 insertions(+), 102 deletions(-) diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 9a06088ecc..a2baadde7b 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -6,7 +6,8 @@ echo "BEGIN: config.atmanl" export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" -export JCB_ALGO_YAML=@JCB_ALGO_YAML@ +export JCB_ALGO_YAML_VAR=@JCB_ALGO_YAML_VAR@ +export JCB_ALGO_FV3INC="fv3jedi_fv3inc_variational" export STATICB_TYPE=@STATICB_TYPE@ export LOCALIZATION_TYPE="bump" @@ -33,6 +34,7 @@ export layout_y_atmanl=@LAYOUT_Y_ATMANL@ export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${EXECgfs}/gdas.x +export JEDIEXE_VAR="${EXECgfs}/gdas.x" +export JEDIEXE_FV3INC="${EXECgfs}/fv3jedi_fv3inc.x" echo "END: config.atmanl" diff --git a/parm/config/gfs/config.atmanlfv3inc b/parm/config/gfs/config.atmanlfv3inc index ab7efa3a60..4e7714628e 100644 --- a/parm/config/gfs/config.atmanlfv3inc +++ b/parm/config/gfs/config.atmanlfv3inc @@ -8,7 +8,4 @@ echo "BEGIN: config.atmanlfv3inc" # Get task specific resources . "${EXPDIR}/config.resources" atmanlfv3inc -export JCB_ALGO=fv3jedi_fv3inc_variational -export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x - echo "END: config.atmanlfv3inc" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index f5a1278248..2c57525834 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -6,11 +6,11 @@ echo "BEGIN: config.atmensanl" export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" -if [[ ${lobsdiag_forenkf} = ".false." ]] ; then - export JCB_ALGO_YAML=@JCB_ALGO_YAML_LETKF@ -else - export JCB_ALGO_YAML=@JCB_ALGO_YAML_OBS@ -fi + +export JCB_ALGO_YAML_LETKF=@JCB_ALGO_YAML_LETKF@ +export JCB_ALGO_YAML_OBS=@JCB_ALGO_YAML_OBS@ +export JCB_ALGO_YAML_SOL=@JCB_ALGO_YAML_SOL@ +export JCB_ALGO_FV3INC="fv3jedi_fv3inc_lgetkf" export INTERP_METHOD='barycentric' @@ -24,6 +24,7 @@ export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@ export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${EXECgfs}/gdas.x +export JEDIEXE_LETKF=${EXECgfs}/gdas.x +export JEDIEXE_FV3INC=${EXECgfs}/fv3jedi_fv3inc.x echo "END: config.atmensanl" diff --git a/parm/config/gfs/config.atmensanlfv3inc b/parm/config/gfs/config.atmensanlfv3inc index 2dc73f3f6e..fe3337e5a2 100644 --- a/parm/config/gfs/config.atmensanlfv3inc +++ b/parm/config/gfs/config.atmensanlfv3inc @@ -8,7 +8,4 @@ echo "BEGIN: config.atmensanlfv3inc" # Get task specific resources . "${EXPDIR}/config.resources" atmensanlfv3inc -export JCB_ALGO=fv3jedi_fv3inc_lgetkf -export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x - echo "END: config.atmensanlfv3inc" diff --git a/parm/config/gfs/config.atmensanlobs b/parm/config/gfs/config.atmensanlobs index dff3fa3095..c7e050b009 100644 --- a/parm/config/gfs/config.atmensanlobs +++ b/parm/config/gfs/config.atmensanlobs @@ -8,6 +8,4 @@ echo "BEGIN: config.atmensanlobs" # Get task specific resources . "${EXPDIR}/config.resources" atmensanlobs -export JCB_ALGO_YAML=@JCB_ALGO_YAML@ - echo "END: config.atmensanlobs" diff --git a/parm/config/gfs/config.atmensanlsol b/parm/config/gfs/config.atmensanlsol index dac161373b..8ef905d1bd 100644 --- a/parm/config/gfs/config.atmensanlsol +++ b/parm/config/gfs/config.atmensanlsol @@ -8,6 +8,4 @@ echo "BEGIN: config.atmensanlsol" # Get task specific resources . "${EXPDIR}/config.resources" atmensanlsol -export JCB_ALGO_YAML=@JCB_ALGO_YAML@ - echo "END: config.atmensanlsol" diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index dfc67d1237..caadf23464 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -23,7 +23,7 @@ base: FHMAX_ENKF_GFS: 12 atmanl: - JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2" + JCB_ALGO_YAML_VAR: "${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2" STATICB_TYPE: "gsibec" LAYOUT_X_ATMANL: 8 LAYOUT_Y_ATMANL: 8 @@ -33,16 +33,11 @@ atmanl: atmensanl: JCB_ALGO_YAML_LETKF: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf.yaml.j2" JCB_ALGO_YAML_OBS: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf_observer.yaml.j2" + JCB_ALGO_YAML_SOL: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf_solver.yaml.j2" LAYOUT_X_ATMENSANL: 8 LAYOUT_Y_ATMENSANL: 8 IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 - -atmensanlobs: - JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf_observer.yaml.j2" - -atmensanlsol: - JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_lgetkf_solver.yaml.j2" aeroanl: IO_LAYOUT_X: 1 diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index f1422cca89..d49dbe103c 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -18,7 +18,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atm analysis object - AtmAnl = AtmAnalysis(config, 'atmanlfv3inc') + AtmAnl = AtmAnalysis(config) # Initialize and execute FV3 increment converter - AtmAnl.jedi.execute(config.APRUN_ATMANLFV3INC) + AtmAnl.jedi_fv3inc.execute() diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py index d9af271235..444a532f6c 100755 --- a/scripts/exglobal_atm_analysis_initialize.py +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -20,8 +20,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atm analysis task - AtmAnl = AtmAnalysis(config, 'atmanlvar') + AtmAnl = AtmAnalysis(config) # Initialize JEDI variational analysis - AtmAnl.jedi.initialize(AtmAnl.task_config) AtmAnl.initialize() diff --git a/scripts/exglobal_atm_analysis_variational.py b/scripts/exglobal_atm_analysis_variational.py index cba7a33a5d..1200fe147c 100755 --- a/scripts/exglobal_atm_analysis_variational.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -18,7 +18,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atm analysis task - AtmAnl = AtmAnalysis(config, 'atmanlvar') + AtmAnl = AtmAnalysis(config) # Execute JEDI variational analysis - AtmAnl.jedi.execute(config.APRUN_ATMANLVAR, ['fv3jedi', 'variational']) + AtmAnl.jedi_var.execute() diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py index 48eb6a6a1e..dab8803d9c 100755 --- a/scripts/exglobal_atmens_analysis_fv3_increment.py +++ b/scripts/exglobal_atmens_analysis_fv3_increment.py @@ -18,8 +18,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atmens analysis object - AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlfv3inc') + AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI FV3 increment converter - AtmEnsAnl.initialize_jedi() - AtmEnsAnl.execute(config.APRUN_ATMENSANLFV3INC) + AtmEnsAnl.jedi_fv3inc.execute() diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py index 26bb9a6dab..010a6f1075 100755 --- a/scripts/exglobal_atmens_analysis_initialize.py +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -20,11 +20,11 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atmens analysis task - if not config.lobsdiag_forenkf: - AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlletkf') - else: - AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlobs') + AtmEnsAnl = AtmEnsAnalysis(config) +# if not config.lobsdiag_forenkf: +# AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlletkf') +# else: +# AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlobs') # Initialize JEDI ensemble DA analysis - AtmEnsAnl.jedi.initialize(AtmEnsAnl.task_config) AtmEnsAnl.initialize() diff --git a/scripts/exglobal_atmens_analysis_letkf.py b/scripts/exglobal_atmens_analysis_letkf.py index 45b06524fe..33d2fd3b8f 100755 --- a/scripts/exglobal_atmens_analysis_letkf.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -18,7 +18,12 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atmens analysis task - AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlletkf') + AtmEnsAnl = AtmEnsAnalysis(config) + # Initalize JEDI ensemble DA application + # Note: This is normally done in AtmEnsAnl.initialize(), but the that now + # initializes the split observer-solver. This case is just for testing. + AtmEnsAnl.jedi_letkf.initialize() + # Execute the JEDI ensemble DA analysis - AtmEnsAnl.execute(config.APRUN_ATMENSANLLETKF, ['fv3jedi', 'localensembleda']) + AtmEnsAnl.jedi_letkf.execute() diff --git a/scripts/exglobal_atmens_analysis_obs.py b/scripts/exglobal_atmens_analysis_obs.py index ac3271272e..660276d3e4 100755 --- a/scripts/exglobal_atmens_analysis_obs.py +++ b/scripts/exglobal_atmens_analysis_obs.py @@ -18,7 +18,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atmens analysis task - AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlobs') + AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI ensembler DA analysis in observer mode - AtmEnsAnl.jedi.execute(config.APRUN_ATMENSANLOBS, ['fv3jedi', 'localensembleda']) + AtmEnsAnl.jedi_letkf_obs.execute() diff --git a/scripts/exglobal_atmens_analysis_sol.py b/scripts/exglobal_atmens_analysis_sol.py index d93c42ddef..a1e71dc1cf 100755 --- a/scripts/exglobal_atmens_analysis_sol.py +++ b/scripts/exglobal_atmens_analysis_sol.py @@ -18,8 +18,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the atmens analysis task - AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlsol') + AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI ensemble DA analysis in solver mode - AtmEnsAnl.jedi.initialize(AtmEnsAnl.task_config) - AtmEnsAnl.execute(config.APRUN_ATMENSANLSOL, ['fv3jedi', 'localensembleda']) + AtmEnsAnl.jedi_letkf_sol.execute() diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 366f0f7470..a57871d619 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -20,12 +20,11 @@ class Jedi: Class for initializing and executing JEDI applications """ @logit(logger, name="Jedi") - def __init__(self, DATA: str, JEDIEXE: str, yaml_name: Optional[str]) -> None: + def __init__(self, config) -> None: """Constructor for JEDI objects This method will construct a Jedi object. This includes: - - save a copy of task_config for provenance - set the default JEDI YAML and executable names - set an empty AttrDict for the JEDI config - set the default directory for J2-YAML templates @@ -34,28 +33,23 @@ def __init__(self, DATA: str, JEDIEXE: str, yaml_name: Optional[str]) -> None: ---------- task_config: AttrDict Attribute-dictionary of all configuration variables associated with a GDAS task. - yaml_name: str, optional - Name of YAML file for JEDI configuration Returns ---------- None """ - - _exe_name = os.path.basename(JEDIEXE) - - self.exe_src = JEDIEXE - self.rundir = DATA - self.exe = os.path.join(DATA, _exe_name) - if yaml_name: - self.yaml = os.path.join(DATA, yaml_name + '.yaml') - else: - self.yaml = os.path.join(DATA, os.path.splitext(_exe_name)[0] + '.yaml') - - # Initialize empty JEDI input config attribute-dictionary - self.config = AttrDict() -# self.j2tmpl_dir = os.path.join(task_config.PARMgfs, 'gdas') + # Create the configuration dictionary for JEDI object + self.jedi_config = config.deepcopy() + + local_dict = AttrDict( + { + 'exe': os.path.join(self.config.run_dir, os.path.basename(self.config.exe_src)), + 'yaml': os.path.join(DATA, config.yaml_name + '.yaml'), + 'input_config': None + } + ) + self.jedi_config.update(local_dict) @logit(logger) def initialize(self, task_config: AttrDict) -> None: @@ -69,16 +63,16 @@ def initialize(self, task_config: AttrDict) -> None: """ # Render JEDI config dictionary - logger.info(f"Generating JEDI YAML config: {self.yaml}") - self.config = self.get_config(task_config) - logger.debug(f"JEDI config:\n{pformat(self.config)}") + logger.info(f"Generating JEDI YAML config: {self.jedi_config.yaml}") + self.jedi_config.input_config = self.render_jcb(task_config) + logger.debug(f"JEDI config:\n{pformat(self.jedi_config.input_config)}") # Save JEDI config dictionary to YAML in run directory - logger.debug(f"Writing JEDI YAML config to: {self.yaml}") - save_as_yaml(self.config, self.yaml) + logger.debug(f"Writing JEDI YAML config to: {self.jedi_config.yaml}") + save_as_yaml(self.jedi_config.input_config, self.jedi_config.yaml) # Link JEDI executable to run directory - logger.info(f"Linking JEDI executable {self.exe_src} to {self.exe}") + logger.info(f"Linking JEDI executable {self.jedi_config.exe_src} to {self.jedi_config.exe}") self.link_exe() @logit(logger) @@ -100,12 +94,12 @@ def execute(self, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: chdir(self.rundir) - exec_cmd = Executable(aprun_cmd) - exec_cmd.add_default_arg(self.exe) - if jedi_args: - for arg in jedi_args: + exec_cmd = Executable(self.jedi_config.aprun_cmd) + exec_cmd.add_default_arg(self.jedi_config.exe) + if self.jedi_config.jedi_args: + for arg in self.jedi_config.jedi_args: exec_cmd.add_default_arg(arg) - exec_cmd.add_default_arg(self.yaml) + exec_cmd.add_default_arg(self.jedi_config.yaml) logger.info(f"Executing {exec_cmd}") try: @@ -116,7 +110,7 @@ def execute(self, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: raise WorkflowException(f"FATAL ERROR: An error occurred during execution of {exec_cmd}") @logit(logger) - def get_config(self, task_config: AttrDict, algorithm: Optional[str] = None) -> AttrDict: + def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> AttrDict: """Compile a JEDI configuration dictionary from a template file and save to a YAML file Parameters @@ -125,19 +119,24 @@ def get_config(self, task_config: AttrDict, algorithm: Optional[str] = None) -> Dictionary of all configuration variables associated with a GDAS task. algorithm (optional) : str Name of the algorithm used to generate the JEDI configuration dictionary. - It will override the algorithm set in the task_config.JCB_ALGO_YAML file. + It will override the algorithm set in the jedi_config.jcb_algo_yaml file. Returns ---------- None - """ + """ + if not self.jedi_config.jcb_algo_yaml and not.algorithm: + logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") + logger.error(f"FATAL ERROR: JEDI config must contain jcb_algo_yaml or algorithm be + specified as an input to jedi.render_jcb") + # Fill JCB base YAML template and build JCB config dictionary - jcb_config = parse_j2yaml(task_config.JCB_BASE_YAML, task_config) + jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) # Add JCB algorithm YAML, if it exists, to JCB config dictionary - if 'JCB_ALGO_YAML' in task_config.keys(): - jcb_config.update(parse_j2yaml(task_config.JCB_ALGO_YAML, task_config)) + if self.jedi_config.jcb_algo_yaml: + jcb_config.update(parse_j2yaml(self.jedi_config.jcb_algo_yaml, task_config)) # Set algorithm in JCB config dictionary or override the one set by JCB_ALGO_YAML if algorithm: @@ -164,9 +163,8 @@ def link_exe(self) -> None: # TODO: linking is not permitted per EE2. # Needs work in JEDI to be able to copy the exec. [NOAA-EMC/GDASApp#1254] logger.warn("Linking is not permitted per EE2.") - if os.path.exists(self.exe): - rm_p(self.exe) - os.symlink(self.exe_src, self.exe) + if not os.path.exists(self.jedi_config.exe): + os.symlink(self.jedi_config.exe_src, self.jedi_config.exe) @staticmethod @logit(logger) @@ -224,6 +222,7 @@ def extract_tar(tar_file: str) -> None: logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") +# TODO: remove since no longer used @logit(logger) def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: """ diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index f9c81d5456..0e038165f7 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -23,7 +23,7 @@ class AtmAnalysis(Task): Class for JEDI-based global atm analysis tasks """ @logit(logger, name="AtmAnalysis") - def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): + def __init__(self, config: Dict[str, Any]): """Constructor global atm analysis task This method will construct a global atm analysis task. @@ -35,8 +35,6 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): ---------- config: Dict dictionary object containing task configuration - yaml_name: str, optional - name of YAML file for JEDI configuration Returns ---------- @@ -72,8 +70,35 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) - # Create JEDI object - self.jedi = Jedi(self.task_config.DATA, self.task_config.JEDIEXE, yaml_name) + # Create JEDI variational object + jedi_config = AttrDict( + { + 'exe_src': self.task_config.JEDIEXE_VAR, + 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_algo': None, + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_VAR, + 'rundir': self.task_config.DATA, + 'aprun_cmd': self.task_config.APRUN_ATMANLVAR, + 'yaml_name': 'atmanlvar', + 'jedi_args': ['fv3jedi', 'variational'] + } + ) + self.jedi_var = Jedi(jedi_config) + + # Create JEDI FV3 increment converter object + jedi_config = AttrDict( + { + 'exe_src': self.task_config.JEDIEXE_FV3INC, + 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_algo': self.task_config.JCB_ALGO_FV3INC + 'jcb_algo_yaml': None, + 'rundir': self.task_config.DATA, + 'aprun_cmd': self.task_config.APRUN_ATMANLFV3INC, + 'yaml_name': 'atmanlfv3inc', + 'jedi_args': None + } + ) + self.jedi_fv3inc = Jedi(jedi_config) @logit(logger) def initialize(self) -> None: @@ -81,7 +106,7 @@ def initialize(self) -> None: This method will initialize a global atm analysis. This includes: - - initializing JEDI variational application + - initialize JEDI applications - staging observation files - staging bias correction files - staging CRTM fix files @@ -98,16 +123,24 @@ def initialize(self) -> None: ---------- None """ + + # initialize JEDI variational application + logger.info(f"Initializing JEDI variational DA application") + self.jedi_var.initialize() + + # initialize JEDI FV3 increment conversion application + logger.info(f"Initializing JEDI FV3 increment conversion application") + self.jedi_fv3inc.initialize() # stage observations logger.info(f"Staging list of observation files") - obs_dict = self.jedi.get_config(self.task_config, 'atm_obs_staging') + obs_dict = self.jedi_var.render_jcb(self.task_config, 'atm_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") # stage bias corrections logger.info(f"Staging list of bias correction files") - bias_dict = self.jedi.get_config(self.task_config, 'atm_bias_staging') + bias_dict = self.jedi_var.render_jcb(self.task_config, 'atm_bias_staging') bias_dict['copy'] = jedi.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") @@ -115,7 +148,7 @@ def initialize(self) -> None: # extract bias corrections tar_file = os.path.join(self.task_config.DATA, 'obs', f"{self.task_config.GPREFIX}{bias_file}") logger.info(f"Extract bias correction files from {tar_file}") - self.jedi.extract_tar(tar_file) + self.jedi_var.extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 859372e6e5..1ca1248451 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -28,7 +28,7 @@ class AtmEnsAnalysis(Task): Class for JEDI-based global atmens analysis tasks """ @logit(logger, name="AtmEnsAnalysis") - def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): + def __init__(self, config: Dict[str, Any]): """Constructor global atmens analysis task This method will construct a global atmens analysis task. @@ -40,8 +40,6 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): ---------- config: Dict dictionary object containing task configuration - yaml_name: str, optional - name of YAML file for JEDI configuration Returns ---------- @@ -73,15 +71,76 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) - # Create JEDI object - self.jedi = Jedi(self.task_config.DATA, self.task_config.JEDIEXE, yaml_name) + # Create JEDI LETKF observer object + jedi_config = AttrDict( + { + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_LETKF, + 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_algo': None + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF_OBS + 'aprun_cmd': self.task_config.APRUN_ATMENSANLOBS, + 'yaml_name': 'atmensanlobs', + 'jedi_args': ['fv3jedi', 'localensembleda'] + } + ) + self.jedi_letkf_obs = Jedi(jedi_config) + # Create JEDI LETKF solver object + jedi_config = AttrDict( + { + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_LETKF, + 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_algo': None + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF_SOL + 'aprun_cmd': self.task_config.APRUN_ATMENSANLSOL, + 'yaml_name': 'atmensanlsol', + 'jedi_args': ['fv3jedi', 'localensembleda'] + } + ) + self.jedi_letkf_sol = Jedi(jedi_config) + + # Create JEDI FV3 increment converter + jedi_config = AttrDict( + { + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_FV3INC, + 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_algo': self.task_config.JCB_ALGO_FV3INC + 'jcb_algo_yaml': None + 'aprun_cmd': self.task_config.APRUN_ATMENSANLFV3INC, + 'yaml_name': 'atmensanlfv3inc', + 'jedi_args': None + } + ) + self.jedi_fv3inc = Jedi(jedi_config) + + # Note: Since we now use the split observer-solvers, the following + # is only for testing. + + # Create JEDI LETKF object + jedi_config = AttrDict( + { + 'exe_src': self.task_config.JEDIEXE_LETKF, + 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_algo': None + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF + 'rundir': self.task_config.DATA, + 'aprun_cmd': self.task_config.APRUN_ATMENSANLLETKF, + 'yaml_name': 'atmensanlletkf', + 'jedi_args': ['fv3jedi', 'localensembleda'] + } + ) + self.jedi_letkf = Jedi(jedi_config) + @logit(logger) def initialize(self) -> None: """Initialize a global atmens analysis This method will initialize a global atmens analysis. This includes: + - initialize JEDI applications - staging observation files - staging bias correction files - staging CRTM fix files @@ -98,23 +157,35 @@ def initialize(self) -> None: None """ + # initialize JEDI LETKF observer application + logger.info(f"Initializing JEDI LETKF observer application") + self.jedi_letkf_obs.initialize() + + # initialize JEDI LETKF solver application + logger.info(f"Initializing JEDI LETKF solver application") + self.jedi_letkf_sol.initialize() + + # initialize JEDI FV3 increment conversion application + logger.info(f"Initializing JEDI FV3 increment conversion application") + self.jedi_fv3inc.initialize() + # stage observations logger.info(f"Staging list of observation files") - obs_dict = self.jedi.get_config(self.task_config, 'atm_obs_staging') + obs_dict = self.jedi_letkf_obs.render_jcb(self.task_config, 'atm_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") # stage bias corrections logger.info(f"Staging list of bias correction files") - bias_dict = self.jedi.get_config(self.task_config, 'atm_bias_staging') - bias_dict['copy'] = jedi.remove_redundant(bias_dict['copy']) + bias_dict = self.jedi_letkf_obs.render_jcb(self.task_config, 'atm_bias_staging') + bias_dict['copy'] = jedi_letkf_obs.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections tar_file = os.path.join(self.task_config.DATA, 'obs', f"{self.task_config.GPREFIX}{bias_file}") logger.info(f"Extract bias correction files from {tar_file}") - self.jedi.extract_tar(tar_file) + self.jedi_letkf_obs..extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From d5e476b7a8a8ba4406e3cbb62dd13463c0708508 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 7 Oct 2024 17:57:38 +0000 Subject: [PATCH 031/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index d39bf61570..12633b1fdd 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit d39bf61570394730e17cd6508307ff7a624cd3cd +Subproject commit 12633b1fddd230bb16a7d88b438542c88b4623f2 From cec645fca86e57d540eb789d1727dec586832533 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 7 Oct 2024 22:08:46 +0000 Subject: [PATCH 032/157] pynorms --- ush/python/pygfs/jedi/jedi.py | 27 ++++++++++++------------ ush/python/pygfs/task/atm_analysis.py | 4 ++-- ush/python/pygfs/task/atmens_analysis.py | 8 +++---- 3 files changed, 20 insertions(+), 19 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index a57871d619..ff92569229 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -38,10 +38,10 @@ def __init__(self, config) -> None: ---------- None """ - + # Create the configuration dictionary for JEDI object self.jedi_config = config.deepcopy() - + local_dict = AttrDict( { 'exe': os.path.join(self.config.run_dir, os.path.basename(self.config.exe_src)), @@ -74,7 +74,7 @@ def initialize(self, task_config: AttrDict) -> None: # Link JEDI executable to run directory logger.info(f"Linking JEDI executable {self.jedi_config.exe_src} to {self.jedi_config.exe}") self.link_exe() - + @logit(logger) def execute(self, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: """Execute JEDI application @@ -101,7 +101,7 @@ def execute(self, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: exec_cmd.add_default_arg(arg) exec_cmd.add_default_arg(self.jedi_config.yaml) - logger.info(f"Executing {exec_cmd}") + logger.info(f"Executing {exec_cmd}") try: exec_cmd() except OSError: @@ -124,16 +124,16 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> Returns ---------- None - """ + """ - if not self.jedi_config.jcb_algo_yaml and not.algorithm: + if not self.jedi_config.jcb_algo_yaml and not algorithm: logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") logger.error(f"FATAL ERROR: JEDI config must contain jcb_algo_yaml or algorithm be - specified as an input to jedi.render_jcb") - + specified as an input to jedi.render_jcb()") + # Fill JCB base YAML template and build JCB config dictionary jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) - + # Add JCB algorithm YAML, if it exists, to JCB config dictionary if self.jedi_config.jcb_algo_yaml: jcb_config.update(parse_j2yaml(self.jedi_config.jcb_algo_yaml, task_config)) @@ -146,7 +146,7 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> jedi_config = render(jcb_config) return jedi_config - + @logit(logger) def link_exe(self) -> None: """Link JEDI executable to run directory @@ -185,10 +185,10 @@ def remove_redundant(input_list: List) -> List: output_list = [] for item in input_list: if item not in output_list: - output_list.append(item); + output_list.append(item) return output_list - + @staticmethod @logit(logger) def extract_tar(tar_file: str) -> None: @@ -222,7 +222,8 @@ def extract_tar(tar_file: str) -> None: logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") -# TODO: remove since no longer used + +# TODO: remove since no longer used @logit(logger) def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: """ diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 0e038165f7..b5fedbf994 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -130,8 +130,8 @@ def initialize(self) -> None: # initialize JEDI FV3 increment conversion application logger.info(f"Initializing JEDI FV3 increment conversion application") - self.jedi_fv3inc.initialize() - + self.jedi_fv3inc.initialize() + # stage observations logger.info(f"Staging list of observation files") obs_dict = self.jedi_var.render_jcb(self.task_config, 'atm_obs_staging') diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 1ca1248451..ebd1aede62 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -118,7 +118,7 @@ def __init__(self, config: Dict[str, Any]): # Note: Since we now use the split observer-solvers, the following # is only for testing. - + # Create JEDI LETKF object jedi_config = AttrDict( { @@ -133,7 +133,7 @@ def __init__(self, config: Dict[str, Any]): } ) self.jedi_letkf = Jedi(jedi_config) - + @logit(logger) def initialize(self) -> None: """Initialize a global atmens analysis @@ -157,11 +157,11 @@ def initialize(self) -> None: None """ - # initialize JEDI LETKF observer application + # initialize JEDI LETKF observer application logger.info(f"Initializing JEDI LETKF observer application") self.jedi_letkf_obs.initialize() - # initialize JEDI LETKF solver application + # initialize JEDI LETKF solver application logger.info(f"Initializing JEDI LETKF solver application") self.jedi_letkf_sol.initialize() From bc7fbbd9414ba8ffc461346e94d07d644eb45435 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 7 Oct 2024 22:11:33 +0000 Subject: [PATCH 033/157] pynorms #2 --- ush/python/pygfs/jedi/jedi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index ff92569229..47d4ba0993 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -129,7 +129,7 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> if not self.jedi_config.jcb_algo_yaml and not algorithm: logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") logger.error(f"FATAL ERROR: JEDI config must contain jcb_algo_yaml or algorithm be - specified as an input to jedi.render_jcb()") + specified as an input to jedi.render_jcb()") # Fill JCB base YAML template and build JCB config dictionary jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) From b3d323994e92290f64e03cc5cbf8db7a74111e7a Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 7 Oct 2024 22:14:06 +0000 Subject: [PATCH 034/157] pynorms #3 --- scripts/exglobal_atmens_analysis_letkf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/exglobal_atmens_analysis_letkf.py b/scripts/exglobal_atmens_analysis_letkf.py index 33d2fd3b8f..b6b47a2264 100755 --- a/scripts/exglobal_atmens_analysis_letkf.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -21,9 +21,9 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initalize JEDI ensemble DA application - # Note: This is normally done in AtmEnsAnl.initialize(), but the that now + # Note: This is normally done in AtmEnsAnl.initialize(), but that method now # initializes the split observer-solver. This case is just for testing. AtmEnsAnl.jedi_letkf.initialize() - + # Execute the JEDI ensemble DA analysis AtmEnsAnl.jedi_letkf.execute() From afada5d224d9b0873b60688481e180018f56f7d2 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 00:21:57 +0000 Subject: [PATCH 035/157] Fix some python bugs --- ush/python/pygfs/jedi/jedi.py | 19 +++++++++++++------ ush/python/pygfs/task/atm_analysis.py | 2 +- ush/python/pygfs/task/atmens_analysis.py | 18 +++++++++--------- 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 47d4ba0993..ec0cd8050f 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -126,13 +126,12 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> None """ - if not self.jedi_config.jcb_algo_yaml and not algorithm: - logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") - logger.error(f"FATAL ERROR: JEDI config must contain jcb_algo_yaml or algorithm be - specified as an input to jedi.render_jcb()") - # Fill JCB base YAML template and build JCB config dictionary - jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) + if self.jedi_config.jcb_base_yaml: + jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) + else: + logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") + logger.error(f"FATAL ERROR: JEDI config must contain jcb_base_yaml.") # Add JCB algorithm YAML, if it exists, to JCB config dictionary if self.jedi_config.jcb_algo_yaml: @@ -141,6 +140,14 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> # Set algorithm in JCB config dictionary or override the one set by JCB_ALGO_YAML if algorithm: jcb_config['algorithm'] = algorithm + elif self.jedi_config.jcb_algo: + jcb_config['algorithm'] = self.jedi_config.jcb_algo + elif 'algorithm' in jcb_config: + pass + else: + logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") + logger.error(f"FATAL ERROR: algorithm must be specified in JEDI config, " + + "JCB algorithm YAML, or as input to jedi.render_jcb()") # Generate JEDI YAML config by rendering JCB config dictionary jedi_config = render(jcb_config) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index b5fedbf994..02aeb47e47 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -90,7 +90,7 @@ def __init__(self, config: Dict[str, Any]): { 'exe_src': self.task_config.JEDIEXE_FV3INC, 'jcb_base_yaml': self.task_config.jcb_base_yaml, - 'jcb_algo': self.task_config.JCB_ALGO_FV3INC + 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, 'jcb_algo_yaml': None, 'rundir': self.task_config.DATA, 'aprun_cmd': self.task_config.APRUN_ATMANLFV3INC, diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index ebd1aede62..410244138b 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -77,8 +77,8 @@ def __init__(self, config: Dict[str, Any]): 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_LETKF, 'jcb_base_yaml': self.task_config.jcb_base_yaml, - 'jcb_algo': None - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF_OBS + 'jcb_algo': None, + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF_OBS, 'aprun_cmd': self.task_config.APRUN_ATMENSANLOBS, 'yaml_name': 'atmensanlobs', 'jedi_args': ['fv3jedi', 'localensembleda'] @@ -92,8 +92,8 @@ def __init__(self, config: Dict[str, Any]): 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_LETKF, 'jcb_base_yaml': self.task_config.jcb_base_yaml, - 'jcb_algo': None - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF_SOL + 'jcb_algo': None, + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF_SOL, 'aprun_cmd': self.task_config.APRUN_ATMENSANLSOL, 'yaml_name': 'atmensanlsol', 'jedi_args': ['fv3jedi', 'localensembleda'] @@ -107,8 +107,8 @@ def __init__(self, config: Dict[str, Any]): 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_FV3INC, 'jcb_base_yaml': self.task_config.jcb_base_yaml, - 'jcb_algo': self.task_config.JCB_ALGO_FV3INC - 'jcb_algo_yaml': None + 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, + 'jcb_algo_yaml': None, 'aprun_cmd': self.task_config.APRUN_ATMENSANLFV3INC, 'yaml_name': 'atmensanlfv3inc', 'jedi_args': None @@ -124,8 +124,8 @@ def __init__(self, config: Dict[str, Any]): { 'exe_src': self.task_config.JEDIEXE_LETKF, 'jcb_base_yaml': self.task_config.jcb_base_yaml, - 'jcb_algo': None - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF + 'jcb_algo': None, + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF, 'rundir': self.task_config.DATA, 'aprun_cmd': self.task_config.APRUN_ATMENSANLLETKF, 'yaml_name': 'atmensanlletkf', @@ -185,7 +185,7 @@ def initialize(self) -> None: # extract bias corrections tar_file = os.path.join(self.task_config.DATA, 'obs', f"{self.task_config.GPREFIX}{bias_file}") logger.info(f"Extract bias correction files from {tar_file}") - self.jedi_letkf_obs..extract_tar(tar_file) + self.jedi_letkf_obs.extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From 9e57e52ae0e4e78f169e75109c4fa2fb65629e9d Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 01:12:30 +0000 Subject: [PATCH 036/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 12633b1fdd..3bf7b50f32 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 12633b1fddd230bb16a7d88b438542c88b4623f2 +Subproject commit 3bf7b50f324d4073cb357d0088bd11d324038593 From 25fad0e4587f3de7faa6af1c7de62d00bcbab5b7 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 03:05:50 +0000 Subject: [PATCH 037/157] Fixing bugs --- .../exglobal_atm_analysis_fv3_increment.py | 2 +- scripts/exglobal_atm_analysis_variational.py | 2 +- .../exglobal_atmens_analysis_fv3_increment.py | 2 +- scripts/exglobal_atmens_analysis_letkf.py | 4 +-- scripts/exglobal_atmens_analysis_obs.py | 2 +- scripts/exglobal_atmens_analysis_sol.py | 2 +- ush/python/pygfs/jedi/jedi.py | 27 ++++++++-------- ush/python/pygfs/task/atm_analysis.py | 21 +++++++------ ush/python/pygfs/task/atmens_analysis.py | 31 +++++++++---------- 9 files changed, 46 insertions(+), 47 deletions(-) diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index d49dbe103c..594ae6e7f7 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Initialize and execute FV3 increment converter - AtmAnl.jedi_fv3inc.execute() + AtmAnl.jedi_fv3inc.execute(config.APRUN_ATMANLFV3INC) diff --git a/scripts/exglobal_atm_analysis_variational.py b/scripts/exglobal_atm_analysis_variational.py index 1200fe147c..c7929f6b19 100755 --- a/scripts/exglobal_atm_analysis_variational.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Execute JEDI variational analysis - AtmAnl.jedi_var.execute() + AtmAnl.jedi_var.execute(config.APRUN_ATMANLVAR) diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py index dab8803d9c..42d0afceed 100755 --- a/scripts/exglobal_atmens_analysis_fv3_increment.py +++ b/scripts/exglobal_atmens_analysis_fv3_increment.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI FV3 increment converter - AtmEnsAnl.jedi_fv3inc.execute() + AtmEnsAnl.jedi_fv3inc.execute(config.APRUN_ATMENSANLFV3INC) diff --git a/scripts/exglobal_atmens_analysis_letkf.py b/scripts/exglobal_atmens_analysis_letkf.py index b6b47a2264..050449334e 100755 --- a/scripts/exglobal_atmens_analysis_letkf.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -23,7 +23,7 @@ # Initalize JEDI ensemble DA application # Note: This is normally done in AtmEnsAnl.initialize(), but that method now # initializes the split observer-solver. This case is just for testing. - AtmEnsAnl.jedi_letkf.initialize() + AtmEnsAnl.jedi_letkf.initialize(AtmEnsAnl.task_config) # Execute the JEDI ensemble DA analysis - AtmEnsAnl.jedi_letkf.execute() + AtmEnsAnl.jedi_letkf.execute(config.APRUN_ATMENSANLLETKF) diff --git a/scripts/exglobal_atmens_analysis_obs.py b/scripts/exglobal_atmens_analysis_obs.py index 660276d3e4..6cd961c99f 100755 --- a/scripts/exglobal_atmens_analysis_obs.py +++ b/scripts/exglobal_atmens_analysis_obs.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI ensembler DA analysis in observer mode - AtmEnsAnl.jedi_letkf_obs.execute() + AtmEnsAnl.jedi_letkf_obs.execute(config.APRUN_ATMENSANLOBS) diff --git a/scripts/exglobal_atmens_analysis_sol.py b/scripts/exglobal_atmens_analysis_sol.py index a1e71dc1cf..dab5206daf 100755 --- a/scripts/exglobal_atmens_analysis_sol.py +++ b/scripts/exglobal_atmens_analysis_sol.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI ensemble DA analysis in solver mode - AtmEnsAnl.jedi_letkf_sol.execute() + AtmEnsAnl.jedi_letkf_sol.execute(config.APRUN_ATMENSANLSOL) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index ec0cd8050f..dfbd167407 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -40,16 +40,17 @@ def __init__(self, config) -> None: """ # Create the configuration dictionary for JEDI object - self.jedi_config = config.deepcopy() - local_dict = AttrDict( { - 'exe': os.path.join(self.config.run_dir, os.path.basename(self.config.exe_src)), - 'yaml': os.path.join(DATA, config.yaml_name + '.yaml'), + 'exe': os.path.join(config.rundir, os.path.basename(config.exe_src)), + 'yaml': os.path.join(config.rundir, config.yaml_name + '.yaml'), 'input_config': None } ) - self.jedi_config.update(local_dict) + self.jedi_config = AttrDict(**config, **local_dict) + + # Save a copy of jedi_config + self._jedi_config = self.jedi_config.deepcopy() @logit(logger) def initialize(self, task_config: AttrDict) -> None: @@ -76,15 +77,13 @@ def initialize(self, task_config: AttrDict) -> None: self.link_exe() @logit(logger) - def execute(self, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: + def execute(self, aprun_cmd: str) -> None: """Execute JEDI application Parameters ---------- aprun_cmd: str String comprising the run command for the JEDI executable. - jedi_args (optional): List - List of strings comprising optional input arguments for the JEDI executable. Returns ---------- @@ -92,9 +91,9 @@ def execute(self, aprun_cmd: str, jedi_args: Optional[List] = None) -> None: Attribute-dictionary of JEDI configuration rendered from a template. """ - chdir(self.rundir) + chdir(self.jedi_config.rundir) - exec_cmd = Executable(self.jedi_config.aprun_cmd) + exec_cmd = Executable(aprun_cmd) exec_cmd.add_default_arg(self.jedi_config.exe) if self.jedi_config.jedi_args: for arg in self.jedi_config.jedi_args: @@ -131,13 +130,13 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) else: logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") - logger.error(f"FATAL ERROR: JEDI config must contain jcb_base_yaml.") + logger.error(f"FATAL ERROR: JEDI configuration dictionary must contain jcb_base_yaml.") # Add JCB algorithm YAML, if it exists, to JCB config dictionary if self.jedi_config.jcb_algo_yaml: jcb_config.update(parse_j2yaml(self.jedi_config.jcb_algo_yaml, task_config)) - # Set algorithm in JCB config dictionary or override the one set by JCB_ALGO_YAML + # Set algorithm in JCB config dictionary if algorithm: jcb_config['algorithm'] = algorithm elif self.jedi_config.jcb_algo: @@ -146,8 +145,8 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> pass else: logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") - logger.error(f"FATAL ERROR: algorithm must be specified in JEDI config, " + - "JCB algorithm YAML, or as input to jedi.render_jcb()") + logger.error(f"FATAL ERROR: JCB algorithm must be specified as input to jedi.render_jcb(), " + + "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") # Generate JEDI YAML config by rendering JCB config dictionary jedi_config = render(jcb_config) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 02aeb47e47..04f161c81d 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -74,11 +74,10 @@ def __init__(self, config: Dict[str, Any]): jedi_config = AttrDict( { 'exe_src': self.task_config.JEDIEXE_VAR, - 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_VAR, 'rundir': self.task_config.DATA, - 'aprun_cmd': self.task_config.APRUN_ATMANLVAR, 'yaml_name': 'atmanlvar', 'jedi_args': ['fv3jedi', 'variational'] } @@ -89,11 +88,10 @@ def __init__(self, config: Dict[str, Any]): jedi_config = AttrDict( { 'exe_src': self.task_config.JEDIEXE_FV3INC, - 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, 'jcb_algo_yaml': None, 'rundir': self.task_config.DATA, - 'aprun_cmd': self.task_config.APRUN_ATMANLFV3INC, 'yaml_name': 'atmanlfv3inc', 'jedi_args': None } @@ -126,11 +124,11 @@ def initialize(self) -> None: # initialize JEDI variational application logger.info(f"Initializing JEDI variational DA application") - self.jedi_var.initialize() + self.jedi_var.initialize(self.task_config) # initialize JEDI FV3 increment conversion application logger.info(f"Initializing JEDI FV3 increment conversion application") - self.jedi_fv3inc.initialize() + self.jedi_fv3inc.initialize(self.task_config) # stage observations logger.info(f"Staging list of observation files") @@ -141,14 +139,17 @@ def initialize(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files") bias_dict = self.jedi_var.render_jcb(self.task_config, 'atm_bias_staging') - bias_dict['copy'] = jedi.remove_redundant(bias_dict['copy']) + bias_dict['copy'] = self.jedi_var.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections - tar_file = os.path.join(self.task_config.DATA, 'obs', f"{self.task_config.GPREFIX}{bias_file}") - logger.info(f"Extract bias correction files from {tar_file}") - self.jedi_var.extract_tar(tar_file) + for item in bias_dict['copy']: + bias_file = os.path.basename(item[0]) + if os.path.splitext(bias_file)[1] == '.tar': + tar_file = f"{os.path.dirname(item[1])}/{bias_file}" + logger.info(f"Extract bias correction files from {tar_file}") + self.jedi_var.extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 410244138b..332172fa8d 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -76,10 +76,9 @@ def __init__(self, config: Dict[str, Any]): { 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_LETKF, - 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF_OBS, - 'aprun_cmd': self.task_config.APRUN_ATMENSANLOBS, + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_OBS, 'yaml_name': 'atmensanlobs', 'jedi_args': ['fv3jedi', 'localensembleda'] } @@ -91,10 +90,9 @@ def __init__(self, config: Dict[str, Any]): { 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_LETKF, - 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF_SOL, - 'aprun_cmd': self.task_config.APRUN_ATMENSANLSOL, + 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_SOL, 'yaml_name': 'atmensanlsol', 'jedi_args': ['fv3jedi', 'localensembleda'] } @@ -106,10 +104,9 @@ def __init__(self, config: Dict[str, Any]): { 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_FV3INC, - 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, 'jcb_algo_yaml': None, - 'aprun_cmd': self.task_config.APRUN_ATMENSANLFV3INC, 'yaml_name': 'atmensanlfv3inc', 'jedi_args': None } @@ -123,11 +120,10 @@ def __init__(self, config: Dict[str, Any]): jedi_config = AttrDict( { 'exe_src': self.task_config.JEDIEXE_LETKF, - 'jcb_base_yaml': self.task_config.jcb_base_yaml, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF, 'rundir': self.task_config.DATA, - 'aprun_cmd': self.task_config.APRUN_ATMENSANLLETKF, 'yaml_name': 'atmensanlletkf', 'jedi_args': ['fv3jedi', 'localensembleda'] } @@ -159,15 +155,15 @@ def initialize(self) -> None: # initialize JEDI LETKF observer application logger.info(f"Initializing JEDI LETKF observer application") - self.jedi_letkf_obs.initialize() + self.jedi_letkf_obs.initialize(self.task_config) # initialize JEDI LETKF solver application logger.info(f"Initializing JEDI LETKF solver application") - self.jedi_letkf_sol.initialize() + self.jedi_letkf_sol.initialize(self.task_config) # initialize JEDI FV3 increment conversion application logger.info(f"Initializing JEDI FV3 increment conversion application") - self.jedi_fv3inc.initialize() + self.jedi_fv3inc.initialize(self.task_config) # stage observations logger.info(f"Staging list of observation files") @@ -183,9 +179,12 @@ def initialize(self) -> None: logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections - tar_file = os.path.join(self.task_config.DATA, 'obs', f"{self.task_config.GPREFIX}{bias_file}") - logger.info(f"Extract bias correction files from {tar_file}") - self.jedi_letkf_obs.extract_tar(tar_file) + for item in bias_dict['copy']: + bias_file = os.path.basename(item[0]) + if os.path.splitext(bias_file)[1] == '.tar': + tar_file = f"{os.path.dirname(item[1])}/{bias_file}" + logger.info(f"Extract bias correction files from {tar_file}") + self.jedi_var.extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From 78bf67d1fe991d71c22adaad9d9da0741d56a569 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 03:12:37 +0000 Subject: [PATCH 038/157] Bug fix --- ush/python/pygfs/task/atm_analysis.py | 4 ++-- ush/python/pygfs/task/atmens_analysis.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 04f161c81d..60c904396f 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -139,7 +139,7 @@ def initialize(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files") bias_dict = self.jedi_var.render_jcb(self.task_config, 'atm_bias_staging') - bias_dict['copy'] = self.jedi_var.remove_redundant(bias_dict['copy']) + bias_dict['copy'] = Jedi.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") @@ -149,7 +149,7 @@ def initialize(self) -> None: if os.path.splitext(bias_file)[1] == '.tar': tar_file = f"{os.path.dirname(item[1])}/{bias_file}" logger.info(f"Extract bias correction files from {tar_file}") - self.jedi_var.extract_tar(tar_file) + Jedi.extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 332172fa8d..178a8ee097 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -174,7 +174,7 @@ def initialize(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files") bias_dict = self.jedi_letkf_obs.render_jcb(self.task_config, 'atm_bias_staging') - bias_dict['copy'] = jedi_letkf_obs.remove_redundant(bias_dict['copy']) + bias_dict['copy'] = Jedi.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") @@ -184,7 +184,7 @@ def initialize(self) -> None: if os.path.splitext(bias_file)[1] == '.tar': tar_file = f"{os.path.dirname(item[1])}/{bias_file}" logger.info(f"Extract bias correction files from {tar_file}") - self.jedi_var.extract_tar(tar_file) + Jedi.extract_tar(tar_file) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From dadda23f2a5f6cff16ec5556b03e80ebda471cd9 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 16:37:15 +0000 Subject: [PATCH 039/157] Remove redundant code for tarball extraction, update comments, and update GDAS hash --- sorc/gdas.cd | 2 +- ush/python/pygfs/jedi/jedi.py | 107 ++++++++++++++++------- ush/python/pygfs/task/atm_analysis.py | 9 +- ush/python/pygfs/task/atmens_analysis.py | 9 +- 4 files changed, 79 insertions(+), 48 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 3bf7b50f32..8a501f7905 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 3bf7b50f324d4073cb357d0088bd11d324038593 +Subproject commit 8a501f7905497f5b65649fd05c825f5869b2a6c8 diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index dfbd167407..bda70e86d4 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -25,14 +25,13 @@ def __init__(self, config) -> None: This method will construct a Jedi object. This includes: - - set the default JEDI YAML and executable names - - set an empty AttrDict for the JEDI config - - set the default directory for J2-YAML templates + - create the jedi_config AttrDict and extend it with additional required entries + - save a coy of jedi_config Parameters ---------- - task_config: AttrDict - Attribute-dictionary of all configuration variables associated with a GDAS task. + config: AttrDict + Attribute-dictionary of all configuration variables required for the Jedi class Returns ---------- @@ -58,9 +57,18 @@ def initialize(self, task_config: AttrDict) -> None: This method will initialize a JEDI application. This includes: - - generating JEDI YAML config - - saving JEDI YAML config to run directory + - generating JEDI input YAML config + - saving JEDI input YAML config to run directory - linking the JEDI executable to run directory + + Parameters + ---------- + task_config: AttrDict + Attribute-dictionary of all configuration variables associated with a GDAS task. + + Returns + ---------- + None """ # Render JEDI config dictionary @@ -87,8 +95,7 @@ def execute(self, aprun_cmd: str) -> None: Returns ---------- - jedi_config: AttrDict - Attribute-dictionary of JEDI configuration rendered from a template. + None """ chdir(self.jedi_config.rundir) @@ -122,7 +129,8 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> Returns ---------- - None + jedi_input_config: AttrDict + Attribute-dictionary of JEDI configuration rendered from a template. """ # Fill JCB base YAML template and build JCB config dictionary @@ -149,9 +157,9 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") # Generate JEDI YAML config by rendering JCB config dictionary - jedi_config = render(jcb_config) + jedi_input_config = render(jcb_config) - return jedi_config + return jedi_input_config @logit(logger) def link_exe(self) -> None: @@ -197,37 +205,70 @@ def remove_redundant(input_list: List) -> List: @staticmethod @logit(logger) - def extract_tar(tar_file: str) -> None: - """Extract files from a tarball - - This method extract files from a tarball + def extract_tar_from_fh_dict(fh_dict) -> None: + """Extract tarballs from FileHandler input dictionary + This method extracts files from tarballs specified in a FileHander + input dictionary for the 'copy' action. + Parameters ---------- - tar_file - path/name of tarball + fh_dict + Input dictionary for FileHandler Returns ---------- None """ - - # extract files from tar file - tar_path = os.path.dirname(tar_file) - try: - with tarfile.open(tar_file, "r") as tarball: - tarball.extractall(path=tar_path) - logger.info(f"Extract {tarball.getnames()}") - except tarfile.ReadError as err: - if tarfile.is_tarfile(tar_file): - logger.error(f"FATAL ERROR: {tar_file} could not be read") - raise tarfile.ReadError(f"FATAL ERROR: unable to read {tar_file}") + + for item in fh_dict['copy']: + # Use the filename from the destination entry if it's a file path + # Otherwise, it's a directory, so use the source entry filename + if os.path.isfile(item[1]): + filename = os.path.basename(item[1]) else: - logger.info() - except tarfile.ExtractError as err: - logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") - raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") + filename = os.path.basename(item[0]) + + # Extract if file is a tarball + if os.path.splitext(filename)[1] == '.tar': + tar_file = f"{os.path.dirname(item[1])}/{filename}" + if os.path.isfile(tar_file): + logger.info(f"Extract files from {tar_file}") + extract_tar(tar_file) + else: + logger.error(f"FATAL ERROR: {tar_file} could not be read") + logger.error(f"FATAL ERROR: {tar_file} does not exist!") +@logit(logger) +def extract_tar(tar_file: str) -> None: + """Extract files from a tarball + + This method extract files from a tarball + + Parameters + ---------- + tar_file + path/name of tarball + Returns + ---------- + None + """ + + # extract files from tar file + tar_path = os.path.dirname(tar_file) + try: + with tarfile.open(tar_file, "r") as tarball: + tarball.extractall(path=tar_path) + logger.info(f"Extract {tarball.getnames()}") + except tarfile.ReadError as err: + if tarfile.is_tarfile(tar_file): + logger.error(f"FATAL ERROR: {tar_file} could not be read") + raise tarfile.ReadError(f"FATAL ERROR: unable to read {tar_file}") + else: + logger.info() + except tarfile.ExtractError as err: + logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") + raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") # TODO: remove since no longer used @logit(logger) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 60c904396f..20d5264420 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -29,7 +29,7 @@ def __init__(self, config: Dict[str, Any]): This method will construct a global atm analysis task. This includes: - extending the task_config attribute AttrDict to include parameters required for this task - - instantiate the Jedi attribute object + - instantiate the Jedi attribute objects Parameters ---------- @@ -144,12 +144,7 @@ def initialize(self) -> None: logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections - for item in bias_dict['copy']: - bias_file = os.path.basename(item[0]) - if os.path.splitext(bias_file)[1] == '.tar': - tar_file = f"{os.path.dirname(item[1])}/{bias_file}" - logger.info(f"Extract bias correction files from {tar_file}") - Jedi.extract_tar(tar_file) + Jedi.extract_tar_from_fh_dict(bias_dict) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 178a8ee097..b00dc4d9e8 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -34,7 +34,7 @@ def __init__(self, config: Dict[str, Any]): This method will construct a global atmens analysis task. This includes: - extending the task_config attribute AttrDict to include parameters required for this task - - instantiate the Jedi attribute object + - instantiate the Jedi attribute objects Parameters ---------- @@ -179,12 +179,7 @@ def initialize(self) -> None: logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections - for item in bias_dict['copy']: - bias_file = os.path.basename(item[0]) - if os.path.splitext(bias_file)[1] == '.tar': - tar_file = f"{os.path.dirname(item[1])}/{bias_file}" - logger.info(f"Extract bias correction files from {tar_file}") - Jedi.extract_tar(tar_file) + Jedi.extract_tar_from_fh_dict(bias_dict) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From ee22db3861f99e063141b9b25982038dc68c2a55 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 16:39:54 +0000 Subject: [PATCH 040/157] pynorms --- ush/python/pygfs/jedi/jedi.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index bda70e86d4..f64dacf9bb 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -208,9 +208,9 @@ def remove_redundant(input_list: List) -> List: def extract_tar_from_fh_dict(fh_dict) -> None: """Extract tarballs from FileHandler input dictionary - This method extracts files from tarballs specified in a FileHander + This method extracts files from tarballs specified in a FileHander input dictionary for the 'copy' action. - + Parameters ---------- fh_dict @@ -220,7 +220,7 @@ def extract_tar_from_fh_dict(fh_dict) -> None: ---------- None """ - + for item in fh_dict['copy']: # Use the filename from the destination entry if it's a file path # Otherwise, it's a directory, so use the source entry filename @@ -238,6 +238,8 @@ def extract_tar_from_fh_dict(fh_dict) -> None: else: logger.error(f"FATAL ERROR: {tar_file} could not be read") logger.error(f"FATAL ERROR: {tar_file} does not exist!") + + @logit(logger) def extract_tar(tar_file: str) -> None: """Extract files from a tarball @@ -254,7 +256,7 @@ def extract_tar(tar_file: str) -> None: None """ - # extract files from tar file + # extract files from tar file tar_path = os.path.dirname(tar_file) try: with tarfile.open(tar_file, "r") as tarball: From 057a5e95a61109bf4c5fd583e3c0903cedace5b9 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 16:41:56 +0000 Subject: [PATCH 041/157] pynorms #2 --- ush/python/pygfs/jedi/jedi.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index f64dacf9bb..b062abb43f 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -229,7 +229,7 @@ def extract_tar_from_fh_dict(fh_dict) -> None: else: filename = os.path.basename(item[0]) - # Extract if file is a tarball + # Extract if file is a tarball if os.path.splitext(filename)[1] == '.tar': tar_file = f"{os.path.dirname(item[1])}/{filename}" if os.path.isfile(tar_file): @@ -272,6 +272,7 @@ def extract_tar(tar_file: str) -> None: logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") + # TODO: remove since no longer used @logit(logger) def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: From ccd11fc3b8eccee88668a967babde0020f0f5c53 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 9 Oct 2024 15:27:10 +0000 Subject: [PATCH 042/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 8a501f7905..29696c82ab 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 8a501f7905497f5b65649fd05c825f5869b2a6c8 +Subproject commit 29696c82ab994e02dcfbeefc4b30953de352605b From c8f38bed573ba21138ca5fbfb0be45abcb1bb0e0 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 9 Oct 2024 23:56:36 +0000 Subject: [PATCH 043/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 29696c82ab..140dc0e6a6 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 29696c82ab994e02dcfbeefc4b30953de352605b +Subproject commit 140dc0e6a6550321b6cf1db6630f3d6ca067dc12 From 700204d6c0c5cf77f36a4c66939d449f7d22c77e Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 10 Oct 2024 01:40:13 +0000 Subject: [PATCH 044/157] Address Cory's comments --- ush/python/pygfs/jedi/jedi.py | 20 ++++++++++---------- ush/python/pygfs/task/atm_analysis.py | 2 +- ush/python/pygfs/task/atmens_analysis.py | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index b062abb43f..6a1ec3855c 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -205,7 +205,7 @@ def remove_redundant(input_list: List) -> List: @staticmethod @logit(logger) - def extract_tar_from_fh_dict(fh_dict) -> None: + def extract_tar_from_filehandler_dict(filehandler_dict) -> None: """Extract tarballs from FileHandler input dictionary This method extracts files from tarballs specified in a FileHander @@ -213,7 +213,7 @@ def extract_tar_from_fh_dict(fh_dict) -> None: Parameters ---------- - fh_dict + filehandler_dict Input dictionary for FileHandler Returns @@ -221,7 +221,7 @@ def extract_tar_from_fh_dict(fh_dict) -> None: None """ - for item in fh_dict['copy']: + for item in filehandler_dict['copy']: # Use the filename from the destination entry if it's a file path # Otherwise, it's a directory, so use the source entry filename if os.path.isfile(item[1]): @@ -229,16 +229,13 @@ def extract_tar_from_fh_dict(fh_dict) -> None: else: filename = os.path.basename(item[0]) - # Extract if file is a tarball + # Check if file is a tar ball if os.path.splitext(filename)[1] == '.tar': tar_file = f"{os.path.dirname(item[1])}/{filename}" - if os.path.isfile(tar_file): - logger.info(f"Extract files from {tar_file}") - extract_tar(tar_file) - else: - logger.error(f"FATAL ERROR: {tar_file} could not be read") - logger.error(f"FATAL ERROR: {tar_file} does not exist!") + # Extract tarball + logger.info(f"Extract files from {tar_file}") + extract_tar(tar_file) @logit(logger) def extract_tar(tar_file: str) -> None: @@ -262,6 +259,9 @@ def extract_tar(tar_file: str) -> None: with tarfile.open(tar_file, "r") as tarball: tarball.extractall(path=tar_path) logger.info(f"Extract {tarball.getnames()}") + except tarfile.FileExistsError as err: + logger.exception(f"FATAL ERROR: {tar_file} does not exist") + raise tarfile.FileExistsError(f"FATAL ERROR: {tar_file} does not exist") except tarfile.ReadError as err: if tarfile.is_tarfile(tar_file): logger.error(f"FATAL ERROR: {tar_file} could not be read") diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 20d5264420..7b91ecc909 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -144,7 +144,7 @@ def initialize(self) -> None: logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections - Jedi.extract_tar_from_fh_dict(bias_dict) + Jedi.extract_tar_from_filehandler_dict(bias_dict) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index b00dc4d9e8..c532dcb9f5 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -179,7 +179,7 @@ def initialize(self) -> None: logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections - Jedi.extract_tar_from_fh_dict(bias_dict) + Jedi.extract_tar_from_filehandler_dict(bias_dict) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From 62783ce5af36226efb59f9ddf10b90beda92e7db Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 10 Oct 2024 01:44:29 +0000 Subject: [PATCH 045/157] pynorms --- ush/python/pygfs/jedi/jedi.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 6a1ec3855c..c9871e4ae3 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -237,6 +237,7 @@ def extract_tar_from_filehandler_dict(filehandler_dict) -> None: logger.info(f"Extract files from {tar_file}") extract_tar(tar_file) + @logit(logger) def extract_tar(tar_file: str) -> None: """Extract files from a tarball From d41a385e17828b1ecca631650005a209080d1f85 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 11 Oct 2024 15:40:32 +0000 Subject: [PATCH 046/157] commit before lunch --- scripts/exglobal_snow_analysis.py | 17 +++- ush/python/pygfs/task/snow_analysis.py | 135 +++++++++++++++---------- 2 files changed, 96 insertions(+), 56 deletions(-) diff --git a/scripts/exglobal_snow_analysis.py b/scripts/exglobal_snow_analysis.py index fe050f5af5..a8af6d8919 100755 --- a/scripts/exglobal_snow_analysis.py +++ b/scripts/exglobal_snow_analysis.py @@ -18,7 +18,16 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the snow analysis task - anl = SnowAnalysis(config) - anl.initialize() - anl.execute() - anl.finalize() + SnowAnl = SnowAnalysis(config) + + # Initialize JEDI 2DVar snow analysis + SnowAnl.initialize_jedi() + SnowAnl.initialize_analysis() + + # Run IMS preprocessing + + # Execute JEDI snow analysis + SnowAnl.execute(config.APRUN_SNOWANLVAR, ['fv3jedi', 'variational']) + + # Finalize JEDI snow analysis + SnowAnl.finalize() \ No newline at end of file diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 4b991d2b34..f1ce2f3801 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -13,28 +13,44 @@ rm_p, parse_j2yaml, save_as_yaml, Jinja, + Task, logit, Executable, WorkflowException) -from pygfs.task.analysis import Analysis +from pygfs.jedi import Jedi logger = getLogger(__name__.split('.')[-1]) -class SnowAnalysis(Analysis): +class SnowAnalysis(Task): """ - Class for global snow analysis tasks + Class for JEDI-based global snow analysis tasks """ - NMEM_SNOWENS = 2 - @logit(logger, name="SnowAnalysis") - def __init__(self, config): + def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): + """Constructor global snow analysis task + + This method will construct a global snow analysis task. + This includes: + - extending the task_config attribute AttrDict to include parameters required for this task + - instantiate the Jedi attribute object + + Parameters + ---------- + config: Dict + dictionary object containing task configuration + yaml_name: str, optional + name of YAML file for JEDI configuration + + Returns + ---------- + None + """ super().__init__(config) _res = int(self.task_config['CASE'][1:]) _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) - _letkfoi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.letkfoi.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -47,13 +63,72 @@ def __init__(self, config): 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", - 'jedi_yaml': _letkfoi_yaml + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'snow_obsdatain_path': f"{self.task_config.DATA}/obs/", + 'snow_obsdataout_path': f"{self.task_config.DATA}/diags/", } ) # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) + # Create JEDI object + self.jedi = Jedi(self.task_config, yaml_name) + + @logit(logger) + def initialize_jedi(self): + """Initialize JEDI application + + This method will initialize a JEDI application used in the global snow analysis. + This includes: + - generating and saving JEDI YAML config + - linking the JEDI executable + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + # get JEDI-to-FV3 increment converter config and save to YAML file + logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") + self.jedi.set_config(self.task_config) + logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") + + # save JEDI config to YAML file + logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") + save_as_yaml(self.jedi.config, self.jedi.yaml) + + # link JEDI executable + logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") + self.jedi.link_exe(self.task_config) + + @logit(logger) + def initialize_analysis(self) -> None: + """Initialize a global snow analysis + + This method will initialize a global snow analysis. + This includes: + - staging observation files + - preprocessing IMS snow cover + - staging FV3-JEDI fix files + - staging B error files + - staging model backgrounds + - creating output directories + + Parameters + ---------- + None + + Returns + ---------- + None + """ + super().initialize() + @logit(logger) def prepare_IMS(self) -> None: """Prepare the IMS data for a global snow analysis @@ -412,50 +487,6 @@ def get_ens_bkg_dict(config: Dict) -> Dict: return bkg_dict - @staticmethod - @logit(logger) - def create_ensemble(vname: str, bestddev: float, config: Dict) -> None: - """Create a 2-member ensemble for Snow Depth analysis by perturbing snow depth with a prescribed variance. - Additionally, remove glacier locations - - Parameters - ---------- - vname : str - snow depth variable to perturb: "snodl" - bestddev : float - Background Error Standard Deviation to perturb around to create ensemble - config: Dict - Dictionary of key-value pairs needed in this method. It must contain the following keys: - DATA - current_cycle - ntiles - """ - - # 2 ens members - offset = bestddev / np.sqrt(SnowAnalysis.NMEM_SNOWENS) - - logger.info(f"Creating ensemble for LETKFOI by offsetting with {offset}") - - workdir = os.path.join(config.DATA, 'bkg') - - sign = [1, -1] - ens_dirs = ['mem001', 'mem002'] - - for (memchar, value) in zip(ens_dirs, sign): - logger.debug(f"creating ensemble member {memchar} with sign {value}") - for tt in range(1, config.ntiles + 1): - logger.debug(f"perturbing tile {tt}") - # open file - out_netcdf = os.path.join(workdir, memchar, 'RESTART', f"{to_fv3time(config.current_cycle)}.sfc_data.tile{tt}.nc") - logger.debug(f"creating member {out_netcdf}") - with Dataset(out_netcdf, "r+") as ncOut: - slmsk_array = ncOut.variables['slmsk'][:] - vtype_array = ncOut.variables['vtype'][:] - slmsk_array[vtype_array == 15] = 0 # remove glacier locations - var_array = ncOut.variables[vname][:] - var_array[slmsk_array == 1] = var_array[slmsk_array == 1] + value * offset - ncOut.variables[vname][0, :, :] = var_array[:] - @staticmethod @logit(logger) def add_increments(config: Dict) -> None: From bc01b0ca81c61e5e1012c5f48014099053395b35 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 11 Oct 2024 18:55:11 +0000 Subject: [PATCH 047/157] WIP for snow 2dvar --- jobs/JGLOBAL_SNOW_ANALYSIS | 1 - parm/config/gfs/config.snowanl | 7 ++++--- scripts/exglobal_snow_analysis.py | 6 ++---- sorc/gdas.cd | 2 +- ush/python/pygfs/task/snow_analysis.py | 2 +- 5 files changed, 8 insertions(+), 10 deletions(-) diff --git a/jobs/JGLOBAL_SNOW_ANALYSIS b/jobs/JGLOBAL_SNOW_ANALYSIS index e0f24fa624..9ed7369611 100755 --- a/jobs/JGLOBAL_SNOW_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ANALYSIS @@ -1,7 +1,6 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}} source "${HOMEgfs}/ush/jjob_header.sh" -e "snowanl" -c "base snowanl" ############################################## diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index b1460dfa67..d342306fb5 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -11,9 +11,8 @@ source "${EXPDIR}/config.resources" snowanl export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" export GTS_SNOW_STAGE_YAML="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.yaml.j2" -# Name of the JEDI executable and its yaml template -export JEDIEXE="${EXECgfs}/gdas.x" -export JEDIYAML="${PARMgfs}/gdas/snow/letkfoi/letkfoi.yaml.j2" +export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-prototype_2dvar.yaml.j2" # Ensemble member properties export SNOWDEPTHVAR="snodl" @@ -25,6 +24,8 @@ export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" +export JEDIEXE=${EXECgfs}/gdas.x + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/scripts/exglobal_snow_analysis.py b/scripts/exglobal_snow_analysis.py index a8af6d8919..7f059149c7 100755 --- a/scripts/exglobal_snow_analysis.py +++ b/scripts/exglobal_snow_analysis.py @@ -24,10 +24,8 @@ SnowAnl.initialize_jedi() SnowAnl.initialize_analysis() - # Run IMS preprocessing - # Execute JEDI snow analysis - SnowAnl.execute(config.APRUN_SNOWANLVAR, ['fv3jedi', 'variational']) + SnowAnl.execute(config.APRUN_SNOWANL, ['fv3jedi', 'variational']) # Finalize JEDI snow analysis - SnowAnl.finalize() \ No newline at end of file + SnowAnl.finalize() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 55e895f1dc..54654489cd 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 55e895f1dcf4e6be36eb0eb4c8a7995d429157e0 +Subproject commit 54654489cd32f16df6b282ccb6acc533692b3eb6 diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index f1ce2f3801..3d7b706312 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -2,7 +2,7 @@ import os from logging import getLogger -from typing import Dict, List +from typing import Dict, List, Optional, Any from pprint import pformat import numpy as np from netCDF4 import Dataset From 4c261453629871bfec63773b72ce0240189ad258 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 11 Oct 2024 19:20:02 +0000 Subject: [PATCH 048/157] end of day save --- parm/config/gfs/config.snowanl | 2 ++ parm/gdas/staging/snow_berror.yaml.j2 | 8 ++++++++ parm/gdas/staging/snow_var_bkg.yaml.j2 | 7 +++++++ sorc/gdas.cd | 2 +- ush/python/pygfs/task/snow_analysis.py | 16 +++++++++++++++- 5 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 parm/gdas/staging/snow_berror.yaml.j2 create mode 100644 parm/gdas/staging/snow_var_bkg.yaml.j2 diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index d342306fb5..f90730f4de 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -23,6 +23,8 @@ export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" +export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/snow_var_bkg.yaml.j2" +export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/snow_berror.yaml.j2" export JEDIEXE=${EXECgfs}/gdas.x diff --git a/parm/gdas/staging/snow_berror.yaml.j2 b/parm/gdas/staging/snow_berror.yaml.j2 new file mode 100644 index 0000000000..e6c5e41609 --- /dev/null +++ b/parm/gdas/staging/snow_berror.yaml.j2 @@ -0,0 +1,8 @@ +{% set fname_list = ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4'] %} + +mkdir: +- '{{ DATA }}/berror' +copy: +{% for fname in fname_list %} +- ['{{ HOMEgfs }}/fix/gdas/gsibec/{{ CASE_ANL }}/{{ fname }}', '{{ DATA }}/berror'] +{% endfor %} diff --git a/parm/gdas/staging/snow_var_bkg.yaml.j2 b/parm/gdas/staging/snow_var_bkg.yaml.j2 new file mode 100644 index 0000000000..920817b1db --- /dev/null +++ b/parm/gdas/staging/snow_var_bkg.yaml.j2 @@ -0,0 +1,7 @@ +mkdir: +- '{{ DATA }}/bkg' +copy: +- ['{{ COM_ATMOS_RESTART_PREV }}/{{ current_cycle | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/'] +{% for tile in range(1, ntiles+1) %} +- ['{{ COM_ATMOS_RESTART_PREV }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc', '{{ DATA }}/bkg/'] +{% endfor %} \ No newline at end of file diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 54654489cd..51aec43378 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 54654489cd32f16df6b282ccb6acc533692b3eb6 +Subproject commit 51aec433784584fbd30a8707a8736c09006fbc27 diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 3d7b706312..dc222636e1 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -112,11 +112,11 @@ def initialize_analysis(self) -> None: This method will initialize a global snow analysis. This includes: + - staging model backgrounds - staging observation files - preprocessing IMS snow cover - staging FV3-JEDI fix files - staging B error files - - staging model backgrounds - creating output directories Parameters @@ -129,6 +129,20 @@ def initialize_analysis(self) -> None: """ super().initialize() + # stage backgrounds + logger.info(f"Staging background files from {self.task_config.VAR_BKG_STAGING_YAML}") + bkg_staging_dict = parse_j2yaml(self.task_config.VAR_BKG_STAGING_YAML, self.task_config) + FileHandler(bkg_staging_dict).sync() + logger.debug(f"Background files:\n{pformat(bkg_staging_dict)}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + @logit(logger) def prepare_IMS(self) -> None: """Prepare the IMS data for a global snow analysis From f2a7a5585d27521d93352d53eb38de809b405394 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 12:46:21 +0000 Subject: [PATCH 049/157] Slight change for readability --- ush/python/pygfs/task/atm_analysis.py | 14 ++++++-------- ush/python/pygfs/task/atmens_analysis.py | 20 ++++++++------------ 2 files changed, 14 insertions(+), 20 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 7b91ecc909..6d7b1145ff 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -71,32 +71,30 @@ def __init__(self, config: Dict[str, Any]): self.task_config = AttrDict(**self.task_config, **local_dict) # Create JEDI variational object - jedi_config = AttrDict( + self.jedi_var = Jedi(AttrDict( { + 'yaml_name': 'atmanlvar', + 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_VAR, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_VAR, - 'rundir': self.task_config.DATA, - 'yaml_name': 'atmanlvar', 'jedi_args': ['fv3jedi', 'variational'] } ) - self.jedi_var = Jedi(jedi_config) # Create JEDI FV3 increment converter object - jedi_config = AttrDict( + self.jedi_fv3inc = Jedi(AttrDict( { + 'yaml_name': 'atmanlfv3inc', + 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_FV3INC, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, 'jcb_algo_yaml': None, - 'rundir': self.task_config.DATA, - 'yaml_name': 'atmanlfv3inc', 'jedi_args': None } ) - self.jedi_fv3inc = Jedi(jedi_config) @logit(logger) def initialize(self) -> None: diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index c532dcb9f5..334b2622c3 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -72,63 +72,59 @@ def __init__(self, config: Dict[str, Any]): self.task_config = AttrDict(**self.task_config, **local_dict) # Create JEDI LETKF observer object - jedi_config = AttrDict( + self.jedi_letkf_obs = Jedi(AttrDict( { + 'yaml_name': 'atmensanlobs', 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_LETKF, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_OBS, - 'yaml_name': 'atmensanlobs', 'jedi_args': ['fv3jedi', 'localensembleda'] } ) - self.jedi_letkf_obs = Jedi(jedi_config) # Create JEDI LETKF solver object - jedi_config = AttrDict( + self.jedi_letkf_sol = Jedi(AttrDict( { + 'yaml_name': 'atmensanlsol', 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_LETKF, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_SOL, - 'yaml_name': 'atmensanlsol', 'jedi_args': ['fv3jedi', 'localensembleda'] } ) - self.jedi_letkf_sol = Jedi(jedi_config) # Create JEDI FV3 increment converter - jedi_config = AttrDict( + self.jedi_fv3inc = Jedi(AttrDict( { + 'yaml_name': 'atmensanlfv3inc', 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_FV3INC, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, 'jcb_algo_yaml': None, - 'yaml_name': 'atmensanlfv3inc', 'jedi_args': None } ) - self.jedi_fv3inc = Jedi(jedi_config) # Note: Since we now use the split observer-solvers, the following # is only for testing. # Create JEDI LETKF object - jedi_config = AttrDict( + self.jedi_letkf = Jedi(AttrDict( { + 'yaml_name': 'atmensanlletkf', 'exe_src': self.task_config.JEDIEXE_LETKF, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF, 'rundir': self.task_config.DATA, - 'yaml_name': 'atmensanlletkf', 'jedi_args': ['fv3jedi', 'localensembleda'] } ) - self.jedi_letkf = Jedi(jedi_config) @logit(logger) def initialize(self) -> None: From 4baa1d53f9ce6fc9d42bc51c3bbc149b2618a730 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 13:28:52 +0000 Subject: [PATCH 050/157] Add key checking to JEDI class constructor --- ush/python/pygfs/jedi/jedi.py | 7 ++++++- ush/python/pygfs/task/atm_analysis.py | 15 +++++++++------ ush/python/pygfs/task/atmens_analysis.py | 23 +++++++++++++---------- 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index c9871e4ae3..fea1c176d7 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -38,6 +38,11 @@ def __init__(self, config) -> None: None """ + _key_list = ['yaml_name', 'rundir', 'exe_src', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml', 'jedi_args'] + for key in _key_list: + if key not in config.keys: + raise KeyError(f"Key '{key}' not found in the nested dictionary") + # Create the configuration dictionary for JEDI object local_dict = AttrDict( { @@ -46,7 +51,7 @@ def __init__(self, config) -> None: 'input_config': None } ) - self.jedi_config = AttrDict(**config, **local_dict) + self.jedi_config.update(local_dict) # Save a copy of jedi_config self._jedi_config = self.jedi_config.deepcopy() diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 6d7b1145ff..f08c7e5589 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -70,8 +70,11 @@ def __init__(self, config: Dict[str, Any]): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) + # Create dictionary of JEDI objects + self.jedi = AttrDict() + # Create JEDI variational object - self.jedi_var = Jedi(AttrDict( + self.jedi['atmanlvar'] = Jedi(AttrDict( { 'yaml_name': 'atmanlvar', 'rundir': self.task_config.DATA, @@ -84,7 +87,7 @@ def __init__(self, config: Dict[str, Any]): ) # Create JEDI FV3 increment converter object - self.jedi_fv3inc = Jedi(AttrDict( + self.jedi['atmanlfv3inc'] = Jedi(AttrDict( { 'yaml_name': 'atmanlfv3inc', 'rundir': self.task_config.DATA, @@ -122,21 +125,21 @@ def initialize(self) -> None: # initialize JEDI variational application logger.info(f"Initializing JEDI variational DA application") - self.jedi_var.initialize(self.task_config) + self.jedi['atmanlvar'].initialize(self.task_config) # initialize JEDI FV3 increment conversion application logger.info(f"Initializing JEDI FV3 increment conversion application") - self.jedi_fv3inc.initialize(self.task_config) + self.jedi['atmanlfv3inc'].initialize(self.task_config) # stage observations logger.info(f"Staging list of observation files") - obs_dict = self.jedi_var.render_jcb(self.task_config, 'atm_obs_staging') + obs_dict = self.jedi['atmanlvar'].render_jcb(self.task_config, 'atm_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") # stage bias corrections logger.info(f"Staging list of bias correction files") - bias_dict = self.jedi_var.render_jcb(self.task_config, 'atm_bias_staging') + bias_dict = self.jedi['atmanlvar'].render_jcb(self.task_config, 'atm_bias_staging') bias_dict['copy'] = Jedi.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 334b2622c3..dc4075971a 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -71,8 +71,11 @@ def __init__(self, config: Dict[str, Any]): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) + # Create dictionary of JEDI objects + self.jedi = AttrDict() + # Create JEDI LETKF observer object - self.jedi_letkf_obs = Jedi(AttrDict( + self.jedi['atmensanlobs'] = Jedi(AttrDict( { 'yaml_name': 'atmensanlobs', 'rundir': self.task_config.DATA, @@ -85,7 +88,7 @@ def __init__(self, config: Dict[str, Any]): ) # Create JEDI LETKF solver object - self.jedi_letkf_sol = Jedi(AttrDict( + self.jedi['atmensanlsol'] = Jedi(AttrDict( { 'yaml_name': 'atmensanlsol', 'rundir': self.task_config.DATA, @@ -98,7 +101,7 @@ def __init__(self, config: Dict[str, Any]): ) # Create JEDI FV3 increment converter - self.jedi_fv3inc = Jedi(AttrDict( + self.jedi['atmensanlfv3inc'] = Jedi(AttrDict( { 'yaml_name': 'atmensanlfv3inc', 'rundir': self.task_config.DATA, @@ -114,14 +117,14 @@ def __init__(self, config: Dict[str, Any]): # is only for testing. # Create JEDI LETKF object - self.jedi_letkf = Jedi(AttrDict( + self.jedi['atmensanlletkf'] = Jedi(AttrDict( { 'yaml_name': 'atmensanlletkf', + 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_LETKF, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': None, 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF, - 'rundir': self.task_config.DATA, 'jedi_args': ['fv3jedi', 'localensembleda'] } ) @@ -151,25 +154,25 @@ def initialize(self) -> None: # initialize JEDI LETKF observer application logger.info(f"Initializing JEDI LETKF observer application") - self.jedi_letkf_obs.initialize(self.task_config) + self.jedi['atmensanlobs'].initialize(self.task_config) # initialize JEDI LETKF solver application logger.info(f"Initializing JEDI LETKF solver application") - self.jedi_letkf_sol.initialize(self.task_config) + self.jedi['atmensanlsol'].initialize(self.task_config) # initialize JEDI FV3 increment conversion application logger.info(f"Initializing JEDI FV3 increment conversion application") - self.jedi_fv3inc.initialize(self.task_config) + self.jedi['atmensanlfv3inc'].initialize(self.task_config) # stage observations logger.info(f"Staging list of observation files") - obs_dict = self.jedi_letkf_obs.render_jcb(self.task_config, 'atm_obs_staging') + obs_dict = self.jedi['atmensanlobs'].render_jcb(self.task_config, 'atm_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") # stage bias corrections logger.info(f"Staging list of bias correction files") - bias_dict = self.jedi_letkf_obs.render_jcb(self.task_config, 'atm_bias_staging') + bias_dict = self.jedi['atmensanlobs'].render_jcb(self.task_config, 'atm_bias_staging') bias_dict['copy'] = Jedi.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") From 2f72ecccfd1c09702faeeefdfeac9e73d54e3945 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 13:33:19 +0000 Subject: [PATCH 051/157] Update --- ush/python/pygfs/task/atm_analysis.py | 8 ++++---- ush/python/pygfs/task/atmens_analysis.py | 16 ++++++++-------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index f08c7e5589..698567cb60 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -73,7 +73,7 @@ def __init__(self, config: Dict[str, Any]): # Create dictionary of JEDI objects self.jedi = AttrDict() - # Create JEDI variational object + # atmanlvar self.jedi['atmanlvar'] = Jedi(AttrDict( { 'yaml_name': 'atmanlvar', @@ -84,9 +84,9 @@ def __init__(self, config: Dict[str, Any]): 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_VAR, 'jedi_args': ['fv3jedi', 'variational'] } - ) + )) - # Create JEDI FV3 increment converter object + # atmanlfv3inc self.jedi['atmanlfv3inc'] = Jedi(AttrDict( { 'yaml_name': 'atmanlfv3inc', @@ -97,7 +97,7 @@ def __init__(self, config: Dict[str, Any]): 'jcb_algo_yaml': None, 'jedi_args': None } - ) + )) @logit(logger) def initialize(self) -> None: diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index dc4075971a..e3196b7e98 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -74,7 +74,7 @@ def __init__(self, config: Dict[str, Any]): # Create dictionary of JEDI objects self.jedi = AttrDict() - # Create JEDI LETKF observer object + # atmensanlobs self.jedi['atmensanlobs'] = Jedi(AttrDict( { 'yaml_name': 'atmensanlobs', @@ -85,9 +85,9 @@ def __init__(self, config: Dict[str, Any]): 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_OBS, 'jedi_args': ['fv3jedi', 'localensembleda'] } - ) + )) - # Create JEDI LETKF solver object + # atmensanlsol self.jedi['atmensanlsol'] = Jedi(AttrDict( { 'yaml_name': 'atmensanlsol', @@ -98,9 +98,9 @@ def __init__(self, config: Dict[str, Any]): 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_SOL, 'jedi_args': ['fv3jedi', 'localensembleda'] } - ) + )) - # Create JEDI FV3 increment converter + # atmensanlfv3inc self.jedi['atmensanlfv3inc'] = Jedi(AttrDict( { 'yaml_name': 'atmensanlfv3inc', @@ -111,12 +111,12 @@ def __init__(self, config: Dict[str, Any]): 'jcb_algo_yaml': None, 'jedi_args': None } - ) + )) # Note: Since we now use the split observer-solvers, the following # is only for testing. - # Create JEDI LETKF object + # atmensanlletkf self.jedi['atmensanlletkf'] = Jedi(AttrDict( { 'yaml_name': 'atmensanlletkf', @@ -127,7 +127,7 @@ def __init__(self, config: Dict[str, Any]): 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF, 'jedi_args': ['fv3jedi', 'localensembleda'] } - ) + )) @logit(logger) def initialize(self) -> None: From e3ffaf0b68b181d2819b54a0611a520da4717839 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 13:49:16 +0000 Subject: [PATCH 052/157] Update --- scripts/exglobal_atm_analysis_fv3_increment.py | 6 +++--- scripts/exglobal_atm_analysis_initialize.py | 4 ++-- scripts/exglobal_atm_analysis_variational.py | 4 ++-- scripts/exglobal_atmens_analysis_fv3_increment.py | 4 ++-- scripts/exglobal_atmens_analysis_initialize.py | 8 ++------ scripts/exglobal_atmens_analysis_letkf.py | 8 ++++---- scripts/exglobal_atmens_analysis_obs.py | 4 ++-- scripts/exglobal_atmens_analysis_sol.py | 4 ++-- 8 files changed, 19 insertions(+), 23 deletions(-) diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index 594ae6e7f7..39a9b4e25a 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atm_analysis_fv3_increment.py # This script creates an AtmAnalysis object -# and runs the initialize_fv3inc and execute methods -# which convert the JEDI increment into an FV3 increment +# and runs the execute method of its Jedi +# object attribute import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Initialize and execute FV3 increment converter - AtmAnl.jedi_fv3inc.execute(config.APRUN_ATMANLFV3INC) + AtmAnl.jedi['atmanlfv3inc'].execute(config.APRUN_ATMANLFV3INC) diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py index 444a532f6c..749d320111 100755 --- a/scripts/exglobal_atm_analysis_initialize.py +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -2,8 +2,8 @@ # exglobal_atm_analysis_initialize.py # This script creates an AtmAnalysis class # and runs the initialize method -# which create and stage the runtime directory -# and create the YAML configuration +# which creates and stages the runtime directory +# and creates the YAML configuration # for a global atm variational analysis import os diff --git a/scripts/exglobal_atm_analysis_variational.py b/scripts/exglobal_atm_analysis_variational.py index c7929f6b19..21d99da3a2 100755 --- a/scripts/exglobal_atm_analysis_variational.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # exglobal_atm_analysis_variational.py # This script creates an AtmAnalysis object -# and runs the execute method +# and runs the execute method of its Jedi object attribute # which executes the global atm variational analysis import os @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Execute JEDI variational analysis - AtmAnl.jedi_var.execute(config.APRUN_ATMANLVAR) + AtmAnl.jedi['atmanlvar'].execute(config.APRUN_ATMANLVAR) diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py index 42d0afceed..288c043adc 100755 --- a/scripts/exglobal_atmens_analysis_fv3_increment.py +++ b/scripts/exglobal_atmens_analysis_fv3_increment.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_fv3_increment.py # This script creates an AtmEnsAnalysis object -# and runs the initialize_fv3inc and execute methods +# and runs the execute method of its Jedi object attribute # which convert the JEDI increment into an FV3 increment import os @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI FV3 increment converter - AtmEnsAnl.jedi_fv3inc.execute(config.APRUN_ATMENSANLFV3INC) + AtmEnsAnl.jedi['atmensanlfv3inc'].execute(config.APRUN_ATMENSANLFV3INC) diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py index 010a6f1075..124e755594 100755 --- a/scripts/exglobal_atmens_analysis_initialize.py +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -2,8 +2,8 @@ # exglobal_atmens_analysis_initialize.py # This script creates an AtmEnsAnalysis class # and runs the initialize method -# which create and stage the runtime directory -# and create the YAML configuration +# which creates and stages the runtime directory +# and creates the YAML configuration # for a global atm local ensemble analysis import os @@ -21,10 +21,6 @@ # Instantiate the atmens analysis task AtmEnsAnl = AtmEnsAnalysis(config) -# if not config.lobsdiag_forenkf: -# AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlletkf') -# else: -# AtmEnsAnl = AtmEnsAnalysis(config, 'atmensanlobs') # Initialize JEDI ensemble DA analysis AtmEnsAnl.initialize() diff --git a/scripts/exglobal_atmens_analysis_letkf.py b/scripts/exglobal_atmens_analysis_letkf.py index 050449334e..c0516003d6 100755 --- a/scripts/exglobal_atmens_analysis_letkf.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_letkf.py # This script creates an AtmEnsAnalysis object -# and runs the execute method which executes -# the global atm local ensemble analysis +# and runs the execute method of its Jedi object attribute +# which executes the global atm local ensemble analysis import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -23,7 +23,7 @@ # Initalize JEDI ensemble DA application # Note: This is normally done in AtmEnsAnl.initialize(), but that method now # initializes the split observer-solver. This case is just for testing. - AtmEnsAnl.jedi_letkf.initialize(AtmEnsAnl.task_config) + AtmEnsAnl.jedi['atmensanlletkf'].initialize(AtmEnsAnl.task_config) # Execute the JEDI ensemble DA analysis - AtmEnsAnl.jedi_letkf.execute(config.APRUN_ATMENSANLLETKF) + AtmEnsAnl.jedi['atmensanlletkf'].execute(config.APRUN_ATMENSANLLETKF) diff --git a/scripts/exglobal_atmens_analysis_obs.py b/scripts/exglobal_atmens_analysis_obs.py index 6cd961c99f..84b1f28096 100755 --- a/scripts/exglobal_atmens_analysis_obs.py +++ b/scripts/exglobal_atmens_analysis_obs.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_obs.py # This script creates an AtmEnsAnalysis object -# and runs the execute method +# and runs the execute method of its Jedi object attribute # which executes the global atm local ensemble analysis in observer mode import os @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI ensembler DA analysis in observer mode - AtmEnsAnl.jedi_letkf_obs.execute(config.APRUN_ATMENSANLOBS) + AtmEnsAnl.jedi['atmensanlobs'].execute(config.APRUN_ATMENSANLOBS) diff --git a/scripts/exglobal_atmens_analysis_sol.py b/scripts/exglobal_atmens_analysis_sol.py index dab5206daf..e6c112f97f 100755 --- a/scripts/exglobal_atmens_analysis_sol.py +++ b/scripts/exglobal_atmens_analysis_sol.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_sol.py # This script creates an AtmEnsAnalysis object -# and runs the execute method +# and runs the execute method of its Jedi object attribute # which executes the global atm local ensemble analysis in solver mode import os @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI ensemble DA analysis in solver mode - AtmEnsAnl.jedi_letkf_sol.execute(config.APRUN_ATMENSANLSOL) + AtmEnsAnl.jedi['atmensanlsol'].execute(config.APRUN_ATMENSANLSOL) From 955f19112700b31422634f46d355c7953821d481 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 13:53:18 +0000 Subject: [PATCH 053/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 140dc0e6a6..c71d3b1721 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 140dc0e6a6550321b6cf1db6630f3d6ca067dc12 +Subproject commit c71d3b17214007d59f91cc9c0988d4a8a279b22d From e8baba30c32bee0d01fcb0b3d1e48bce1b6be23e Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 13:57:03 +0000 Subject: [PATCH 054/157] pynorms --- ush/python/pygfs/jedi/jedi.py | 2 +- ush/python/pygfs/task/atm_analysis.py | 2 +- ush/python/pygfs/task/atmens_analysis.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index fea1c176d7..89c81c93dc 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -42,7 +42,7 @@ def __init__(self, config) -> None: for key in _key_list: if key not in config.keys: raise KeyError(f"Key '{key}' not found in the nested dictionary") - + # Create the configuration dictionary for JEDI object local_dict = AttrDict( { diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 698567cb60..5ddb678036 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -90,7 +90,7 @@ def __init__(self, config: Dict[str, Any]): self.jedi['atmanlfv3inc'] = Jedi(AttrDict( { 'yaml_name': 'atmanlfv3inc', - 'rundir': self.task_config.DATA, + 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_FV3INC, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index e3196b7e98..46072dfe8b 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -73,7 +73,7 @@ def __init__(self, config: Dict[str, Any]): # Create dictionary of JEDI objects self.jedi = AttrDict() - + # atmensanlobs self.jedi['atmensanlobs'] = Jedi(AttrDict( { From 0805a08e9ceb1f75162394b6f23c58d0d5b1bfe3 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 18:29:14 +0000 Subject: [PATCH 055/157] Initial commit --- parm/config/gfs/config.marineanl | 21 +- parm/config/gfs/config.marinebmat | 8 - ush/python/pygfs/task/marine_bmat.py | 282 ++++++++++++--------------- 3 files changed, 144 insertions(+), 167 deletions(-) diff --git a/parm/config/gfs/config.marineanl b/parm/config/gfs/config.marineanl index a19fc015e2..cf056387ca 100644 --- a/parm/config/gfs/config.marineanl +++ b/parm/config/gfs/config.marineanl @@ -15,6 +15,25 @@ export SOCA_FIX_YAML_TMPL="${PARMgfs}/gdas/soca/soca_fix_stage_${OCNRES}.yaml.j2 export MARINE_UTILITY_YAML_TMPL="${PARMgfs}/gdas/soca/soca_utils_stage.yaml.j2" export MARINE_ENSDA_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/ensda/stage_ens_mem.yaml.j2" export MARINE_DET_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/soca_det_bkg_stage.yaml.j2" -export MARINE_JCB_GDAS_ALGO="${PARMgfs}/gdas/jcb-gdas/algorithm/marine" + +export JCB_BASE_YAML="${PARMgfs}/gdas/soca/marine-jcb-base.yaml" +#export MARINE_JCB_GDAS_ALGO="${PARMgfs}/gdas/jcb-gdas/algorithm/marine" + +export JCB_ALGO_GRIDGEN="soca_gridgen" +export JCB_ALGO_DIAGB="soca_diagb" +export JCB_ALGO_SETCORSCALES="soca_setcorscales" +export JCB_ALGO_PARAMETERS_DIFFUSION_HZ="soca_parameters_diffusion_hz" +export JCB_ALGO_VTSCALES="soca_vtscales" +export JCB_ALGO_PARAMETERS_DIFFUSION_VT="soca_parameters_diffusion_vt" +export JCB_ALGO_ENSB="soca_ensb" +export JCB_ALGO_ENSWEIGHTS="soca_ensweights" + +export JEDIEXE_GRIDGEN="${EXECgfs}/gdas_soca_gridgen.x" +export JEDIEXE_DIAGB="${EXECgfs}/gdas_soca_diagb.x" +export JEDIEXE_SETCORSCALES="${EXECgfs}/soca_setcorscales.x" +export JEDIEXE_PARAMETERS_DIFFUSION_HZ="${EXECgfs}/gdas_soca_error_covariance_toolbox.x" +export JEDIEXE_PARAMETERS_DIFFUSION_VT="${EXECgfs}/gdas_soca_error_covariance_toolbox.x" +export JEDIEXE_ENSB="${EXECgfs}/gdas_ens_handler.x" +export JEDIEXE_ENSWEIGHTS="${EXECgfs}/gdas_socahybridweights.x" echo "END: config.marineanl" diff --git a/parm/config/gfs/config.marinebmat b/parm/config/gfs/config.marinebmat index 00352737d0..d88739dced 100644 --- a/parm/config/gfs/config.marinebmat +++ b/parm/config/gfs/config.marinebmat @@ -8,12 +8,4 @@ echo "BEGIN: config.marinebmat" # Get task specific resources . "${EXPDIR}/config.resources" marinebmat -export BERROR_DIAGB_YAML="${PARMgfs}/gdas/soca/berror/soca_diagb.yaml.j2" -export BERROR_VTSCALES_YAML="${PARMgfs}/gdas/soca/berror/soca_vtscales.yaml.j2" -export BERROR_DIFFV_YAML="${PARMgfs}/gdas/soca/berror/soca_parameters_diffusion_vt.yaml.j2" -export BERROR_HZSCALES_YAML="${PARMgfs}/gdas/soca/berror/soca_setcorscales.yaml" -export BERROR_DIFFH_YAML="${PARMgfs}/gdas/soca/berror/soca_parameters_diffusion_hz.yaml.j2" -export BERROR_ENS_RECENTER_YAML="${PARMgfs}/gdas/soca/berror/soca_ensb.yaml.j2" -export BERROR_HYB_WEIGHTS_YAML="${PARMgfs}/gdas/soca/berror/soca_ensweights.yaml.j2" - echo "END: config.marinebmat" diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 93329f05ac..b316b221f9 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -50,6 +50,100 @@ def __init__(self, config): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) + # Create dictionary of Jedi objects + self.jedi = AttrDict() + + # gridgen + self.jedi['gridgen'] = Jedi(AttrDict( + { + 'yaml_name': 'gridgen', + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_GRIDGEN, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, + 'jcb_algo': self.task_config.JCB_ALGO_GRIDGEN, + 'jcb_algo_yaml': None, + 'jedi_args': None + } + )) + + # soca_diagb + self.jedi['soca_diagb'] = Jedi(AttrDict( + { + 'yaml_name': 'soca_diagb', + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_DIAGB, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, + 'jcb_algo': self.task_config.JCB_ALGO_DIAGB, + 'jcb_algo_yaml': None, + 'jedi_args': None + } + )) + + # soca_parameters_diffusion_vt + self.jedi['soca_parameters_diffusion_vt'] = Jedi(AttrDict( + { + 'yaml_name': 'soca_parameters_diffusion_vt', + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_PARAMETERS_DIFFUSION_VT, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, + 'jcb_algo': self.task_config.JCB_ALGO_PARAMETERS_DIFFUSION_VT, + 'jcb_algo_yaml': None, + 'jedi_args': None + } + ) + + # soca_setcorscales + self.jedi['soca_setcorscales'] = Jedi(AttrDict( + { + 'yaml_name': 'soca_setcorscales', + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_SETCORSCALES, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, + 'jcb_algo': self.task_config.JCB_ALGO_SETCORSCALES, + 'jcb_algo_yaml': None, + 'jedi_args': None + } + ) + + # soca_parameters_diffusion_hz + self.jedi['soca_parameters_diffusion_hz'] = Jedi(AttrDict( + { + 'yaml_name': 'soca_parameters_diffusion_hz', + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_PARAMETERS_DIFFUSION_HZ, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, + 'jcb_algo': self.task_config.JCB_ALGO_PARAMETERS_DIFFUSION_HZ, + 'jcb_algo_yaml': None, + 'jedi_args': None + } + ) + + # soca_ensb + self.jedi['soca_ensb'] = Jedi(AttrDict( + { + 'yaml_name': 'soca_ensb', + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_ENSB, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, + 'jcb_algo': self.task_config.JCB_ALGO_ENSB, + 'jcb_algo_yaml': None, + 'jedi_args': None + } + ) + + # soca_ensweights + self.jedi['soca_ensb'] = Jedi(AttrDict( + { + 'yaml_name': 'soca_ensb', + 'rundir': self.task_config.DATA, + 'exe_src': self.task_config.JEDIEXE_ENSWEIGHTS, + 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, + 'jcb_algo': self.task_config.JCB_ALGO_ENSWEIGHTS, + 'jcb_algo_yaml': None, + 'jedi_args': None + } + ) + @logit(logger) def initialize(self: Task) -> None: """Initialize a global B-matrix @@ -63,7 +157,6 @@ def initialize(self: Task) -> None: - generating the YAML files for the JEDI and GDASApp executables - creating output directories """ - super().initialize() # stage fix files logger.info(f"Staging SOCA fix files from {self.task_config.SOCA_INPUT_FIX_DIR}") @@ -78,54 +171,27 @@ def initialize(self: Task) -> None: bkg_list = parse_j2yaml(self.task_config.MARINE_DET_STAGE_BKG_YAML_TMPL, self.task_config) FileHandler(bkg_list).sync() - # stage the soca utility yamls (gridgen, fields and ufo mapping yamls) - logger.info(f"Staging SOCA utility yaml files") - soca_utility_list = parse_j2yaml(self.task_config.MARINE_UTILITY_YAML_TMPL, self.task_config) - FileHandler(soca_utility_list).sync() - - # generate the variance partitioning YAML file - logger.info(f"Generate variance partitioning YAML file from {self.task_config.BERROR_DIAGB_YAML}") - diagb_config = parse_j2yaml(path=self.task_config.BERROR_DIAGB_YAML, data=self.task_config) - diagb_config.save(os.path.join(self.task_config.DATA, 'soca_diagb.yaml')) - - # generate the vertical decorrelation scale YAML file - logger.info(f"Generate the vertical correlation scale YAML file from {self.task_config.BERROR_VTSCALES_YAML}") - vtscales_config = parse_j2yaml(path=self.task_config.BERROR_VTSCALES_YAML, data=self.task_config) - vtscales_config.save(os.path.join(self.task_config.DATA, 'soca_vtscales.yaml')) - - # generate vertical diffusion scale YAML file - logger.info(f"Generate vertical diffusion YAML file from {self.task_config.BERROR_DIFFV_YAML}") - diffvz_config = parse_j2yaml(path=self.task_config.BERROR_DIFFV_YAML, data=self.task_config) - diffvz_config.save(os.path.join(self.task_config.DATA, 'soca_parameters_diffusion_vt.yaml')) - - # generate the horizontal diffusion YAML files - if True: # TODO(G): skip this section once we have optimized the scales - # stage the correlation scale configuration - logger.info(f"Generate correlation scale YAML file from {self.task_config.BERROR_HZSCALES_YAML}") - FileHandler({'copy': [[self.task_config.BERROR_HZSCALES_YAML, - os.path.join(self.task_config.DATA, 'soca_setcorscales.yaml')]]}).sync() - - # generate horizontal diffusion scale YAML file - logger.info(f"Generate horizontal diffusion scale YAML file from {self.task_config.BERROR_DIFFH_YAML}") - diffhz_config = parse_j2yaml(path=self.task_config.BERROR_DIFFH_YAML, data=self.task_config) - diffhz_config.save(os.path.join(self.task_config.DATA, 'soca_parameters_diffusion_hz.yaml')) - - # hybrid EnVAR case + # initialize vtscales python script + vtscales_config = self.render_jcb(task_config, 'vtscales') + save_as_yaml(vtscales_config, os.path.join(self.task_config.DATA, 'soca_vtscales.yaml') + FileHandler({'copy': [[os.path.join(self.task_config.CALC_SCALE_EXEC), + os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync() + + # initialize JEDI applications + self.jedi['gridgen'].initialize(self.task_config) + self.jedi['soca_diagb'].initialize(self.task_config) + self.jedi['soca_parameters_diffusion_vt'].initialize(self.task_config) + self.jedi['soca_setcorscales'].initialize(self.task_config) + self.jedi['soca_parameters_diffusion_hz'].initialize(self.task_config) + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + self.jedi['soca_ensb'.initialize(self.task_config) + self.jedi['soca_ensweights'].initialize(self.task_config) + + # stage ensemble members for the hybrid background error if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - # stage ensemble membersfiles for use in hybrid background error logger.debug(f"Stage ensemble members for the hybrid background error") mdau.stage_ens_mem(self.task_config) - - # generate ensemble recentering/rebalancing YAML file - logger.debug("Generate ensemble recentering YAML file") - ensrecenter_config = parse_j2yaml(path=self.task_config.BERROR_ENS_RECENTER_YAML, data=self.task_config) - ensrecenter_config.save(os.path.join(self.task_config.DATA, 'soca_ensb.yaml')) - - # generate ensemble weights YAML file - logger.debug("Generate hybrid-weigths YAML file") - hybridweights_config = parse_j2yaml(path=self.task_config.BERROR_HYB_WEIGHTS_YAML, data=self.task_config) - hybridweights_config.save(os.path.join(self.task_config.DATA, 'soca_ensweights.yaml')) - + # create the symbolic link to the static B-matrix directory link_target = os.path.join(self.task_config.DATAstaticb) link_name = os.path.join(self.task_config.DATA, 'staticb') @@ -134,130 +200,30 @@ def initialize(self: Task) -> None: os.symlink(link_target, link_name) @logit(logger) - def gridgen(self: Task) -> None: - # link gdas_soca_gridgen.x - mdau.link_executable(self.task_config, 'gdas_soca_gridgen.x') - exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) - exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_gridgen.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('gridgen.yaml') - - mdau.run(exec_cmd) - - @logit(logger) - def variance_partitioning(self: Task) -> None: - # link the variance partitioning executable, gdas_soca_diagb.x - mdau.link_executable(self.task_config, 'gdas_soca_diagb.x') - exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) - exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_diagb.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('soca_diagb.yaml') - - mdau.run(exec_cmd) - - @logit(logger) - def horizontal_diffusion(self: Task) -> None: - """Generate the horizontal diffusion coefficients - """ - # link the executable that computes the correlation scales, gdas_soca_setcorscales.x, - # and prepare the command to run it - mdau.link_executable(self.task_config, 'gdas_soca_setcorscales.x') - exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) - exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_setcorscales.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('soca_setcorscales.yaml') - - # create a files containing the correlation scales - mdau.run(exec_cmd) - - # link the executable that computes the correlation scales, gdas_soca_error_covariance_toolbox.x, - # and prepare the command to run it - mdau.link_executable(self.task_config, 'gdas_soca_error_covariance_toolbox.x') - exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) - exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_error_covariance_toolbox.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('soca_parameters_diffusion_hz.yaml') - - # compute the coefficients of the diffusion operator - mdau.run(exec_cmd) - - @logit(logger) - def vertical_diffusion(self: Task) -> None: + def execute_vtscales(self: Task) -> None: """Generate the vertical diffusion coefficients """ - # compute the vertical correlation scales based on the MLD - FileHandler({'copy': [[os.path.join(self.task_config.CALC_SCALE_EXEC), - os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync() + # compute the vertical correlation scales based on the MLD exec_cmd = Executable("python") exec_name = os.path.join(self.task_config.DATA, 'calc_scales.x') exec_cmd.add_default_arg(exec_name) exec_cmd.add_default_arg('soca_vtscales.yaml') - mdau.run(exec_cmd) - - # link the executable that computes the correlation scales, gdas_soca_error_covariance_toolbox.x, - # and prepare the command to run it - mdau.link_executable(self.task_config, 'gdas_soca_error_covariance_toolbox.x') - exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) - exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_error_covariance_toolbox.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('soca_parameters_diffusion_vt.yaml') - - # compute the coefficients of the diffusion operator - mdau.run(exec_cmd) - - @logit(logger) - def ensemble_perturbations(self: Task) -> None: - """Generate the 3D ensemble of perturbation for the 3DEnVAR - - This method will generate ensemble perturbations re-balanced w.r.t the - deterministic background. - This includes: - - computing a storing the unbalanced ensemble perturbations' statistics - - recentering the ensemble members around the deterministic background and - accounting for the nonlinear steric recentering - - saving the recentered ensemble statistics - """ - mdau.link_executable(self.task_config, 'gdas_ens_handler.x') - exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) - exec_name = os.path.join(self.task_config.DATA, 'gdas_ens_handler.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('soca_ensb.yaml') - - # generate the ensemble perturbations - mdau.run(exec_cmd) - - @logit(logger) - def hybrid_weight(self: Task) -> None: - """Generate the hybrid weights for the 3DEnVAR - - This method will generate the 3D fields hybrid weights for the 3DEnVAR for each - variables. - TODO(G): Currently implemented for the specific case of the static ensemble members only - """ - mdau.link_executable(self.task_config, 'gdas_socahybridweights.x') - exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) - exec_name = os.path.join(self.task_config.DATA, 'gdas_socahybridweights.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('soca_ensweights.yaml') - - # compute the ensemble weights - mdau.run(exec_cmd) - + mdau.run(exec_cmd) + @logit(logger) - def execute(self: Task) -> None: + def execute(self, aprun_cmd: str) -> None: """Generate the full B-matrix This method will generate the full B-matrix according to the configuration. """ - chdir(self.task_config.DATA) - self.gridgen() # TODO: This should be optional in case the geometry file was staged - self.variance_partitioning() - self.horizontal_diffusion() # TODO: Make this optional once we've converged on an acceptable set of scales - self.vertical_diffusion() - # hybrid EnVAR case + self.jedi['gridgen'].execute(aprun_cmd) # TODO: This should be optional in case the geometry file was staged + self.execute_vtscales() + self.jedi['soca_parameters_diffusion_vt'].execute(aprun_cmd) + self.jedi['soca_setcorscales'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi['soca_parameters_diffusion_hz'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - self.ensemble_perturbations() # TODO: refactor this from the old scripts - self.hybrid_weight() # TODO: refactor this from the old scripts + self.jedi['soca_ensb'].execute(self.task_config) # TODO: refactor this from the old scripts + self.jedi['soca_ensweights'].execute(self.task_config) # TODO: refactor this from the old scripts @logit(logger) def finalize(self: Task) -> None: From 8cd354a496360230126f4fce7cfcfdafda9bdfaa Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 18:35:10 +0000 Subject: [PATCH 056/157] Debug --- ush/python/pygfs/jedi/jedi.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 89c81c93dc..f873be1077 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -40,7 +40,7 @@ def __init__(self, config) -> None: _key_list = ['yaml_name', 'rundir', 'exe_src', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml', 'jedi_args'] for key in _key_list: - if key not in config.keys: + if key not in config: raise KeyError(f"Key '{key}' not found in the nested dictionary") # Create the configuration dictionary for JEDI object @@ -51,7 +51,7 @@ def __init__(self, config) -> None: 'input_config': None } ) - self.jedi_config.update(local_dict) + self.jedi_config = AttrDict(**config, **local_dict) # Save a copy of jedi_config self._jedi_config = self.jedi_config.deepcopy() From 8bf1a4293a329f145e921374a060cf6bc78f0521 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 20:25:25 +0000 Subject: [PATCH 057/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index c71d3b1721..6e85be032c 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit c71d3b17214007d59f91cc9c0988d4a8a279b22d +Subproject commit 6e85be032c18419fa245d7d0743264c70bd0592c From 8d53fe663e3abe8bde4aa7d5f63705da2cb75799 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 15 Oct 2024 01:22:53 +0000 Subject: [PATCH 058/157] Saving progress --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/marine_analysis.py | 2 +- ush/python/pygfs/task/marine_bmat.py | 18 ++++++++++-------- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 6e85be032c..b70652625a 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 6e85be032c18419fa245d7d0743264c70bd0592c +Subproject commit b70652625a7e87f277e8dc5c0b08588e61ee95ff diff --git a/ush/python/pygfs/task/marine_analysis.py b/ush/python/pygfs/task/marine_analysis.py index 4e4311b906..964ea4a0d3 100644 --- a/ush/python/pygfs/task/marine_analysis.py +++ b/ush/python/pygfs/task/marine_analysis.py @@ -200,7 +200,7 @@ def _prep_variational_yaml(self: Task) -> None: # Add the things to the envconfig in order to template JCB files envconfig_jcb['PARMgfs'] = self.task_config.PARMgfs - envconfig_jcb['nmem_ens'] = self.task_config.NMEM_ENS + envconfig_jcb['NMEM_ENS'] = self.task_config.NMEM_ENS envconfig_jcb['berror_model'] = 'marine_background_error_static_diffusion' if self.task_config.NMEM_ENS > 3: envconfig_jcb['berror_model'] = 'marine_background_error_hybrid_diffusion_diffusion' diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index b316b221f9..dee992dc0e 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -14,6 +14,8 @@ Executable, Task) +from pygfs.jedi import Jedi + logger = getLogger(__name__.split('.')[-1]) @@ -90,7 +92,7 @@ def __init__(self, config): 'jcb_algo_yaml': None, 'jedi_args': None } - ) + )) # soca_setcorscales self.jedi['soca_setcorscales'] = Jedi(AttrDict( @@ -103,7 +105,7 @@ def __init__(self, config): 'jcb_algo_yaml': None, 'jedi_args': None } - ) + )) # soca_parameters_diffusion_hz self.jedi['soca_parameters_diffusion_hz'] = Jedi(AttrDict( @@ -116,7 +118,7 @@ def __init__(self, config): 'jcb_algo_yaml': None, 'jedi_args': None } - ) + )) # soca_ensb self.jedi['soca_ensb'] = Jedi(AttrDict( @@ -129,7 +131,7 @@ def __init__(self, config): 'jcb_algo_yaml': None, 'jedi_args': None } - ) + )) # soca_ensweights self.jedi['soca_ensb'] = Jedi(AttrDict( @@ -142,7 +144,7 @@ def __init__(self, config): 'jcb_algo_yaml': None, 'jedi_args': None } - ) + )) @logit(logger) def initialize(self: Task) -> None: @@ -172,8 +174,8 @@ def initialize(self: Task) -> None: FileHandler(bkg_list).sync() # initialize vtscales python script - vtscales_config = self.render_jcb(task_config, 'vtscales') - save_as_yaml(vtscales_config, os.path.join(self.task_config.DATA, 'soca_vtscales.yaml') + vtscales_config = self.jedi['soca_parameters_diffusion_vt'].render_jcb(self.task_config, 'soca_vtscales') + save_as_yaml(vtscales_config, os.path.join(self.task_config.DATA, 'soca_vtscales.yaml')) FileHandler({'copy': [[os.path.join(self.task_config.CALC_SCALE_EXEC), os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync() @@ -184,7 +186,7 @@ def initialize(self: Task) -> None: self.jedi['soca_setcorscales'].initialize(self.task_config) self.jedi['soca_parameters_diffusion_hz'].initialize(self.task_config) if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - self.jedi['soca_ensb'.initialize(self.task_config) + self.jedi['soca_ensb'].initialize(self.task_config) self.jedi['soca_ensweights'].initialize(self.task_config) # stage ensemble members for the hybrid background error From 694ad55b656761992f6ff307a5f2ee903d64ffbb Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 15 Oct 2024 02:27:29 +0000 Subject: [PATCH 059/157] Saving progress --- scripts/exglobal_marinebmat.py | 2 +- sorc/gdas.cd | 2 +- ush/python/pygfs/task/marine_bmat.py | 10 ++++++---- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/scripts/exglobal_marinebmat.py b/scripts/exglobal_marinebmat.py index e285e646ac..fd8770c18e 100755 --- a/scripts/exglobal_marinebmat.py +++ b/scripts/exglobal_marinebmat.py @@ -20,5 +20,5 @@ # Create an instance of the MarineBMat task marineBMat = MarineBMat(config) marineBMat.initialize() - marineBMat.execute() + marineBMat.execute(config.APRUN_MARINEBMAT) marineBMat.finalize() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index b70652625a..52bc8d5ef7 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit b70652625a7e87f277e8dc5c0b08588e61ee95ff +Subproject commit 52bc8d5ef7ad436e07da6baf5f15bbd8dac03104 diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index dee992dc0e..a6384f38b9 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -9,7 +9,7 @@ FileHandler, add_to_datetime, to_timedelta, chdir, - parse_j2yaml, + parse_j2yaml, save_as_yaml, logit, Executable, Task) @@ -40,12 +40,14 @@ def __init__(self, config): local_dict = AttrDict( { 'PARMsoca': os.path.join(self.task_config.PARMgfs, 'gdas', 'soca'), + 'CALC_SCALE_EXEC': _calc_scale_exec, 'MARINE_WINDOW_BEGIN': _window_begin, - 'MARINE_WINDOW_END': _window_end, 'MARINE_WINDOW_MIDDLE': self.task_config.current_cycle, + 'MARINE_WINDOW_END': _window_end, + 'MARINE_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", 'ENSPERT_RELPATH': _enspert_relpath, - 'CALC_SCALE_EXEC': _calc_scale_exec, - 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z." + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z." } ) From cc4a0d80a46714b028d41d82d3e41ffb950a8804 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 15 Oct 2024 13:10:59 +0000 Subject: [PATCH 060/157] Update GDAS hash to develop --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index c71d3b1721..e024564f72 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit c71d3b17214007d59f91cc9c0988d4a8a279b22d +Subproject commit e024564f72e8b8b617e2a6a1cc06053e6dfb5786 From 9d8aa9382569daea83fb61e52f426c457e30de87 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 15 Oct 2024 15:03:14 +0000 Subject: [PATCH 061/157] Clean up exception handling --- ush/python/pygfs/jedi/jedi.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index f873be1077..f4692e82d5 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -142,8 +142,7 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> if self.jedi_config.jcb_base_yaml: jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) else: - logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") - logger.error(f"FATAL ERROR: JEDI configuration dictionary must contain jcb_base_yaml.") + raise KeyError(f"FATAL ERROR: JEDI configuration dictionary must contain jcb_base_yaml.") # Add JCB algorithm YAML, if it exists, to JCB config dictionary if self.jedi_config.jcb_algo_yaml: @@ -157,9 +156,8 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> elif 'algorithm' in jcb_config: pass else: - logger.error(f"FATAL ERROR: Unable to compile JEDI configuration dictionary, ABORT!") - logger.error(f"FATAL ERROR: JCB algorithm must be specified as input to jedi.render_jcb(), " + - "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") + raise KeyError(f"FATAL ERROR: JCB algorithm must be specified as input to jedi.render_jcb(), " + + "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") # Generate JEDI YAML config by rendering JCB config dictionary jedi_input_config = render(jcb_config) From 715279c45ec67b2d3d0729a798a8e427439b8e74 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Tue, 15 Oct 2024 19:52:13 +0000 Subject: [PATCH 062/157] 4pm tuesday commit before JCB deep dive --- parm/gdas/staging/snow_berror.yaml.j2 | 6 +- parm/gdas/staging/snow_var_bkg.yaml.j2 | 1 + scripts/exglobal_snow_analysis.py | 2 +- sorc/gdas.cd | 2 +- ush/python/pygfs/task/snow_analysis.py | 134 ++++++++----------------- 5 files changed, 44 insertions(+), 101 deletions(-) diff --git a/parm/gdas/staging/snow_berror.yaml.j2 b/parm/gdas/staging/snow_berror.yaml.j2 index e6c5e41609..42ed94e138 100644 --- a/parm/gdas/staging/snow_berror.yaml.j2 +++ b/parm/gdas/staging/snow_berror.yaml.j2 @@ -1,8 +1,4 @@ -{% set fname_list = ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4'] %} - mkdir: - '{{ DATA }}/berror' copy: -{% for fname in fname_list %} -- ['{{ HOMEgfs }}/fix/gdas/gsibec/{{ CASE_ANL }}/{{ fname }}', '{{ DATA }}/berror'] -{% endfor %} +- ['{{ HOMEgfs }}/fix/gdas/snow/snow_bump_nicas_300km_fakelevels_nicas.nc', '{{ DATA }}/berror'] diff --git a/parm/gdas/staging/snow_var_bkg.yaml.j2 b/parm/gdas/staging/snow_var_bkg.yaml.j2 index 920817b1db..d629d9f23d 100644 --- a/parm/gdas/staging/snow_var_bkg.yaml.j2 +++ b/parm/gdas/staging/snow_var_bkg.yaml.j2 @@ -4,4 +4,5 @@ copy: - ['{{ COM_ATMOS_RESTART_PREV }}/{{ current_cycle | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/'] {% for tile in range(1, ntiles+1) %} - ['{{ COM_ATMOS_RESTART_PREV }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc', '{{ DATA }}/bkg/'] +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] {% endfor %} \ No newline at end of file diff --git a/scripts/exglobal_snow_analysis.py b/scripts/exglobal_snow_analysis.py index 7f059149c7..2a292db3b8 100755 --- a/scripts/exglobal_snow_analysis.py +++ b/scripts/exglobal_snow_analysis.py @@ -18,7 +18,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the snow analysis task - SnowAnl = SnowAnalysis(config) + SnowAnl = SnowAnalysis(config, 'snowanl') # Initialize JEDI 2DVar snow analysis SnowAnl.initialize_jedi() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 51aec43378..ba093508c0 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 51aec433784584fbd30a8707a8736c09006fbc27 +Subproject commit ba093508c0fb0971ba9116359d83f6df197bbd25 diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index dc222636e1..95d02c3b76 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -114,7 +114,6 @@ def initialize_analysis(self) -> None: This includes: - staging model backgrounds - staging observation files - - preprocessing IMS snow cover - staging FV3-JEDI fix files - staging B error files - creating output directories @@ -135,6 +134,29 @@ def initialize_analysis(self) -> None: FileHandler(bkg_staging_dict).sync() logger.debug(f"Background files:\n{pformat(bkg_staging_dict)}") + # stage observations + logger.info(f"Staging list of observation files generated from JEDI config") + obs_dict = self.jedi.get_obs_dict(self.task_config) + FileHandler(obs_dict).sync() + logger.debug(f"Observation files:\n{pformat(obs_dict)}") + + # stage GTS bufr2ioda mapping YAML files + logger.info(f"Staging GTS bufr2ioda mapping YAML files from {self.task_config.GTS_SNOW_STAGE_YAML}") + gts_mapping_list = parse_j2yaml(self.task_config.GTS_SNOW_STAGE_YAML, self.task_config) + FileHandler(gts_mapping_list).sync() + + # stage FV3-JEDI fix files + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_dict = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_dict).sync() + logger.debug(f"JEDI fix files:\n{pformat(jedi_fix_dict)}") + + # staging B error files + logger.info("Stage files for static background error") + berror_staging_dict = parse_j2yaml(self.task_config.BERROR_STAGING_YAML, self.task_config) + FileHandler(berror_staging_dict).sync() + logger.debug(f"Background error files:\n{pformat(berror_staging_dict)}") + # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ @@ -243,107 +265,31 @@ def prepare_IMS(self) -> None: FileHandler(prep_ims_config.ims2ioda).sync() @logit(logger) - def initialize(self) -> None: - """Initialize method for snow analysis - This method: - - creates artifacts in the DATA directory by copying fix files - - creates the JEDI LETKF yaml from the template - - stages backgrounds, observations and ensemble members + def execute(self, aprun_cmd: str, jedi_args: Optional[str] = None) -> None: + """Run JEDI executable + + This method will run JEDI executables for the global snow analysis Parameters ---------- - self : Analysis - Instance of the SnowAnalysis object - """ - - super().initialize() - - # create a temporary dict of all keys needed in this method - localconf = AttrDict() - keys = ['PARMgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', - 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] - for key in keys: - localconf[key] = self.task_config[key] - - # Make member directories in DATA for background - dirlist = [] - for imem in range(1, SnowAnalysis.NMEM_SNOWENS + 1): - dirlist.append(os.path.join(localconf.DATA, 'bkg', f'mem{imem:03d}')) - FileHandler({'mkdir': dirlist}).sync() - - # stage fix files - logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") - jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) - FileHandler(jedi_fix_list).sync() + aprun_cmd : str + Run command for JEDI application on HPC system + jedi_args : List + List of additional optional arguments for JEDI application - # stage backgrounds - logger.info("Staging ensemble backgrounds") - FileHandler(self.get_ens_bkg_dict(localconf)).sync() - - # stage GTS bufr2ioda mapping YAML files - logger.info(f"Staging GTS bufr2ioda mapping YAML files from {self.task_config.GTS_SNOW_STAGE_YAML}") - gts_mapping_list = parse_j2yaml(self.task_config.GTS_SNOW_STAGE_YAML, localconf) - FileHandler(gts_mapping_list).sync() - - # Write out letkfoi YAML file - save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) - logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") - - # need output dir for diags and anl - logger.info("Create empty output [anl, diags] directories to receive output from executable") - newdirs = [ - os.path.join(localconf.DATA, "anl"), - os.path.join(localconf.DATA, "diags"), - ] - FileHandler({'mkdir': newdirs}).sync() - - @logit(logger) - def execute(self) -> None: - """Run a series of tasks to create Snow analysis - This method: - - creates an 2 member ensemble - - runs the JEDI LETKF executable to produce increments - - creates analysis from increments - - Parameters + Returns ---------- - self : Analysis - Instance of the SnowAnalysis object + None """ - # create a temporary dict of all keys needed in this method - localconf = AttrDict() - keys = ['HOMEgfs', 'DATA', 'current_cycle', - 'COM_ATMOS_RESTART_PREV', 'COM_SNOW_ANALYSIS', 'APREFIX', - 'SNOWDEPTHVAR', 'BESTDDEV', 'CASE', 'OCNRES', 'ntiles', - 'APRUN_SNOWANL', 'JEDIEXE', 'jedi_yaml', 'DOIAU', 'SNOW_WINDOW_BEGIN', - 'APPLY_INCR_NML_TMPL', 'APPLY_INCR_EXE', 'APRUN_APPLY_INCR'] - for key in keys: - localconf[key] = self.task_config[key] - - logger.info("Creating ensemble") - self.create_ensemble(localconf.SNOWDEPTHVAR, - localconf.BESTDDEV, - AttrDict({key: localconf[key] for key in ['DATA', 'ntiles', 'current_cycle']})) - - logger.info("Running JEDI LETKF") - exec_cmd = Executable(localconf.APRUN_SNOWANL) - exec_name = os.path.join(localconf.DATA, 'gdas.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('fv3jedi') - exec_cmd.add_default_arg('localensembleda') - exec_cmd.add_default_arg(localconf.jedi_yaml) - - try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd() - except OSError: - raise OSError(f"Failed to execute {exec_cmd}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") + if jedi_args: + logger.info(f"Executing {self.jedi.exe} {' '.join(jedi_args)} {self.jedi.yaml}") + else: + logger.info(f"Executing {self.jedi.exe} {self.jedi.yaml}") - logger.info("Creating analysis from backgrounds and increments") - self.add_increments(localconf) + self.jedi.execute(self.task_config, aprun_cmd, jedi_args) + #logger.info("Creating analysis from backgrounds and increments") + #self.add_increments(localconf) @logit(logger) def finalize(self) -> None: From e55858e24603423676592dae58e6d8978a20094a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 16 Oct 2024 13:29:11 +0000 Subject: [PATCH 063/157] update wxflow hash --- sorc/wxflow | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/wxflow b/sorc/wxflow index e1ef697430..799d55b30e 160000 --- a/sorc/wxflow +++ b/sorc/wxflow @@ -1 +1 @@ -Subproject commit e1ef697430c09d2b1a0560f21f11c7a32ed5f3e2 +Subproject commit 799d55b30ec75f6d8a3c39ec8281dc6b28069a07 From b83ff6e59779cc68d7e143d2404891b717ff510f Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 16 Oct 2024 19:18:54 +0000 Subject: [PATCH 064/157] Save progress --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/marine_bmat.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 52bc8d5ef7..9b2eabd108 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 52bc8d5ef7ad436e07da6baf5f15bbd8dac03104 +Subproject commit 9b2eabd10879357c3354623306b2d516becdab03 diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index a6384f38b9..748197a02c 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -182,7 +182,8 @@ def initialize(self: Task) -> None: os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync() # initialize JEDI applications - self.jedi['gridgen'].initialize(self.task_config) + self.jedi['gridgen'].initialize(self.task_config) + logger.error('foobar') # Test self.jedi['soca_diagb'].initialize(self.task_config) self.jedi['soca_parameters_diffusion_vt'].initialize(self.task_config) self.jedi['soca_setcorscales'].initialize(self.task_config) @@ -221,10 +222,11 @@ def execute(self, aprun_cmd: str) -> None: This method will generate the full B-matrix according to the configuration. """ self.jedi['gridgen'].execute(aprun_cmd) # TODO: This should be optional in case the geometry file was staged - self.execute_vtscales() - self.jedi['soca_parameters_diffusion_vt'].execute(aprun_cmd) + self.jedi['soca_diagb'].execute(aprun_cmd) self.jedi['soca_setcorscales'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales self.jedi['soca_parameters_diffusion_hz'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales + self.execute_vtscales() + self.jedi['soca_parameters_diffusion_vt'].execute(aprun_cmd) if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: self.jedi['soca_ensb'].execute(self.task_config) # TODO: refactor this from the old scripts self.jedi['soca_ensweights'].execute(self.task_config) # TODO: refactor this from the old scripts From 0e5f08a7d14e7133030c4e49fc8c59ba8db05a0d Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 16 Oct 2024 20:59:53 +0000 Subject: [PATCH 065/157] snow 2dvar completes --- scripts/exglobal_snow_analysis.py | 3 + sorc/gdas.cd | 2 +- ush/python/pygfs/jedi/jedi.py | 2 +- ush/python/pygfs/task/snow_analysis.py | 223 +++++++------------------ 4 files changed, 68 insertions(+), 162 deletions(-) diff --git a/scripts/exglobal_snow_analysis.py b/scripts/exglobal_snow_analysis.py index 2a292db3b8..eec652ec2f 100755 --- a/scripts/exglobal_snow_analysis.py +++ b/scripts/exglobal_snow_analysis.py @@ -27,5 +27,8 @@ # Execute JEDI snow analysis SnowAnl.execute(config.APRUN_SNOWANL, ['fv3jedi', 'variational']) + # Add increments + SnowAnl.add_increments() + # Finalize JEDI snow analysis SnowAnl.finalize() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index ba093508c0..6abb37c7e8 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit ba093508c0fb0971ba9116359d83f6df197bbd25 +Subproject commit 6abb37c7e88d9f9d017aecf78e531e49d1388fee diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 415a0a3c08..935cdf2381 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -185,7 +185,7 @@ def get_obs_dict(self, task_config: AttrDict) -> Dict[str, Any]: copylist.append([os.path.join(task_config.COM_OBS, basename), obfile]) obs_dict = { 'mkdir': [os.path.join(task_config.DATA, 'obs')], - 'copy': copylist + 'copy_opt': copylist } return obs_dict diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 95d02c3b76..e301959c29 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -4,6 +4,9 @@ from logging import getLogger from typing import Dict, List, Optional, Any from pprint import pformat +import glob +import gzip +import tarfile import numpy as np from netCDF4 import Dataset @@ -306,18 +309,41 @@ def finalize(self) -> None: Instance of the SnowAnalysis object """ - logger.info("Create diagnostic tarball of diag*.nc4 files") - statfile = os.path.join(self.task_config.COM_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat.tgz") - self.tgz_diags(statfile, self.task_config.DATA) - - logger.info("Copy full YAML to COM") - src = os.path.join(self.task_config['DATA'], f"{self.task_config.APREFIX}letkfoi.yaml") - dest = os.path.join(self.task_config.COM_CONF, f"{self.task_config.APREFIX}letkfoi.yaml") - yaml_copy = { - 'mkdir': [self.task_config.COM_CONF], - 'copy': [[src, dest]] - } - FileHandler(yaml_copy).sync() + # ---- tar up diags + # path of output tar statfile + snowstat = os.path.join(self.task_config.COM_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc')) + + logger.info(f"Compressing {len(diags)} diag files to {snowstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {snowstat} with {len(diags)} gzipped diag files") + with tarfile.open(snowstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # get list of yamls to copy to ROTDIR + yamls = glob.glob(os.path.join(self.task_config.DATA, '*snow*yaml')) + + # copy full YAML from executable to ROTDIR + for src in yamls: + yaml_base = os.path.splitext(os.path.basename(src))[0] + dest_yaml_name = f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.{yaml_base}.yaml" + dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, dest_yaml_name) + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() logger.info("Copy analysis to COM") bkgtimes = [] @@ -345,166 +371,43 @@ def finalize(self) -> None: inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() - @staticmethod @logit(logger) - def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: - """Compile a dictionary of model background files to copy - - This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data) - that are needed for global snow DA and returns said dictionary for use by the FileHandler class. - - Parameters - ---------- - config: Dict - Dictionary of key-value pairs needed in this method - Should contain the following keys: - COM_ATMOS_RESTART_PREV - DATA - current_cycle - ntiles - - Returns - ---------- - bkg_dict: Dict - a dictionary containing the list of model background files to copy for FileHandler - """ - # NOTE for now this is FV3 RESTART files and just assumed to be fh006 - - # get FV3 sfc_data RESTART files, this will be a lot simpler when using history files - rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV) # for now, option later? - run_dir = os.path.join(config.DATA, 'bkg') - - # Start accumulating list of background files to copy - bkglist = [] - - # snow DA needs coupler - basename = f'{to_fv3time(config.current_cycle)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # snow DA only needs sfc_data - for ftype in ['sfc_data']: - template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - bkg_dict = { - 'mkdir': [run_dir], - 'copy': bkglist - } - return bkg_dict - - @staticmethod - @logit(logger) - def get_ens_bkg_dict(config: Dict) -> Dict: - """Compile a dictionary of model background files to copy for the ensemble - Note that a "Fake" 2-member ensemble backgroud is being created by copying FV3 RESTART files (coupler, sfc_data) - from the deterministic background to DATA/bkg/mem001, 002. - - Parameters - ---------- - config: Dict - Dictionary of key-value pairs needed in this method - Should contain the following keys: - COM_ATMOS_RESTART_PREV - DATA - current_cycle - ntiles - - Returns - ---------- - bkg_dict: Dict - a dictionary containing the list of model background files to copy for FileHandler - """ - - dirlist = [] - bkglist = [] - - # get FV3 sfc_data RESTART files; Note an ensemble is being created - rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV) - - for imem in range(1, SnowAnalysis.NMEM_SNOWENS + 1): - memchar = f"mem{imem:03d}" - - run_dir = os.path.join(config.DATA, 'bkg', memchar, 'RESTART') - dirlist.append(run_dir) - - # Snow DA needs coupler - basename = f'{to_fv3time(config.current_cycle)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # Snow DA only needs sfc_data - for ftype in ['sfc_data']: - template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - bkg_dict = { - 'mkdir': dirlist, - 'copy': bkglist - } - - return bkg_dict - - @staticmethod - @logit(logger) - def add_increments(config: Dict) -> None: + def add_increments(self) -> None: """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds Parameters ---------- - config: Dict - Dictionary of key-value pairs needed in this method - Should contain the following keys: - HOMEgfs - COM_ATMOS_RESTART_PREV - DATA - current_cycle - CASE - OCNRES - ntiles - APPLY_INCR_NML_TMPL - APPLY_INCR_EXE - APRUN_APPLY_INCR - DOIAU - SNOW_WINDOW_BEGIN - - Raises - ------ - OSError - Failure due to OS issues - WorkflowException - All other exceptions + self : Analysis + Instance of the SnowAnalysis object """ # need backgrounds to create analysis from increments after LETKF logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") bkgtimes = [] - if config.DOIAU: + if self.task_config.DOIAU: # want analysis at beginning and middle of window - bkgtimes.append(config.SNOW_WINDOW_BEGIN) - bkgtimes.append(config.current_cycle) + bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) + bkgtimes.append(self.task_config.current_cycle) anllist = [] for bkgtime in bkgtimes: template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): + for itile in range(1, self.task_config.ntiles + 1): filename = template.format(tilenum=itile) - src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename) - dest = os.path.join(config.DATA, "anl", filename) + src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, filename) + dest = os.path.join(self.task_config.DATA, "anl", filename) anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() - if config.DOIAU: + if self.task_config.DOIAU: logger.info("Copying increments to beginning of window") - template_in = f'snowinc.{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' - template_out = f'snowinc.{to_fv3time(config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' + template_in = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + template_out = f'snowinc.{to_fv3time(self.task_config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' inclist = [] - for itile in range(1, config.ntiles + 1): + for itile in range(1, self.task_config.ntiles + 1): filename_in = template_in.format(tilenum=itile) filename_out = template_out.format(tilenum=itile) - src = os.path.join(config.DATA, 'anl', filename_in) - dest = os.path.join(config.DATA, 'anl', filename_out) + src = os.path.join(self.task_config.DATA, 'anl', filename_in) + dest = os.path.join(self.task_config.DATA, 'anl', filename_out) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() @@ -512,31 +415,31 @@ def add_increments(config: Dict) -> None: for bkgtime in bkgtimes: logger.info("Processing analysis valid: {bkgtime}") logger.info("Create namelist for APPLY_INCR_EXE") - nml_template = config.APPLY_INCR_NML_TMPL + nml_template = self.task_config.APPLY_INCR_NML_TMPL nml_config = { 'current_cycle': bkgtime, - 'CASE': config.CASE, - 'DATA': config.DATA, - 'HOMEgfs': config.HOMEgfs, - 'OCNRES': config.OCNRES, + 'CASE': self.task_config.CASE, + 'DATA': self.task_config.DATA, + 'HOMEgfs': self.task_config.HOMEgfs, + 'OCNRES': self.task_config.OCNRES, } nml_data = Jinja(nml_template, nml_config).render logger.debug(f"apply_incr_nml:\n{nml_data}") - nml_file = os.path.join(config.DATA, "apply_incr_nml") + nml_file = os.path.join(self.task_config.DATA, "apply_incr_nml") with open(nml_file, "w") as fho: fho.write(nml_data) logger.info("Link APPLY_INCR_EXE into DATA/") - exe_src = config.APPLY_INCR_EXE - exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) + exe_src = self.task_config.APPLY_INCR_EXE + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) if os.path.exists(exe_dest): rm_p(exe_dest) os.symlink(exe_src, exe_dest) # execute APPLY_INCR_EXE to create analysis files - exe = Executable(config.APRUN_APPLY_INCR) - exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) + exe = Executable(self.task_config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(self.task_config.DATA, os.path.basename(exe_src))) logger.info(f"Executing {exe}") try: exe() From f7dc7eaccfe6e7d7f5f96a81c0b7a03e98200059 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 17 Oct 2024 14:39:38 +0000 Subject: [PATCH 066/157] start of ensemble mean snow analysis --- jobs/JGDAS_ENKF_SNOW_ANALYSIS | 59 ++++++++++++++++++++++++++++ jobs/rocoto/esnowanl.sh | 18 +++++++++ scripts/exgdas_enkf_snow_analysis.py | 38 ++++++++++++++++++ workflow/applications/gfs_cycled.py | 5 ++- workflow/rocoto/gfs_tasks.py | 25 +++++++++++- workflow/rocoto/tasks.py | 2 +- 6 files changed, 143 insertions(+), 4 deletions(-) create mode 100755 jobs/JGDAS_ENKF_SNOW_ANALYSIS create mode 100755 jobs/rocoto/esnowanl.sh create mode 100755 scripts/exgdas_enkf_snow_analysis.py diff --git a/jobs/JGDAS_ENKF_SNOW_ANALYSIS b/jobs/JGDAS_ENKF_SNOW_ANALYSIS new file mode 100755 index 0000000000..f0d3610bc5 --- /dev/null +++ b/jobs/JGDAS_ENKF_SNOW_ANALYSIS @@ -0,0 +1,59 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDUMP="gdas" +export GDUMP + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL \ + COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ + COMOUT_CONF:COM_CONF_TMPL +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL + +mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" + +for imem in $(seq 1 "${NMEM_ENS}"); do + memchar="mem$(printf %03i "${imem}")" + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL + mkdir -p "${COMOUT_SNOW_ANALYSIS}" +done + +############################################################### +# Run relevant script + +EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exgdas_enkf_snow_analysis.py} +${EXSCRIPT} +status=$? +(( status != 0 )) && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/rocoto/esnowanl.sh b/jobs/rocoto/esnowanl.sh new file mode 100755 index 0000000000..1e488f8618 --- /dev/null +++ b/jobs/rocoto/esnowanl.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="esnowanl" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGDAS_ENKF_SNOW_ANALYSIS" +status=$? +exit "${status}" diff --git a/scripts/exgdas_enkf_snow_analysis.py b/scripts/exgdas_enkf_snow_analysis.py new file mode 100755 index 0000000000..82ce3d3c09 --- /dev/null +++ b/scripts/exgdas_enkf_snow_analysis.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 +# exgdas_enkf_snow_analysis.py +# This script creates an SnowEnsAnalysis class, +# which will compute the ensemble mean of the snow forecast, +# run a 2DVar analysis, +# then will recenter the ensemble mean to the +# deterministic analysis and provide increments +# to create an ensemble of snow analyses +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.snowens_analysis import SnowEnsAnalysis + +# Initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the snow ensemble analysis task + SnowEnsAnl = SnowEnsAnalysis(config, 'snowanl') + + # Initialize JEDI 2DVar snow analysis + SnowEnsAnalysis.initialize_jedi() + SnowEnsAnalysis.initialize_analysis() + + # anl = SnowEnsAnalysis(config) + # anl.initialize() + # anl.genWeights() + # anl.genMask() + # anl.regridDetBkg() + # anl.regridDetInc() + # anl.recenterEns() + # anl.addEnsIncrements() + # anl.finalize() diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 19f4dd607b..03abcd806d 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -115,7 +115,7 @@ def _get_app_configs(self): if self.do_jedisnowda: configs += ['prepsnowobs', 'snowanl'] if self.do_hybvar: - configs += ['esnowrecen'] + configs += ['esnowanl', 'esnowrecen'] if self.do_mos: configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', @@ -172,7 +172,7 @@ def get_task_names(self): hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] if self.do_jedisnowda: - hybrid_tasks += ['esnowrecen'] + hybrid_tasks += ['esnowanl', 'esnowrecen'] hybrid_after_eupd_tasks += ['stage_ic', 'ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] # Collect all "gdas" cycle tasks @@ -303,6 +303,7 @@ def get_task_names(self): if self.do_hybvar and 'gfs' in self.eupd_runs: enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks enkfgfs_tasks.remove("echgres") + enkfgfs_tasks.remove("esnowanl") enkfgfs_tasks.remove("esnowrecen") tasks['enkfgfs'] = enkfgfs_tasks diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 7c56f25583..feeaf2fd8f 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -611,7 +611,7 @@ def snowanl(self): task = rocoto.create_task(task_dict) return task - def esnowrecen(self): + def esnowanl(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_prepsnowobs'} @@ -622,6 +622,29 @@ def esnowrecen(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + resources = self.get_resource('esnowanl') + task_name = f'{self.run}_esnowanl' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/esnowanl.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + return task + + def esnowrecen(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.run}_esnowanl'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + resources = self.get_resource('esnowrecen') task_name = f'{self.run}_esnowrecen' task_dict = {'task_name': task_name, diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 8a32827377..78cc7435bf 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal', 'aeroanlgenb', - 'prepsnowobs', 'snowanl', 'esnowrecen', + 'prepsnowobs', 'snowanl', 'esnowanl', 'esnowrecen', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', 'atmos_prod', 'ocean_prod', 'ice_prod', From 00adfeadda43450713e3604d44ec6b17cff82d46 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 22 Oct 2024 13:03:47 +0000 Subject: [PATCH 067/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index e024564f72..93e7ec60bb 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit e024564f72e8b8b617e2a6a1cc06053e6dfb5786 +Subproject commit 93e7ec60bbc354a3db42d174eb59f8ed1a170f48 From 3c98b82b4e6ba05a5cbe9187f15e6d86fd47b961 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 22 Oct 2024 13:28:45 +0000 Subject: [PATCH 068/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 9b2eabd108..136bcc09a4 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 9b2eabd10879357c3354623306b2d516becdab03 +Subproject commit 136bcc09a4994b7b15dd67aa83ae26543c7f9fae From b12a375fb1f94e1c530145718be353c7c99d5cb7 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Tue, 22 Oct 2024 20:48:26 -0400 Subject: [PATCH 069/157] Update to use for managing the rocoto dependency. --- workflow/rocoto/gfs_tasks.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index f2892816a8..90389a979b 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -590,12 +590,19 @@ def prepsnowcover(self): def snowanl(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}prep'} - deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowcover'} deps.append(rocoto.add_dependency(dep_dict)) + + deps2 = [] + dep_dict = {'type': 'taskvalid', 'name': f'{self.run}prepsnowcover', 'condition': 'not'} + deps2.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run}prep'} + deps2.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.create_dependency(dep_condition='and', dep=deps2)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + resources = self.get_resource('snowanl') task_name = f'{self.run}_snowanl' task_dict = {'task_name': task_name, From 679e57edd42fd26f1f4b96c2e3b657eeecc7a4ae Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Tue, 22 Oct 2024 20:53:55 -0400 Subject: [PATCH 070/157] Fix the pynorms error. --- workflow/rocoto/gfs_tasks.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 90389a979b..148fd9f14c 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -592,17 +592,14 @@ def snowanl(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowcover'} deps.append(rocoto.add_dependency(dep_dict)) - deps2 = [] dep_dict = {'type': 'taskvalid', 'name': f'{self.run}prepsnowcover', 'condition': 'not'} deps2.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}prep'} deps2.append(rocoto.add_dependency(dep_dict)) deps.append(rocoto.create_dependency(dep_condition='and', dep=deps2)) - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) - resources = self.get_resource('snowanl') task_name = f'{self.run}_snowanl' task_dict = {'task_name': task_name, From d97480a7c90395b71db193a3bdd1f059c98b90b7 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Wed, 23 Oct 2024 07:15:00 -0400 Subject: [PATCH 071/157] Fix an error. --- workflow/rocoto/gfs_tasks.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 148fd9f14c..12cdd24a6f 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -571,7 +571,7 @@ def prepsnowcover(self): dependencies = rocoto.create_dependency(dep=deps) resources = self.get_resource('prepsnowcover') - task_name = f'{self.run}prepsnowcover' + task_name = f'{self.run}_prepsnowcover' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, @@ -590,14 +590,16 @@ def prepsnowcover(self): def snowanl(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}prepsnowcover'} + dep_dict = {'type': 'task', 'name': f'{self.run}_prepsnowcover'} deps.append(rocoto.add_dependency(dep_dict)) + deps2 = [] - dep_dict = {'type': 'taskvalid', 'name': f'{self.run}prepsnowcover', 'condition': 'not'} + dep_dict = {'type': 'taskvalid', 'name': f'{self.run}_prepsnowcover', 'condition': 'not'} deps2.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{self.run}prep'} + dep_dict = {'type': 'task', 'name': f'{self.run}_prep'} deps2.append(rocoto.add_dependency(dep_dict)) deps.append(rocoto.create_dependency(dep_condition='and', dep=deps2)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) resources = self.get_resource('snowanl') From cc74aa4be96e87041206bc6f703d8278728b99ad Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 15:59:22 +0000 Subject: [PATCH 072/157] Bugfixes --- parm/config/gfs/config.marineanl | 2 +- ush/python/pygfs/task/marine_bmat.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/parm/config/gfs/config.marineanl b/parm/config/gfs/config.marineanl index cf056387ca..273dec9178 100644 --- a/parm/config/gfs/config.marineanl +++ b/parm/config/gfs/config.marineanl @@ -30,7 +30,7 @@ export JCB_ALGO_ENSWEIGHTS="soca_ensweights" export JEDIEXE_GRIDGEN="${EXECgfs}/gdas_soca_gridgen.x" export JEDIEXE_DIAGB="${EXECgfs}/gdas_soca_diagb.x" -export JEDIEXE_SETCORSCALES="${EXECgfs}/soca_setcorscales.x" +export JEDIEXE_SETCORSCALES="${EXECgfs}/gdas_soca_setcorscales.x" export JEDIEXE_PARAMETERS_DIFFUSION_HZ="${EXECgfs}/gdas_soca_error_covariance_toolbox.x" export JEDIEXE_PARAMETERS_DIFFUSION_VT="${EXECgfs}/gdas_soca_error_covariance_toolbox.x" export JEDIEXE_ENSB="${EXECgfs}/gdas_ens_handler.x" diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 748197a02c..69bdbfbedd 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -175,6 +175,11 @@ def initialize(self: Task) -> None: bkg_list = parse_j2yaml(self.task_config.MARINE_DET_STAGE_BKG_YAML_TMPL, self.task_config) FileHandler(bkg_list).sync() + # stage the soca utility yamls (fields and ufo mapping yamls) + logger.info(f"Staging SOCA utility yaml files") + soca_utility_list = parse_j2yaml(self.task_config.MARINE_UTILITY_YAML_TMPL, self.task_config) + FileHandler(soca_utility_list).sync() + # initialize vtscales python script vtscales_config = self.jedi['soca_parameters_diffusion_vt'].render_jcb(self.task_config, 'soca_vtscales') save_as_yaml(vtscales_config, os.path.join(self.task_config.DATA, 'soca_vtscales.yaml')) @@ -183,7 +188,6 @@ def initialize(self: Task) -> None: # initialize JEDI applications self.jedi['gridgen'].initialize(self.task_config) - logger.error('foobar') # Test self.jedi['soca_diagb'].initialize(self.task_config) self.jedi['soca_parameters_diffusion_vt'].initialize(self.task_config) self.jedi['soca_setcorscales'].initialize(self.task_config) From c45544580a069aadb1e332aec037cc52459e62a4 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 15:59:47 +0000 Subject: [PATCH 073/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 136bcc09a4..a69df434d6 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 136bcc09a4994b7b15dd67aa83ae26543c7f9fae +Subproject commit a69df434d678419ca36b813f4f4c2836db5d28d5 From 2dc75c72aed7227d34793a1417aa16c25e07bb94 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 16:06:32 +0000 Subject: [PATCH 074/157] pynorms --- ush/python/pygfs/task/marine_bmat.py | 38 ++++++++++++++-------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 69bdbfbedd..f4490227ec 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -56,11 +56,11 @@ def __init__(self, config): # Create dictionary of Jedi objects self.jedi = AttrDict() - + # gridgen self.jedi['gridgen'] = Jedi(AttrDict( { - 'yaml_name': 'gridgen', + 'yaml_name': 'gridgen', 'rundir': self.task_config.DATA, 'exe_src': self.task_config.JEDIEXE_GRIDGEN, 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, @@ -95,7 +95,7 @@ def __init__(self, config): 'jedi_args': None } )) - + # soca_setcorscales self.jedi['soca_setcorscales'] = Jedi(AttrDict( { @@ -134,7 +134,7 @@ def __init__(self, config): 'jedi_args': None } )) - + # soca_ensweights self.jedi['soca_ensb'] = Jedi(AttrDict( { @@ -175,23 +175,23 @@ def initialize(self: Task) -> None: bkg_list = parse_j2yaml(self.task_config.MARINE_DET_STAGE_BKG_YAML_TMPL, self.task_config) FileHandler(bkg_list).sync() - # stage the soca utility yamls (fields and ufo mapping yamls) + # stage the soca utility yamls (fields and ufo mapping yamls) logger.info(f"Staging SOCA utility yaml files") soca_utility_list = parse_j2yaml(self.task_config.MARINE_UTILITY_YAML_TMPL, self.task_config) FileHandler(soca_utility_list).sync() - + # initialize vtscales python script vtscales_config = self.jedi['soca_parameters_diffusion_vt'].render_jcb(self.task_config, 'soca_vtscales') save_as_yaml(vtscales_config, os.path.join(self.task_config.DATA, 'soca_vtscales.yaml')) FileHandler({'copy': [[os.path.join(self.task_config.CALC_SCALE_EXEC), os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync() - + # initialize JEDI applications self.jedi['gridgen'].initialize(self.task_config) - self.jedi['soca_diagb'].initialize(self.task_config) - self.jedi['soca_parameters_diffusion_vt'].initialize(self.task_config) + self.jedi['soca_diagb'].initialize(self.task_config) + self.jedi['soca_parameters_diffusion_vt'].initialize(self.task_config) self.jedi['soca_setcorscales'].initialize(self.task_config) - self.jedi['soca_parameters_diffusion_hz'].initialize(self.task_config) + self.jedi['soca_parameters_diffusion_hz'].initialize(self.task_config) if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: self.jedi['soca_ensb'].initialize(self.task_config) self.jedi['soca_ensweights'].initialize(self.task_config) @@ -200,7 +200,7 @@ def initialize(self: Task) -> None: if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: logger.debug(f"Stage ensemble members for the hybrid background error") mdau.stage_ens_mem(self.task_config) - + # create the symbolic link to the static B-matrix directory link_target = os.path.join(self.task_config.DATAstaticb) link_name = os.path.join(self.task_config.DATA, 'staticb') @@ -212,28 +212,28 @@ def initialize(self: Task) -> None: def execute_vtscales(self: Task) -> None: """Generate the vertical diffusion coefficients """ - # compute the vertical correlation scales based on the MLD + # compute the vertical correlation scales based on the MLD exec_cmd = Executable("python") exec_name = os.path.join(self.task_config.DATA, 'calc_scales.x') exec_cmd.add_default_arg(exec_name) exec_cmd.add_default_arg('soca_vtscales.yaml') - mdau.run(exec_cmd) - + mdau.run(exec_cmd) + @logit(logger) def execute(self, aprun_cmd: str) -> None: """Generate the full B-matrix This method will generate the full B-matrix according to the configuration. """ - self.jedi['gridgen'].execute(aprun_cmd) # TODO: This should be optional in case the geometry file was staged + self.jedi['gridgen'].execute(aprun_cmd) # TODO: This should be optional in case the geometry file was staged self.jedi['soca_diagb'].execute(aprun_cmd) - self.jedi['soca_setcorscales'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales - self.jedi['soca_parameters_diffusion_hz'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi['soca_setcorscales'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi['soca_parameters_diffusion_hz'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales self.execute_vtscales() self.jedi['soca_parameters_diffusion_vt'].execute(aprun_cmd) if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - self.jedi['soca_ensb'].execute(self.task_config) # TODO: refactor this from the old scripts - self.jedi['soca_ensweights'].execute(self.task_config) # TODO: refactor this from the old scripts + self.jedi['soca_ensb'].execute(self.task_config) # TODO: refactor this from the old scripts + self.jedi['soca_ensweights'].execute(self.task_config) # TODO: refactor this from the old scripts @logit(logger) def finalize(self: Task) -> None: From ac3d385cceab8c670c48c5517ff627bf10cc0e59 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 16:18:36 +0000 Subject: [PATCH 075/157] Remove comment --- parm/config/gfs/config.marineanl | 1 - 1 file changed, 1 deletion(-) diff --git a/parm/config/gfs/config.marineanl b/parm/config/gfs/config.marineanl index 273dec9178..fc3719f106 100644 --- a/parm/config/gfs/config.marineanl +++ b/parm/config/gfs/config.marineanl @@ -17,7 +17,6 @@ export MARINE_ENSDA_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/ensda/stage_ens_me export MARINE_DET_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/soca_det_bkg_stage.yaml.j2" export JCB_BASE_YAML="${PARMgfs}/gdas/soca/marine-jcb-base.yaml" -#export MARINE_JCB_GDAS_ALGO="${PARMgfs}/gdas/jcb-gdas/algorithm/marine" export JCB_ALGO_GRIDGEN="soca_gridgen" export JCB_ALGO_DIAGB="soca_diagb" From 4cd585c218dd9af0a6790f33f700e1522e0d5cea Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 17:20:52 +0000 Subject: [PATCH 076/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index a69df434d6..b8ce7e1666 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit a69df434d678419ca36b813f4f4c2836db5d28d5 +Subproject commit b8ce7e16662eef62eec57244322aeac3d7e74d9a From 7e4defade6ed7cdaccb5b674c7a694ddf5717d5f Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 17:24:10 +0000 Subject: [PATCH 077/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index b8ce7e1666..a00b9191ce 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit b8ce7e16662eef62eec57244322aeac3d7e74d9a +Subproject commit a00b9191ce5253e984539024ab21ab3c44ece7f4 From 1c0fcf1fd5b43edaf0e79057298ab46408f897ce Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 17:38:48 +0000 Subject: [PATCH 078/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 93e7ec60bb..f1222ec379 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 93e7ec60bbc354a3db42d174eb59f8ed1a170f48 +Subproject commit f1222ec37924d567a8d935f0cad1a6a705045e4e From 97bf8e83c1693ddd78d53b8102f593958478eee1 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 18:08:49 +0000 Subject: [PATCH 079/157] Add comment blocks to methods --- ush/python/pygfs/task/marine_bmat.py | 62 +++++++++++++++++++++++++++- 1 file changed, 60 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index f4490227ec..59515bd15a 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -25,7 +25,24 @@ class MarineBMat(Task): """ @logit(logger, name="MarineBMat") def __init__(self, config): + """Constructor for marine B-matrix task + + This method will construct the marine B-matrix task object + This includes: + - extending the task_config AttrDict to include parameters required for this task + - instantiate the Jedi attribute objects + + Parameters + ---------- + config: Dict + dictionary object containing task configuration + + Returns + ---------- + None + """ super().__init__(config) + _home_gdas = os.path.join(self.task_config.HOMEgfs, 'sorc', 'gdas.cd') _calc_scale_exec = os.path.join(self.task_config.HOMEgfs, 'ush', 'soca', 'calc_scales.py') _window_begin = add_to_datetime(self.task_config.current_cycle, @@ -158,8 +175,17 @@ def initialize(self: Task) -> None: - staging SOCA fix files - staging static ensemble members (optional) - staging ensemble members (optional) - - generating the YAML files for the JEDI and GDASApp executables + - initializing the soca_vtscales Python script + - initializing the JEDI applications - creating output directories + + Parameters + ---------- + None + + Returns + ---------- + None """ # stage fix files @@ -210,7 +236,20 @@ def initialize(self: Task) -> None: @logit(logger) def execute_vtscales(self: Task) -> None: - """Generate the vertical diffusion coefficients + """Execute vertical diffusion coefficients generator + + This method will execute a Python script which generatres the vertical diffusion coefficients + This includes: + - constructing the executable object + - running the executable object + + Parameters + ---------- + None + + Returns + ---------- + None """ # compute the vertical correlation scales based on the MLD exec_cmd = Executable("python") @@ -224,7 +263,19 @@ def execute(self, aprun_cmd: str) -> None: """Generate the full B-matrix This method will generate the full B-matrix according to the configuration. + This includes: + - running all JEDI application and Python scripts required to generate the B-matrix + + Parameters + ---------- + aprun_cmd: str + String comprising the run command for the JEDI executable. + + Returns + ---------- + None """ + self.jedi['gridgen'].execute(aprun_cmd) # TODO: This should be optional in case the geometry file was staged self.jedi['soca_diagb'].execute(aprun_cmd) self.jedi['soca_setcorscales'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales @@ -246,6 +297,13 @@ def finalize(self: Task) -> None: - keep the re-balanced ensemble perturbation files in DATAenspert - ... + Parameters + ---------- + None + + Returns + ---------- + None """ # Copy the soca grid if it was created grid_file = os.path.join(self.task_config.DATA, 'soca_gridspec.nc') From 66129a297e695bf4a205f6fe662016ea79f54300 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 18:17:29 +0000 Subject: [PATCH 080/157] pynorms --- ush/python/pygfs/task/marine_bmat.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 59515bd15a..ca8730a706 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -31,7 +31,7 @@ def __init__(self, config): This includes: - extending the task_config AttrDict to include parameters required for this task - instantiate the Jedi attribute objects - + Parameters ---------- config: Dict @@ -42,7 +42,7 @@ def __init__(self, config): None """ super().__init__(config) - + _home_gdas = os.path.join(self.task_config.HOMEgfs, 'sorc', 'gdas.cd') _calc_scale_exec = os.path.join(self.task_config.HOMEgfs, 'ush', 'soca', 'calc_scales.py') _window_begin = add_to_datetime(self.task_config.current_cycle, From ed78b8c4601a50472e62ddd45821a60821a02378 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Wed, 23 Oct 2024 14:50:07 -0400 Subject: [PATCH 081/157] Update the name of dependency from prepsnowcover for ens recentering. --- workflow/rocoto/gfs_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 12cdd24a6f..fda625a907 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -621,7 +621,7 @@ def snowanl(self): def esnowrecen(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}prepsnowcover'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_prepsnowcover'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_snowanl'} deps.append(rocoto.add_dependency(dep_dict)) From 74e5efa27034f01971892ae5297dd4093180a2f7 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 20:14:07 +0000 Subject: [PATCH 082/157] Make jcb rendering method save copies of jcb_config and task_ocnfig --- ush/python/pygfs/jedi/jedi.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index f4692e82d5..f4e9ac67c4 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -56,6 +56,14 @@ def __init__(self, config) -> None: # Save a copy of jedi_config self._jedi_config = self.jedi_config.deepcopy() + # Create a dictionary of dictionaries for saving copies of the jcb_config + # associated with each algorithm + self._jcb_configs = AttrDict() + + # Create a dictionary of dictionaries for saving copies of the task_config + # used to render each JCB template + self._task_configs_for_jcb = AttrDict() + @logit(logger) def initialize(self, task_config: AttrDict) -> None: """Initialize JEDI application @@ -156,9 +164,13 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> elif 'algorithm' in jcb_config: pass else: - raise KeyError(f"FATAL ERROR: JCB algorithm must be specified as input to jedi.render_jcb(), " + + raise Exception(f"FATAL ERROR: JCB algorithm must be specified as input to jedi.render_jcb(), " + "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") + # Save copies of the task_config and jcb_config used to render this JCB template + self._task_configs_for_jcb[jcb_config['algorithm']] = task_config.deepcopy() + self._jcb_configs[jcb_config['algorithm']] = jcb_config.deepcopy() + # Generate JEDI YAML config by rendering JCB config dictionary jedi_input_config = render(jcb_config) From 371c4c097c04436b6af8a9675cdea8fb86f64e11 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 23 Oct 2024 21:06:47 +0000 Subject: [PATCH 083/157] pynorms --- ush/python/pygfs/jedi/jedi.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index f4e9ac67c4..67b4213305 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -58,11 +58,11 @@ def __init__(self, config) -> None: # Create a dictionary of dictionaries for saving copies of the jcb_config # associated with each algorithm - self._jcb_configs = AttrDict() + self._jcb_config_dict = AttrDict() # Create a dictionary of dictionaries for saving copies of the task_config # used to render each JCB template - self._task_configs_for_jcb = AttrDict() + self._task_config_dict = AttrDict() @logit(logger) def initialize(self, task_config: AttrDict) -> None: @@ -165,15 +165,15 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> pass else: raise Exception(f"FATAL ERROR: JCB algorithm must be specified as input to jedi.render_jcb(), " + - "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") + "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") - # Save copies of the task_config and jcb_config used to render this JCB template - self._task_configs_for_jcb[jcb_config['algorithm']] = task_config.deepcopy() - self._jcb_configs[jcb_config['algorithm']] = jcb_config.deepcopy() - # Generate JEDI YAML config by rendering JCB config dictionary jedi_input_config = render(jcb_config) + # Save copies of the task_config and jcb_config used to render this JCB template + self._task_config_dict[jcb_config['algorithm']] = task_config.deepcopy() + self._jcb_config_dict[jcb_config['algorithm']] = jcb_config.deepcopy() + return jedi_input_config @logit(logger) From f710d0ffbec0be6904c7c7985f4d6a4444b9ef17 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 24 Oct 2024 15:51:48 +0000 Subject: [PATCH 084/157] Fix to make sure bkg_list.yaml isn't loaded directly in marine JCB base --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/marine_analysis.py | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index a00b9191ce..1660f1ae50 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit a00b9191ce5253e984539024ab21ab3c44ece7f4 +Subproject commit 1660f1ae50da47a1f6bc0c31c625bd3d185d93ca diff --git a/ush/python/pygfs/task/marine_analysis.py b/ush/python/pygfs/task/marine_analysis.py index 964ea4a0d3..f9fdbb61bb 100644 --- a/ush/python/pygfs/task/marine_analysis.py +++ b/ush/python/pygfs/task/marine_analysis.py @@ -15,7 +15,7 @@ from wxflow import (AttrDict, FileHandler, add_to_datetime, to_timedelta, to_YMD, - parse_j2yaml, + parse_j2yaml, parse_yaml, logit, Executable, Task, @@ -192,7 +192,7 @@ def _prep_variational_yaml(self: Task) -> None: mdau.gen_bkg_list(bkg_path='./bkg', window_begin=self.task_config.MARINE_WINDOW_BEGIN, yaml_name='bkg_list.yaml') - + # Make a copy of the env config before modifying to avoid breaking something else envconfig_jcb = copy.deepcopy(self.task_config) logger.info(f"---------------- Prepare the yaml configuration") @@ -225,7 +225,7 @@ def _prep_variational_yaml(self: Task) -> None: jcb_algo_config = YAMLFile(path=jcb_algo_yaml) jcb_algo_config = Template.substitute_structure(jcb_algo_config, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig_jcb.get) jcb_algo_config = Template.substitute_structure(jcb_algo_config, TemplateConstants.DOLLAR_PARENTHESES, envconfig_jcb.get) - + # Override base with the application specific config jcb_config = {**jcb_base_config, **jcb_algo_config} @@ -233,6 +233,9 @@ def _prep_variational_yaml(self: Task) -> None: jcb_config['window_begin'] = self.task_config.MARINE_WINDOW_BEGIN.strftime('%Y-%m-%dT%H:%M:%SZ') jcb_config['window_middle'] = self.task_config.MARINE_WINDOW_MIDDLE.strftime('%Y-%m-%dT%H:%M:%SZ') + # Current hack so that this is not done directly in the JCB base yaml + jcb_config['marine_pseudo_model_states'] = parse_yaml('bkg_list.yaml') + # Render the full JEDI configuration file using JCB jedi_config = render(jcb_config) From 215188aeedc246ecab77e63d5a4ba45cb1971f20 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 24 Oct 2024 15:53:38 +0000 Subject: [PATCH 085/157] pynorms --- ush/python/pygfs/task/marine_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/marine_analysis.py b/ush/python/pygfs/task/marine_analysis.py index f9fdbb61bb..0cffb3fdcc 100644 --- a/ush/python/pygfs/task/marine_analysis.py +++ b/ush/python/pygfs/task/marine_analysis.py @@ -192,7 +192,7 @@ def _prep_variational_yaml(self: Task) -> None: mdau.gen_bkg_list(bkg_path='./bkg', window_begin=self.task_config.MARINE_WINDOW_BEGIN, yaml_name='bkg_list.yaml') - + # Make a copy of the env config before modifying to avoid breaking something else envconfig_jcb = copy.deepcopy(self.task_config) logger.info(f"---------------- Prepare the yaml configuration") From cbf1c6f61fca90c5e5cdb5761b211862df8ef558 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 24 Oct 2024 15:54:31 +0000 Subject: [PATCH 086/157] pynorms --- ush/python/pygfs/task/marine_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/marine_analysis.py b/ush/python/pygfs/task/marine_analysis.py index 0cffb3fdcc..71aa6ea67f 100644 --- a/ush/python/pygfs/task/marine_analysis.py +++ b/ush/python/pygfs/task/marine_analysis.py @@ -225,7 +225,7 @@ def _prep_variational_yaml(self: Task) -> None: jcb_algo_config = YAMLFile(path=jcb_algo_yaml) jcb_algo_config = Template.substitute_structure(jcb_algo_config, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig_jcb.get) jcb_algo_config = Template.substitute_structure(jcb_algo_config, TemplateConstants.DOLLAR_PARENTHESES, envconfig_jcb.get) - + # Override base with the application specific config jcb_config = {**jcb_base_config, **jcb_algo_config} From 54223bf620cc4b4a16b33957b74aa0ed207407c2 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 24 Oct 2024 18:18:11 +0000 Subject: [PATCH 087/157] pynorms --- ush/python/pygfs/task/marine_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/marine_analysis.py b/ush/python/pygfs/task/marine_analysis.py index 71aa6ea67f..b311c6874f 100644 --- a/ush/python/pygfs/task/marine_analysis.py +++ b/ush/python/pygfs/task/marine_analysis.py @@ -235,7 +235,7 @@ def _prep_variational_yaml(self: Task) -> None: # Current hack so that this is not done directly in the JCB base yaml jcb_config['marine_pseudo_model_states'] = parse_yaml('bkg_list.yaml') - + # Render the full JEDI configuration file using JCB jedi_config = render(jcb_config) From 837c420caf7cdb710dac09c821d9ff8e5ef3a54b Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Thu, 24 Oct 2024 14:47:41 -0400 Subject: [PATCH 088/157] Update snow_analysis.py --- ush/python/pygfs/task/snow_analysis.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index e301959c29..e204932673 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -158,7 +158,7 @@ def initialize_analysis(self) -> None: logger.info("Stage files for static background error") berror_staging_dict = parse_j2yaml(self.task_config.BERROR_STAGING_YAML, self.task_config) FileHandler(berror_staging_dict).sync() - logger.debug(f"Background error files:\n{pformat(berror_staging_dict)}") + logger.debug(f"Background error files:\n{pformat(berror_staging_dict)}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -291,8 +291,6 @@ def execute(self, aprun_cmd: str, jedi_args: Optional[str] = None) -> None: logger.info(f"Executing {self.jedi.exe} {self.jedi.yaml}") self.jedi.execute(self.task_config, aprun_cmd, jedi_args) - #logger.info("Creating analysis from backgrounds and increments") - #self.add_increments(localconf) @logit(logger) def finalize(self) -> None: From abdbe31115b34857970c233d67241a2989bc21b5 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 24 Oct 2024 19:27:01 +0000 Subject: [PATCH 089/157] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 1660f1ae50..12643982fa 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 1660f1ae50da47a1f6bc0c31c625bd3d185d93ca +Subproject commit 12643982fade77a47e573da41a90198457e356be From cbe619ca5f9184243068ca692fd4f5d422df61bb Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 25 Oct 2024 12:50:10 +0000 Subject: [PATCH 090/157] Adding back accidentally deleted line --- parm/config/gfs/config.marineanl | 1 + 1 file changed, 1 insertion(+) diff --git a/parm/config/gfs/config.marineanl b/parm/config/gfs/config.marineanl index fc3719f106..8aacc52135 100644 --- a/parm/config/gfs/config.marineanl +++ b/parm/config/gfs/config.marineanl @@ -15,6 +15,7 @@ export SOCA_FIX_YAML_TMPL="${PARMgfs}/gdas/soca/soca_fix_stage_${OCNRES}.yaml.j2 export MARINE_UTILITY_YAML_TMPL="${PARMgfs}/gdas/soca/soca_utils_stage.yaml.j2" export MARINE_ENSDA_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/ensda/stage_ens_mem.yaml.j2" export MARINE_DET_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/soca_det_bkg_stage.yaml.j2" +export MARINE_JCB_GDAS_ALGO="${PARMgfs}/gdas/jcb-gdas/algorithm/marine" export JCB_BASE_YAML="${PARMgfs}/gdas/soca/marine-jcb-base.yaml" From eac7e1fe64829ba8536444280114f9e1009b6f5e Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Fri, 25 Oct 2024 13:14:56 -0400 Subject: [PATCH 091/157] Update to run gdas_prepsnowcover job on every 00Z cycle. --- workflow/rocoto/gfs_cycled_xml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py index a73ff17d40..ddee248e61 100644 --- a/workflow/rocoto/gfs_cycled_xml.py +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -32,7 +32,7 @@ def get_cycledefs(self): sdate_snocvr_str = sdate_snocvr.replace(hour=0, minute=0, second=0).strftime("%Y%m%d%H%M") edate_snocvr_str = edate_snocvr.strftime("%Y%m%d%H%M") interval_str = timedelta_to_HMS(interval_snocvr) - if sdate_snocvr <= edate_snocvr: + if sdate_snocvr.date() <= edate_snocvr.date(): strings.append(f'\t{sdate_snocvr_str} {edate_snocvr_str} {interval_str}') interval_gfs = self._app_config.interval_gfs From 96254c46b87a1023efe40a898e03b28014e2ced7 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Fri, 25 Oct 2024 14:33:12 -0400 Subject: [PATCH 092/157] Made changes as reviewer suggested. --- workflow/rocoto/gfs_cycled_xml.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py index ddee248e61..65119df5ba 100644 --- a/workflow/rocoto/gfs_cycled_xml.py +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -25,14 +25,21 @@ def get_cycledefs(self): strings.append(f'\t{sdate_str} {edate_str} {interval_str}') if self._app_config.do_jedisnowda: - sdate_snocvr = self._base['SDATE'] - edate_snocvr = self._base['EDATE'] + sdate = self._base['SDATE'] + edate_snocvr = self._base['EDATE'].replace(hour=0, minute=0, second=0) interval_snocvr = to_timedelta('24H') - sdate_snocvr = sdate_snocvr + interval_snocvr - sdate_snocvr_str = sdate_snocvr.replace(hour=0, minute=0, second=0).strftime("%Y%m%d%H%M") + assim_freq = to_timedelta("{self._base['assim_freq']}H") + + is_warm_start = self._base.get('EXP_WARM_START', False) + first_full_cycle = sdate if is_warm_start else sdate + assim_freq + sdate_snocvr = (first_full_cycle if first_full_cycle.hour == 0 else + (first_full_cycle + interval_snocvr).replace(hour=0, minute=0, second=0)) + + sdate_snocvr_str = sdate_snocvr.strftime("%Y%m%d%H%M") edate_snocvr_str = edate_snocvr.strftime("%Y%m%d%H%M") interval_str = timedelta_to_HMS(interval_snocvr) - if sdate_snocvr.date() <= edate_snocvr.date(): + + if sdate_snocvr <= edate_snocvr: strings.append(f'\t{sdate_snocvr_str} {edate_snocvr_str} {interval_str}') interval_gfs = self._app_config.interval_gfs From a00f9e8a5f0441de028ee13bdff9b99af8039536 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Fri, 25 Oct 2024 14:43:04 -0400 Subject: [PATCH 093/157] Fix pynorm error. --- workflow/rocoto/gfs_cycled_xml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py index 65119df5ba..1fb59c81e5 100644 --- a/workflow/rocoto/gfs_cycled_xml.py +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -33,7 +33,7 @@ def get_cycledefs(self): is_warm_start = self._base.get('EXP_WARM_START', False) first_full_cycle = sdate if is_warm_start else sdate + assim_freq sdate_snocvr = (first_full_cycle if first_full_cycle.hour == 0 else - (first_full_cycle + interval_snocvr).replace(hour=0, minute=0, second=0)) + (first_full_cycle + interval_snocvr).replace(hour=0, minute=0, second=0)) sdate_snocvr_str = sdate_snocvr.strftime("%Y%m%d%H%M") edate_snocvr_str = edate_snocvr.strftime("%Y%m%d%H%M") From 272bced697216924d18f078e538a8116df18287d Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 28 Oct 2024 14:10:54 +0000 Subject: [PATCH 094/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 12643982fa..9be49de2a5 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 12643982fade77a47e573da41a90198457e356be +Subproject commit 9be49de2a5833ee6e70ee3469e7aa019649b22d3 From db3a91237ddea5e907fb9acc9cd7818c0a900039 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Mon, 28 Oct 2024 13:58:19 -0400 Subject: [PATCH 095/157] Update the dependency for the enkfgdas_esnowrecen job. --- workflow/rocoto/gfs_tasks.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index fda625a907..1847c1632c 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -623,11 +623,15 @@ def esnowrecen(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_prepsnowcover'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_snowanl'} - deps.append(rocoto.add_dependency(dep_dict)) + + deps2 = [] + dep_dict = {'type': 'taskvalid', 'name': f'{self.run.replace("enkf","")}_prepsnowcover', 'condition': 'not'} + deps2.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'metatask', 'name': f'{self.run}_epmn', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + deps2.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.create_dependency(dep_condition='and', dep=deps2)) + + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) resources = self.get_resource('esnowrecen') task_name = f'{self.run}_esnowrecen' From 3d8c9d33c2e89b6d2f80b2b0002e7e700f81b5c7 Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Mon, 28 Oct 2024 14:33:26 -0400 Subject: [PATCH 096/157] Update the esnowrecen job dependency. --- workflow/rocoto/gfs_tasks.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 1847c1632c..442a7f2624 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -621,17 +621,11 @@ def snowanl(self): def esnowrecen(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_prepsnowcover'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_snowanl'} deps.append(rocoto.add_dependency(dep_dict)) - - deps2 = [] - dep_dict = {'type': 'taskvalid', 'name': f'{self.run.replace("enkf","")}_prepsnowcover', 'condition': 'not'} - deps2.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'metatask', 'name': f'{self.run}_epmn', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} - deps2.append(rocoto.add_dependency(dep_dict)) - deps.append(rocoto.create_dependency(dep_condition='and', dep=deps2)) - - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('esnowrecen') task_name = f'{self.run}_esnowrecen' From 7148c4e2aac9ec7a24d1889b5df1f8308d08a43b Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Wed, 30 Oct 2024 07:35:50 -0400 Subject: [PATCH 097/157] Update the gfs workflow for prepsnowcover job. --- scripts/exglobal_prep_snowcover.py | 3 ++- workflow/rocoto/gfs_tasks.py | 6 +++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/scripts/exglobal_prep_snowcover.py b/scripts/exglobal_prep_snowcover.py index c7be33a2cf..2637546e7c 100755 --- a/scripts/exglobal_prep_snowcover.py +++ b/scripts/exglobal_prep_snowcover.py @@ -20,4 +20,5 @@ # Instantiate the snow prepare task SnowAnl = SnowAnalysis(config) - SnowAnl.prepare_IMS() + if SnowAnl.task_config.cyc == 0: + SnowAnl.prepare_IMS() diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 442a7f2624..78582dc324 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -570,13 +570,17 @@ def prepsnowcover(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) + cycledef = 'gdas_prep_snocvr' + if self.run in ['gfs']: + cycledef = self.run + resources = self.get_resource('prepsnowcover') task_name = f'{self.run}_prepsnowcover' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, - 'cycledef': 'gdas_prep_snocvr', + 'cycledef': cycledef, 'command': f'{self.HOMEgfs}/jobs/rocoto/prepsnowcover.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', From 8eea61c0a98257482036f5e01ac3aa04838c50c4 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 31 Oct 2024 15:42:11 +0000 Subject: [PATCH 098/157] Remove esnowrecen job --- env/HERA.env | 6 +-- env/HERCULES.env | 6 +-- env/JET.env | 6 +-- env/ORION.env | 6 +-- env/S4.env | 6 +-- env/WCOSS2.env | 6 +-- jobs/JGDAS_ENKF_SNOW_RECENTER | 59 ----------------------- jobs/rocoto/esnowrecen.sh | 18 ------- parm/config/gfs/config.esnowrecen | 8 +-- parm/config/gfs/config.resources | 4 +- scripts/exgdas_enkf_snow_recenter.py | 30 ------------ ush/python/pygfs/task/snowens_analysis.py | 2 +- workflow/applications/gfs_cycled.py | 5 +- workflow/rocoto/gfs_tasks.py | 27 +---------- workflow/rocoto/tasks.py | 2 +- 15 files changed, 29 insertions(+), 162 deletions(-) delete mode 100755 jobs/JGDAS_ENKF_SNOW_RECENTER delete mode 100755 jobs/rocoto/esnowrecen.sh delete mode 100755 scripts/exgdas_enkf_snow_recenter.py diff --git a/env/HERA.env b/env/HERA.env index 259461b1ac..197566ae50 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -121,10 +121,10 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" -elif [[ "${step}" = "esnowrecen" ]]; then +elif [[ "${step}" = "esnowanl" ]]; then - export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export NTHREADS_ESNOWANL=${NTHREADSmax} + export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" diff --git a/env/HERCULES.env b/env/HERCULES.env index bed1d11281..be7f91c0e0 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -120,10 +120,10 @@ case ${step} in export APRUN_APPLY_INCR="${launcher} -n 6" ;; - "esnowrecen") + "esnowanl") - export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export NTHREADS_ESNOWANL=${NTHREADSmax} + export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" ;; diff --git a/env/JET.env b/env/JET.env index dbc249d4d6..24103f033f 100755 --- a/env/JET.env +++ b/env/JET.env @@ -104,10 +104,10 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" -elif [[ "${step}" = "esnowrecen" ]]; then +elif [[ "${step}" = "esnowanl" ]]; then - export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export NTHREADS_ESNOWANL=${NTHREADSmax} + export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" diff --git a/env/ORION.env b/env/ORION.env index 06ae2c1a63..9e98f3d82a 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -111,10 +111,10 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" -elif [[ "${step}" = "esnowrecen" ]]; then +elif [[ "${step}" = "esnowanl" ]]; then - export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export NTHREADS_ESNOWANL=${NTHREADSmax} + export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" diff --git a/env/S4.env b/env/S4.env index 5d5ffd23b1..696a76440e 100755 --- a/env/S4.env +++ b/env/S4.env @@ -104,10 +104,10 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" -elif [[ "${step}" = "esnowrecen" ]]; then +elif [[ "${step}" = "esnowanl" ]]; then - export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export NTHREADS_ESNOWANL=${NTHREADSmax} + export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index c67c16f929..9d67d75c30 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -97,10 +97,10 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" -elif [[ "${step}" = "esnowrecen" ]]; then +elif [[ "${step}" = "esnowanl" ]]; then - export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN_default}" + export NTHREADS_ESNOWANL=${NTHREADSmax} + export APRUN_ESNOWANL="${APRUN_default}" export APRUN_APPLY_INCR="${launcher} -n 6" diff --git a/jobs/JGDAS_ENKF_SNOW_RECENTER b/jobs/JGDAS_ENKF_SNOW_RECENTER deleted file mode 100755 index 05d46cffc2..0000000000 --- a/jobs/JGDAS_ENKF_SNOW_RECENTER +++ /dev/null @@ -1,59 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowrecen" -c "base esnowrecen" - -############################################## -# Set variables used in the script -############################################## -# Ignore possible spelling error (nothing is misspelled) -# shellcheck disable=SC2153 -GDUMP="gdas" -export GDUMP - -############################################## -# Begin JOB SPECIFIC work -############################################## -# Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ - COMIN_OBS:COM_OBS_TMPL \ - COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ - COMOUT_CONF:COM_CONF_TMPL -MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ - COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL - -mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" - -for imem in $(seq 1 "${NMEM_ENS}"); do - memchar="mem$(printf %03i "${imem}")" - MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ - COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL - mkdir -p "${COMOUT_SNOW_ANALYSIS}" -done - -############################################################### -# Run relevant script - -EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exgdas_enkf_snow_recenter.py} -${EXSCRIPT} -status=$? -(( status != 0 )) && exit "${status}" - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [[ -e "${pgmout}" ]] ; then - cat "${pgmout}" -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd "${DATAROOT}" || exit 1 -[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" - -exit 0 diff --git a/jobs/rocoto/esnowrecen.sh b/jobs/rocoto/esnowrecen.sh deleted file mode 100755 index f8c4f8f7fc..0000000000 --- a/jobs/rocoto/esnowrecen.sh +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs}/ush/preamble.sh" - -############################################################### -# Source UFSDA workflow modules -. "${HOMEgfs}/ush/load_ufsda_modules.sh" -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -export job="esnowrecen" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -"${HOMEgfs}/jobs/JGDAS_ENKF_SNOW_RECENTER" -status=$? -exit "${status}" diff --git a/parm/config/gfs/config.esnowrecen b/parm/config/gfs/config.esnowrecen index adb039559a..5d57c120a0 100644 --- a/parm/config/gfs/config.esnowrecen +++ b/parm/config/gfs/config.esnowrecen @@ -1,12 +1,12 @@ #! /usr/bin/env bash -########## config.esnowrecen ########## +########## config.esnowanl ########## # configuration common to snow ensemble analysis tasks -echo "BEGIN: config.esnowrecen" +echo "BEGIN: config.esnowanl" # Get task specific resources -source "${EXPDIR}/config.resources" esnowrecen +source "${EXPDIR}/config.resources" esnowanl export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" @@ -26,4 +26,4 @@ export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE=${EXECgfs}/gdasapp_land_ensrecenter.x export FREGRID=${EXECgfs}/fregrid.x -echo "END: config.esnowrecen" +echo "END: config.esnowanl" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 14e6f0d7fb..09eb34128d 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -15,7 +15,7 @@ if (( $# != 1 )); then echo "prep prepsnowobs prepatmiodaobs" echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlobs atmensanlsol atmensanlletkf atmensanlfv3inc atmensanlfinal" - echo "snowanl esnowrecen" + echo "snowanl esnowanl" echo "prepobsaero aeroanlinit aeroanlvar aeroanlfinal aeroanlgenb" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" @@ -357,7 +357,7 @@ case ${step} in tasks_per_node=$(( max_tasks_per_node / threads_per_task )) ;; - "esnowrecen") + "esnowanl") # below lines are for creating JEDI YAML case ${CASE} in "C768") diff --git a/scripts/exgdas_enkf_snow_recenter.py b/scripts/exgdas_enkf_snow_recenter.py deleted file mode 100755 index fcd501860c..0000000000 --- a/scripts/exgdas_enkf_snow_recenter.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python3 -# exgdas_enkf_snow_recenter.py -# This script creates an SnowEnsAnalysis class -# and will recenter the ensemble mean to the -# deterministic analysis and provide increments -# to create an ensemble of snow analyses -import os - -from wxflow import Logger, cast_strdict_as_dtypedict -from pygfs.task.snowens_analysis import SnowEnsAnalysis - -# Initialize root logger -logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) - - -if __name__ == '__main__': - - # Take configuration from environment and cast it as python dictionary - config = cast_strdict_as_dtypedict(os.environ) - - # Instantiate the snow ensemble analysis task - anl = SnowEnsAnalysis(config) - anl.initialize() - anl.genWeights() - anl.genMask() - anl.regridDetBkg() - anl.regridDetInc() - anl.recenterEns() - anl.addEnsIncrements() - anl.finalize() diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 982f74130c..923d90b393 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -245,7 +245,7 @@ def recenterEns(self) -> None: Instance of the SnowEnsAnalysis object """ logger.info("Running recentering code") - exec_cmd = Executable(self.task_config.APRUN_ESNOWRECEN) + exec_cmd = Executable(self.task_config.APRUN_ESNOWANL) exec_name = os.path.join(self.task_config.DATA, 'gdasapp_land_ensrecenter.x') exec_cmd.add_default_arg(exec_name) exec_cmd.add_default_arg(self.task_config.jedi_yaml) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 4185dcfb77..204b7d4a19 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -115,7 +115,7 @@ def _get_app_configs(self): if self.do_jedisnowda: configs += ['prepsnowobs', 'snowanl'] if self.do_hybvar: - configs += ['esnowanl', 'esnowrecen'] + configs += ['esnowanl'] if self.do_mos: configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', @@ -172,7 +172,7 @@ def get_task_names(self): hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] if self.do_jedisnowda: - hybrid_tasks += ['esnowanl', 'esnowrecen'] + hybrid_tasks += ['esnowanl'] hybrid_after_eupd_tasks += ['stage_ic', 'ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] # Collect all "gdas" cycle tasks @@ -304,7 +304,6 @@ def get_task_names(self): enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks enkfgfs_tasks.remove("echgres") enkfgfs_tasks.remove("esnowanl") - enkfgfs_tasks.remove("esnowrecen") tasks['enkfgfs'] = enkfgfs_tasks return tasks diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 6ae5f2924a..ed3b9b7327 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -615,8 +615,6 @@ def esnowanl(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_prepsnowobs'} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}_snowanl'} - deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'metatask', 'name': f'{self.run}_epmn', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) @@ -637,29 +635,6 @@ def esnowanl(self): task = rocoto.create_task(task_dict) return task - def esnowrecen(self): - - deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}_esnowanl'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - - resources = self.get_resource('esnowrecen') - task_name = f'{self.run}_esnowrecen' - task_dict = {'task_name': task_name, - 'resources': resources, - 'dependency': dependencies, - 'envars': self.envars, - 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/esnowrecen.sh', - 'job_name': f'{self.pslot}_{task_name}_@H', - 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', - 'maxtries': '&MAXTRIES;' - } - - task = rocoto.create_task(task_dict) - return task - def prepoceanobs(self): ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) @@ -2767,7 +2742,7 @@ def esfc(self): dep_dict = {'type': 'task', 'name': f'{self.run}_eupd'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jedisnowda: - dep_dict = {'type': 'task', 'name': f'{self.run}_esnowrecen'} + dep_dict = {'type': 'task', 'name': f'{self.run}_esnowanl'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 3ba63e9ce6..c9592b620d 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal', 'aeroanlgenb', - 'prepsnowobs', 'snowanl', 'esnowanl', 'esnowrecen', + 'prepsnowobs', 'snowanl', 'esnowanl', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', 'atmos_prod', 'ocean_prod', 'ice_prod', From 39915e4250199f3837ccd511162b6291f15ed500 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 31 Oct 2024 17:39:27 +0000 Subject: [PATCH 099/157] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 9be49de2a5..3d8892c503 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 9be49de2a5833ee6e70ee3469e7aa019649b22d3 +Subproject commit 3d8892c503f0b57abdbf1e6aec750cfd45e90722 From ef1c1e1168677c8eda75456d2ebe87ae75836605 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 31 Oct 2024 20:40:23 +0000 Subject: [PATCH 100/157] Move Jedi object initialization dictionaries into YAML files for AtmAnalysis and AtmEnsAnalysis --- parm/config/gfs/config.atmanl | 10 +-- parm/config/gfs/config.atmensanl | 15 ++--- parm/gdas/atmanl_jedi_config.yaml.j2 | 11 +++ parm/gdas/atmensanl_jedi_config.yaml.j2 | 23 +++++++ .../exglobal_atm_analysis_fv3_increment.py | 2 +- scripts/exglobal_atm_analysis_variational.py | 2 +- .../exglobal_atmens_analysis_fv3_increment.py | 2 +- scripts/exglobal_atmens_analysis_letkf.py | 4 +- scripts/exglobal_atmens_analysis_obs.py | 2 +- scripts/exglobal_atmens_analysis_sol.py | 2 +- ush/python/pygfs/jedi/jedi.py | 30 ++++++++- ush/python/pygfs/task/atm_analysis.py | 38 ++--------- ush/python/pygfs/task/atmens_analysis.py | 67 ++----------------- 13 files changed, 89 insertions(+), 119 deletions(-) create mode 100644 parm/gdas/atmanl_jedi_config.yaml.j2 create mode 100644 parm/gdas/atmensanl_jedi_config.yaml.j2 diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index a2baadde7b..b3474ec1a8 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -5,10 +5,6 @@ echo "BEGIN: config.atmanl" -export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" -export JCB_ALGO_YAML_VAR=@JCB_ALGO_YAML_VAR@ -export JCB_ALGO_FV3INC="fv3jedi_fv3inc_variational" - export STATICB_TYPE=@STATICB_TYPE@ export LOCALIZATION_TYPE="bump" export INTERP_METHOD='barycentric' @@ -22,19 +18,19 @@ else export BERROR_YAML="atmosphere_background_error_static_${STATICB_TYPE}" fi +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmanl_jedi_config.yaml.j2" export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_bkg.yaml.j2" export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/atm_berror_${STATICB_TYPE}.yaml.j2" export FV3ENS_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_fv3ens.yaml.j2" +export JCB_ALGO_YAML_VAR=@JCB_ALGO_YAML_VAR@ + export layout_x_atmanl=@LAYOUT_X_ATMANL@ export layout_y_atmanl=@LAYOUT_Y_ATMANL@ export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE_VAR="${EXECgfs}/gdas.x" -export JEDIEXE_FV3INC="${EXECgfs}/fv3jedi_fv3inc.x" - echo "END: config.atmanl" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 2c57525834..3dc29441f1 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -5,26 +5,21 @@ echo "BEGIN: config.atmensanl" -export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" - -export JCB_ALGO_YAML_LETKF=@JCB_ALGO_YAML_LETKF@ -export JCB_ALGO_YAML_OBS=@JCB_ALGO_YAML_OBS@ -export JCB_ALGO_YAML_SOL=@JCB_ALGO_YAML_SOL@ -export JCB_ALGO_FV3INC="fv3jedi_fv3inc_lgetkf" - export INTERP_METHOD='barycentric' +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmensanl_jedi_config.yaml.j2" export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" export LGETKF_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_lgetkf_bkg.yaml.j2" +export JCB_ALGO_YAML_LETKF=@JCB_ALGO_YAML_LETKF@ +export JCB_ALGO_YAML_OBS=@JCB_ALGO_YAML_OBS@ +export JCB_ALGO_YAML_SOL=@JCB_ALGO_YAML_SOL@ + export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@ export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@ export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE_LETKF=${EXECgfs}/gdas.x -export JEDIEXE_FV3INC=${EXECgfs}/fv3jedi_fv3inc.x - echo "END: config.atmensanl" diff --git a/parm/gdas/atmanl_jedi_config.yaml.j2 b/parm/gdas/atmanl_jedi_config.yaml.j2 new file mode 100644 index 0000000000..f44db56e57 --- /dev/null +++ b/parm/gdas/atmanl_jedi_config.yaml.j2 @@ -0,0 +1,11 @@ +atmanlvar: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_VAR }}' + jedi_args: ['fv3jedi', 'variational'] +atmanlfv3inc: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/fv3jedi_fv3inc.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo: fv3jedi_fv3inc_variational diff --git a/parm/gdas/atmensanl_jedi_config.yaml.j2 b/parm/gdas/atmensanl_jedi_config.yaml.j2 new file mode 100644 index 0000000000..b274b90abb --- /dev/null +++ b/parm/gdas/atmensanl_jedi_config.yaml.j2 @@ -0,0 +1,23 @@ +atmensanlobs: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_OBS }}' + jedi_args: ['fv3jedi', 'localensembleda'] +atmensanlsol: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_SOL }}' + jedi_args: ['fv3jedi', 'localensembleda'] +atmensanlfv3inc: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/fv3jedi_fv3inc.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo: fv3jedi_fv3inc_lgetkf +atmensanlletkf: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_LETKF }}' + jedi_args: ['fv3jedi', 'localensembleda'] diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index 39a9b4e25a..ba7dcf3cf4 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Initialize and execute FV3 increment converter - AtmAnl.jedi['atmanlfv3inc'].execute(config.APRUN_ATMANLFV3INC) + AtmAnl.jedi_dict['atmanlfv3inc'].execute(config.APRUN_ATMANLFV3INC) diff --git a/scripts/exglobal_atm_analysis_variational.py b/scripts/exglobal_atm_analysis_variational.py index 21d99da3a2..a9605c0304 100755 --- a/scripts/exglobal_atm_analysis_variational.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Execute JEDI variational analysis - AtmAnl.jedi['atmanlvar'].execute(config.APRUN_ATMANLVAR) + AtmAnl.jedi_dict['atmanlvar'].execute(config.APRUN_ATMANLVAR) diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py index 288c043adc..e48a8d4555 100755 --- a/scripts/exglobal_atmens_analysis_fv3_increment.py +++ b/scripts/exglobal_atmens_analysis_fv3_increment.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI FV3 increment converter - AtmEnsAnl.jedi['atmensanlfv3inc'].execute(config.APRUN_ATMENSANLFV3INC) + AtmEnsAnl.jedi_dict['atmensanlfv3inc'].execute(config.APRUN_ATMENSANLFV3INC) diff --git a/scripts/exglobal_atmens_analysis_letkf.py b/scripts/exglobal_atmens_analysis_letkf.py index c0516003d6..423a8e7fd7 100755 --- a/scripts/exglobal_atmens_analysis_letkf.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -23,7 +23,7 @@ # Initalize JEDI ensemble DA application # Note: This is normally done in AtmEnsAnl.initialize(), but that method now # initializes the split observer-solver. This case is just for testing. - AtmEnsAnl.jedi['atmensanlletkf'].initialize(AtmEnsAnl.task_config) + AtmEnsAnl.jedi_dict['atmensanlletkf'].initialize(AtmEnsAnl.task_config) # Execute the JEDI ensemble DA analysis - AtmEnsAnl.jedi['atmensanlletkf'].execute(config.APRUN_ATMENSANLLETKF) + AtmEnsAnl.jedi_dict['atmensanlletkf'].execute(config.APRUN_ATMENSANLLETKF) diff --git a/scripts/exglobal_atmens_analysis_obs.py b/scripts/exglobal_atmens_analysis_obs.py index 84b1f28096..d1d18e5837 100755 --- a/scripts/exglobal_atmens_analysis_obs.py +++ b/scripts/exglobal_atmens_analysis_obs.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI ensembler DA analysis in observer mode - AtmEnsAnl.jedi['atmensanlobs'].execute(config.APRUN_ATMENSANLOBS) + AtmEnsAnl.jedi_dict['atmensanlobs'].execute(config.APRUN_ATMENSANLOBS) diff --git a/scripts/exglobal_atmens_analysis_sol.py b/scripts/exglobal_atmens_analysis_sol.py index e6c112f97f..617e63726f 100755 --- a/scripts/exglobal_atmens_analysis_sol.py +++ b/scripts/exglobal_atmens_analysis_sol.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI ensemble DA analysis in solver mode - AtmEnsAnl.jedi['atmensanlsol'].execute(config.APRUN_ATMENSANLSOL) + AtmEnsAnl.jedi_dict['atmensanlsol'].execute(config.APRUN_ATMENSANLSOL) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 67b4213305..bc319e10ae 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -14,11 +14,13 @@ logger = getLogger(__name__.split('.')[-1]) +jedi_key_list = ['rundir', 'exe_src', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml', 'jedi_args'] class Jedi: """ Class for initializing and executing JEDI applications """ + @logit(logger, name="Jedi") def __init__(self, config) -> None: """Constructor for JEDI objects @@ -38,8 +40,9 @@ def __init__(self, config) -> None: None """ - _key_list = ['yaml_name', 'rundir', 'exe_src', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml', 'jedi_args'] - for key in _key_list: + if 'yaml_name' not in config: + raise KeyError(f"Key 'yaml_name' not found in the nested dictionary") + for key in jedi_key_list: if key not in config: raise KeyError(f"Key '{key}' not found in the nested dictionary") @@ -195,6 +198,29 @@ def link_exe(self) -> None: if not os.path.exists(self.jedi_config.exe): os.symlink(self.jedi_config.exe_src, self.jedi_config.exe) + @staticmethod + @logit(logger) + def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict): + # Initialize dictionary of Jedi objects + jedi_dict = AttrDict() + + # Parse J2-YAML file for dictionary of JEDI configuration dictionaries + jedi_config_dict = parse_j2yaml(jedi_config_yaml, task_config) + + # Loop through dictionary of Jedi configuration dictionaries + for yaml_name in jedi_config_dict: + # Make sure all required keys present or set to None + jedi_config_dict[yaml_name]['yaml_name'] = yaml_name + for key in jedi_key_list: + if key not in jedi_config_dict[yaml_name]: + jedi_config_dict[yaml_name][key] = None + + # Construct JEDI object + jedi_dict[yaml_name] = Jedi(jedi_config_dict[yaml_name]) + + # Return dictionary of JEDI objects + return jedi_dict + @staticmethod @logit(logger) def remove_redundant(input_list: List) -> List: diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 5ddb678036..78f430acb3 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -70,34 +70,8 @@ def __init__(self, config: Dict[str, Any]): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) - # Create dictionary of JEDI objects - self.jedi = AttrDict() - - # atmanlvar - self.jedi['atmanlvar'] = Jedi(AttrDict( - { - 'yaml_name': 'atmanlvar', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_VAR, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': None, - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_VAR, - 'jedi_args': ['fv3jedi', 'variational'] - } - )) - - # atmanlfv3inc - self.jedi['atmanlfv3inc'] = Jedi(AttrDict( - { - 'yaml_name': 'atmanlfv3inc', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_FV3INC, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) + # Create dictionary of Jedi objects + self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config) @logit(logger) def initialize(self) -> None: @@ -125,21 +99,21 @@ def initialize(self) -> None: # initialize JEDI variational application logger.info(f"Initializing JEDI variational DA application") - self.jedi['atmanlvar'].initialize(self.task_config) + self.jedi_dict['atmanlvar'].initialize(self.task_config) # initialize JEDI FV3 increment conversion application logger.info(f"Initializing JEDI FV3 increment conversion application") - self.jedi['atmanlfv3inc'].initialize(self.task_config) + self.jedi_dict['atmanlfv3inc'].initialize(self.task_config) # stage observations logger.info(f"Staging list of observation files") - obs_dict = self.jedi['atmanlvar'].render_jcb(self.task_config, 'atm_obs_staging') + obs_dict = self.jedi_dict['atmanlvar'].render_jcb(self.task_config, 'atm_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") # stage bias corrections logger.info(f"Staging list of bias correction files") - bias_dict = self.jedi['atmanlvar'].render_jcb(self.task_config, 'atm_bias_staging') + bias_dict = self.jedi_dict['atmanlvar'].render_jcb(self.task_config, 'atm_bias_staging') bias_dict['copy'] = Jedi.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 46072dfe8b..95b04a1aef 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -72,62 +72,7 @@ def __init__(self, config: Dict[str, Any]): self.task_config = AttrDict(**self.task_config, **local_dict) # Create dictionary of JEDI objects - self.jedi = AttrDict() - - # atmensanlobs - self.jedi['atmensanlobs'] = Jedi(AttrDict( - { - 'yaml_name': 'atmensanlobs', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_LETKF, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': None, - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_OBS, - 'jedi_args': ['fv3jedi', 'localensembleda'] - } - )) - - # atmensanlsol - self.jedi['atmensanlsol'] = Jedi(AttrDict( - { - 'yaml_name': 'atmensanlsol', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_LETKF, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': None, - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_SOL, - 'jedi_args': ['fv3jedi', 'localensembleda'] - } - )) - - # atmensanlfv3inc - self.jedi['atmensanlfv3inc'] = Jedi(AttrDict( - { - 'yaml_name': 'atmensanlfv3inc', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_FV3INC, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_FV3INC, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) - - # Note: Since we now use the split observer-solvers, the following - # is only for testing. - - # atmensanlletkf - self.jedi['atmensanlletkf'] = Jedi(AttrDict( - { - 'yaml_name': 'atmensanlletkf', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_LETKF, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': None, - 'jcb_algo_yaml': self.task_config.JCB_ALGO_YAML_LETKF, - 'jedi_args': ['fv3jedi', 'localensembleda'] - } - )) + self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config) @logit(logger) def initialize(self) -> None: @@ -154,25 +99,25 @@ def initialize(self) -> None: # initialize JEDI LETKF observer application logger.info(f"Initializing JEDI LETKF observer application") - self.jedi['atmensanlobs'].initialize(self.task_config) + self.jedi_dict['atmensanlobs'].initialize(self.task_config) # initialize JEDI LETKF solver application logger.info(f"Initializing JEDI LETKF solver application") - self.jedi['atmensanlsol'].initialize(self.task_config) + self.jedi_dict['atmensanlsol'].initialize(self.task_config) # initialize JEDI FV3 increment conversion application logger.info(f"Initializing JEDI FV3 increment conversion application") - self.jedi['atmensanlfv3inc'].initialize(self.task_config) + self.jedi_dict['atmensanlfv3inc'].initialize(self.task_config) # stage observations logger.info(f"Staging list of observation files") - obs_dict = self.jedi['atmensanlobs'].render_jcb(self.task_config, 'atm_obs_staging') + obs_dict = self.jedi_dict['atmensanlobs'].render_jcb(self.task_config, 'atm_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") # stage bias corrections logger.info(f"Staging list of bias correction files") - bias_dict = self.jedi['atmensanlobs'].render_jcb(self.task_config, 'atm_bias_staging') + bias_dict = self.jedi_dict['atmensanlobs'].render_jcb(self.task_config, 'atm_bias_staging') bias_dict['copy'] = Jedi.remove_redundant(bias_dict['copy']) FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") From 5fa0ed179c48d11efd84bf68a8d4f237ef5f2b80 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 31 Oct 2024 23:04:28 +0000 Subject: [PATCH 101/157] Move marine bmat JEDI config to YAML file --- parm/config/gfs/config.marineanl | 18 +--- parm/gdas/soca_bmat_jedi_config.yaml.j2 | 35 +++++++ ush/python/pygfs/task/marine_bmat.py | 124 ++++-------------------- 3 files changed, 53 insertions(+), 124 deletions(-) create mode 100644 parm/gdas/soca_bmat_jedi_config.yaml.j2 diff --git a/parm/config/gfs/config.marineanl b/parm/config/gfs/config.marineanl index 8aacc52135..c850744fff 100644 --- a/parm/config/gfs/config.marineanl +++ b/parm/config/gfs/config.marineanl @@ -17,23 +17,7 @@ export MARINE_ENSDA_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/ensda/stage_ens_me export MARINE_DET_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/soca_det_bkg_stage.yaml.j2" export MARINE_JCB_GDAS_ALGO="${PARMgfs}/gdas/jcb-gdas/algorithm/marine" +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/soca_bmat_jedi_config.yaml.j2" export JCB_BASE_YAML="${PARMgfs}/gdas/soca/marine-jcb-base.yaml" -export JCB_ALGO_GRIDGEN="soca_gridgen" -export JCB_ALGO_DIAGB="soca_diagb" -export JCB_ALGO_SETCORSCALES="soca_setcorscales" -export JCB_ALGO_PARAMETERS_DIFFUSION_HZ="soca_parameters_diffusion_hz" -export JCB_ALGO_VTSCALES="soca_vtscales" -export JCB_ALGO_PARAMETERS_DIFFUSION_VT="soca_parameters_diffusion_vt" -export JCB_ALGO_ENSB="soca_ensb" -export JCB_ALGO_ENSWEIGHTS="soca_ensweights" - -export JEDIEXE_GRIDGEN="${EXECgfs}/gdas_soca_gridgen.x" -export JEDIEXE_DIAGB="${EXECgfs}/gdas_soca_diagb.x" -export JEDIEXE_SETCORSCALES="${EXECgfs}/gdas_soca_setcorscales.x" -export JEDIEXE_PARAMETERS_DIFFUSION_HZ="${EXECgfs}/gdas_soca_error_covariance_toolbox.x" -export JEDIEXE_PARAMETERS_DIFFUSION_VT="${EXECgfs}/gdas_soca_error_covariance_toolbox.x" -export JEDIEXE_ENSB="${EXECgfs}/gdas_ens_handler.x" -export JEDIEXE_ENSWEIGHTS="${EXECgfs}/gdas_socahybridweights.x" - echo "END: config.marineanl" diff --git a/parm/gdas/soca_bmat_jedi_config.yaml.j2 b/parm/gdas/soca_bmat_jedi_config.yaml.j2 new file mode 100644 index 0000000000..0ca8e7062a --- /dev/null +++ b/parm/gdas/soca_bmat_jedi_config.yaml.j2 @@ -0,0 +1,35 @@ +gridgen: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas_soca_gridgen.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_algo: soca_gridgen +soca_diagb: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas_soca_diagb.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_algo: soca_diagb +soca_parameters_diffusion_vt: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas_soca_error_covariance_toolbox.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_algo: soca_parameters_diffusion_vt +soca_setcorscales: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas_soca_setcorscales.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_algo: soca_setcorscales +soca_parameters_diffusion_hz: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas_soca_error_covariance_toolbox.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_algo: soca_parameters_diffusion_hz +soca_ensb: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas_ens_handler.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_algo: soca_ensb +soca_ensweights: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas_socahybridweights.x' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_algo: soca_ensweights diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 3188477040..fd63cfb933 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -63,6 +63,7 @@ def __init__(self, config): 'MARINE_WINDOW_END': _window_end, 'MARINE_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", 'ENSPERT_RELPATH': _enspert_relpath, + 'MOM6_LEVS': mdau.get_mom6_levels(str(self.task_config.OCNRES)), 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z." } @@ -72,98 +73,7 @@ def __init__(self, config): self.task_config = AttrDict(**self.task_config, **local_dict) # Create dictionary of Jedi objects - self.jedi = AttrDict() - - # gridgen - self.jedi['gridgen'] = Jedi(AttrDict( - { - 'yaml_name': 'gridgen', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_GRIDGEN, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_GRIDGEN, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) - - # soca_diagb - self.jedi['soca_diagb'] = Jedi(AttrDict( - { - 'yaml_name': 'soca_diagb', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_DIAGB, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_DIAGB, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) - - # soca_parameters_diffusion_vt - self.jedi['soca_parameters_diffusion_vt'] = Jedi(AttrDict( - { - 'yaml_name': 'soca_parameters_diffusion_vt', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_PARAMETERS_DIFFUSION_VT, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_PARAMETERS_DIFFUSION_VT, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) - - # soca_setcorscales - self.jedi['soca_setcorscales'] = Jedi(AttrDict( - { - 'yaml_name': 'soca_setcorscales', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_SETCORSCALES, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_SETCORSCALES, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) - - # soca_parameters_diffusion_hz - self.jedi['soca_parameters_diffusion_hz'] = Jedi(AttrDict( - { - 'yaml_name': 'soca_parameters_diffusion_hz', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_PARAMETERS_DIFFUSION_HZ, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_PARAMETERS_DIFFUSION_HZ, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) - - # soca_ensb - self.jedi['soca_ensb'] = Jedi(AttrDict( - { - 'yaml_name': 'soca_ensb', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_ENSB, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_ENSB, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) - - # soca_ensweights - self.jedi['soca_ensb'] = Jedi(AttrDict( - { - 'yaml_name': 'soca_ensb', - 'rundir': self.task_config.DATA, - 'exe_src': self.task_config.JEDIEXE_ENSWEIGHTS, - 'jcb_base_yaml': self.task_config.JCB_BASE_YAML, - 'jcb_algo': self.task_config.JCB_ALGO_ENSWEIGHTS, - 'jcb_algo_yaml': None, - 'jedi_args': None - } - )) + self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config) @logit(logger) def initialize(self: Task) -> None: @@ -207,20 +117,20 @@ def initialize(self: Task) -> None: FileHandler(soca_utility_list).sync() # initialize vtscales python script - vtscales_config = self.jedi['soca_parameters_diffusion_vt'].render_jcb(self.task_config, 'soca_vtscales') + vtscales_config = self.jedi_dict['soca_parameters_diffusion_vt'].render_jcb(self.task_config, 'soca_vtscales') save_as_yaml(vtscales_config, os.path.join(self.task_config.DATA, 'soca_vtscales.yaml')) FileHandler({'copy': [[os.path.join(self.task_config.CALC_SCALE_EXEC), os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync() # initialize JEDI applications - self.jedi['gridgen'].initialize(self.task_config) - self.jedi['soca_diagb'].initialize(self.task_config) - self.jedi['soca_parameters_diffusion_vt'].initialize(self.task_config) - self.jedi['soca_setcorscales'].initialize(self.task_config) - self.jedi['soca_parameters_diffusion_hz'].initialize(self.task_config) + self.jedi_dict['gridgen'].initialize(self.task_config) + self.jedi_dict['soca_diagb'].initialize(self.task_config) + self.jedi_dict['soca_parameters_diffusion_vt'].initialize(self.task_config) + self.jedi_dict['soca_setcorscales'].initialize(self.task_config) + self.jedi_dict['soca_parameters_diffusion_hz'].initialize(self.task_config) if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - self.jedi['soca_ensb'].initialize(self.task_config) - self.jedi['soca_ensweights'].initialize(self.task_config) + self.jedi_dict['soca_ensb'].initialize(self.task_config) + self.jedi_dict['soca_ensweights'].initialize(self.task_config) # stage ensemble members for the hybrid background error if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: @@ -276,15 +186,15 @@ def execute(self, aprun_cmd: str) -> None: None """ - self.jedi['gridgen'].execute(aprun_cmd) # TODO: This should be optional in case the geometry file was staged - self.jedi['soca_diagb'].execute(aprun_cmd) - self.jedi['soca_setcorscales'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales - self.jedi['soca_parameters_diffusion_hz'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi_dict['gridgen'].execute(aprun_cmd) # TODO: This should be optional in case the geometry file was staged + self.jedi_dict['soca_diagb'].execute(aprun_cmd) + self.jedi_dict['soca_setcorscales'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi_dict['soca_parameters_diffusion_hz'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales self.execute_vtscales() - self.jedi['soca_parameters_diffusion_vt'].execute(aprun_cmd) + self.jedi_dict['soca_parameters_diffusion_vt'].execute(aprun_cmd) if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - self.jedi['soca_ensb'].execute(self.task_config) # TODO: refactor this from the old scripts - self.jedi['soca_ensweights'].execute(self.task_config) # TODO: refactor this from the old scripts + self.jedi_dict['soca_ensb'].execute(self.task_config) # TODO: refactor this from the old scripts + self.jedi_dict['soca_ensweights'].execute(self.task_config) # TODO: refactor this from the old scripts @logit(logger) def finalize(self: Task) -> None: From 5e8bd8e2b199c2fee5fdbd633b1372cd210ae0c3 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 31 Oct 2024 23:09:16 +0000 Subject: [PATCH 102/157] pynorms --- ush/python/pygfs/jedi/jedi.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index bc319e10ae..d3aa6dd984 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -16,11 +16,12 @@ jedi_key_list = ['rundir', 'exe_src', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml', 'jedi_args'] + class Jedi: """ Class for initializing and executing JEDI applications """ - + @logit(logger, name="Jedi") def __init__(self, config) -> None: """Constructor for JEDI objects @@ -215,12 +216,12 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict): if key not in jedi_config_dict[yaml_name]: jedi_config_dict[yaml_name][key] = None - # Construct JEDI object + # Construct JEDI object jedi_dict[yaml_name] = Jedi(jedi_config_dict[yaml_name]) # Return dictionary of JEDI objects - return jedi_dict - + return jedi_dict + @staticmethod @logit(logger) def remove_redundant(input_list: List) -> List: From 6f311af4a7ad60924635a7b110696fd2d7f04013 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 31 Oct 2024 23:34:51 +0000 Subject: [PATCH 103/157] Minor change to retrigger GW CI --- ush/python/pygfs/task/marine_bmat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index fd63cfb933..223c330f66 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -21,7 +21,7 @@ class MarineBMat(Task): """ - Class for global marine B-matrix tasks + Class for global marine B-matrix tasks. """ @logit(logger, name="MarineBMat") def __init__(self, config): From d7a58ccbff3576307b1085c8c43a086fc86549ea Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 1 Nov 2024 09:04:37 +0000 Subject: [PATCH 104/157] Address Walter's comments --- .../exglobal_atm_analysis_fv3_increment.py | 6 +- scripts/exglobal_atm_analysis_variational.py | 6 +- .../exglobal_atmens_analysis_fv3_increment.py | 6 +- scripts/exglobal_atmens_analysis_letkf.py | 12 ++- scripts/exglobal_atmens_analysis_obs.py | 8 +- scripts/exglobal_atmens_analysis_sol.py | 8 +- scripts/exglobal_marinebmat.py | 2 +- ush/python/pygfs/task/atm_analysis.py | 30 +++++++ ush/python/pygfs/task/atmens_analysis.py | 83 +++++++++++++++++++ ush/python/pygfs/task/marine_bmat.py | 19 ++--- 10 files changed, 145 insertions(+), 35 deletions(-) diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index ba7dcf3cf4..eba7cc1cd1 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atm_analysis_fv3_increment.py # This script creates an AtmAnalysis object -# and runs the execute method of its Jedi -# object attribute +# and runs the execute method which runs the JEDI +# FV3 increment converter import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Initialize and execute FV3 increment converter - AtmAnl.jedi_dict['atmanlfv3inc'].execute(config.APRUN_ATMANLFV3INC) + AtmAnl.execute_fv3inc() diff --git a/scripts/exglobal_atm_analysis_variational.py b/scripts/exglobal_atm_analysis_variational.py index a9605c0304..c247a1b0a4 100755 --- a/scripts/exglobal_atm_analysis_variational.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atm_analysis_variational.py # This script creates an AtmAnalysis object -# and runs the execute method of its Jedi object attribute -# which executes the global atm variational analysis +# and runs the execute method which runs the JEDI +# variational analysis application import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Execute JEDI variational analysis - AtmAnl.jedi_dict['atmanlvar'].execute(config.APRUN_ATMANLVAR) + AtmAnl.execute_var() diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py index e48a8d4555..4179ec3217 100755 --- a/scripts/exglobal_atmens_analysis_fv3_increment.py +++ b/scripts/exglobal_atmens_analysis_fv3_increment.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_fv3_increment.py # This script creates an AtmEnsAnalysis object -# and runs the execute method of its Jedi object attribute -# which convert the JEDI increment into an FV3 increment +# and runs the execute method which runs the JEDI +# FV3 increment converter application import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI FV3 increment converter - AtmEnsAnl.jedi_dict['atmensanlfv3inc'].execute(config.APRUN_ATMENSANLFV3INC) + AtmEnsAnl.execute_fv3inc() diff --git a/scripts/exglobal_atmens_analysis_letkf.py b/scripts/exglobal_atmens_analysis_letkf.py index 423a8e7fd7..e5497670c1 100755 --- a/scripts/exglobal_atmens_analysis_letkf.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_letkf.py # This script creates an AtmEnsAnalysis object -# and runs the execute method of its Jedi object attribute -# which executes the global atm local ensemble analysis +# and initializes and runs the full JEDI LETKF +# application import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -20,10 +20,8 @@ # Instantiate the atmens analysis task AtmEnsAnl = AtmEnsAnalysis(config) - # Initalize JEDI ensemble DA application - # Note: This is normally done in AtmEnsAnl.initialize(), but that method now - # initializes the split observer-solver. This case is just for testing. - AtmEnsAnl.jedi_dict['atmensanlletkf'].initialize(AtmEnsAnl.task_config) + # Initalize JEDI full ensemble DA application + AtmEnsAnl.initialize_letkf() # Execute the JEDI ensemble DA analysis - AtmEnsAnl.jedi_dict['atmensanlletkf'].execute(config.APRUN_ATMENSANLLETKF) + AtmEnsAnl.execute_letkf() diff --git a/scripts/exglobal_atmens_analysis_obs.py b/scripts/exglobal_atmens_analysis_obs.py index d1d18e5837..8fc3ddaa25 100755 --- a/scripts/exglobal_atmens_analysis_obs.py +++ b/scripts/exglobal_atmens_analysis_obs.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_obs.py # This script creates an AtmEnsAnalysis object -# and runs the execute method of its Jedi object attribute -# which executes the global atm local ensemble analysis in observer mode +# and runs the execute method which runs the JEDI LETKF +# application in observer mode import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -20,5 +20,5 @@ # Instantiate the atmens analysis task AtmEnsAnl = AtmEnsAnalysis(config) - # Initialize and execute JEDI ensembler DA analysis in observer mode - AtmEnsAnl.jedi_dict['atmensanlobs'].execute(config.APRUN_ATMENSANLOBS) + # Execute JEDI ensembler DA analysis in observer mode + AtmEnsAnl.execute_obs() diff --git a/scripts/exglobal_atmens_analysis_sol.py b/scripts/exglobal_atmens_analysis_sol.py index 617e63726f..8f47da2255 100755 --- a/scripts/exglobal_atmens_analysis_sol.py +++ b/scripts/exglobal_atmens_analysis_sol.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atmens_analysis_sol.py # This script creates an AtmEnsAnalysis object -# and runs the execute method of its Jedi object attribute -# which executes the global atm local ensemble analysis in solver mode +# and runs the execute method which runs the JEDI LETKF +# application in solver mode import os from wxflow import Logger, cast_strdict_as_dtypedict @@ -20,5 +20,5 @@ # Instantiate the atmens analysis task AtmEnsAnl = AtmEnsAnalysis(config) - # Initialize and execute JEDI ensemble DA analysis in solver mode - AtmEnsAnl.jedi_dict['atmensanlsol'].execute(config.APRUN_ATMENSANLSOL) + # Execute JEDI ensemble DA analysis in solver mode + AtmEnsAnl.execute_sol() diff --git a/scripts/exglobal_marinebmat.py b/scripts/exglobal_marinebmat.py index fd8770c18e..e285e646ac 100755 --- a/scripts/exglobal_marinebmat.py +++ b/scripts/exglobal_marinebmat.py @@ -20,5 +20,5 @@ # Create an instance of the MarineBMat task marineBMat = MarineBMat(config) marineBMat.initialize() - marineBMat.execute(config.APRUN_MARINEBMAT) + marineBMat.execute() marineBMat.finalize() diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 78f430acb3..d168767e22 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -163,6 +163,36 @@ def initialize(self) -> None: ] FileHandler({'mkdir': newdirs}).sync() + @logit(logger) + def execute_var(self) -> None: + """Execute JEDI variational analysis application + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + self.jedi_dict['atmanlvar'].execute(self.task_config.APRUN_ATMANLVAR) + + @logit(logger) + def execute_fv3inc(self) -> None: + """Execute JEDI FV3 increment converter application + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + self.jedi_dict['atmanlfv3inc'].execute(self.task_config.APRUN_ATMANLFV3INC) + @logit(logger) def finalize(self) -> None: """Finalize a global atm analysis diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 95b04a1aef..a5857eb377 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -151,6 +151,87 @@ def initialize(self) -> None: ] FileHandler({'mkdir': newdirs}).sync() + @logit(logger) + def initialize_letkf(self) -> None: + """Initialize a global atmens analysis + + Note: This would normally be done in AtmEnsAnalysis.initialize(), but that method + now initializes the split observer-solver. This case is just for testing. + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + self.jedi_dict['atmensanlletkf'].initialize(self.task_config) + + @logit(logger) + def execute_obs(self) -> None: + """Execute JEDI LETKF application in observer mode + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + self.jedi_dict['atmensanlobs'].execute(self.task_config.APRUN_ATMENSANLOBS) + + @logit(logger) + def execute_sol(self) -> None: + """Execute JEDI LETKF application in solver mode + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + self.jedi_dict['atmensanlsol'].execute(self.task_config.APRUN_ATMENSANLSOL) + + @logit(logger) + def execute_fv3inc(self) -> None: + """Execute FV3 increment converter + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + self.jedi_dict['atmensanlfv3inc'].execute(self.task_config.APRUN_ATMENSANLFV3INC) + + @logit(logger) + def execute_letkf(self) -> None: + """Execute full JEDI LETKF application + + Note: This is just for testing. Operationally, we plan to split the LETKF + into observer and solver modes. + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + self.jedi_dict['atmensanlletkf'].execute(self.task_config.APRUN_ATMENSANLLETKF) + @logit(logger) def finalize(self) -> None: """Finalize a global atmens analysis @@ -237,5 +318,7 @@ def finalize(self) -> None: } FileHandler(inc_copy).sync() + + def clean(self): super().clean() diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 223c330f66..14687c8604 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -169,7 +169,7 @@ def execute_vtscales(self: Task) -> None: mdau.run(exec_cmd) @logit(logger) - def execute(self, aprun_cmd: str) -> None: + def execute(self) -> None: """Generate the full B-matrix This method will generate the full B-matrix according to the configuration. @@ -178,23 +178,22 @@ def execute(self, aprun_cmd: str) -> None: Parameters ---------- - aprun_cmd: str - String comprising the run command for the JEDI executable. + None Returns ---------- None """ - self.jedi_dict['gridgen'].execute(aprun_cmd) # TODO: This should be optional in case the geometry file was staged - self.jedi_dict['soca_diagb'].execute(aprun_cmd) - self.jedi_dict['soca_setcorscales'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales - self.jedi_dict['soca_parameters_diffusion_hz'].execute(aprun_cmd) # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi_dict['gridgen'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: This should be optional in case the geometry file was staged + self.jedi_dict['soca_diagb'].execute(self.task_config.APRUN_MARINEBMAT) + self.jedi_dict['soca_setcorscales'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi_dict['soca_parameters_diffusion_hz'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: Make this optional once we've converged on an acceptable set of scales self.execute_vtscales() - self.jedi_dict['soca_parameters_diffusion_vt'].execute(aprun_cmd) + self.jedi_dict['soca_parameters_diffusion_vt'].execute(self.task_config.APRUN_MARINEBMAT) if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - self.jedi_dict['soca_ensb'].execute(self.task_config) # TODO: refactor this from the old scripts - self.jedi_dict['soca_ensweights'].execute(self.task_config) # TODO: refactor this from the old scripts + self.jedi_dict['soca_ensb'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: refactor this from the old scripts + self.jedi_dict['soca_ensweights'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: refactor this from the old scripts @logit(logger) def finalize(self: Task) -> None: From 0b9ca3b8ca8a11f3b2affdc12416b3baec209c87 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 1 Nov 2024 09:11:50 +0000 Subject: [PATCH 105/157] pynorms --- sorc/gfs_utils.fd | 2 +- ush/python/pygfs/task/atm_analysis.py | 8 ++++---- ush/python/pygfs/task/atmens_analysis.py | 8 +++----- ush/python/pygfs/task/marine_bmat.py | 14 +++++++++----- 4 files changed, 17 insertions(+), 15 deletions(-) diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index 856a42076a..a00cc0949e 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit 856a42076a65256aaae9b29f4891532cb4a3fbca +Subproject commit a00cc0949e2f901e73b58d54834517743916c69a diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index d168767e22..dc982229ee 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -175,7 +175,7 @@ def execute_var(self) -> None: ---------- None """ - + self.jedi_dict['atmanlvar'].execute(self.task_config.APRUN_ATMANLVAR) @logit(logger) @@ -190,9 +190,9 @@ def execute_fv3inc(self) -> None: ---------- None """ - - self.jedi_dict['atmanlfv3inc'].execute(self.task_config.APRUN_ATMANLFV3INC) - + + self.jedi_dict['atmanlfv3inc'].execute(self.task_config.APRUN_ATMANLFV3INC) + @logit(logger) def finalize(self) -> None: """Finalize a global atm analysis diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index a5857eb377..c41ff554a4 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -168,7 +168,7 @@ def initialize_letkf(self) -> None: """ self.jedi_dict['atmensanlletkf'].initialize(self.task_config) - + @logit(logger) def execute_obs(self) -> None: """Execute JEDI LETKF application in observer mode @@ -213,7 +213,7 @@ def execute_fv3inc(self) -> None: """ self.jedi_dict['atmensanlfv3inc'].execute(self.task_config.APRUN_ATMENSANLFV3INC) - + @logit(logger) def execute_letkf(self) -> None: """Execute full JEDI LETKF application @@ -231,7 +231,7 @@ def execute_letkf(self) -> None: """ self.jedi_dict['atmensanlletkf'].execute(self.task_config.APRUN_ATMENSANLLETKF) - + @logit(logger) def finalize(self) -> None: """Finalize a global atmens analysis @@ -317,8 +317,6 @@ def finalize(self) -> None: 'copy': [[src, dest]] } FileHandler(inc_copy).sync() - - def clean(self): super().clean() diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 14687c8604..b64de87225 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -185,15 +185,19 @@ def execute(self) -> None: None """ - self.jedi_dict['gridgen'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: This should be optional in case the geometry file was staged + # TODO: This should be optional in case the geometry file was staged + self.jedi_dict['gridgen'].execute(self.task_config.APRUN_MARINEBMAT) self.jedi_dict['soca_diagb'].execute(self.task_config.APRUN_MARINEBMAT) - self.jedi_dict['soca_setcorscales'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: Make this optional once we've converged on an acceptable set of scales - self.jedi_dict['soca_parameters_diffusion_hz'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: Make this optional once we've converged on an acceptable set of scales + # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi_dict['soca_setcorscales'].execute(self.task_config.APRUN_MARINEBMAT) + # TODO: Make this optional once we've converged on an acceptable set of scales + self.jedi_dict['soca_parameters_diffusion_hz'].execute(self.task_config.APRUN_MARINEBMAT) self.execute_vtscales() self.jedi_dict['soca_parameters_diffusion_vt'].execute(self.task_config.APRUN_MARINEBMAT) + # TODO: refactor this from the old scripts if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - self.jedi_dict['soca_ensb'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: refactor this from the old scripts - self.jedi_dict['soca_ensweights'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: refactor this from the old scripts + self.jedi_dict['soca_ensb'].execute(self.task_config.APRUN_MARINEBMAT) + self.jedi_dict['soca_ensweights'].execute(self.task_config.APRUN_MARINEBMAT) @logit(logger) def finalize(self: Task) -> None: From 1e6a40f013791158800275d9fbbc6cc01489907f Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 1 Nov 2024 09:15:07 +0000 Subject: [PATCH 106/157] pynorms --- ush/python/pygfs/task/atmens_analysis.py | 2 +- ush/python/pygfs/task/marine_bmat.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index c41ff554a4..1b437ae46c 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -317,6 +317,6 @@ def finalize(self) -> None: 'copy': [[src, dest]] } FileHandler(inc_copy).sync() - + def clean(self): super().clean() diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index b64de87225..564e79d80d 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -186,7 +186,7 @@ def execute(self) -> None: """ # TODO: This should be optional in case the geometry file was staged - self.jedi_dict['gridgen'].execute(self.task_config.APRUN_MARINEBMAT) + self.jedi_dict['gridgen'].execute(self.task_config.APRUN_MARINEBMAT) self.jedi_dict['soca_diagb'].execute(self.task_config.APRUN_MARINEBMAT) # TODO: Make this optional once we've converged on an acceptable set of scales self.jedi_dict['soca_setcorscales'].execute(self.task_config.APRUN_MARINEBMAT) From 0b63f8dd467274d634748ee7aa93defb0274ae5b Mon Sep 17 00:00:00 2001 From: Jiarui Dong Date: Fri, 1 Nov 2024 09:58:36 -0400 Subject: [PATCH 107/157] Move IMS snow obs processing to the snow analysis job. --- env/HERA.env | 6 ++-- env/HERCULES.env | 6 ++-- env/JET.env | 6 ++-- env/ORION.env | 6 ++-- env/S4.env | 6 ++-- env/WCOSS2.env | 6 ++-- jobs/JGLOBAL_PREP_SNOWCOVER | 50 ---------------------------- jobs/rocoto/prepsnowcover.sh | 26 --------------- jobs/rocoto/snowanl.sh | 8 +++++ parm/config/gfs/config.prepsnowcover | 18 ---------- parm/config/gfs/config.resources | 9 +---- parm/config/gfs/config.snowanl | 6 ++++ scripts/exglobal_prep_snowcover.py | 24 ------------- scripts/exglobal_snow_analysis.py | 2 ++ workflow/applications/gfs_cycled.py | 4 +-- workflow/rocoto/gfs_cycled_xml.py | 18 ---------- workflow/rocoto/gfs_tasks.py | 38 +-------------------- workflow/rocoto/tasks.py | 2 +- 18 files changed, 33 insertions(+), 208 deletions(-) delete mode 100755 jobs/JGLOBAL_PREP_SNOWCOVER delete mode 100755 jobs/rocoto/prepsnowcover.sh delete mode 100644 parm/config/gfs/config.prepsnowcover delete mode 100755 scripts/exglobal_prep_snowcover.py diff --git a/env/HERA.env b/env/HERA.env index a7270aaa5e..91f1863def 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -52,10 +52,6 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="HERA" export launcher_PREP="srun" -elif [[ "${step}" = "prepsnowcover" ]]; then - - export APRUN_CALCFIMS="${APRUN_default}" - elif [[ "${step}" = "prep_emissions" ]]; then export APRUN="${APRUN_default}" @@ -116,6 +112,8 @@ elif [[ "${step}" = "prepobsaero" ]]; then elif [[ "${step}" = "snowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_SNOWANL}" diff --git a/env/HERCULES.env b/env/HERCULES.env index 809b82d1e9..9bc1f65639 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -50,10 +50,6 @@ case ${step} in export sys_tp="HERCULES" export launcher_PREP="srun" ;; - "prepsnowcover") - - export APRUN_CALCFIMS="${APRUN_default}" - ;; "prep_emissions") export APRUN="${APRUN_default}" @@ -115,6 +111,8 @@ case ${step} in ;; "snowanl") + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_SNOWANL}" diff --git a/env/JET.env b/env/JET.env index dc7a568835..6465b69acd 100755 --- a/env/JET.env +++ b/env/JET.env @@ -40,10 +40,6 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="JET" export launcher_PREP="srun" -elif [[ "${step}" = "prepsnowcover" ]]; then - - export APRUN_CALCFIMS="${launcher} -n 1" - elif [[ "${step}" = "prep_emissions" ]]; then export APRUN="${launcher} -n 1" @@ -99,6 +95,8 @@ elif [[ "${step}" = "prepobsaero" ]]; then elif [[ "${step}" = "snowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN_default}" diff --git a/env/ORION.env b/env/ORION.env index d5853fe5ad..f246d4bec8 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -47,10 +47,6 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="ORION" export launcher_PREP="srun" -elif [[ "${step}" = "prepsnowcover" ]]; then - - export APRUN_CALCFIMS="${launcher} -n 1" - elif [[ "${step}" = "prep_emissions" ]]; then export APRUN="${launcher} -n 1" @@ -106,6 +102,8 @@ elif [[ "${step}" = "prepobsaero" ]]; then elif [[ "${step}" = "snowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_SNOWANL}" diff --git a/env/S4.env b/env/S4.env index d17e3bf452..9a5baf29ed 100755 --- a/env/S4.env +++ b/env/S4.env @@ -40,10 +40,6 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="S4" export launcher_PREP="srun" -elif [[ "${step}" = "prepsnowcover" ]]; then - - export APRUN_CALCFIMS="${APRUN_default}" - elif [[ "${step}" = "prep_emissions" ]]; then export APRUN="${APRUN_default}" @@ -99,6 +95,8 @@ elif [[ "${step}" = "prepobsaero" ]]; then elif [[ "${step}" = "snowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN_default}" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index bafcb0b8c8..adbd8e06b7 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -34,10 +34,6 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export sys_tp="wcoss2" export launcher_PREP="mpiexec" -elif [[ "${step}" = "prepsnowcover" ]]; then - - export APRUN_CALCFIMS="${APRUN_default}" - elif [[ "${step}" = "prep_emissions" ]]; then export APRUN="${APRUN_default}" @@ -92,6 +88,8 @@ elif [[ "${step}" = "prepobsaero" ]]; then elif [[ "${step}" = "snowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_SNOWANL=${NTHREADSmax} export APRUN_SNOWANL="${APRUN_default}" diff --git a/jobs/JGLOBAL_PREP_SNOWCOVER b/jobs/JGLOBAL_PREP_SNOWCOVER deleted file mode 100755 index e208cd0362..0000000000 --- a/jobs/JGLOBAL_PREP_SNOWCOVER +++ /dev/null @@ -1,50 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs}/ush/preamble.sh" -export DATA=${DATA:-${DATAROOT}/${RUN}snowanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "prepsnowcover" -c "base prepsnowcover" - -############################################## -# Set variables used in the script -############################################## -# Ignore possible spelling error (nothing is misspelled) -# shellcheck disable=SC2153 -GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -gcyc=${GDATE:8:2} -GDUMP="gdas" - -############################################## -# Begin JOB SPECIFIC work -############################################## -# Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS - -RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL - -############################################################### -# Run relevant script -EXSCRIPT=${GDASSNOWPREPPY:-${SCRgfs}/exglobal_prep_snowcover.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && (echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}") - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [[ -e "${pgmout}" ]] ; then - cat "${pgmout}" -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd "${DATAROOT}" || exit 1 -[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" - -exit 0 diff --git a/jobs/rocoto/prepsnowcover.sh b/jobs/rocoto/prepsnowcover.sh deleted file mode 100755 index 6f4bf51a5f..0000000000 --- a/jobs/rocoto/prepsnowcover.sh +++ /dev/null @@ -1,26 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs}/ush/preamble.sh" - -############################################################### -# Source UFSDA workflow modules -. "${HOMEgfs}/ush/load_ufsda_modules.sh" -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -export job="prepsnowcover" -export jobid="${job}.$$" - -############################################################### -# setup python path for ioda utilities -# shellcheck disable=SC2311 -pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" -gdasappPATH="${HOMEgfs}/sorc/gdas.cd/sorc/iodaconv/src:${pyiodaPATH}" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}:${gdasappPATH}" -export PYTHONPATH - -############################################################### -# Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_PREP_SNOWCOVER" -status=$? -exit "${status}" diff --git a/jobs/rocoto/snowanl.sh b/jobs/rocoto/snowanl.sh index 97df7a46c7..cf1ddd688b 100755 --- a/jobs/rocoto/snowanl.sh +++ b/jobs/rocoto/snowanl.sh @@ -11,6 +11,14 @@ status=$? export job="snowanl" export jobid="${job}.$$" +############################################################### +# setup python path for ioda utilities +# shellcheck disable=SC2311 +pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" +gdasappPATH="${HOMEgfs}/sorc/gdas.cd/sorc/iodaconv/src:${pyiodaPATH}" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}:${gdasappPATH}" +export PYTHONPATH + ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_SNOW_ANALYSIS" diff --git a/parm/config/gfs/config.prepsnowcover b/parm/config/gfs/config.prepsnowcover deleted file mode 100644 index 12b97ca470..0000000000 --- a/parm/config/gfs/config.prepsnowcover +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env bash - -########## config.prepsnowcover ########## -# Snow Obs Prep specific - -echo "BEGIN: config.prepsnowcover" - -# Get task specific resources -. "${EXPDIR}/config.resources" prepsnowcover - -export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" - -export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" -export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" - -export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py" - -echo "END: config.prepsnowcover" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index e2769b1b28..049c8c7fe5 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -12,7 +12,7 @@ if (( $# != 1 )); then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" echo "stage_ic aerosol_init" - echo "prep prepsnowcover prepatmiodaobs" + echo "prep prepatmiodaobs" echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlobs atmensanlsol atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl esnowrecen" @@ -152,13 +152,6 @@ case ${step} in memory="${mem_node_max}" ;; - "prepsnowcover") - walltime="00:05:00" - ntasks=1 - threads_per_task=1 - tasks_per_node=1 - ;; - "prepatmiodaobs") walltime="00:30:00" ntasks=1 diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index b1460dfa67..1aeaf58e46 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -19,6 +19,12 @@ export JEDIYAML="${PARMgfs}/gdas/snow/letkfoi/letkfoi.yaml.j2" export SNOWDEPTHVAR="snodl" export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI +# Process IMS snowcover into snow depth +export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" +export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" +export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" +export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py" + # Name of the executable that applies increment to bkg and its namelist template export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" diff --git a/scripts/exglobal_prep_snowcover.py b/scripts/exglobal_prep_snowcover.py deleted file mode 100755 index 2637546e7c..0000000000 --- a/scripts/exglobal_prep_snowcover.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python3 -# exglobal_prep_snowcover.py -# This script creates a SnowAnalysis object -# and runs the prepare_IMS method which perform -# the pre-processing for IMS data -import os - -from wxflow import Logger, cast_strdict_as_dtypedict -from pygfs.task.snow_analysis import SnowAnalysis - - -# Initialize root logger -logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) - - -if __name__ == '__main__': - - # Take configuration from environment and cast it as python dictionary - config = cast_strdict_as_dtypedict(os.environ) - - # Instantiate the snow prepare task - SnowAnl = SnowAnalysis(config) - if SnowAnl.task_config.cyc == 0: - SnowAnl.prepare_IMS() diff --git a/scripts/exglobal_snow_analysis.py b/scripts/exglobal_snow_analysis.py index fe050f5af5..dd52b699dc 100755 --- a/scripts/exglobal_snow_analysis.py +++ b/scripts/exglobal_snow_analysis.py @@ -19,6 +19,8 @@ # Instantiate the snow analysis task anl = SnowAnalysis(config) + if anl.task_config.cyc == 0: + anl.prepare_IMS() anl.initialize() anl.execute() anl.finalize() diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index b515bf7eb6..d4db6bfa29 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -113,7 +113,7 @@ def _get_app_configs(self): configs += ['prepobsaero'] if self.do_jedisnowda: - configs += ['prepsnowcover', 'snowanl'] + configs += ['snowanl'] if self.do_hybvar: configs += ['esnowrecen'] @@ -156,7 +156,7 @@ def get_task_names(self): gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] if self.do_jedisnowda: - gdas_gfs_common_tasks_before_fcst += ['prepsnowcover', 'snowanl'] + gdas_gfs_common_tasks_before_fcst += ['snowanl'] wave_prep_tasks = ['waveinit', 'waveprep'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py index 1fb59c81e5..eef77ba7fc 100644 --- a/workflow/rocoto/gfs_cycled_xml.py +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -24,24 +24,6 @@ def get_cycledefs(self): sdate_str = sdate.strftime("%Y%m%d%H%M") strings.append(f'\t{sdate_str} {edate_str} {interval_str}') - if self._app_config.do_jedisnowda: - sdate = self._base['SDATE'] - edate_snocvr = self._base['EDATE'].replace(hour=0, minute=0, second=0) - interval_snocvr = to_timedelta('24H') - assim_freq = to_timedelta("{self._base['assim_freq']}H") - - is_warm_start = self._base.get('EXP_WARM_START', False) - first_full_cycle = sdate if is_warm_start else sdate + assim_freq - sdate_snocvr = (first_full_cycle if first_full_cycle.hour == 0 else - (first_full_cycle + interval_snocvr).replace(hour=0, minute=0, second=0)) - - sdate_snocvr_str = sdate_snocvr.strftime("%Y%m%d%H%M") - edate_snocvr_str = edate_snocvr.strftime("%Y%m%d%H%M") - interval_str = timedelta_to_HMS(interval_snocvr) - - if sdate_snocvr <= edate_snocvr: - strings.append(f'\t{sdate_snocvr_str} {edate_snocvr_str} {interval_str}') - interval_gfs = self._app_config.interval_gfs if interval_gfs > to_timedelta("0H"): diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 78582dc324..a93c35cb34 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -563,49 +563,13 @@ def aeroanlfinal(self): return task - def prepsnowcover(self): + def snowanl(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.run}_prep'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) - cycledef = 'gdas_prep_snocvr' - if self.run in ['gfs']: - cycledef = self.run - - resources = self.get_resource('prepsnowcover') - task_name = f'{self.run}_prepsnowcover' - task_dict = {'task_name': task_name, - 'resources': resources, - 'dependency': dependencies, - 'envars': self.envars, - 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/jobs/rocoto/prepsnowcover.sh', - 'job_name': f'{self.pslot}_{task_name}_@H', - 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', - 'maxtries': '&MAXTRIES;' - } - - task = rocoto.create_task(task_dict) - - return task - - def snowanl(self): - - deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run}_prepsnowcover'} - deps.append(rocoto.add_dependency(dep_dict)) - - deps2 = [] - dep_dict = {'type': 'taskvalid', 'name': f'{self.run}_prepsnowcover', 'condition': 'not'} - deps2.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{self.run}_prep'} - deps2.append(rocoto.add_dependency(dep_dict)) - deps.append(rocoto.create_dependency(dep_condition='and', dep=deps2)) - - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) - resources = self.get_resource('snowanl') task_name = f'{self.run}_snowanl' task_dict = {'task_name': task_name, diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 0408b3ea00..ee9e1d3f75 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'aeroanlinit', 'aeroanlvar', 'aeroanlfinal', 'aeroanlgenb', - 'prepsnowcover', 'snowanl', 'esnowrecen', + 'snowanl', 'esnowrecen', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', 'atmos_prod', 'ocean_prod', 'ice_prod', From b60d9f4666da1ec36f41a1cd2ee6270f1102d23b Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 1 Nov 2024 14:47:23 +0000 Subject: [PATCH 108/157] rename jobs for global not just gdas --- ...{JGDAS_ENKF_SNOW_ANALYSIS => JGLOBAL_SNOWENS_ANALYSIS} | 2 +- jobs/rocoto/esnowanl.sh | 2 +- parm/config/gfs/{config.esnowrecen => config.esnowanl} | 0 ...enkf_snow_analysis.py => exglobal_snowens_analysis.py} | 8 +++----- 4 files changed, 5 insertions(+), 7 deletions(-) rename jobs/{JGDAS_ENKF_SNOW_ANALYSIS => JGLOBAL_SNOWENS_ANALYSIS} (96%) rename parm/config/gfs/{config.esnowrecen => config.esnowanl} (100%) rename scripts/{exgdas_enkf_snow_analysis.py => exglobal_snowens_analysis.py} (82%) diff --git a/jobs/JGDAS_ENKF_SNOW_ANALYSIS b/jobs/JGLOBAL_SNOWENS_ANALYSIS similarity index 96% rename from jobs/JGDAS_ENKF_SNOW_ANALYSIS rename to jobs/JGLOBAL_SNOWENS_ANALYSIS index f0d3610bc5..62264368a5 100755 --- a/jobs/JGDAS_ENKF_SNOW_ANALYSIS +++ b/jobs/JGLOBAL_SNOWENS_ANALYSIS @@ -34,7 +34,7 @@ done ############################################################### # Run relevant script -EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exgdas_enkf_snow_analysis.py} +EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snowens_analysis.py} ${EXSCRIPT} status=$? (( status != 0 )) && exit "${status}" diff --git a/jobs/rocoto/esnowanl.sh b/jobs/rocoto/esnowanl.sh index 1e488f8618..9fd1eb1b65 100755 --- a/jobs/rocoto/esnowanl.sh +++ b/jobs/rocoto/esnowanl.sh @@ -13,6 +13,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGDAS_ENKF_SNOW_ANALYSIS" +"${HOMEgfs}/jobs/JGLOBAL_SNOWENS_ANALYSIS" status=$? exit "${status}" diff --git a/parm/config/gfs/config.esnowrecen b/parm/config/gfs/config.esnowanl similarity index 100% rename from parm/config/gfs/config.esnowrecen rename to parm/config/gfs/config.esnowanl diff --git a/scripts/exgdas_enkf_snow_analysis.py b/scripts/exglobal_snowens_analysis.py similarity index 82% rename from scripts/exgdas_enkf_snow_analysis.py rename to scripts/exglobal_snowens_analysis.py index 82ce3d3c09..4dcc64a34b 100755 --- a/scripts/exgdas_enkf_snow_analysis.py +++ b/scripts/exglobal_snowens_analysis.py @@ -1,10 +1,8 @@ #!/usr/bin/env python3 -# exgdas_enkf_snow_analysis.py +# exglobal_snowens_analysis.py # This script creates an SnowEnsAnalysis class, # which will compute the ensemble mean of the snow forecast, -# run a 2DVar analysis, -# then will recenter the ensemble mean to the -# deterministic analysis and provide increments +# run a 2DVar analysis, and provide increments # to create an ensemble of snow analyses import os @@ -21,7 +19,7 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the snow ensemble analysis task - SnowEnsAnl = SnowEnsAnalysis(config, 'snowanl') + SnowEnsAnl = SnowEnsAnalysis(config, 'esnowanl') # Initialize JEDI 2DVar snow analysis SnowEnsAnalysis.initialize_jedi() From 874db59d1175c9830c59aece085ccd55e5927b72 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 1 Nov 2024 14:49:06 +0000 Subject: [PATCH 109/157] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 3d8892c503..54e21096bb 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 3d8892c503f0b57abdbf1e6aec750cfd45e90722 +Subproject commit 54e21096bbbea4d2a051149bf9c4073627c79622 From b33270ae3ff475a063e42399cbdb1acc68c699af Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 1 Nov 2024 17:23:48 +0000 Subject: [PATCH 110/157] Save before merge --- parm/gdas/snow_stage_ens_update.yaml.j2 | 36 +++---------------------- 1 file changed, 3 insertions(+), 33 deletions(-) diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index 4ad5499751..cbe431b263 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -10,45 +10,15 @@ # create working directories ###################################### mkdir: -- "{{ DATA }}/bkg/det" -- "{{ DATA }}/bkg/det_ensres" -- "{{ DATA }}/inc/det" -- "{{ DATA }}/inc/det_ensres" -- "{{ DATA }}//inc/ensmean" +- "{{ DATA }}/obs" +- "{{ DATA }}/bkg/ensmean" +- "{{ DATA }}/anl/ensmean" {% for mem in range(1, NMEM_ENS + 1) %} - "{{ DATA }}/bkg/mem{{ '%03d' % mem }}" - "{{ DATA }}/anl/mem{{ '%03d' % mem }}" {% endfor %} copy: ###################################### -# copy deterministic background files -###################################### -# define variables -# Declare a dict of search and replace terms to run on each template -{% set tmpl_dict = {'${ROTDIR}':ROTDIR, - '${RUN}':GDUMP, - '${YMD}':previous_cycle | to_YMD, - '${HH}':previous_cycle | strftime("%H"), - '${MEMDIR}':""} %} - -{% for tile in range(1, ntiles+1) %} -- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] -{% endfor %} -###################################### -# copy deterministic increment files -###################################### -# define variables -# Declare a dict of search and replace terms to run on each template -{% set tmpl_dict = {'${ROTDIR}':ROTDIR, - '${RUN}':GDUMP, - '${YMD}':current_cycle | to_YMD, - '${HH}':current_cycle | strftime("%H"), - '${MEMDIR}':""} %} - -{% for tile in range(1, ntiles+1) %} -- ["{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] -{% endfor %} -###################################### # copy ensemble background files ###################################### {% for mem in range(1, NMEM_ENS + 1) %} From 711a7a0266dc2e020e9b09442270c31c85454d52 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 1 Nov 2024 17:26:40 +0000 Subject: [PATCH 111/157] update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index c44f3b998a..0f54916519 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit c44f3b998acfa37853a1931e579ba54621bbaae9 +Subproject commit 0f5491651997500eb227883aaf70f40c6ccafdf5 From c857319ca8a9bb9f204078683056e2dc3e4eb46a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 1 Nov 2024 19:03:14 +0000 Subject: [PATCH 112/157] commit to save --- parm/gdas/staging/snow_berror.yaml.j2 | 2 +- sorc/gdas.cd | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/parm/gdas/staging/snow_berror.yaml.j2 b/parm/gdas/staging/snow_berror.yaml.j2 index 42ed94e138..a25669132e 100644 --- a/parm/gdas/staging/snow_berror.yaml.j2 +++ b/parm/gdas/staging/snow_berror.yaml.j2 @@ -1,4 +1,4 @@ mkdir: - '{{ DATA }}/berror' copy: -- ['{{ HOMEgfs }}/fix/gdas/snow/snow_bump_nicas_300km_fakelevels_nicas.nc', '{{ DATA }}/berror'] +- ['{{ HOMEgfs }}/fix/gdas/snow/snow_bump_nicas_300km_shadowlevels_nicas.nc', '{{ DATA }}/berror'] diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 0f54916519..ce29cfb0c3 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 0f5491651997500eb227883aaf70f40c6ccafdf5 +Subproject commit ce29cfb0c30dc05f6ffbd3dc47267b40e8f75209 From e179122a25f5bb7e91ac8060a3cf4db6255ad74c Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 3 Nov 2024 14:45:42 +0000 Subject: [PATCH 113/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 54e21096bb..58e31a644f 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 54e21096bbbea4d2a051149bf9c4073627c79622 +Subproject commit 58e31a644f666ae70353d33d2ca6f41fdd4d6b21 From fbda568118f31086df3d128fa86f9c180c284a71 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 3 Nov 2024 14:48:17 +0000 Subject: [PATCH 114/157] Update gdas hash and revert accidentally changes gfs_utils hash --- sorc/gfs_utils.fd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index a00cc0949e..856a42076a 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit a00cc0949e2f901e73b58d54834517743916c69a +Subproject commit 856a42076a65256aaae9b29f4891532cb4a3fbca From c18e2f270727930ce057899537f4b32c2fefc192 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 3 Nov 2024 15:58:38 +0000 Subject: [PATCH 115/157] Small, mostly asthetic changes --- parm/config/gfs/config.atmanl | 7 ++++--- parm/config/gfs/config.atmensanl | 11 ++++++----- parm/config/gfs/config.marineanl | 6 +++--- parm/gdas/atmanl_jedi_config.yaml.j2 | 4 ++-- parm/gdas/atmensanl_jedi_config.yaml.j2 | 8 ++++---- parm/gdas/soca_bmat_jedi_config.yaml.j2 | 14 +++++++------- 6 files changed, 26 insertions(+), 24 deletions(-) diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index b3474ec1a8..08fc3762f5 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -5,6 +5,10 @@ echo "BEGIN: config.atmanl" +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML_VAR=@JCB_ALGO_YAML_VAR@ +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmanl_jedi_config.yaml.j2" + export STATICB_TYPE=@STATICB_TYPE@ export LOCALIZATION_TYPE="bump" export INTERP_METHOD='barycentric' @@ -18,15 +22,12 @@ else export BERROR_YAML="atmosphere_background_error_static_${STATICB_TYPE}" fi -export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmanl_jedi_config.yaml.j2" export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_bkg.yaml.j2" export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/atm_berror_${STATICB_TYPE}.yaml.j2" export FV3ENS_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_fv3ens.yaml.j2" -export JCB_ALGO_YAML_VAR=@JCB_ALGO_YAML_VAR@ - export layout_x_atmanl=@LAYOUT_X_ATMANL@ export layout_y_atmanl=@LAYOUT_Y_ATMANL@ diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 3dc29441f1..b57bac82a9 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -5,17 +5,18 @@ echo "BEGIN: config.atmensanl" +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML_LETKF=@JCB_ALGO_YAML_LETKF@ +export JCB_ALGO_YAML_OBS=@JCB_ALGO_YAML_OBS@ +export JCB_ALGO_YAML_SOL=@JCB_ALGO_YAML_SOL@ +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmensanl_jedi_config.yaml.j2" + export INTERP_METHOD='barycentric' -export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmensanl_jedi_config.yaml.j2" export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" export LGETKF_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_lgetkf_bkg.yaml.j2" -export JCB_ALGO_YAML_LETKF=@JCB_ALGO_YAML_LETKF@ -export JCB_ALGO_YAML_OBS=@JCB_ALGO_YAML_OBS@ -export JCB_ALGO_YAML_SOL=@JCB_ALGO_YAML_SOL@ - export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@ export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@ diff --git a/parm/config/gfs/config.marineanl b/parm/config/gfs/config.marineanl index c850744fff..9a63580d8b 100644 --- a/parm/config/gfs/config.marineanl +++ b/parm/config/gfs/config.marineanl @@ -5,6 +5,9 @@ echo "BEGIN: config.marineanl" +export JCB_BASE_YAML="${PARMgfs}/gdas/soca/marine-jcb-base.yaml" +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/soca_bmat_jedi_config.yaml.j2" + export MARINE_OBS_YAML_DIR="${PARMgfs}/gdas/soca/obs/config" export MARINE_OBS_LIST_YAML=@SOCA_OBS_LIST@ export SOCA_INPUT_FIX_DIR=@SOCA_INPUT_FIX_DIR@ @@ -17,7 +20,4 @@ export MARINE_ENSDA_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/ensda/stage_ens_me export MARINE_DET_STAGE_BKG_YAML_TMPL="${PARMgfs}/gdas/soca/soca_det_bkg_stage.yaml.j2" export MARINE_JCB_GDAS_ALGO="${PARMgfs}/gdas/jcb-gdas/algorithm/marine" -export JEDI_CONFIG_YAML="${PARMgfs}/gdas/soca_bmat_jedi_config.yaml.j2" -export JCB_BASE_YAML="${PARMgfs}/gdas/soca/marine-jcb-base.yaml" - echo "END: config.marineanl" diff --git a/parm/gdas/atmanl_jedi_config.yaml.j2 b/parm/gdas/atmanl_jedi_config.yaml.j2 index f44db56e57..0b845305ad 100644 --- a/parm/gdas/atmanl_jedi_config.yaml.j2 +++ b/parm/gdas/atmanl_jedi_config.yaml.j2 @@ -1,11 +1,11 @@ atmanlvar: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo_yaml: '{{ JCB_ALGO_YAML_VAR }}' jedi_args: ['fv3jedi', 'variational'] atmanlfv3inc: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/fv3jedi_fv3inc.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: fv3jedi_fv3inc_variational diff --git a/parm/gdas/atmensanl_jedi_config.yaml.j2 b/parm/gdas/atmensanl_jedi_config.yaml.j2 index b274b90abb..816395a340 100644 --- a/parm/gdas/atmensanl_jedi_config.yaml.j2 +++ b/parm/gdas/atmensanl_jedi_config.yaml.j2 @@ -1,23 +1,23 @@ atmensanlobs: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo_yaml: '{{ JCB_ALGO_YAML_OBS }}' jedi_args: ['fv3jedi', 'localensembleda'] atmensanlsol: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo_yaml: '{{ JCB_ALGO_YAML_SOL }}' jedi_args: ['fv3jedi', 'localensembleda'] atmensanlfv3inc: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/fv3jedi_fv3inc.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: fv3jedi_fv3inc_lgetkf atmensanlletkf: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo_yaml: '{{ JCB_ALGO_YAML_LETKF }}' jedi_args: ['fv3jedi', 'localensembleda'] diff --git a/parm/gdas/soca_bmat_jedi_config.yaml.j2 b/parm/gdas/soca_bmat_jedi_config.yaml.j2 index 0ca8e7062a..055fd0bc0c 100644 --- a/parm/gdas/soca_bmat_jedi_config.yaml.j2 +++ b/parm/gdas/soca_bmat_jedi_config.yaml.j2 @@ -1,35 +1,35 @@ gridgen: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_gridgen.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: soca_gridgen soca_diagb: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_diagb.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: soca_diagb soca_parameters_diffusion_vt: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_error_covariance_toolbox.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: soca_parameters_diffusion_vt soca_setcorscales: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_setcorscales.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: soca_setcorscales soca_parameters_diffusion_hz: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_error_covariance_toolbox.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: soca_parameters_diffusion_hz soca_ensb: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_ens_handler.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: soca_ensb soca_ensweights: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_socahybridweights.x' - jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' + jcb_base_yaml: '{{ JCB_BASE_YAML }}' jcb_algo: soca_ensweights From ffbc827d6899d0a8175d1578aadcb7cf2940fd0e Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 4 Nov 2024 16:27:38 +0000 Subject: [PATCH 116/157] Updates to test for IMS and some ens work --- scripts/exglobal_snow_analysis.py | 8 +- scripts/exglobal_snowens_analysis.py | 8 +- sorc/gdas.cd | 2 +- ush/python/pygfs/task/snow_analysis.py | 10 +- ush/python/pygfs/task/snowens_analysis.py | 132 ++++++++++++++++++++-- 5 files changed, 137 insertions(+), 23 deletions(-) diff --git a/scripts/exglobal_snow_analysis.py b/scripts/exglobal_snow_analysis.py index f1e5ba4b30..0406257a06 100755 --- a/scripts/exglobal_snow_analysis.py +++ b/scripts/exglobal_snow_analysis.py @@ -20,14 +20,14 @@ # Instantiate the snow analysis task SnowAnl = SnowAnalysis(config, 'snowanl') - # Process IMS snow cover (if applicable) - if SnowAnl.task_config.cyc == 0: - anl.prepare_IMS() - # Initialize JEDI 2DVar snow analysis SnowAnl.initialize_jedi() SnowAnl.initialize_analysis() + # Process IMS snow cover (if applicable) + if SnowAnl.task_config.cyc == 0: + SnowAnl.prepare_IMS() + # Execute JEDI snow analysis SnowAnl.execute(config.APRUN_SNOWANL, ['fv3jedi', 'variational']) diff --git a/scripts/exglobal_snowens_analysis.py b/scripts/exglobal_snowens_analysis.py index 4dcc64a34b..a336181e05 100755 --- a/scripts/exglobal_snowens_analysis.py +++ b/scripts/exglobal_snowens_analysis.py @@ -22,11 +22,11 @@ SnowEnsAnl = SnowEnsAnalysis(config, 'esnowanl') # Initialize JEDI 2DVar snow analysis - SnowEnsAnalysis.initialize_jedi() - SnowEnsAnalysis.initialize_analysis() + SnowEnsAnl.initialize_jedi() + SnowEnsAnl.initialize_analysis() - # anl = SnowEnsAnalysis(config) - # anl.initialize() + #anl = SnowEnsAnalysis(config) + #anl.initialize() # anl.genWeights() # anl.genMask() # anl.regridDetBkg() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index ce29cfb0c3..98560ff570 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit ce29cfb0c30dc05f6ffbd3dc47267b40e8f75209 +Subproject commit 98560ff5701fb7a35031bcab36db35d89a53766f diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index e204932673..aeb2c7a776 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -96,7 +96,7 @@ def initialize_jedi(self): None """ - # get JEDI-to-FV3 increment converter config and save to YAML file + # get JEDI config logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") self.jedi.set_config(self.task_config) logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") @@ -194,10 +194,6 @@ def prepare_IMS(self) -> None: for key in keys: localconf[key] = self.task_config[key] - # stage backgrounds - logger.info("Staging backgrounds") - FileHandler(self.get_bkg_dict(localconf)).sync() - # Read and render the IMS_OBS_LIST yaml logger.info(f"Reading {self.task_config.IMS_OBS_LIST}") prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, localconf) @@ -259,12 +255,12 @@ def prepare_IMS(self) -> None: raise WorkflowException(f"An error occured during execution of {exe}") # Ensure the IODA snow depth IMS file is produced by the IODA converter - # If so, copy to COM_OBS/ + # If so, copy to DATA/obs/ if not os.path.isfile(f"{os.path.join(localconf.DATA, output_file)}"): logger.exception(f"{self.task_config.IMS2IODACONV} failed to produce {output_file}") raise FileNotFoundError(f"{os.path.join(localconf.DATA, output_file)}") else: - logger.info(f"Copy {output_file} to {self.task_config.COM_OBS}") + logger.info(f"Copy {output_file} to {os.path.join(localconf.DATA, 'obs')}") FileHandler(prep_ims_config.ims2ioda).sync() @logit(logger) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 923d90b393..556f8a7cd8 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -2,7 +2,7 @@ import os from logging import getLogger -from typing import Dict, List, Any +from typing import Dict, List, Optional, Any import netCDF4 as nc import numpy as np @@ -12,27 +12,45 @@ rm_p, chdir, parse_j2yaml, save_as_yaml, Jinja, + Task, logit, Executable, WorkflowException) -from pygfs.task.analysis import Analysis +from pygfs.jedi import Jedi logger = getLogger(__name__.split('.')[-1]) -class SnowEnsAnalysis(Analysis): +class SnowEnsAnalysis(Task): """ - Class for global ensemble snow analysis tasks + Class for JEDI-based global ensemble snow analysis tasks """ @logit(logger, name="SnowEnsAnalysis") - def __init__(self, config): + def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): + """Constructor global ensemble snow analysis task + + This method will construct a global ensemble snow analysis task. + This includes: + - extending the task_config attribute AttrDict to include parameters required for this task + - instantiate the Jedi attribute object + + Parameters + ---------- + config: Dict + dictionary object containing task configuration + yaml_name: str, optional + name of YAML file for JEDI configuration + + Returns + ---------- + None + """ super().__init__(config) _res_det = int(self.task_config['CASE'][1:]) _res_ens = int(self.task_config['CASE_ENS'][1:]) _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) - _recenter_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.land_recenter.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -47,7 +65,9 @@ def __init__(self, config): 'ATM_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", - 'jedi_yaml': _recenter_yaml, + 'GPREFIX': f"enkfgdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'snow_obsdatain_path': f"{self.task_config.DATA}/obs/", + 'snow_obsdataout_path': f"{self.task_config.DATA}/diags/", } ) bkg_time = _window_begin if self.task_config.DOIAU else self.task_config.current_cycle @@ -56,6 +76,104 @@ def __init__(self, config): # task_config is everything that this task should need self.task_config = AttrDict(**self.task_config, **local_dict) + # Create JEDI object + self.jedi = Jedi(self.task_config, yaml_name) + + @logit(logger) + def initialize_jedi(self): + """Initialize JEDI application + + This method will initialize a JEDI application used in the global ensemble snow analysis. + This includes: + - generating and saving JEDI YAML config + - linking the JEDI executable + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + # get JEDI config + logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") + self.jedi.set_config(self.task_config) + logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") + + # save JEDI config to YAML file + logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") + save_as_yaml(self.jedi.config, self.jedi.yaml) + + # link JEDI executable + logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") + self.jedi.link_exe(self.task_config) + + @logit(logger) + def initialize_analysis(self) -> None: + """Initialize a global ensemble snow analysis + + This method will initialize a global ensemble snow analysis. + This includes: + - staging model backgrounds + - staging observation files + - staging FV3-JEDI fix files + - staging B error files + - creating output directories + + Parameters + ---------- + None + + Returns + ---------- + None + """ + super().initialize() + + # stage backgrounds + logger.info(f"Staging background files from {self.task_config.SNOW_ENS_STAGE_TMPL}") + bkg_staging_dict = parse_j2yaml(self.task_config.SNOW_ENS_STAGE_TMPL, self.task_config) + FileHandler(bkg_staging_dict).sync() + logger.debug(f"Background files:\n{pformat(bkg_staging_dict)}") + + # stage orography files + logger.info(f"Staging orography files specified in {self.task_config.SNOW_OROG_STAGE_TMPL}") + snow_orog_stage_list = parse_j2yaml(self.task_config.SNOW_OROG_STAGE_TMPL, self.task_config) + FileHandler(snow_orog_stage_list).sync() + + # stage observations + logger.info(f"Staging list of observation files generated from JEDI config") + obs_dict = self.jedi.get_obs_dict(self.task_config) + FileHandler(obs_dict).sync() + logger.debug(f"Observation files:\n{pformat(obs_dict)}") + + # stage GTS bufr2ioda mapping YAML files + logger.info(f"Staging GTS bufr2ioda mapping YAML files from {self.task_config.GTS_SNOW_STAGE_YAML}") + gts_mapping_list = parse_j2yaml(self.task_config.GTS_SNOW_STAGE_YAML, self.task_config) + FileHandler(gts_mapping_list).sync() + + # stage FV3-JEDI fix files + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_dict = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_dict).sync() + logger.debug(f"JEDI fix files:\n{pformat(jedi_fix_dict)}") + + # staging B error files + logger.info("Stage files for static background error") + berror_staging_dict = parse_j2yaml(self.task_config.BERROR_STAGING_YAML, self.task_config) + FileHandler(berror_staging_dict).sync() + logger.debug(f"Background error files:\n{pformat(berror_staging_dict)}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + @logit(logger) def initialize(self) -> None: """Initialize method for snow ensemble analysis From 3136a98193d0d45b2d62ebcb5f192b220ae97f32 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Mon, 4 Nov 2024 21:03:24 +0000 Subject: [PATCH 117/157] End of day commit --- ush/python/pygfs/task/snowens_analysis.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 556f8a7cd8..567b11a094 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -3,6 +3,7 @@ import os from logging import getLogger from typing import Dict, List, Optional, Any +from pprint import pformat import netCDF4 as nc import numpy as np From 10197d9bac403027eb74174619aa74518543ed4b Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Tue, 5 Nov 2024 10:54:19 -0500 Subject: [PATCH 118/157] attempt to fix OCNRES --- ush/python/pygfs/task/snow_analysis.py | 3 +++ ush/python/pygfs/task/snowens_analysis.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index aeb2c7a776..c25d751fc0 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -55,6 +55,9 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): _res = int(self.task_config['CASE'][1:]) _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + # fix ocnres + self.task_config.OCNRES = f"{self.task_config.OCNRES :03d}" + # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 567b11a094..d70016bd7f 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -53,6 +53,9 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): _res_ens = int(self.task_config['CASE_ENS'][1:]) _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + # fix ocnres + self.task_config.OCNRES = f"{self.task_config.OCNRES :03d}" + # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { From 482f2f3bc0742a07688d66e977ca599d41209772 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 04:30:46 +0000 Subject: [PATCH 119/157] Address requested changes --- parm/config/gfs/config.atmanl | 4 +- parm/config/gfs/config.atmensanl | 4 +- parm/config/gfs/config.marineanl | 1 - parm/gdas/atmanl_jedi_config.yaml.j2 | 8 +-- parm/gdas/atmensanl_jedi_config.yaml.j2 | 18 ++++--- parm/gdas/soca_bmat_jedi_config.yaml.j2 | 21 +++++--- .../exglobal_atm_analysis_fv3_increment.py | 2 +- scripts/exglobal_atm_analysis_variational.py | 2 +- .../exglobal_atmens_analysis_fv3_increment.py | 2 +- scripts/exglobal_atmens_analysis_letkf.py | 2 +- scripts/exglobal_atmens_analysis_obs.py | 2 +- scripts/exglobal_atmens_analysis_sol.py | 2 +- ush/python/pygfs/jedi/jedi.py | 51 +++++++++++-------- ush/python/pygfs/task/atm_analysis.py | 27 +++------- ush/python/pygfs/task/atmens_analysis.py | 14 ++--- ush/python/pygfs/task/marine_bmat.py | 18 ++++--- 16 files changed, 96 insertions(+), 82 deletions(-) diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 08fc3762f5..1d700a479c 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -5,9 +5,7 @@ echo "BEGIN: config.atmanl" -export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" export JCB_ALGO_YAML_VAR=@JCB_ALGO_YAML_VAR@ -export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmanl_jedi_config.yaml.j2" export STATICB_TYPE=@STATICB_TYPE@ export LOCALIZATION_TYPE="bump" @@ -24,6 +22,8 @@ fi export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmanl_jedi_config.yaml.j2" export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_bkg.yaml.j2" export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/atm_berror_${STATICB_TYPE}.yaml.j2" export FV3ENS_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_fv3ens.yaml.j2" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index b57bac82a9..2726f655bd 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -5,16 +5,16 @@ echo "BEGIN: config.atmensanl" -export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" export JCB_ALGO_YAML_LETKF=@JCB_ALGO_YAML_LETKF@ export JCB_ALGO_YAML_OBS=@JCB_ALGO_YAML_OBS@ export JCB_ALGO_YAML_SOL=@JCB_ALGO_YAML_SOL@ -export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmensanl_jedi_config.yaml.j2" export INTERP_METHOD='barycentric' export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/atmensanl_jedi_config.yaml.j2" export LGETKF_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_lgetkf_bkg.yaml.j2" export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@ diff --git a/parm/config/gfs/config.marineanl b/parm/config/gfs/config.marineanl index 9a63580d8b..0b55fa447d 100644 --- a/parm/config/gfs/config.marineanl +++ b/parm/config/gfs/config.marineanl @@ -5,7 +5,6 @@ echo "BEGIN: config.marineanl" -export JCB_BASE_YAML="${PARMgfs}/gdas/soca/marine-jcb-base.yaml" export JEDI_CONFIG_YAML="${PARMgfs}/gdas/soca_bmat_jedi_config.yaml.j2" export MARINE_OBS_YAML_DIR="${PARMgfs}/gdas/soca/obs/config" diff --git a/parm/gdas/atmanl_jedi_config.yaml.j2 b/parm/gdas/atmanl_jedi_config.yaml.j2 index 0b845305ad..52fe9c55a0 100644 --- a/parm/gdas/atmanl_jedi_config.yaml.j2 +++ b/parm/gdas/atmanl_jedi_config.yaml.j2 @@ -1,11 +1,13 @@ atmanlvar: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' - jcb_algo_yaml: '{{ JCB_ALGO_YAML_VAR }}' jedi_args: ['fv3jedi', 'variational'] + mpi_cmd: '{{ APRUN_ATMANLVAR }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_VAR }}' atmanlfv3inc: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/fv3jedi_fv3inc.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_ATMANLFV3INC }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' jcb_algo: fv3jedi_fv3inc_variational diff --git a/parm/gdas/atmensanl_jedi_config.yaml.j2 b/parm/gdas/atmensanl_jedi_config.yaml.j2 index 816395a340..73ba96efb8 100644 --- a/parm/gdas/atmensanl_jedi_config.yaml.j2 +++ b/parm/gdas/atmensanl_jedi_config.yaml.j2 @@ -1,23 +1,27 @@ atmensanlobs: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' - jcb_algo_yaml: '{{ JCB_ALGO_YAML_OBS }}' jedi_args: ['fv3jedi', 'localensembleda'] + mpi_cmd: '{{ APRUN_ATMENSANLOBS }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_OBS }}' atmensanlsol: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' - jcb_algo_yaml: '{{ JCB_ALGO_YAML_SOL }}' jedi_args: ['fv3jedi', 'localensembleda'] + mpi_cmd: '{{ APRUN_ATMENSANLSOL }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_SOL }}' atmensanlfv3inc: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/fv3jedi_fv3inc.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_ATMENSANLFV3INC }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' jcb_algo: fv3jedi_fv3inc_lgetkf atmensanlletkf: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' - jcb_algo_yaml: '{{ JCB_ALGO_YAML_LETKF }}' jedi_args: ['fv3jedi', 'localensembleda'] + mpi_cmd: '{{ APRUN_ATMENSANLLETKF }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_LETKF }}' diff --git a/parm/gdas/soca_bmat_jedi_config.yaml.j2 b/parm/gdas/soca_bmat_jedi_config.yaml.j2 index 055fd0bc0c..4e476d3117 100644 --- a/parm/gdas/soca_bmat_jedi_config.yaml.j2 +++ b/parm/gdas/soca_bmat_jedi_config.yaml.j2 @@ -1,35 +1,42 @@ gridgen: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_gridgen.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_MARINEBMAT }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' jcb_algo: soca_gridgen soca_diagb: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_diagb.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_MARINEBMAT }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' jcb_algo: soca_diagb soca_parameters_diffusion_vt: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_error_covariance_toolbox.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_MARINEBMAT }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' jcb_algo: soca_parameters_diffusion_vt soca_setcorscales: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_setcorscales.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_MARINEBMAT }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' jcb_algo: soca_setcorscales soca_parameters_diffusion_hz: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_soca_error_covariance_toolbox.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_MARINEBMAT }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' jcb_algo: soca_parameters_diffusion_hz soca_ensb: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_ens_handler.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_MARINEBMAT }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' jcb_algo: soca_ensb soca_ensweights: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas_socahybridweights.x' - jcb_base_yaml: '{{ JCB_BASE_YAML }}' + mpi_cmd: '{{ APRUN_MARINEBMAT }}' + jcb_base_yaml: '{{ PARMgfs }}/gdas/soca/marine-jcb-base.yaml' jcb_algo: soca_ensweights diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index eba7cc1cd1..c5a3e70943 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Initialize and execute FV3 increment converter - AtmAnl.execute_fv3inc() + AtmAnl.execute('atmanlfv3inc') diff --git a/scripts/exglobal_atm_analysis_variational.py b/scripts/exglobal_atm_analysis_variational.py index c247a1b0a4..9ad121f76c 100755 --- a/scripts/exglobal_atm_analysis_variational.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -21,4 +21,4 @@ AtmAnl = AtmAnalysis(config) # Execute JEDI variational analysis - AtmAnl.execute_var() + AtmAnl.execute('atmanlvar') diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py index 4179ec3217..4506b28033 100755 --- a/scripts/exglobal_atmens_analysis_fv3_increment.py +++ b/scripts/exglobal_atmens_analysis_fv3_increment.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Initialize and execute JEDI FV3 increment converter - AtmEnsAnl.execute_fv3inc() + AtmEnsAnl.execute('atmensanlfv3inc') diff --git a/scripts/exglobal_atmens_analysis_letkf.py b/scripts/exglobal_atmens_analysis_letkf.py index e5497670c1..dea9ace5b8 100755 --- a/scripts/exglobal_atmens_analysis_letkf.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -24,4 +24,4 @@ AtmEnsAnl.initialize_letkf() # Execute the JEDI ensemble DA analysis - AtmEnsAnl.execute_letkf() + AtmEnsAnl.execute('atmensanlletkf') diff --git a/scripts/exglobal_atmens_analysis_obs.py b/scripts/exglobal_atmens_analysis_obs.py index 8fc3ddaa25..b09c67703f 100755 --- a/scripts/exglobal_atmens_analysis_obs.py +++ b/scripts/exglobal_atmens_analysis_obs.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Execute JEDI ensembler DA analysis in observer mode - AtmEnsAnl.execute_obs() + AtmEnsAnl.execute('atmensanlobs') diff --git a/scripts/exglobal_atmens_analysis_sol.py b/scripts/exglobal_atmens_analysis_sol.py index 8f47da2255..85dc228a5a 100755 --- a/scripts/exglobal_atmens_analysis_sol.py +++ b/scripts/exglobal_atmens_analysis_sol.py @@ -21,4 +21,4 @@ AtmEnsAnl = AtmEnsAnalysis(config) # Execute JEDI ensemble DA analysis in solver mode - AtmEnsAnl.execute_sol() + AtmEnsAnl.execute('atmensanlsol') diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index d3aa6dd984..c38cca10fc 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -14,7 +14,7 @@ logger = getLogger(__name__.split('.')[-1]) -jedi_key_list = ['rundir', 'exe_src', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml', 'jedi_args'] +jedi_key_list = ['rundir', 'exe_src', 'jedi_args', 'mpi_cmd', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml'] class Jedi: @@ -41,6 +41,7 @@ def __init__(self, config) -> None: None """ + # Make sure input dictionary for Jedi class constructor has the required keys if 'yaml_name' not in config: raise KeyError(f"Key 'yaml_name' not found in the nested dictionary") for key in jedi_key_list: @@ -60,14 +61,6 @@ def __init__(self, config) -> None: # Save a copy of jedi_config self._jedi_config = self.jedi_config.deepcopy() - # Create a dictionary of dictionaries for saving copies of the jcb_config - # associated with each algorithm - self._jcb_config_dict = AttrDict() - - # Create a dictionary of dictionaries for saving copies of the task_config - # used to render each JCB template - self._task_config_dict = AttrDict() - @logit(logger) def initialize(self, task_config: AttrDict) -> None: """Initialize JEDI application @@ -86,8 +79,8 @@ def initialize(self, task_config: AttrDict) -> None: Returns ---------- None - """ - + """ + # Render JEDI config dictionary logger.info(f"Generating JEDI YAML config: {self.jedi_config.yaml}") self.jedi_config.input_config = self.render_jcb(task_config) @@ -102,13 +95,12 @@ def initialize(self, task_config: AttrDict) -> None: self.link_exe() @logit(logger) - def execute(self, aprun_cmd: str) -> None: + def execute(self) -> None: """Execute JEDI application Parameters ---------- - aprun_cmd: str - String comprising the run command for the JEDI executable. + None Returns ---------- @@ -117,7 +109,7 @@ def execute(self, aprun_cmd: str) -> None: chdir(self.jedi_config.rundir) - exec_cmd = Executable(aprun_cmd) + exec_cmd = Executable(self.jedi_config.mpi_cmd) exec_cmd.add_default_arg(self.jedi_config.exe) if self.jedi_config.jedi_args: for arg in self.jedi_config.jedi_args: @@ -174,10 +166,6 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> # Generate JEDI YAML config by rendering JCB config dictionary jedi_input_config = render(jcb_config) - # Save copies of the task_config and jcb_config used to render this JCB template - self._task_config_dict[jcb_config['algorithm']] = task_config.deepcopy() - self._jcb_config_dict[jcb_config['algorithm']] = jcb_config.deepcopy() - return jedi_input_config @logit(logger) @@ -201,7 +189,22 @@ def link_exe(self) -> None: @staticmethod @logit(logger) - def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict): + def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_keys: Optional[list] = None): + """Get dictionary of Jedi objects from YAML specifying their configuration dictionaries + + Parameters + ---------- + jedi_config_yaml : str + path to YAML specifying configuration dictionaries for Jedi objects + task_config : str + attribute-dictionary of all configuration variables associated with a GDAS task + + + Returns + ---------- + None + """ + # Initialize dictionary of Jedi objects jedi_dict = AttrDict() @@ -219,6 +222,14 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict): # Construct JEDI object jedi_dict[yaml_name] = Jedi(jedi_config_dict[yaml_name]) + # Make sure jedi_dict has the keys we expect + if expected_keys: + for jedi_dict_key in expected_keys: + if jedi_dict_key not in jedi_dict: + raise Exception(f"FATAL ERROR: {jedi_dict_key} not present {jedi_config_yaml}") + if len(jedi_dict) > len(expected_keys): + raise Exception(f"FATAL ERROR: {jedi_config_yaml} specifies more Jedi objects than expected.") + # Return dictionary of JEDI objects return jedi_dict diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index dc982229ee..c13f419142 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -71,7 +71,8 @@ def __init__(self, config: Dict[str, Any]): self.task_config = AttrDict(**self.task_config, **local_dict) # Create dictionary of Jedi objects - self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config) + expected_keys = ['atmanlvar', 'atmanlfv3inc'] + self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config, expected_keys) @logit(logger) def initialize(self) -> None: @@ -164,34 +165,20 @@ def initialize(self) -> None: FileHandler({'mkdir': newdirs}).sync() @logit(logger) - def execute_var(self) -> None: - """Execute JEDI variational analysis application + def execute(self, jedi_dict_key: str) -> None: + """Execute JEDI application of atm analysis Parameters ---------- - None - - Returns - ---------- - None - """ - - self.jedi_dict['atmanlvar'].execute(self.task_config.APRUN_ATMANLVAR) - - @logit(logger) - def execute_fv3inc(self) -> None: - """Execute JEDI FV3 increment converter application - - Parameters - ---------- - None + jedi_dict_key + key specifying particular Jedi object in self.jedi_dict Returns ---------- None """ - self.jedi_dict['atmanlfv3inc'].execute(self.task_config.APRUN_ATMANLFV3INC) + self.jedi_dict[jedi_dict_key].execute() @logit(logger) def finalize(self) -> None: diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 1b437ae46c..f1c6ecae7e 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -72,7 +72,8 @@ def __init__(self, config: Dict[str, Any]): self.task_config = AttrDict(**self.task_config, **local_dict) # Create dictionary of JEDI objects - self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config) + expected_keys = ['atmensanlobs', 'atmensanlsol', 'atmensanlfv3inc', 'atmensanlletkf'] + self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config, expected_keys) @logit(logger) def initialize(self) -> None: @@ -156,7 +157,7 @@ def initialize_letkf(self) -> None: """Initialize a global atmens analysis Note: This would normally be done in AtmEnsAnalysis.initialize(), but that method - now initializes the split observer-solver. This case is just for testing. + now initializes the split observer-solver. This method is just for testing. Parameters ---------- @@ -170,19 +171,20 @@ def initialize_letkf(self) -> None: self.jedi_dict['atmensanlletkf'].initialize(self.task_config) @logit(logger) - def execute_obs(self) -> None: - """Execute JEDI LETKF application in observer mode + def execute(self, jedi_dict_key: str) -> None: + """Execute JEDI application of atmens analysis Parameters ---------- - None + jedi_dict_key + key specifying a particular Jedi object in self.jedi_dict Returns ---------- None """ - self.jedi_dict['atmensanlobs'].execute(self.task_config.APRUN_ATMENSANLOBS) + self.jedi_dict[jedi_dict_key].execute() @logit(logger) def execute_sol(self) -> None: diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 564e79d80d..c11aa1ae19 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -73,7 +73,9 @@ def __init__(self, config): self.task_config = AttrDict(**self.task_config, **local_dict) # Create dictionary of Jedi objects - self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config) + expected_keys = ['gridgen', 'soca_diagb', 'soca_parameters_diffusion_vt', 'soca_setcorscales', + 'soca_parameters_diffusion_hz', 'soca_ensb', 'soca_ensweights'] + self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config, expected_keys) @logit(logger) def initialize(self: Task) -> None: @@ -186,18 +188,18 @@ def execute(self) -> None: """ # TODO: This should be optional in case the geometry file was staged - self.jedi_dict['gridgen'].execute(self.task_config.APRUN_MARINEBMAT) - self.jedi_dict['soca_diagb'].execute(self.task_config.APRUN_MARINEBMAT) + self.jedi_dict['gridgen'].execute() + self.jedi_dict['soca_diagb'].execute() # TODO: Make this optional once we've converged on an acceptable set of scales - self.jedi_dict['soca_setcorscales'].execute(self.task_config.APRUN_MARINEBMAT) + self.jedi_dict['soca_setcorscales'].execute() # TODO: Make this optional once we've converged on an acceptable set of scales - self.jedi_dict['soca_parameters_diffusion_hz'].execute(self.task_config.APRUN_MARINEBMAT) + self.jedi_dict['soca_parameters_diffusion_hz'].execute() self.execute_vtscales() - self.jedi_dict['soca_parameters_diffusion_vt'].execute(self.task_config.APRUN_MARINEBMAT) + self.jedi_dict['soca_parameters_diffusion_vt'].execute() # TODO: refactor this from the old scripts if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: - self.jedi_dict['soca_ensb'].execute(self.task_config.APRUN_MARINEBMAT) - self.jedi_dict['soca_ensweights'].execute(self.task_config.APRUN_MARINEBMAT) + self.jedi_dict['soca_ensb'].execute() + self.jedi_dict['soca_ensweights'].execute() @logit(logger) def finalize(self: Task) -> None: From e59e88360052d6ab3f94897c61a6c272e488331d Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 04:38:09 +0000 Subject: [PATCH 120/157] pynorms --- ush/python/pygfs/jedi/jedi.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index c38cca10fc..e72ae34024 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -79,8 +79,8 @@ def initialize(self, task_config: AttrDict) -> None: Returns ---------- None - """ - + """ + # Render JEDI config dictionary logger.info(f"Generating JEDI YAML config: {self.jedi_config.yaml}") self.jedi_config.input_config = self.render_jcb(task_config) @@ -198,13 +198,13 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_keys: O path to YAML specifying configuration dictionaries for Jedi objects task_config : str attribute-dictionary of all configuration variables associated with a GDAS task - + Returns ---------- None """ - + # Initialize dictionary of Jedi objects jedi_dict = AttrDict() @@ -229,7 +229,7 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_keys: O raise Exception(f"FATAL ERROR: {jedi_dict_key} not present {jedi_config_yaml}") if len(jedi_dict) > len(expected_keys): raise Exception(f"FATAL ERROR: {jedi_config_yaml} specifies more Jedi objects than expected.") - + # Return dictionary of JEDI objects return jedi_dict From d4227c01fb608cbb7a91ab1700b218282cea6519 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 11:42:50 +0000 Subject: [PATCH 121/157] Fix bug Russ found --- ush/python/pygfs/task/atm_analysis.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index c13f419142..18b7d3a153 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -115,9 +115,12 @@ def initialize(self) -> None: # stage bias corrections logger.info(f"Staging list of bias correction files") bias_dict = self.jedi_dict['atmanlvar'].render_jcb(self.task_config, 'atm_bias_staging') - bias_dict['copy'] = Jedi.remove_redundant(bias_dict['copy']) - FileHandler(bias_dict).sync() - logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") + if bias_dict['copy'] is None: + logger.info(f"No bias correction files to stage") + else: + bias_dict['copy'] = Jedi.remove_redundant(bias_dict['copy']) + FileHandler(bias_dict).sync() + logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") # extract bias corrections Jedi.extract_tar_from_filehandler_dict(bias_dict) From 7ac6ccb2bbf88b25fb533185c5d481cd328415ee Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 11:45:46 +0000 Subject: [PATCH 122/157] Forgot a line --- ush/python/pygfs/task/atm_analysis.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 18b7d3a153..5c3aa0f764 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -122,8 +122,8 @@ def initialize(self) -> None: FileHandler(bias_dict).sync() logger.debug(f"Bias correction files:\n{pformat(bias_dict)}") - # extract bias corrections - Jedi.extract_tar_from_filehandler_dict(bias_dict) + # extract bias corrections + Jedi.extract_tar_from_filehandler_dict(bias_dict) # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") From 8781c4489b2b1e30d6ad6b8e1ba322bfbea58908 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA <134300700+DavidNew-NOAA@users.noreply.github.com> Date: Wed, 6 Nov 2024 09:33:17 -0500 Subject: [PATCH 123/157] Update ush/python/pygfs/jedi/jedi.py Co-authored-by: Rahul Mahajan --- ush/python/pygfs/jedi/jedi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index e72ae34024..6bc67cdf1b 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -23,7 +23,7 @@ class Jedi: """ @logit(logger, name="Jedi") - def __init__(self, config) -> None: + def __init__(self, config: Dict[str, Any]) -> None: """Constructor for JEDI objects This method will construct a Jedi object. From 227718caf71c188a0075007bab9745b1f3468ba8 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA <134300700+DavidNew-NOAA@users.noreply.github.com> Date: Wed, 6 Nov 2024 09:33:25 -0500 Subject: [PATCH 124/157] Update ush/python/pygfs/jedi/jedi.py Co-authored-by: Rahul Mahajan --- ush/python/pygfs/jedi/jedi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 6bc67cdf1b..a4bfce6ba6 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -29,7 +29,7 @@ def __init__(self, config: Dict[str, Any]) -> None: This method will construct a Jedi object. This includes: - create the jedi_config AttrDict and extend it with additional required entries - - save a coy of jedi_config + - save a copy of jedi_config Parameters ---------- From c7f9d1582aa71490d7304fd2433ab092b1450615 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 15:03:59 +0000 Subject: [PATCH 125/157] Log errors before raising exceptions --- ush/python/pygfs/jedi/jedi.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index a4bfce6ba6..c9419a4cd2 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -43,10 +43,12 @@ def __init__(self, config: Dict[str, Any]) -> None: # Make sure input dictionary for Jedi class constructor has the required keys if 'yaml_name' not in config: - raise KeyError(f"Key 'yaml_name' not found in the nested dictionary") + logger.error(f"FATAL ERROR: Key 'yaml_name' not found in the nested dictionary") + raise KeyError(f"FATAL ERROR: Key 'yaml_name' not found in the nested dictionary") for key in jedi_key_list: if key not in config: - raise KeyError(f"Key '{key}' not found in the nested dictionary") + logger.error(f"FATAL ERROR: Key '{key}' not found in the nested dictionary") + raise KeyError(f"FATAL ERROR: Key '{key}' not found in the nested dictionary") # Create the configuration dictionary for JEDI object local_dict = AttrDict( @@ -120,8 +122,10 @@ def execute(self) -> None: try: exec_cmd() except OSError: + logger.error(f"FATAL ERROR: Failed to execute {exec_cmd}") raise OSError(f"FATAL ERROR: Failed to execute {exec_cmd}") except Exception: + logger.error(f"FATAL ERROR: An error occurred during execution of {exec_cmd}") raise WorkflowException(f"FATAL ERROR: An error occurred during execution of {exec_cmd}") @logit(logger) @@ -146,6 +150,7 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> if self.jedi_config.jcb_base_yaml: jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) else: + logger.error(f"FATAL ERROR: JEDI configuration dictionary must contain jcb_base_yaml.") raise KeyError(f"FATAL ERROR: JEDI configuration dictionary must contain jcb_base_yaml.") # Add JCB algorithm YAML, if it exists, to JCB config dictionary @@ -160,6 +165,8 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> elif 'algorithm' in jcb_config: pass else: + logger.error(f"FATAL ERROR: JCB algorithm must be specified as input to jedi.render_jcb(), " + + "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") raise Exception(f"FATAL ERROR: JCB algorithm must be specified as input to jedi.render_jcb(), " + "in JEDI configuration dictionary as jcb_algo, or in JCB algorithm YAML") @@ -226,8 +233,10 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_keys: O if expected_keys: for jedi_dict_key in expected_keys: if jedi_dict_key not in jedi_dict: + logger.error(f"FATAL ERROR: {jedi_dict_key} not present {jedi_config_yaml}") raise Exception(f"FATAL ERROR: {jedi_dict_key} not present {jedi_config_yaml}") if len(jedi_dict) > len(expected_keys): + logger.error(f"FATAL ERROR: {jedi_config_yaml} specifies more Jedi objects than expected.") raise Exception(f"FATAL ERROR: {jedi_config_yaml} specifies more Jedi objects than expected.") # Return dictionary of JEDI objects From 3d2b7130ef649e551fc02d6afcd7c09650932efb Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 17:07:35 +0000 Subject: [PATCH 126/157] Make validation of JEDI class configuration dictionary more robust --- ush/python/pygfs/jedi/jedi.py | 76 +++++++++++++++++++++-------------- 1 file changed, 45 insertions(+), 31 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index c9419a4cd2..c0f8cb197a 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -14,8 +14,8 @@ logger = getLogger(__name__.split('.')[-1]) -jedi_key_list = ['rundir', 'exe_src', 'jedi_args', 'mpi_cmd', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml'] - +required_jedi_keys = ['rundir', 'exe_src', 'mpi_cmd'] +optional_jedi_keys = ['jedi_args', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml'] class Jedi: """ @@ -43,12 +43,12 @@ def __init__(self, config: Dict[str, Any]) -> None: # Make sure input dictionary for Jedi class constructor has the required keys if 'yaml_name' not in config: - logger.error(f"FATAL ERROR: Key 'yaml_name' not found in the nested dictionary") - raise KeyError(f"FATAL ERROR: Key 'yaml_name' not found in the nested dictionary") - for key in jedi_key_list: + logger.error(f"FATAL ERROR: Key 'yaml_name' not found in config") + raise KeyError(f"FATAL ERROR: Key 'yaml_name' not found in config") + for key in required_jedi_keys: if key not in config: - logger.error(f"FATAL ERROR: Key '{key}' not found in the nested dictionary") - raise KeyError(f"FATAL ERROR: Key '{key}' not found in the nested dictionary") + logger.error(f"FATAL ERROR: Required key '{key}' not found in config") + raise KeyError(f"FATAL ERROR: Required key '{key}' not found in config") # Create the configuration dictionary for JEDI object local_dict = AttrDict( @@ -60,6 +60,11 @@ def __init__(self, config: Dict[str, Any]) -> None: ) self.jedi_config = AttrDict(**config, **local_dict) + # Set optional keys in jedi_config to None if not already present + for key in optional_jedi_keys: + if key not in self.jedi_config: + self.jedi_config[key] = None + # Save a copy of jedi_config self._jedi_config = self.jedi_config.deepcopy() @@ -113,7 +118,7 @@ def execute(self) -> None: exec_cmd = Executable(self.jedi_config.mpi_cmd) exec_cmd.add_default_arg(self.jedi_config.exe) - if self.jedi_config.jedi_args: + if not self.jedi_config.jedi_args is None: for arg in self.jedi_config.jedi_args: exec_cmd.add_default_arg(arg) exec_cmd.add_default_arg(self.jedi_config.yaml) @@ -147,20 +152,20 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> """ # Fill JCB base YAML template and build JCB config dictionary - if self.jedi_config.jcb_base_yaml: + if not self.jedi_config.jcb_base_yaml is None: jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) else: - logger.error(f"FATAL ERROR: JEDI configuration dictionary must contain jcb_base_yaml.") - raise KeyError(f"FATAL ERROR: JEDI configuration dictionary must contain jcb_base_yaml.") + logger.error(f"FATAL ERROR: JCB base YAML must be specified in order to render YAML using JCB") + raise KeyError(f"FATAL ERROR: JCB base YAML must be specified in order to render YAML using JCB") # Add JCB algorithm YAML, if it exists, to JCB config dictionary - if self.jedi_config.jcb_algo_yaml: + if not self.jedi_config.jcb_algo_yaml is None: jcb_config.update(parse_j2yaml(self.jedi_config.jcb_algo_yaml, task_config)) # Set algorithm in JCB config dictionary - if algorithm: + if not algorithm is None: jcb_config['algorithm'] = algorithm - elif self.jedi_config.jcb_algo: + elif not self.jedi_config.jcb_algo is None: jcb_config['algorithm'] = self.jedi_config.jcb_algo elif 'algorithm' in jcb_config: pass @@ -196,7 +201,7 @@ def link_exe(self) -> None: @staticmethod @logit(logger) - def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_keys: Optional[list] = None): + def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_block_names: Optional[list] = None): """Get dictionary of Jedi objects from YAML specifying their configuration dictionaries Parameters @@ -205,7 +210,8 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_keys: O path to YAML specifying configuration dictionaries for Jedi objects task_config : str attribute-dictionary of all configuration variables associated with a GDAS task - + expected_block_names (optional) : str + list of names of blocks expected to be in jedi_config_yaml YAML file Returns ---------- @@ -219,23 +225,31 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_keys: O jedi_config_dict = parse_j2yaml(jedi_config_yaml, task_config) # Loop through dictionary of Jedi configuration dictionaries - for yaml_name in jedi_config_dict: - # Make sure all required keys present or set to None - jedi_config_dict[yaml_name]['yaml_name'] = yaml_name - for key in jedi_key_list: - if key not in jedi_config_dict[yaml_name]: - jedi_config_dict[yaml_name][key] = None + for block_name in jedi_config_dict: + # yaml_name key is set to name for this block + jedi_config_dict[block_name]['yaml_name'] = block_name + + # Make sure all required keys present + for key in required_jedi_keys: + if key not in jedi_config_dict[block_name]: + logger.error(f"FATAL ERROR: Required key {key} not found in {jedi_config_yaml} for block {block_name}.") + raise KeyError(f"FATAL ERROR: Required key {key} not found in {jedi_config_yaml} for block {block_name}.") + + # Set optional keys to None + for key in optional_jedi_keys: + if key not in jedi_config_dict[block_name]: + jedi_config_dict[block_name][key] = None # Construct JEDI object - jedi_dict[yaml_name] = Jedi(jedi_config_dict[yaml_name]) - - # Make sure jedi_dict has the keys we expect - if expected_keys: - for jedi_dict_key in expected_keys: - if jedi_dict_key not in jedi_dict: - logger.error(f"FATAL ERROR: {jedi_dict_key} not present {jedi_config_yaml}") - raise Exception(f"FATAL ERROR: {jedi_dict_key} not present {jedi_config_yaml}") - if len(jedi_dict) > len(expected_keys): + jedi_dict[block_name] = Jedi(jedi_config_dict[block_name]) + + # Make sure jedi_dict has the blocks we expect + if expected_block_names: + for block_name in expected_block_names: + if block_name not in jedi_dict: + logger.error(f"FATAL ERROR: Expected block {block_name} not present {jedi_config_yaml}") + raise Exception(f"FATAL ERROR: Expected block {block_name} not present {jedi_config_yaml}") + if len(jedi_dict) > len(expected_block_names): logger.error(f"FATAL ERROR: {jedi_config_yaml} specifies more Jedi objects than expected.") raise Exception(f"FATAL ERROR: {jedi_config_yaml} specifies more Jedi objects than expected.") From f30d4d93bcc15c18a113403dcc998c066621aa80 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 17:30:52 +0000 Subject: [PATCH 127/157] pynorms+ --- ush/python/pygfs/jedi/jedi.py | 89 ++++------------------------------- 1 file changed, 9 insertions(+), 80 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index c0f8cb197a..b6f9c57f53 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -17,6 +17,7 @@ required_jedi_keys = ['rundir', 'exe_src', 'mpi_cmd'] optional_jedi_keys = ['jedi_args', 'jcb_base_yaml', 'jcb_algo', 'jcb_algo_yaml'] + class Jedi: """ Class for initializing and executing JEDI applications @@ -64,7 +65,7 @@ def __init__(self, config: Dict[str, Any]) -> None: for key in optional_jedi_keys: if key not in self.jedi_config: self.jedi_config[key] = None - + # Save a copy of jedi_config self._jedi_config = self.jedi_config.deepcopy() @@ -118,7 +119,7 @@ def execute(self) -> None: exec_cmd = Executable(self.jedi_config.mpi_cmd) exec_cmd.add_default_arg(self.jedi_config.exe) - if not self.jedi_config.jedi_args is None: + if self.jedi_config.jedi_args is not None: for arg in self.jedi_config.jedi_args: exec_cmd.add_default_arg(arg) exec_cmd.add_default_arg(self.jedi_config.yaml) @@ -152,20 +153,20 @@ def render_jcb(self, task_config: AttrDict, algorithm: Optional[str] = None) -> """ # Fill JCB base YAML template and build JCB config dictionary - if not self.jedi_config.jcb_base_yaml is None: + if self.jedi_config.jcb_base_yaml is not None: jcb_config = parse_j2yaml(self.jedi_config.jcb_base_yaml, task_config) else: logger.error(f"FATAL ERROR: JCB base YAML must be specified in order to render YAML using JCB") raise KeyError(f"FATAL ERROR: JCB base YAML must be specified in order to render YAML using JCB") # Add JCB algorithm YAML, if it exists, to JCB config dictionary - if not self.jedi_config.jcb_algo_yaml is None: + if self.jedi_config.jcb_algo_yaml is not None: jcb_config.update(parse_j2yaml(self.jedi_config.jcb_algo_yaml, task_config)) # Set algorithm in JCB config dictionary - if not algorithm is None: + if algorithm is not None: jcb_config['algorithm'] = algorithm - elif not self.jedi_config.jcb_algo is None: + elif self.jedi_config.jcb_algo is not None: jcb_config['algorithm'] = self.jedi_config.jcb_algo elif 'algorithm' in jcb_config: pass @@ -228,7 +229,7 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_block_n for block_name in jedi_config_dict: # yaml_name key is set to name for this block jedi_config_dict[block_name]['yaml_name'] = block_name - + # Make sure all required keys present for key in required_jedi_keys: if key not in jedi_config_dict[block_name]: @@ -237,7 +238,7 @@ def get_jedi_dict(jedi_config_yaml: str, task_config: AttrDict, expected_block_n # Set optional keys to None for key in optional_jedi_keys: - if key not in jedi_config_dict[block_name]: + if key not in jedi_config_dict[block_name]: jedi_config_dict[block_name][key] = None # Construct JEDI object @@ -348,75 +349,3 @@ def extract_tar(tar_file: str) -> None: except tarfile.ExtractError as err: logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") - - -# TODO: remove since no longer used -@logit(logger) -def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: - """ - Recursively search through a nested dictionary and return the value for the target key. - This returns the first target key it finds. So if a key exists in a subsequent - nested dictionary, it will not be found. - - Parameters - ---------- - nested_dict : Dict - Dictionary to search - target_key : str - Key to search for - - Returns - ------- - Any - Value of the target key - - Raises - ------ - KeyError - If key is not found in dictionary - - TODO: if this gives issues due to landing on an incorrect key in the nested - dictionary, we will have to implement a more concrete method to search for a key - given a more complete address. See resolved conversations in PR 2387 - - # Example usage: - nested_dict = { - 'a': { - 'b': { - 'c': 1, - 'd': { - 'e': 2, - 'f': 3 - } - }, - 'g': 4 - }, - 'h': { - 'i': 5 - }, - 'j': { - 'k': 6 - } - } - - user_key = input("Enter the key to search for: ") - result = find_value_in_nested_dict(nested_dict, user_key) - """ - - if not isinstance(nested_dict, dict): - raise TypeError(f"Input is not of type(dict)") - - result = nested_dict.get(target_key) - if result is not None: - return result - - for value in nested_dict.values(): - if isinstance(value, dict): - try: - result = find_value_in_nested_dict(value, target_key) - if result is not None: - return result - except KeyError: - pass - - raise KeyError(f"Key '{target_key}' not found in the nested dictionary") From eb6744c6e246cdb74da321fba66c3918497c0a2f Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 17:40:17 +0000 Subject: [PATCH 128/157] Fill in missing code for tar archive extraction --- ush/python/pygfs/jedi/jedi.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index b6f9c57f53..623beff4b9 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -342,10 +342,11 @@ def extract_tar(tar_file: str) -> None: raise tarfile.FileExistsError(f"FATAL ERROR: {tar_file} does not exist") except tarfile.ReadError as err: if tarfile.is_tarfile(tar_file): - logger.error(f"FATAL ERROR: {tar_file} could not be read") - raise tarfile.ReadError(f"FATAL ERROR: unable to read {tar_file}") + logger.error(f"FATAL ERROR: tar archive {tar_file} could not be read") + raise tarfile.ReadError(f"FATAL ERROR: tar archive {tar_file} could not be read") else: - logger.info() + logger.error(f"FATAL ERROR: {tar_file} is not a tar archive") + raise tarfile.ReadError(f"FATAL ERROR: {tar_file} is not a tar archive") except tarfile.ExtractError as err: logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") From b35ec83f69c0c87a9a4f851b97e55d1789b93ab4 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 17:47:39 +0000 Subject: [PATCH 129/157] pynorms --- parm/gdas/atmanl_jedi_config.yaml.j2 | 2 +- parm/gdas/atmensanl_jedi_config.yaml.j2 | 6 +++--- ush/python/pygfs/jedi/jedi.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/parm/gdas/atmanl_jedi_config.yaml.j2 b/parm/gdas/atmanl_jedi_config.yaml.j2 index 52fe9c55a0..4046ba0931 100644 --- a/parm/gdas/atmanl_jedi_config.yaml.j2 +++ b/parm/gdas/atmanl_jedi_config.yaml.j2 @@ -1,8 +1,8 @@ atmanlvar: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jedi_args: ['fv3jedi', 'variational'] mpi_cmd: '{{ APRUN_ATMANLVAR }}' + jedi_args: ['fv3jedi', 'variational'] jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' jcb_algo_yaml: '{{ JCB_ALGO_YAML_VAR }}' atmanlfv3inc: diff --git a/parm/gdas/atmensanl_jedi_config.yaml.j2 b/parm/gdas/atmensanl_jedi_config.yaml.j2 index 73ba96efb8..9ab2ec6ace 100644 --- a/parm/gdas/atmensanl_jedi_config.yaml.j2 +++ b/parm/gdas/atmensanl_jedi_config.yaml.j2 @@ -1,15 +1,15 @@ atmensanlobs: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jedi_args: ['fv3jedi', 'localensembleda'] mpi_cmd: '{{ APRUN_ATMENSANLOBS }}' + jedi_args: ['fv3jedi', 'localensembleda'] jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' jcb_algo_yaml: '{{ JCB_ALGO_YAML_OBS }}' atmensanlsol: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jedi_args: ['fv3jedi', 'localensembleda'] mpi_cmd: '{{ APRUN_ATMENSANLSOL }}' + jedi_args: ['fv3jedi', 'localensembleda'] jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' jcb_algo_yaml: '{{ JCB_ALGO_YAML_SOL }}' atmensanlfv3inc: @@ -21,7 +21,7 @@ atmensanlfv3inc: atmensanlletkf: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - jedi_args: ['fv3jedi', 'localensembleda'] mpi_cmd: '{{ APRUN_ATMENSANLLETKF }}' + jedi_args: ['fv3jedi', 'localensembleda'] jcb_base_yaml: '{{ PARMgfs }}/gdas/atm/jcb-base.yaml.j2' jcb_algo_yaml: '{{ JCB_ALGO_YAML_LETKF }}' diff --git a/ush/python/pygfs/jedi/jedi.py b/ush/python/pygfs/jedi/jedi.py index 623beff4b9..2806ba4bce 100644 --- a/ush/python/pygfs/jedi/jedi.py +++ b/ush/python/pygfs/jedi/jedi.py @@ -346,7 +346,7 @@ def extract_tar(tar_file: str) -> None: raise tarfile.ReadError(f"FATAL ERROR: tar archive {tar_file} could not be read") else: logger.error(f"FATAL ERROR: {tar_file} is not a tar archive") - raise tarfile.ReadError(f"FATAL ERROR: {tar_file} is not a tar archive") + raise tarfile.ReadError(f"FATAL ERROR: {tar_file} is not a tar archive") except tarfile.ExtractError as err: logger.exception(f"FATAL ERROR: unable to extract from {tar_file}") raise tarfile.ExtractError("FATAL ERROR: unable to extract from {tar_file}") From cfc8b904c31aa75d0028b8dc457213a23b754ba2 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 18:53:50 +0000 Subject: [PATCH 130/157] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 58e31a644f..e9607fc9d2 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 58e31a644f666ae70353d33d2ca6f41fdd4d6b21 +Subproject commit e9607fc9d27a2d5e9dde9c2f66903d5ec06f4280 From b03967d8336fb9146797eb68301176a5a77e9020 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 19:31:14 +0000 Subject: [PATCH 131/157] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index e9607fc9d2..2f1638f126 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit e9607fc9d27a2d5e9dde9c2f66903d5ec06f4280 +Subproject commit 2f1638f126d010ee2f34aa4da5013ce14549d436 From 8d2a9138ac41a576b638b758282c1f9cf7176709 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 20:52:16 +0000 Subject: [PATCH 132/157] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 2f1638f126..4c9b1d2231 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 2f1638f126d010ee2f34aa4da5013ce14549d436 +Subproject commit 4c9b1d22314845bd8cf388cd5b9603b2cbf9ccfd From 62cba2533993a3bda4cfa7a8672fc47d05bedf41 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 20:55:04 +0000 Subject: [PATCH 133/157] Forgot to remove old execute methods --- ush/python/pygfs/task/atmens_analysis.py | 48 ------------------------ 1 file changed, 48 deletions(-) diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index f1c6ecae7e..81cae238bb 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -186,54 +186,6 @@ def execute(self, jedi_dict_key: str) -> None: self.jedi_dict[jedi_dict_key].execute() - @logit(logger) - def execute_sol(self) -> None: - """Execute JEDI LETKF application in solver mode - - Parameters - ---------- - None - - Returns - ---------- - None - """ - - self.jedi_dict['atmensanlsol'].execute(self.task_config.APRUN_ATMENSANLSOL) - - @logit(logger) - def execute_fv3inc(self) -> None: - """Execute FV3 increment converter - - Parameters - ---------- - None - - Returns - ---------- - None - """ - - self.jedi_dict['atmensanlfv3inc'].execute(self.task_config.APRUN_ATMENSANLFV3INC) - - @logit(logger) - def execute_letkf(self) -> None: - """Execute full JEDI LETKF application - - Note: This is just for testing. Operationally, we plan to split the LETKF - into observer and solver modes. - - Parameters - ---------- - None - - Returns - ---------- - None - """ - - self.jedi_dict['atmensanlletkf'].execute(self.task_config.APRUN_ATMENSANLLETKF) - @logit(logger) def finalize(self) -> None: """Finalize a global atmens analysis From dd6736f8805aa8c270452c06f7992fc9185325d1 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 21:28:48 +0000 Subject: [PATCH 134/157] Get rid of execute_vtscales method --- ush/python/pygfs/task/marine_bmat.py | 43 +++++++++------------------- 1 file changed, 14 insertions(+), 29 deletions(-) diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index c11aa1ae19..2af42aca3e 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -146,30 +146,6 @@ def initialize(self: Task) -> None: os.remove(link_name) os.symlink(link_target, link_name) - @logit(logger) - def execute_vtscales(self: Task) -> None: - """Execute vertical diffusion coefficients generator - - This method will execute a Python script which generatres the vertical diffusion coefficients - This includes: - - constructing the executable object - - running the executable object - - Parameters - ---------- - None - - Returns - ---------- - None - """ - # compute the vertical correlation scales based on the MLD - exec_cmd = Executable("python") - exec_name = os.path.join(self.task_config.DATA, 'calc_scales.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg('soca_vtscales.yaml') - mdau.run(exec_cmd) - @logit(logger) def execute(self) -> None: """Generate the full B-matrix @@ -187,16 +163,25 @@ def execute(self) -> None: None """ - # TODO: This should be optional in case the geometry file was staged self.jedi_dict['gridgen'].execute() + + # variance partitioning self.jedi_dict['soca_diagb'].execute() - # TODO: Make this optional once we've converged on an acceptable set of scales + + # horizontal diffusion self.jedi_dict['soca_setcorscales'].execute() - # TODO: Make this optional once we've converged on an acceptable set of scales self.jedi_dict['soca_parameters_diffusion_hz'].execute() - self.execute_vtscales() + + # vertical diffusion + exec_cmd = Executable("python") + exec_name = os.path.join(self.task_config.DATA, 'calc_scales.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_vtscales.yaml') + mdau.run(exec_cmd) + self.jedi_dict['soca_parameters_diffusion_vt'].execute() - # TODO: refactor this from the old scripts + + # hybrid EnVAR case if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: self.jedi_dict['soca_ensb'].execute() self.jedi_dict['soca_ensweights'].execute() From a6fd65ad7a954c07774029dc6174e91646f8ad92 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 6 Nov 2024 21:31:22 +0000 Subject: [PATCH 135/157] pynorms --- ush/python/pygfs/task/marine_bmat.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 2af42aca3e..a21699227b 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -171,13 +171,13 @@ def execute(self) -> None: # horizontal diffusion self.jedi_dict['soca_setcorscales'].execute() self.jedi_dict['soca_parameters_diffusion_hz'].execute() - + # vertical diffusion exec_cmd = Executable("python") exec_name = os.path.join(self.task_config.DATA, 'calc_scales.x') exec_cmd.add_default_arg(exec_name) exec_cmd.add_default_arg('soca_vtscales.yaml') - mdau.run(exec_cmd) + mdau.run(exec_cmd) self.jedi_dict['soca_parameters_diffusion_vt'].execute() From a577040bdac45337f07a45c9bec568ce1ff0bea4 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 7 Nov 2024 20:33:36 +0000 Subject: [PATCH 136/157] incremental save --- parm/config/gfs/config.esnowanl | 2 ++ parm/config/gfs/config.snowanl | 4 +++- parm/gdas/esnowanl_jedi_config.yaml.j2 | 14 ++++++++++++++ parm/gdas/snowanl_jedi_config.yaml.j2 | 7 +++++++ 4 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 parm/gdas/esnowanl_jedi_config.yaml.j2 create mode 100644 parm/gdas/snowanl_jedi_config.yaml.j2 diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index 5d57c120a0..cc1a20b46b 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -20,6 +20,8 @@ export SNOW_ENS_FINALIZE_TMPL="${PARMgfs}/gdas/snow_finalize_ens_update.yaml.j2" export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" export ENS_APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/ens_apply_incr_nml.j2" +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/esnowanl_jedi_config.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index 5a3f06bba2..a0863073e3 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -12,7 +12,7 @@ export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" export GTS_SNOW_STAGE_YAML="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.yaml.j2" export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" -export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-prototype_2dvar.yaml.j2" +export JCB_ALGO_YAML_VAR="${PARMgfs}/gdas/snow/jcb-prototype_2dvar.yaml.j2" # Ensemble member properties export SNOWDEPTHVAR="snodl" @@ -32,6 +32,8 @@ export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/snow_var_bkg.yaml.j2" export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/snow_berror.yaml.j2" +export JEDI_CONFIG_YAML="${PARMgfs}/gdas/snowanl_jedi_config.yaml.j2" + export JEDIEXE=${EXECgfs}/gdas.x export io_layout_x=@IO_LAYOUT_X@ diff --git a/parm/gdas/esnowanl_jedi_config.yaml.j2 b/parm/gdas/esnowanl_jedi_config.yaml.j2 new file mode 100644 index 0000000000..119e320b03 --- /dev/null +++ b/parm/gdas/esnowanl_jedi_config.yaml.j2 @@ -0,0 +1,14 @@ +esnowanlensmean: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas.x' + mpi_cmd: '{{ APRUN_SNOWANL }}' + jedi_args: ['fv3jedi', 'ensmean'] + jcb_base_yaml: '{{ PARMgfs }}/gdas/snow/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_ENSMEAN }}' +esnowanlvar: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas.x' + mpi_cmd: '{{ APRUN_SNOWANL }}' + jedi_args: ['fv3jedi', 'variational'] + jcb_base_yaml: '{{ PARMgfs }}/gdas/snow/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_VAR }}' \ No newline at end of file diff --git a/parm/gdas/snowanl_jedi_config.yaml.j2 b/parm/gdas/snowanl_jedi_config.yaml.j2 new file mode 100644 index 0000000000..c599787592 --- /dev/null +++ b/parm/gdas/snowanl_jedi_config.yaml.j2 @@ -0,0 +1,7 @@ +snowanlvar: + rundir: '{{ DATA }}' + exe_src: '{{ EXECgfs }}/gdas.x' + mpi_cmd: '{{ APRUN_SNOWANL }}' + jedi_args: ['fv3jedi', 'variational'] + jcb_base_yaml: '{{ PARMgfs }}/gdas/snow/jcb-base.yaml.j2' + jcb_algo_yaml: '{{ JCB_ALGO_YAML_VAR }}' \ No newline at end of file From cf64ff6cea48bd7809409759b07b4b6253fcf630 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 8 Nov 2024 14:49:34 +0000 Subject: [PATCH 137/157] commit before merging in DNew code --- scripts/exglobal_snow_analysis.py | 7 +++---- scripts/exglobal_snowens_analysis.py | 31 ++++++++++++++++------------ 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/scripts/exglobal_snow_analysis.py b/scripts/exglobal_snow_analysis.py index 0406257a06..40f35ed2f9 100755 --- a/scripts/exglobal_snow_analysis.py +++ b/scripts/exglobal_snow_analysis.py @@ -18,18 +18,17 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the snow analysis task - SnowAnl = SnowAnalysis(config, 'snowanl') + SnowAnl = SnowAnalysis(config) # Initialize JEDI 2DVar snow analysis - SnowAnl.initialize_jedi() - SnowAnl.initialize_analysis() + SnowAnl.initialize() # Process IMS snow cover (if applicable) if SnowAnl.task_config.cyc == 0: SnowAnl.prepare_IMS() # Execute JEDI snow analysis - SnowAnl.execute(config.APRUN_SNOWANL, ['fv3jedi', 'variational']) + SnowAnl.execute('snowanlvar') # Add increments SnowAnl.add_increments() diff --git a/scripts/exglobal_snowens_analysis.py b/scripts/exglobal_snowens_analysis.py index a336181e05..3834313bac 100755 --- a/scripts/exglobal_snowens_analysis.py +++ b/scripts/exglobal_snowens_analysis.py @@ -19,18 +19,23 @@ config = cast_strdict_as_dtypedict(os.environ) # Instantiate the snow ensemble analysis task - SnowEnsAnl = SnowEnsAnalysis(config, 'esnowanl') + SnowEnsAnl = SnowEnsAnalysis(config) # Initialize JEDI 2DVar snow analysis - SnowEnsAnl.initialize_jedi() - SnowEnsAnl.initialize_analysis() - - #anl = SnowEnsAnalysis(config) - #anl.initialize() - # anl.genWeights() - # anl.genMask() - # anl.regridDetBkg() - # anl.regridDetInc() - # anl.recenterEns() - # anl.addEnsIncrements() - # anl.finalize() + SnowAnl.initialize() + + # Calculate ensemble mean + SnowAnl.execute('esnowanlensmean') + + # Process IMS snow cover (if applicable) + if SnowAnl.task_config.cyc == 0: + SnowAnl.prepare_IMS() + + # Execute JEDI snow analysis + SnowAnl.execute('esnowanlvar') + + # Add increments + SnowAnl.add_increments() + + # Finalize JEDI snow analysis + SnowAnl.finalize() From fe7644d5d763660b516508b26cdeea4bf2a9531e Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 8 Nov 2024 20:19:51 +0000 Subject: [PATCH 138/157] end of day commit --- jobs/JGLOBAL_SNOWENS_ANALYSIS | 4 +- parm/config/gfs/config.esnowanl | 11 +- parm/gdas/esnowanl_jedi_config.yaml.j2 | 4 +- scripts/exglobal_snowens_analysis.py | 16 +- sorc/gdas.cd | 2 +- ush/python/pygfs/task/snow_analysis.py | 64 +-- ush/python/pygfs/task/snowens_analysis.py | 612 +++++++++------------- 7 files changed, 270 insertions(+), 443 deletions(-) diff --git a/jobs/JGLOBAL_SNOWENS_ANALYSIS b/jobs/JGLOBAL_SNOWENS_ANALYSIS index 62264368a5..f04c788166 100755 --- a/jobs/JGLOBAL_SNOWENS_ANALYSIS +++ b/jobs/JGLOBAL_SNOWENS_ANALYSIS @@ -10,13 +10,15 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" # shellcheck disable=SC2153 GDUMP="gdas" export GDUMP +CDUMP=${RUN:4} ############################################## # Begin JOB SPECIFIC work ############################################## # Generate COM variables from templates +RUN=${CDUMP} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ - COMIN_OBS:COM_OBS_TMPL \ COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ COMOUT_CONF:COM_CONF_TMPL MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index cc1a20b46b..bf7dd93f46 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -8,10 +8,14 @@ echo "BEGIN: config.esnowanl" # Get task specific resources source "${EXPDIR}/config.resources" esnowanl +export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" +export GTS_SNOW_STAGE_YAML="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.yaml.j2" + export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" -export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" +export JCB_ALGO_YAML_VAR="${PARMgfs}/gdas/snow/jcb-prototype_2dvar.yaml.j2" -export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" +export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/snow_berror.yaml.j2" export SNOW_ENS_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_ens_update.yaml.j2" export SNOW_OROG_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_orog.yaml.j2" export SNOW_ENS_FINALIZE_TMPL="${PARMgfs}/gdas/snow_finalize_ens_update.yaml.j2" @@ -25,7 +29,4 @@ export JEDI_CONFIG_YAML="${PARMgfs}/gdas/esnowanl_jedi_config.yaml.j2" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${EXECgfs}/gdasapp_land_ensrecenter.x -export FREGRID=${EXECgfs}/fregrid.x - echo "END: config.esnowanl" diff --git a/parm/gdas/esnowanl_jedi_config.yaml.j2 b/parm/gdas/esnowanl_jedi_config.yaml.j2 index 119e320b03..50634db6eb 100644 --- a/parm/gdas/esnowanl_jedi_config.yaml.j2 +++ b/parm/gdas/esnowanl_jedi_config.yaml.j2 @@ -4,8 +4,8 @@ esnowanlensmean: mpi_cmd: '{{ APRUN_SNOWANL }}' jedi_args: ['fv3jedi', 'ensmean'] jcb_base_yaml: '{{ PARMgfs }}/gdas/snow/jcb-base.yaml.j2' - jcb_algo_yaml: '{{ JCB_ALGO_YAML_ENSMEAN }}' -esnowanlvar: + jcb_algo: 'fv3jedi_snow_ensmean' +snowanlvar: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' mpi_cmd: '{{ APRUN_SNOWANL }}' diff --git a/scripts/exglobal_snowens_analysis.py b/scripts/exglobal_snowens_analysis.py index 3834313bac..c1a4d1c504 100755 --- a/scripts/exglobal_snowens_analysis.py +++ b/scripts/exglobal_snowens_analysis.py @@ -22,20 +22,22 @@ SnowEnsAnl = SnowEnsAnalysis(config) # Initialize JEDI 2DVar snow analysis - SnowAnl.initialize() + SnowEnsAnl.initialize() # Calculate ensemble mean - SnowAnl.execute('esnowanlensmean') + SnowEnsAnl.execute('esnowanlensmean') + + # stage ensemble mean backgrounds # Process IMS snow cover (if applicable) - if SnowAnl.task_config.cyc == 0: - SnowAnl.prepare_IMS() + if SnowEnsAnl.task_config.cyc == 0: + SnowEnsAnl.prepare_IMS() # Execute JEDI snow analysis - SnowAnl.execute('esnowanlvar') + SnowEnsAnl.execute('snowanlvar') # Add increments - SnowAnl.add_increments() + SnowEnsAnl.add_increments() # Finalize JEDI snow analysis - SnowAnl.finalize() + SnowEnsAnl.finalize() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 98560ff570..4ddf8aa182 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 98560ff5701fb7a35031bcab36db35d89a53766f +Subproject commit 4ddf8aa182ece196db1ca0727bf40294a799f04f diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index c25d751fc0..cf46aeee91 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -31,7 +31,7 @@ class SnowAnalysis(Task): """ @logit(logger, name="SnowAnalysis") - def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): + def __init__(self, config: Dict[str, Any]): """Constructor global snow analysis task This method will construct a global snow analysis task. @@ -43,8 +43,6 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): ---------- config: Dict dictionary object containing task configuration - yaml_name: str, optional - name of YAML file for JEDI configuration Returns ---------- @@ -78,46 +76,17 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) - # Create JEDI object - self.jedi = Jedi(self.task_config, yaml_name) + # Create JEDI object dictionary + expected_keys = ['snowanlvar'] + self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config, expected_keys) @logit(logger) - def initialize_jedi(self): - """Initialize JEDI application - - This method will initialize a JEDI application used in the global snow analysis. - This includes: - - generating and saving JEDI YAML config - - linking the JEDI executable - - Parameters - ---------- - None - - Returns - ---------- - None - """ - - # get JEDI config - logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") - self.jedi.set_config(self.task_config) - logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") - - # save JEDI config to YAML file - logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") - save_as_yaml(self.jedi.config, self.jedi.yaml) - - # link JEDI executable - logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") - self.jedi.link_exe(self.task_config) - - @logit(logger) - def initialize_analysis(self) -> None: + def initialize(self) -> None: """Initialize a global snow analysis This method will initialize a global snow analysis. This includes: + - initialize JEDI application - staging model backgrounds - staging observation files - staging FV3-JEDI fix files @@ -132,7 +101,9 @@ def initialize_analysis(self) -> None: ---------- None """ - super().initialize() + # initialize JEDI variational application + logger.info(f"Initializing JEDI variational DA application") + self.jedi_dict['snowanlvar'].initialize(self.task_config) # stage backgrounds logger.info(f"Staging background files from {self.task_config.VAR_BKG_STAGING_YAML}") @@ -142,7 +113,7 @@ def initialize_analysis(self) -> None: # stage observations logger.info(f"Staging list of observation files generated from JEDI config") - obs_dict = self.jedi.get_obs_dict(self.task_config) + obs_dict = self.jedi_dict['snowanlvar'].render_jcb(self.task_config, 'snow_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") @@ -267,29 +238,22 @@ def prepare_IMS(self) -> None: FileHandler(prep_ims_config.ims2ioda).sync() @logit(logger) - def execute(self, aprun_cmd: str, jedi_args: Optional[str] = None) -> None: + def execute(self, jedi_dict_key: str) -> None: """Run JEDI executable This method will run JEDI executables for the global snow analysis Parameters ---------- - aprun_cmd : str - Run command for JEDI application on HPC system - jedi_args : List - List of additional optional arguments for JEDI application + jedi_dict_key + key specifying particular Jedi object in self.jedi_dict Returns ---------- None """ - if jedi_args: - logger.info(f"Executing {self.jedi.exe} {' '.join(jedi_args)} {self.jedi.yaml}") - else: - logger.info(f"Executing {self.jedi.exe} {self.jedi.yaml}") - - self.jedi.execute(self.task_config, aprun_cmd, jedi_args) + self.jedi_dict[jedi_dict_key].execute() @logit(logger) def finalize(self) -> None: diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index d70016bd7f..93b9a59271 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -4,13 +4,16 @@ from logging import getLogger from typing import Dict, List, Optional, Any from pprint import pformat -import netCDF4 as nc +import glob +import gzip +import tarfile import numpy as np +from netCDF4 import Dataset from wxflow import (AttrDict, FileHandler, - to_fv3time, to_timedelta, add_to_datetime, - rm_p, chdir, + to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime, + rm_p, parse_j2yaml, save_as_yaml, Jinja, Task, @@ -24,14 +27,14 @@ class SnowEnsAnalysis(Task): """ - Class for JEDI-based global ensemble snow analysis tasks + Class for JEDI-based global snow ensemble analysis tasks """ @logit(logger, name="SnowEnsAnalysis") - def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): - """Constructor global ensemble snow analysis task + def __init__(self, config: Dict[str, Any]): + """Constructor global snow ensemble analysis task - This method will construct a global ensemble snow analysis task. + This method will construct a global snow ensemble analysis task. This includes: - extending the task_config attribute AttrDict to include parameters required for this task - instantiate the Jedi attribute object @@ -40,8 +43,6 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): ---------- config: Dict dictionary object containing task configuration - yaml_name: str, optional - name of YAML file for JEDI configuration Returns ---------- @@ -49,8 +50,7 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): """ super().__init__(config) - _res_det = int(self.task_config['CASE'][1:]) - _res_ens = int(self.task_config['CASE_ENS'][1:]) + _res = int(self.task_config['CASE'][1:]) _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) # fix ocnres @@ -59,67 +59,34 @@ def __init__(self, config: Dict[str, Any], yaml_name: Optional[str] = None): # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { - 'npx_ges': _res_ens + 1, - 'npy_ges': _res_ens + 1, + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, 'npz_ges': self.task_config.LEVS - 1, 'npz': self.task_config.LEVS - 1, 'SNOW_WINDOW_BEGIN': _window_begin, 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", - 'ATM_WINDOW_BEGIN': _window_begin, - 'ATM_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", - 'GPREFIX': f"enkfgdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'snow_obsdatain_path': f"{self.task_config.DATA}/obs/", 'snow_obsdataout_path': f"{self.task_config.DATA}/diags/", } ) - bkg_time = _window_begin if self.task_config.DOIAU else self.task_config.current_cycle - local_dict['bkg_time'] = bkg_time - # task_config is everything that this task should need + # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) - # Create JEDI object - self.jedi = Jedi(self.task_config, yaml_name) + # Create JEDI object dictionary + expected_keys = ['snowanlvar', 'esnowanlensmean'] + self.jedi_dict = Jedi.get_jedi_dict(self.task_config.JEDI_CONFIG_YAML, self.task_config, expected_keys) @logit(logger) - def initialize_jedi(self): - """Initialize JEDI application - - This method will initialize a JEDI application used in the global ensemble snow analysis. - This includes: - - generating and saving JEDI YAML config - - linking the JEDI executable - - Parameters - ---------- - None - - Returns - ---------- - None - """ - - # get JEDI config - logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") - self.jedi.set_config(self.task_config) - logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") - - # save JEDI config to YAML file - logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") - save_as_yaml(self.jedi.config, self.jedi.yaml) - - # link JEDI executable - logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") - self.jedi.link_exe(self.task_config) - - @logit(logger) - def initialize_analysis(self) -> None: - """Initialize a global ensemble snow analysis + def initialize(self) -> None: + """Initialize a global snow ensemble analysis - This method will initialize a global ensemble snow analysis. + This method will initialize a global snow ensemble analysis. This includes: + - initialize JEDI application - staging model backgrounds - staging observation files - staging FV3-JEDI fix files @@ -134,7 +101,9 @@ def initialize_analysis(self) -> None: ---------- None """ - super().initialize() + # initialize JEDI variational application + logger.info(f"Initializing JEDI variational DA application") + self.jedi_dict['snowanlvar'].initialize(self.task_config) # stage backgrounds logger.info(f"Staging background files from {self.task_config.SNOW_ENS_STAGE_TMPL}") @@ -142,14 +111,9 @@ def initialize_analysis(self) -> None: FileHandler(bkg_staging_dict).sync() logger.debug(f"Background files:\n{pformat(bkg_staging_dict)}") - # stage orography files - logger.info(f"Staging orography files specified in {self.task_config.SNOW_OROG_STAGE_TMPL}") - snow_orog_stage_list = parse_j2yaml(self.task_config.SNOW_OROG_STAGE_TMPL, self.task_config) - FileHandler(snow_orog_stage_list).sync() - # stage observations logger.info(f"Staging list of observation files generated from JEDI config") - obs_dict = self.jedi.get_obs_dict(self.task_config) + obs_dict = self.jedi_dict['snowanlvar'].render_jcb(self.task_config, 'snow_obs_staging') FileHandler(obs_dict).sync() logger.debug(f"Observation files:\n{pformat(obs_dict)}") @@ -179,374 +143,268 @@ def initialize_analysis(self) -> None: FileHandler({'mkdir': newdirs}).sync() @logit(logger) - def initialize(self) -> None: - """Initialize method for snow ensemble analysis - This method: + def prepare_IMS(self) -> None: + """Prepare the IMS data for a global snow analysis + This method will prepare IMS data for a global snow analysis using JEDI. + This includes: + - staging model backgrounds + - processing raw IMS observation data and prepare for conversion to IODA + - creating IMS snowdepth data in IODA format. Parameters ---------- - self : Analysis - Instance of the SnowEnsAnalysis object - """ - - super().initialize() - - # stage background and increment files - logger.info(f"Staging files from {self.task_config.SNOW_ENS_STAGE_TMPL}") - snow_stage_list = parse_j2yaml(self.task_config.SNOW_ENS_STAGE_TMPL, self.task_config) - FileHandler(snow_stage_list).sync() - - # stage orography files - logger.info(f"Staging orography files specified in {self.task_config.SNOW_OROG_STAGE_TMPL}") - snow_orog_stage_list = parse_j2yaml(self.task_config.SNOW_OROG_STAGE_TMPL, self.task_config) - FileHandler(snow_orog_stage_list).sync() - - # stage fix files for fv3-jedi - logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") - jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) - FileHandler(jedi_fix_list).sync() - - # write land ensemble recentering YAML - save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) - logger.info(f"Wrote recentering YAML to: {self.task_config.jedi_yaml}") + Analysis: parent class for GDAS task - # link recentering executable - # placeholder, currently already done by the analysis parent class - - # copy fregrid executable - fregrid_copy = {'copy': [[os.path.join(self.task_config.EXECgfs, 'fregrid'), os.path.join(self.task_config.DATA, 'fregrid.x')]]} - FileHandler(fregrid_copy).sync() - - @logit(logger) - def genWeights(self) -> None: - """Create a modified land_frac file for use by fregrid - to interpolate the snow background from det to ensres - - Parameters + Returns ---------- - self : Analysis - Instance of the SnowEnsAnalysis object + None """ - chdir(self.task_config.DATA) - - # loop through tiles - for tile in range(1, self.task_config.ntiles + 1): - # open the restart and get the vegetation type - rst = nc.Dataset(f"./bkg/det/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc") - vtype = rst.variables['vtype'][:] - rst.close() - # open the oro data and get the land fraction - oro = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data.tile{tile}.nc") - land_frac = oro.variables['land_frac'][:] - oro.close() - # create an output file - ncfile = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight.tile{tile}.nc", mode='w', format='NETCDF4') - case_int = int(self.task_config.CASE[1:]) - lon = ncfile.createDimension('lon', case_int) - lat = ncfile.createDimension('lat', case_int) - lsm_frac_out = ncfile.createVariable('lsm_frac', np.float32, ('lon', 'lat')) - # set the land fraction to 0 on glaciers to not interpolate that snow - glacier = 15 - land_frac[np.where(vtype[0, ...] == glacier)] = 0 - lsm_frac_out[:] = land_frac - # write out and close the file - ncfile.close() + # create a temporary dict of all keys needed in this method + localconf = AttrDict() + keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + 'OPREFIX', 'CASE', 'OCNRES', 'ntiles', 'FIXgfs'] + for key in keys: + localconf[key] = self.task_config[key] - @logit(logger) - def genMask(self) -> None: - """Create a mask for use by JEDI - to mask out snow increments on non-LSM gridpoints + # Read and render the IMS_OBS_LIST yaml + logger.info(f"Reading {self.task_config.IMS_OBS_LIST}") + prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, localconf) + logger.debug(f"{self.task_config.IMS_OBS_LIST}:\n{pformat(prep_ims_config)}") - Parameters - ---------- - self : Analysis - Instance of the SnowEnsAnalysis object - """ - - chdir(self.task_config.DATA) - - # loop through tiles - for tile in range(1, self.task_config.ntiles + 1): - # open the restart and get the vegetation type - rst = nc.Dataset(f"./bkg/mem001/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc", mode="r+") - vtype = rst.variables['vtype'][:] - slmsk = rst.variables['slmsk'][:] - # slmsk(Time, yaxis_1, xaxis_1) - # set the mask to 3 on glaciers - glacier = 15 - slmsk[np.where(vtype == glacier)] = 3 - # write out and close the file - rst.variables['slmsk'][:] = slmsk - rst.close() + # copy the IMS obs files from COM_OBS to DATA/obs + logger.info("Copying IMS obs for CALCFIMSEXE") + FileHandler(prep_ims_config.calcfims).sync() - @logit(logger) - def regridDetBkg(self) -> None: - """Run fregrid to regrid the deterministic snow background - to the ensemble resolution + logger.info("Create namelist for CALCFIMSEXE") + nml_template = self.task_config.FIMS_NML_TMPL + nml_data = Jinja(nml_template, localconf).render + logger.debug(f"fims.nml:\n{nml_data}") - Parameters - ---------- - self : Analysis - Instance of the SnowEnsAnalysis object - """ + nml_file = os.path.join(localconf.DATA, "fims.nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) - chdir(self.task_config.DATA) - - arg_list = [ - "--input_mosaic", f"./orog/det/{self.task_config.CASE}_mosaic.nc", - "--input_dir", f"./bkg/det/", - "--input_file", f"{to_fv3time(self.task_config.bkg_time)}.sfc_data", - "--scalar_field", f"snodl", - "--output_dir", f"./bkg/det_ensres/", - "--output_file", f"{to_fv3time(self.task_config.bkg_time)}.sfc_data", - "--output_mosaic", f"./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", - "--interp_method", f"conserve_order1", - "--weight_file", f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", - "--weight_field", f"lsm_frac", - "--remap_file", f"./remap", - ] - fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') - exec_cmd = Executable(fregrid_exe) + logger.info("Link CALCFIMSEXE into DATA/") + exe_src = self.task_config.CALCFIMSEXE + exe_dest = os.path.join(localconf.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + # execute CALCFIMSEXE to calculate IMS snowdepth + exe = Executable(self.task_config.APRUN_CALCFIMS) + exe.add_default_arg(os.path.join(localconf.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd(*arg_list) + exe() except OSError: - raise OSError(f"Failed to execute {exec_cmd}") + raise OSError(f"Failed to execute {exe}") except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") + raise WorkflowException(f"An error occured during execution of {exe}") - @logit(logger) - def regridDetInc(self) -> None: - """Run fregrid to regrid the deterministic snow increment - to the ensemble resolution + # Ensure the snow depth IMS file is produced by the above executable + input_file = f"IMSscf.{to_YMD(localconf.current_cycle)}.{localconf.CASE}_oro_data.nc" + if not os.path.isfile(f"{os.path.join(localconf.DATA, input_file)}"): + logger.exception(f"{self.task_config.CALCFIMSEXE} failed to produce {input_file}") + raise FileNotFoundError(f"{os.path.join(localconf.DATA, input_file)}") - Parameters - ---------- - self : Analysis - Instance of the SnowEnsAnalysis object - """ + # Execute imspy to create the IMS obs data in IODA format + logger.info("Create IMS obs data in IODA format") - chdir(self.task_config.DATA) - - arg_list = [ - "--input_mosaic", f"./orog/det/{self.task_config.CASE}_mosaic.nc", - "--input_dir", f"./inc/det/", - "--input_file", f"snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", - "--scalar_field", f"snodl", - "--output_dir", f"./inc/det_ensres/", - "--output_file", f"snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", - "--output_mosaic", f"./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", - "--interp_method", f"conserve_order1", - "--weight_file", f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", - "--weight_field", f"lsm_frac", - "--remap_file", f"./remap", - ] - fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') - exec_cmd = Executable(fregrid_exe) + output_file = f"ims_snow_{to_YMDH(localconf.current_cycle)}.nc4" + if os.path.isfile(f"{os.path.join(localconf.DATA, output_file)}"): + rm_p(output_file) + exe = Executable(self.task_config.IMS2IODACONV) + exe.add_default_arg(["-i", f"{os.path.join(localconf.DATA, input_file)}"]) + exe.add_default_arg(["-o", f"{os.path.join(localconf.DATA, output_file)}"]) try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd(*arg_list) + logger.debug(f"Executing {exe}") + exe() except OSError: - raise OSError(f"Failed to execute {exec_cmd}") + raise OSError(f"Failed to execute {exe}") except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") + raise WorkflowException(f"An error occured during execution of {exe}") + + # Ensure the IODA snow depth IMS file is produced by the IODA converter + # If so, copy to DATA/obs/ + if not os.path.isfile(f"{os.path.join(localconf.DATA, output_file)}"): + logger.exception(f"{self.task_config.IMS2IODACONV} failed to produce {output_file}") + raise FileNotFoundError(f"{os.path.join(localconf.DATA, output_file)}") + else: + logger.info(f"Copy {output_file} to {os.path.join(localconf.DATA, 'obs')}") + FileHandler(prep_ims_config.ims2ioda).sync() @logit(logger) - def recenterEns(self) -> None: - """Run recentering code to create an ensemble of snow increments - based on the deterministic increment, and the difference - between the determinstic and ensemble mean forecast + def execute(self, jedi_dict_key: str) -> None: + """Run JEDI executable + + This method will run JEDI executables for the global snow analysis Parameters ---------- - self : Analysis - Instance of the SnowEnsAnalysis object + jedi_dict_key + key specifying particular Jedi object in self.jedi_dict + + Returns + ---------- + None """ - logger.info("Running recentering code") - exec_cmd = Executable(self.task_config.APRUN_ESNOWANL) - exec_name = os.path.join(self.task_config.DATA, 'gdasapp_land_ensrecenter.x') - exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.jedi_yaml) - try: - logger.debug(f"Executing {exec_cmd}") - exec_cmd() - except OSError: - raise OSError(f"Failed to execute {exec_cmd}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exec_cmd}") + self.jedi_dict[jedi_dict_key].execute() @logit(logger) def finalize(self) -> None: - """Performs closing actions of the snow ensemble analysis task + """Performs closing actions of the Snow analysis task This method: - - copies the ensemble snow analyses to the proper locations - - copies the ensemble mean increment to COM + - tar and gzip the output diag files and place in COM/ + - copy the generated YAML file from initialize to the COM/ + - copy the analysis files to the COM/ + - copy the increment files to the COM/ Parameters ---------- self : Analysis - Instance of the SnowEnsAnalysis object + Instance of the SnowAnalysis object """ - # save files to COM - logger.info(f"Copying files described in {self.task_config.SNOW_ENS_FINALIZE_TMPL}") - snow_final_list = parse_j2yaml(self.task_config.SNOW_ENS_FINALIZE_TMPL, self.task_config) - FileHandler(snow_final_list).sync() + + # ---- tar up diags + # path of output tar statfile + snowstat = os.path.join(self.task_config.COM_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc')) + + logger.info(f"Compressing {len(diags)} diag files to {snowstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {snowstat} with {len(diags)} gzipped diag files") + with tarfile.open(snowstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # get list of yamls to copy to ROTDIR + yamls = glob.glob(os.path.join(self.task_config.DATA, '*snow*yaml')) + + # copy full YAML from executable to ROTDIR + for src in yamls: + yaml_base = os.path.splitext(os.path.basename(src))[0] + dest_yaml_name = f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.{yaml_base}.yaml" + dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, dest_yaml_name) + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + logger.info("Copy analysis to COM") + bkgtimes = [] + if self.task_config.DOIAU: + # need both beginning and middle of window + bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) + bkgtimes.append(self.task_config.current_cycle) + anllist = [] + for bkgtime in bkgtimes: + template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + for itile in range(1, self.task_config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', filename) + dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) + anllist.append([src, dest]) + FileHandler({'copy': anllist}).sync() + + logger.info('Copy increments to COM') + template = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, self.task_config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', filename) + dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() @logit(logger) - def addEnsIncrements(self) -> None: - """Loop through all ensemble members and apply increment to create - a surface analysis for snow + def add_increments(self) -> None: + """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds Parameters ---------- self : Analysis - Instance of the SnowEnsAnalysis object + Instance of the SnowAnalysis object """ - bkg_times = [] - # no matter what, we want to process the center of the window - bkg_times.append(self.task_config.current_cycle) - # if DOIAU, we need to copy the increment to be valid at the center of the window - # and compute the analysis there to restart the model + # need backgrounds to create analysis from increments after LETKF + logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") + bkgtimes = [] + if self.task_config.DOIAU: + # want analysis at beginning and middle of window + bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) + bkgtimes.append(self.task_config.current_cycle) + anllist = [] + for bkgtime in bkgtimes: + template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + for itile in range(1, self.task_config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, filename) + dest = os.path.join(self.task_config.DATA, "anl", filename) + anllist.append([src, dest]) + FileHandler({'copy': anllist}).sync() + if self.task_config.DOIAU: logger.info("Copying increments to beginning of window") - template_in = f'snowinc.{to_fv3time(self.task_config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' - template_out = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + template_in = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + template_out = f'snowinc.{to_fv3time(self.task_config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' inclist = [] - for itile in range(1, 7): + for itile in range(1, self.task_config.ntiles + 1): filename_in = template_in.format(tilenum=itile) filename_out = template_out.format(tilenum=itile) - src = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_in) - dest = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_out) + src = os.path.join(self.task_config.DATA, 'anl', filename_in) + dest = os.path.join(self.task_config.DATA, 'anl', filename_out) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() - # if running with IAU, we also need an analysis at the beginning of the window - bkg_times.append(self.task_config.SNOW_WINDOW_BEGIN) - - for bkg_time in bkg_times: - for mem in range(1, self.task_config.NMEM_ENS + 1): - # for now, just looping serially, should parallelize this eventually - logger.info(f"Now applying increment to member mem{mem:03}") - logger.info(f'{os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}")}') - memdict = AttrDict( - { - 'HOMEgfs': self.task_config.HOMEgfs, - 'DATA': os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}"), - 'DATAROOT': self.task_config.DATA, - 'current_cycle': bkg_time, - 'CASE_ENS': self.task_config.CASE_ENS, - 'OCNRES': self.task_config.OCNRES, - 'ntiles': self.task_config.ntiles, - 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, - 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, - 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, - 'MYMEM': f"{mem:03}", - } - ) - self.add_increments(memdict) - - @staticmethod - @logit(logger) - def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: - """Compile a dictionary of model background files to copy - - This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data) - that are needed for global snow DA and returns said dictionary for use by the FileHandler class. - - Parameters - ---------- - config: Dict - Dictionary of key-value pairs needed in this method - Should contain the following keys: - COMIN_ATMOS_RESTART_PREV - DATA - current_cycle - ntiles - - Returns - ---------- - bkg_dict: Dict - a dictionary containing the list of model background files to copy for FileHandler - """ - - bkg_dict = { - 'mkdir': [], - 'copy': [], - } - return bkg_dict - - @staticmethod - @logit(logger) - def add_increments(config: Dict) -> None: - """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds - - Parameters - ---------- - config: Dict - Dictionary of key-value pairs needed in this method - Should contain the following keys: - HOMEgfs - DATA - DATAROOT - current_cycle - CASE - OCNRES - ntiles - APPLY_INCR_NML_TMPL - APPLY_INCR_EXE - APRUN_APPLY_INCR - - Raises - ------ - OSError - Failure due to OS issues - WorkflowException - All other exceptions - """ - os.chdir(config.DATA) - - logger.info("Create namelist for APPLY_INCR_EXE") - nml_template = config.ENS_APPLY_INCR_NML_TMPL - nml_data = Jinja(nml_template, config).render - logger.debug(f"apply_incr_nml:\n{nml_data}") - - nml_file = os.path.join(config.DATA, "apply_incr_nml") - with open(nml_file, "w") as fho: - fho.write(nml_data) - - logger.info("Link APPLY_INCR_EXE into DATA/") - exe_src = config.APPLY_INCR_EXE - exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - # execute APPLY_INCR_EXE to create analysis files - exe = Executable(config.APRUN_APPLY_INCR) - exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) - logger.info(f"Executing {exe}") - try: - exe() - except OSError: - raise OSError(f"Failed to execute {exe}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exe}") - - def get_obs_dict(self) -> Dict[str, Any]: - obs_dict = { - 'mkdir': [], - 'copy': [], - } - return obs_dict - - def get_bias_dict(self) -> Dict[str, Any]: - bias_dict = { - 'mkdir': [], - 'copy': [], - } - return bias_dict + # loop over times to apply increments + for bkgtime in bkgtimes: + logger.info("Processing analysis valid: {bkgtime}") + logger.info("Create namelist for APPLY_INCR_EXE") + nml_template = self.task_config.APPLY_INCR_NML_TMPL + nml_config = { + 'current_cycle': bkgtime, + 'CASE': self.task_config.CASE, + 'DATA': self.task_config.DATA, + 'HOMEgfs': self.task_config.HOMEgfs, + 'OCNRES': self.task_config.OCNRES, + } + nml_data = Jinja(nml_template, nml_config).render + logger.debug(f"apply_incr_nml:\n{nml_data}") + + nml_file = os.path.join(self.task_config.DATA, "apply_incr_nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link APPLY_INCR_EXE into DATA/") + exe_src = self.task_config.APPLY_INCR_EXE + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute APPLY_INCR_EXE to create analysis files + exe = Executable(self.task_config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(self.task_config.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") + try: + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") From 4cb746dcd3a2b4f7a76ea5cf75ad41bf706d0bcc Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 13 Nov 2024 21:51:44 +0000 Subject: [PATCH 139/157] end of day commit --- parm/gdas/esnowanl_jedi_config.yaml.j2 | 4 +- parm/gdas/snow_stage_ens_update.yaml.j2 | 2 + parm/gdas/snow_stage_orog.yaml.j2 | 4 - sorc/gdas.cd | 2 +- ush/python/pygfs/task/snow_analysis.py | 1 + ush/python/pygfs/task/snowens_analysis.py | 135 +++++++++++++--------- 6 files changed, 85 insertions(+), 63 deletions(-) diff --git a/parm/gdas/esnowanl_jedi_config.yaml.j2 b/parm/gdas/esnowanl_jedi_config.yaml.j2 index 50634db6eb..ee0909f6db 100644 --- a/parm/gdas/esnowanl_jedi_config.yaml.j2 +++ b/parm/gdas/esnowanl_jedi_config.yaml.j2 @@ -1,14 +1,14 @@ esnowanlensmean: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - mpi_cmd: '{{ APRUN_SNOWANL }}' + mpi_cmd: '{{ APRUN_ESNOWANL }}' jedi_args: ['fv3jedi', 'ensmean'] jcb_base_yaml: '{{ PARMgfs }}/gdas/snow/jcb-base.yaml.j2' jcb_algo: 'fv3jedi_snow_ensmean' snowanlvar: rundir: '{{ DATA }}' exe_src: '{{ EXECgfs }}/gdas.x' - mpi_cmd: '{{ APRUN_SNOWANL }}' + mpi_cmd: '{{ APRUN_ESNOWANL }}' jedi_args: ['fv3jedi', 'variational'] jcb_base_yaml: '{{ PARMgfs }}/gdas/snow/jcb-base.yaml.j2' jcb_algo_yaml: '{{ JCB_ALGO_YAML_VAR }}' \ No newline at end of file diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 index cbe431b263..d8b1d42d00 100644 --- a/parm/gdas/snow_stage_ens_update.yaml.j2 +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -30,6 +30,8 @@ copy: '${HH}':previous_cycle | strftime("%H"), '${MEMDIR}':"mem" + '%03d' % mem} %} + # copy coupler file +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.coupler.res", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.coupler.res"] # we need to copy them to two places, one serves as the basis for the analysis {% for tile in range(1, ntiles+1) %} - ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] diff --git a/parm/gdas/snow_stage_orog.yaml.j2 b/parm/gdas/snow_stage_orog.yaml.j2 index 3cd7d5c327..f915b36d1f 100644 --- a/parm/gdas/snow_stage_orog.yaml.j2 +++ b/parm/gdas/snow_stage_orog.yaml.j2 @@ -1,12 +1,8 @@ mkdir: -- "{{ DATA }}/orog/det" - "{{ DATA }}/orog/ens" copy: -- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_mosaic.nc", "{{ DATA }}/orog/det/{{ CASE }}_mosaic.nc"] - ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_mosaic.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_mosaic.nc"] {% for tile in range(1, ntiles+1) %} -- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}_grid.tile{{ tile }}.nc"] - ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_grid.tile{{ tile }}.nc"] -- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] - ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] {% endfor %} diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 4ddf8aa182..7293231008 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 4ddf8aa182ece196db1ca0727bf40294a799f04f +Subproject commit 7293231008bd8e12d1d4cdd35a1c54d80d8c92c0 diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index cf46aeee91..d9841c9224 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -70,6 +70,7 @@ def __init__(self, config: Dict[str, Any]): 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'snow_obsdatain_path': f"{self.task_config.DATA}/obs/", 'snow_obsdataout_path': f"{self.task_config.DATA}/diags/", + 'snow_bkg_path': './bkg', } ) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 93b9a59271..7afbc63f24 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -50,12 +50,16 @@ def __init__(self, config: Dict[str, Any]): """ super().__init__(config) - _res = int(self.task_config['CASE'][1:]) + _res = int(self.task_config['CASE_ENS'][1:]) + self.task_config['CASE'] = self.task_config['CASE_ENS'] _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) # fix ocnres self.task_config.OCNRES = f"{self.task_config.OCNRES :03d}" + # we do not have enkf obs + oprefix = self.task_config.RUN.replace("enkf", "") + # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { @@ -65,11 +69,12 @@ def __init__(self, config: Dict[str, Any]): 'npz': self.task_config.LEVS - 1, 'SNOW_WINDOW_BEGIN': _window_begin, 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", - 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'OPREFIX': f"{oprefix}.t{self.task_config.cyc:02d}z.", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'snow_obsdatain_path': f"{self.task_config.DATA}/obs/", 'snow_obsdataout_path': f"{self.task_config.DATA}/diags/", + 'snow_bkg_path': './bkg/ensmean', } ) @@ -86,7 +91,7 @@ def initialize(self) -> None: This method will initialize a global snow ensemble analysis. This includes: - - initialize JEDI application + - initialize JEDI applications - staging model backgrounds - staging observation files - staging FV3-JEDI fix files @@ -105,12 +110,33 @@ def initialize(self) -> None: logger.info(f"Initializing JEDI variational DA application") self.jedi_dict['snowanlvar'].initialize(self.task_config) + # initialize ensemble mean computation + logger.info(f"Initializing JEDI ensemble mean application") + self.jedi_dict['esnowanlensmean'].initialize(self.task_config) + # stage backgrounds logger.info(f"Staging background files from {self.task_config.SNOW_ENS_STAGE_TMPL}") bkg_staging_dict = parse_j2yaml(self.task_config.SNOW_ENS_STAGE_TMPL, self.task_config) FileHandler(bkg_staging_dict).sync() logger.debug(f"Background files:\n{pformat(bkg_staging_dict)}") + # stage orography + logger.info(f"Staging orography files from {self.task_config.SNOW_OROG_STAGE_TMPL}") + orog_staging_dict = parse_j2yaml(self.task_config.SNOW_OROG_STAGE_TMPL, self.task_config) + FileHandler(orog_staging_dict).sync() + logger.debug(f"Orography files:\n{pformat(orog_staging_dict)}") + # note JEDI will try to read the orog files for each member, let's just symlink + logger.info("Linking orography files for each member") + oro_files = glob.glob(os.path.join(self.task_config.DATA, 'orog', 'ens', '*')) + for mem in range(1, self.task_config.NMEM_ENS+1): + dest = os.path.join(self.task_config.DATA, 'bkg', f"mem{mem:03}") + for oro_file in oro_files: + os.symlink(oro_file, os.path.join(dest, os.path.basename(oro_file))) + # need to symlink orography files for the ensmean too + dest = os.path.join(self.task_config.DATA, 'bkg', 'ensmean') + for oro_file in oro_files: + os.symlink(oro_file, os.path.join(dest, os.path.basename(oro_file))) + # stage observations logger.info(f"Staging list of observation files generated from JEDI config") obs_dict = self.jedi_dict['snowanlvar'].render_jcb(self.task_config, 'snow_obs_staging') @@ -342,23 +368,6 @@ def add_increments(self) -> None: Instance of the SnowAnalysis object """ - # need backgrounds to create analysis from increments after LETKF - logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") - bkgtimes = [] - if self.task_config.DOIAU: - # want analysis at beginning and middle of window - bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) - bkgtimes.append(self.task_config.current_cycle) - anllist = [] - for bkgtime in bkgtimes: - template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' - for itile in range(1, self.task_config.ntiles + 1): - filename = template.format(tilenum=itile) - src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, filename) - dest = os.path.join(self.task_config.DATA, "anl", filename) - anllist.append([src, dest]) - FileHandler({'copy': anllist}).sync() - if self.task_config.DOIAU: logger.info("Copying increments to beginning of window") template_in = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' @@ -371,40 +380,54 @@ def add_increments(self) -> None: dest = os.path.join(self.task_config.DATA, 'anl', filename_out) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() + + bkgtimes = [] + if self.task_config.DOIAU: + # need both beginning and middle of window + bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) + bkgtimes.append(self.task_config.current_cycle) - # loop over times to apply increments - for bkgtime in bkgtimes: - logger.info("Processing analysis valid: {bkgtime}") - logger.info("Create namelist for APPLY_INCR_EXE") - nml_template = self.task_config.APPLY_INCR_NML_TMPL - nml_config = { - 'current_cycle': bkgtime, - 'CASE': self.task_config.CASE, - 'DATA': self.task_config.DATA, - 'HOMEgfs': self.task_config.HOMEgfs, - 'OCNRES': self.task_config.OCNRES, - } - nml_data = Jinja(nml_template, nml_config).render - logger.debug(f"apply_incr_nml:\n{nml_data}") - - nml_file = os.path.join(self.task_config.DATA, "apply_incr_nml") - with open(nml_file, "w") as fho: - fho.write(nml_data) - - logger.info("Link APPLY_INCR_EXE into DATA/") - exe_src = self.task_config.APPLY_INCR_EXE - exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - - # execute APPLY_INCR_EXE to create analysis files - exe = Executable(self.task_config.APRUN_APPLY_INCR) - exe.add_default_arg(os.path.join(self.task_config.DATA, os.path.basename(exe_src))) - logger.info(f"Executing {exe}") - try: - exe() - except OSError: - raise OSError(f"Failed to execute {exe}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exe}") + # loop over members + # TODO, make this better, or rewrite code to run in parallel + for mem in range(1, self.task_config.NMEM_ENS+1): + logger.info(f"Processing member mem{mem:03d}") + # loop over times to apply increments + for bkgtime in bkgtimes: + logger.info("Processing analysis valid: {bkgtime}") + logger.info("Create namelist for APPLY_INCR_EXE") + nml_template = self.task_config.ENS_APPLY_INCR_NML_TMPL + nml_config = { + 'current_cycle': bkgtime, + 'CASE': self.task_config.CASE, + 'DATA': self.task_config.DATA, + 'HOMEgfs': self.task_config.HOMEgfs, + 'OCNRES': self.task_config.OCNRES, + 'MYMEM': f"{mem:03d}", + 'CASE_ENS': self.task_config.CASE_ENS, + } + nml_data = Jinja(nml_template, nml_config).render + logger.debug(f"apply_incr_nml:\n{nml_data}") + + nml_file = os.path.join(self.task_config.DATA, "apply_incr_nml") + if os.path.exists(nml_file): + rm_p(nml_file) + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link APPLY_INCR_EXE into DATA/") + exe_src = self.task_config.APPLY_INCR_EXE + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute APPLY_INCR_EXE to create analysis files + exe = Executable(self.task_config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(self.task_config.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") + try: + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") From 479d23b92f9056e36da7b18298a2281e8371d56b Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 13 Nov 2024 21:53:54 +0000 Subject: [PATCH 140/157] fix pynorms --- ush/python/pygfs/task/snowens_analysis.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 7afbc63f24..d8fe0d8264 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -128,7 +128,7 @@ def initialize(self) -> None: # note JEDI will try to read the orog files for each member, let's just symlink logger.info("Linking orography files for each member") oro_files = glob.glob(os.path.join(self.task_config.DATA, 'orog', 'ens', '*')) - for mem in range(1, self.task_config.NMEM_ENS+1): + for mem in range(1, self.task_config.NMEM_ENS + 1): dest = os.path.join(self.task_config.DATA, 'bkg', f"mem{mem:03}") for oro_file in oro_files: os.symlink(oro_file, os.path.join(dest, os.path.basename(oro_file))) @@ -136,7 +136,7 @@ def initialize(self) -> None: dest = os.path.join(self.task_config.DATA, 'bkg', 'ensmean') for oro_file in oro_files: os.symlink(oro_file, os.path.join(dest, os.path.basename(oro_file))) - + # stage observations logger.info(f"Staging list of observation files generated from JEDI config") obs_dict = self.jedi_dict['snowanlvar'].render_jcb(self.task_config, 'snow_obs_staging') @@ -380,7 +380,7 @@ def add_increments(self) -> None: dest = os.path.join(self.task_config.DATA, 'anl', filename_out) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() - + bkgtimes = [] if self.task_config.DOIAU: # need both beginning and middle of window @@ -389,7 +389,7 @@ def add_increments(self) -> None: # loop over members # TODO, make this better, or rewrite code to run in parallel - for mem in range(1, self.task_config.NMEM_ENS+1): + for mem in range(1, self.task_config.NMEM_ENS + 1): logger.info(f"Processing member mem{mem:03d}") # loop over times to apply increments for bkgtime in bkgtimes: From 5ddce364839437f5f101f0b9a9d45faa38e7432a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 14 Nov 2024 16:10:37 +0000 Subject: [PATCH 141/157] enssnowanl looks like it is working, need to do a fresh test --- jobs/JGLOBAL_SNOWENS_ANALYSIS | 9 +++++---- sorc/gdas.cd | 2 +- ush/python/pygfs/task/snowens_analysis.py | 24 ++++++++++++----------- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/jobs/JGLOBAL_SNOWENS_ANALYSIS b/jobs/JGLOBAL_SNOWENS_ANALYSIS index f04c788166..78951688ce 100755 --- a/jobs/JGLOBAL_SNOWENS_ANALYSIS +++ b/jobs/JGLOBAL_SNOWENS_ANALYSIS @@ -21,10 +21,6 @@ RUN=${CDUMP} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ COMOUT_CONF:COM_CONF_TMPL -MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ - COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL - -mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" for imem in $(seq 1 "${NMEM_ENS}"); do memchar="mem$(printf %03i "${imem}")" @@ -33,6 +29,11 @@ for imem in $(seq 1 "${NMEM_ENS}"); do mkdir -p "${COMOUT_SNOW_ANALYSIS}" done +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -x\ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL + +mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" + ############################################################### # Run relevant script diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 7293231008..69cefc3114 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 7293231008bd8e12d1d4cdd35a1c54d80d8c92c0 +Subproject commit 69cefc3114563eebbd8d109cbdbf221dd0b89449 diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index d8fe0d8264..66af234ee6 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -298,7 +298,7 @@ def finalize(self) -> None: # ---- tar up diags # path of output tar statfile - snowstat = os.path.join(self.task_config.COM_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat") + snowstat = os.path.join(self.task_config.COMOUT_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat") # get list of diag files to put in tarball diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc')) @@ -325,7 +325,7 @@ def finalize(self) -> None: for src in yamls: yaml_base = os.path.splitext(os.path.basename(src))[0] dest_yaml_name = f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.{yaml_base}.yaml" - dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, dest_yaml_name) + dest = os.path.join(self.task_config.COMOUT_SNOW_ANALYSIS, dest_yaml_name) logger.debug(f"Copying {src} to {dest}") yaml_copy = { 'copy': [[src, dest]] @@ -339,13 +339,15 @@ def finalize(self) -> None: bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) bkgtimes.append(self.task_config.current_cycle) anllist = [] - for bkgtime in bkgtimes: - template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' - for itile in range(1, self.task_config.ntiles + 1): - filename = template.format(tilenum=itile) - src = os.path.join(self.task_config.DATA, 'anl', filename) - dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) - anllist.append([src, dest]) + for mem in range(1, self.task_config.NMEM_ENS + 1): + for bkgtime in bkgtimes: + template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + for itile in range(1, self.task_config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', f"mem{mem:03d}", filename) + COMOUT_SNOW_ANALYSIS = self.task_config.COMOUT_SNOW_ANALYSIS.replace('ensstat', f"mem{mem:03d}") + dest = os.path.join(COMOUT_SNOW_ANALYSIS, filename) + anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() logger.info('Copy increments to COM') @@ -354,7 +356,7 @@ def finalize(self) -> None: for itile in range(1, self.task_config.ntiles + 1): filename = template.format(tilenum=itile) src = os.path.join(self.task_config.DATA, 'anl', filename) - dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) + dest = os.path.join(self.task_config.COMOUT_SNOW_ANALYSIS, filename) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() @@ -393,7 +395,7 @@ def add_increments(self) -> None: logger.info(f"Processing member mem{mem:03d}") # loop over times to apply increments for bkgtime in bkgtimes: - logger.info("Processing analysis valid: {bkgtime}") + logger.info(f"Processing analysis valid: {bkgtime}") logger.info("Create namelist for APPLY_INCR_EXE") nml_template = self.task_config.ENS_APPLY_INCR_NML_TMPL nml_config = { From d6b696401617e5a50af71a0f7e3d2e27b49604ad Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 14 Nov 2024 22:05:19 +0000 Subject: [PATCH 142/157] end of day commit --- jobs/JGLOBAL_SNOW_ANALYSIS | 9 ++++++--- parm/gdas/staging/snow_var_bkg.yaml.j2 | 4 ++-- ush/python/pygfs/task/snow_analysis.py | 14 +++++++------- workflow/rocoto/gfs_tasks.py | 2 ++ 4 files changed, 17 insertions(+), 12 deletions(-) diff --git a/jobs/JGLOBAL_SNOW_ANALYSIS b/jobs/JGLOBAL_SNOW_ANALYSIS index 9ed7369611..1642042b89 100755 --- a/jobs/JGLOBAL_SNOW_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ANALYSIS @@ -17,12 +17,15 @@ GDUMP="gdas" # Begin JOB SPECIFIC work ############################################## # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_SNOW_ANALYSIS COM_CONF +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL \ + COMOUT_CONF:COM_CONF_TMPL RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL -mkdir -m 775 -p "${COM_SNOW_ANALYSIS}" "${COM_CONF}" +mkdir -m 775 -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" ############################################################### # Run relevant script diff --git a/parm/gdas/staging/snow_var_bkg.yaml.j2 b/parm/gdas/staging/snow_var_bkg.yaml.j2 index d629d9f23d..164fb3945e 100644 --- a/parm/gdas/staging/snow_var_bkg.yaml.j2 +++ b/parm/gdas/staging/snow_var_bkg.yaml.j2 @@ -1,8 +1,8 @@ mkdir: - '{{ DATA }}/bkg' copy: -- ['{{ COM_ATMOS_RESTART_PREV }}/{{ current_cycle | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/'] +- ['{{ COMIN_ATMOS_RESTART_PREV }}/{{ current_cycle | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/'] {% for tile in range(1, ntiles+1) %} -- ['{{ COM_ATMOS_RESTART_PREV }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc', '{{ DATA }}/bkg/'] +- ['{{ COMIN_ATMOS_RESTART_PREV }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc', '{{ DATA }}/bkg/'] - ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] {% endfor %} \ No newline at end of file diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index d9841c9224..d2469200d8 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -164,7 +164,7 @@ def prepare_IMS(self) -> None: # create a temporary dict of all keys needed in this method localconf = AttrDict() - keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + keys = ['DATA', 'current_cycle', 'COMIN_OBS', 'COMIN_ATMOS_RESTART_PREV', 'OPREFIX', 'CASE', 'OCNRES', 'ntiles', 'FIXgfs'] for key in keys: localconf[key] = self.task_config[key] @@ -174,7 +174,7 @@ def prepare_IMS(self) -> None: prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, localconf) logger.debug(f"{self.task_config.IMS_OBS_LIST}:\n{pformat(prep_ims_config)}") - # copy the IMS obs files from COM_OBS to DATA/obs + # copy the IMS obs files from COMIN_OBS to DATA/obs logger.info("Copying IMS obs for CALCFIMSEXE") FileHandler(prep_ims_config.calcfims).sync() @@ -273,7 +273,7 @@ def finalize(self) -> None: # ---- tar up diags # path of output tar statfile - snowstat = os.path.join(self.task_config.COM_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat") + snowstat = os.path.join(self.task_config.COMOUT_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat") # get list of diag files to put in tarball diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc')) @@ -300,7 +300,7 @@ def finalize(self) -> None: for src in yamls: yaml_base = os.path.splitext(os.path.basename(src))[0] dest_yaml_name = f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.{yaml_base}.yaml" - dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, dest_yaml_name) + dest = os.path.join(self.task_config.COMOUT_SNOW_ANALYSIS, dest_yaml_name) logger.debug(f"Copying {src} to {dest}") yaml_copy = { 'copy': [[src, dest]] @@ -319,7 +319,7 @@ def finalize(self) -> None: for itile in range(1, self.task_config.ntiles + 1): filename = template.format(tilenum=itile) src = os.path.join(self.task_config.DATA, 'anl', filename) - dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) + dest = os.path.join(self.task_config.COMOUT_SNOW_ANALYSIS, filename) anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() @@ -329,7 +329,7 @@ def finalize(self) -> None: for itile in range(1, self.task_config.ntiles + 1): filename = template.format(tilenum=itile) src = os.path.join(self.task_config.DATA, 'anl', filename) - dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) + dest = os.path.join(self.task_config.COMOUT_SNOW_ANALYSIS, filename) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() @@ -355,7 +355,7 @@ def add_increments(self) -> None: template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' for itile in range(1, self.task_config.ntiles + 1): filename = template.format(tilenum=itile) - src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, filename) + src = os.path.join(self.task_config.COMIN_ATMOS_RESTART_PREV, filename) dest = os.path.join(self.task_config.DATA, "anl", filename) anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 2e6bc338f3..968f3ecb27 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -591,6 +591,8 @@ def esnowanl(self): deps = [] dep_dict = {'type': 'metatask', 'name': f'{self.run}_epmn', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run.replace('enkf', '')}_prep'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('esnowanl') From 0c2e80ad33f9e01c18b93dd572e84b6484ef1aaf Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 14 Nov 2024 22:13:14 +0000 Subject: [PATCH 143/157] try to fix something --- workflow/rocoto/gfs_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 968f3ecb27..0947b08e65 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -591,7 +591,7 @@ def esnowanl(self): deps = [] dep_dict = {'type': 'metatask', 'name': f'{self.run}_epmn', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{self.run.replace('enkf', '')}_prep'} + dep_dict = {'type': 'task', 'name': f"{self.run.replace('enkf', '')}_prep"} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) From 4002bf1bacfa8efc40a3b5e179ce474781298f7d Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 15 Nov 2024 21:29:35 +0000 Subject: [PATCH 144/157] end of day commit --- env/HERA.env | 2 ++ env/HERCULES.env | 2 ++ env/JET.env | 2 ++ env/ORION.env | 2 ++ env/S4.env | 2 ++ env/WCOSS2.env | 2 ++ parm/archive/gdas.yaml.j2 | 2 +- parm/archive/gfs_arcdir.yaml.j2 | 4 ++-- sorc/gdas.cd | 2 +- ush/python/pygfs/task/snowens_analysis.py | 4 ++-- 10 files changed, 18 insertions(+), 6 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index 88450a59f7..bd1c3cc051 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -121,6 +121,8 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_ESNOWANL=${NTHREADSmax} export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" diff --git a/env/HERCULES.env b/env/HERCULES.env index 2df7e955cd..8f5bf37d33 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -120,6 +120,8 @@ case ${step} in ;; "esnowanl") + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_ESNOWANL=${NTHREADSmax} export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" diff --git a/env/JET.env b/env/JET.env index 226caf31a5..7bfd912062 100755 --- a/env/JET.env +++ b/env/JET.env @@ -104,6 +104,8 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_ESNOWANL=${NTHREADSmax} export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" diff --git a/env/ORION.env b/env/ORION.env index a004527ac4..fbe00c153c 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -111,6 +111,8 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_ESNOWANL=${NTHREADSmax} export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" diff --git a/env/S4.env b/env/S4.env index b72520c2c6..39d24e19ec 100755 --- a/env/S4.env +++ b/env/S4.env @@ -104,6 +104,8 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_ESNOWANL=${NTHREADSmax} export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 47a9d3711f..a27b265eb5 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -97,6 +97,8 @@ elif [[ "${step}" = "snowanl" ]]; then elif [[ "${step}" = "esnowanl" ]]; then + export APRUN_CALCFIMS="${launcher} -n 1" + export NTHREADS_ESNOWANL=${NTHREADSmax} export APRUN_ESNOWANL="${APRUN_default}" diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 index 1e9597ba1c..4fda29109e 100644 --- a/parm/archive/gdas.yaml.j2 +++ b/parm/archive/gdas.yaml.j2 @@ -75,7 +75,7 @@ gdas: - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs" {% endif %} {% if DO_JEDISNOWDA %} - - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowstat.tgz" + - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowstat" {% endif %} # Ozone verification diff --git a/parm/archive/gfs_arcdir.yaml.j2 b/parm/archive/gfs_arcdir.yaml.j2 index 57dbc78885..7ff556037d 100644 --- a/parm/archive/gfs_arcdir.yaml.j2 +++ b/parm/archive/gfs_arcdir.yaml.j2 @@ -46,8 +46,8 @@ {% endif %} {% if DO_JEDISNOWDA == True %} - {% do det_anl_files.append([COMIN_SNOW_ANALYSIS ~ "/" ~ head ~ "snowstat.tgz", - ARCDIR ~ "/snowstat." ~ RUN ~ "." ~ cycle_YMDH ~ ".tgz"]) %} + {% do det_anl_files.append([COMIN_SNOW_ANALYSIS ~ "/" ~ head ~ "snowstat", + ARCDIR ~ "/snowstat." ~ RUN ~ "." ~ cycle_YMDH ]) %} {% endif %} {% if AERO_ANL_RUN == RUN or AERO_ANL_RUN == "both" %} diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 69cefc3114..fc1cc39064 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 69cefc3114563eebbd8d109cbdbf221dd0b89449 +Subproject commit fc1cc3906420a3ce64c93b3ee3392c984a2a2ad2 diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 66af234ee6..aaff196444 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -189,7 +189,7 @@ def prepare_IMS(self) -> None: # create a temporary dict of all keys needed in this method localconf = AttrDict() - keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + keys = ['DATA', 'current_cycle', 'COMIN_OBS', 'OPREFIX', 'CASE', 'OCNRES', 'ntiles', 'FIXgfs'] for key in keys: localconf[key] = self.task_config[key] @@ -199,7 +199,7 @@ def prepare_IMS(self) -> None: prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, localconf) logger.debug(f"{self.task_config.IMS_OBS_LIST}:\n{pformat(prep_ims_config)}") - # copy the IMS obs files from COM_OBS to DATA/obs + # copy the IMS obs files from COMIN_OBS to DATA/obs logger.info("Copying IMS obs for CALCFIMSEXE") FileHandler(prep_ims_config.calcfims).sync() From c0b53f3960d7e3c69e4e88a513172aea1bd7c872 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Tue, 26 Nov 2024 19:46:40 +0000 Subject: [PATCH 145/157] update --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/snow_analysis.py | 2 ++ ush/python/pygfs/task/snowens_analysis.py | 1 + 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index fc1cc39064..aee0cc051a 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit fc1cc3906420a3ce64c93b3ee3392c984a2a2ad2 +Subproject commit aee0cc051a9fe50059e70ff2205d3f765cf9694c diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index d2469200d8..b2659de753 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -169,6 +169,8 @@ def prepare_IMS(self) -> None: for key in keys: localconf[key] = self.task_config[key] + localconf['ims_fcst_path'] = './bkg/' + # Read and render the IMS_OBS_LIST yaml logger.info(f"Reading {self.task_config.IMS_OBS_LIST}") prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, localconf) diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index aaff196444..8b42ce64bc 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -194,6 +194,7 @@ def prepare_IMS(self) -> None: for key in keys: localconf[key] = self.task_config[key] + localconf['ims_fcst_path'] = './bkg/ensmean/' # Read and render the IMS_OBS_LIST yaml logger.info(f"Reading {self.task_config.IMS_OBS_LIST}") prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, localconf) From 49b22ca8147e3d8c9c4693f42a4bae390427fb17 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Tue, 26 Nov 2024 20:25:34 +0000 Subject: [PATCH 146/157] update esnowanl script --- jobs/rocoto/esnowanl.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/jobs/rocoto/esnowanl.sh b/jobs/rocoto/esnowanl.sh index 9fd1eb1b65..a6a87f8492 100755 --- a/jobs/rocoto/esnowanl.sh +++ b/jobs/rocoto/esnowanl.sh @@ -11,6 +11,14 @@ status=$? export job="esnowanl" export jobid="${job}.$$" +############################################################### +# setup python path for ioda utilities +# shellcheck disable=SC2311 +pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" +gdasappPATH="${HOMEgfs}/sorc/gdas.cd/sorc/iodaconv/src:${pyiodaPATH}" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}:${gdasappPATH}" +export PYTHONPATH + ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_SNOWENS_ANALYSIS" From 7aab2581bd296638181101aad9a650aa073bf36b Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Tue, 10 Dec 2024 20:51:25 +0000 Subject: [PATCH 147/157] new fix files --- parm/gdas/staging/snow_berror.yaml.j2 | 2 +- sorc/gdas.cd | 2 +- sorc/link_workflow.sh | 2 +- versions/fix.ver | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/parm/gdas/staging/snow_berror.yaml.j2 b/parm/gdas/staging/snow_berror.yaml.j2 index a25669132e..e230217300 100644 --- a/parm/gdas/staging/snow_berror.yaml.j2 +++ b/parm/gdas/staging/snow_berror.yaml.j2 @@ -1,4 +1,4 @@ mkdir: - '{{ DATA }}/berror' copy: -- ['{{ HOMEgfs }}/fix/gdas/snow/snow_bump_nicas_300km_shadowlevels_nicas.nc', '{{ DATA }}/berror'] +- ['{{ HOMEgfs }}/fix/gdas/snow/snow_bump_nicas_250km_shadowlevels_nicas.nc', '{{ DATA }}/berror'] diff --git a/sorc/gdas.cd b/sorc/gdas.cd index aee0cc051a..56edba44ab 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit aee0cc051a9fe50059e70ff2205d3f765cf9694c +Subproject commit 56edba44ab61d9853c1c1946e685c55884e4e41c diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index b35b7ff35a..a1a77f08f0 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -214,7 +214,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then cd "${HOMEgfs}/fix" || exit 1 [[ ! -d gdas ]] && mkdir -p gdas cd gdas || exit 1 - for gdas_sub in fv3jedi gsibec obs soca aero; do + for gdas_sub in fv3jedi gsibec obs soca aero snow; do if [[ -d "${gdas_sub}" ]]; then rm -rf "${gdas_sub}" fi diff --git a/versions/fix.ver b/versions/fix.ver index 4739ce778a..991e0ce13a 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -13,6 +13,7 @@ export gdas_soca_ver=20240802 export gdas_gsibec_ver=20240416 export gdas_obs_ver=20240213 export gdas_aero_ver=20240806 +export gdas_snow_ver=20241210 export glwu_ver=20220805 export gsi_ver=20240208 export lut_ver=20220805 From 43a7c70218ec40d7a4cb94d862b98c9770ff4bb6 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Wed, 11 Dec 2024 15:28:09 +0000 Subject: [PATCH 148/157] update restart archive yaml --- parm/archive/gdas_restarta.yaml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2 index fc5ce9478d..f52cae085c 100644 --- a/parm/archive/gdas_restarta.yaml.j2 +++ b/parm/archive/gdas_restarta.yaml.j2 @@ -47,7 +47,7 @@ gdas_restarta: # Snow configuration yaml {% if DO_JEDISNOWDA %} - - "{{ COMIN_CONF | relpath(ROTDIR) }}/{{ head }}letkfoi.yaml" + - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowanl.yaml" {% endif %} # Input BUFR files From 31fb42e3dc50075ce21f9eb2c0e8e28aff4f47af Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Thu, 12 Dec 2024 09:47:52 -0500 Subject: [PATCH 149/157] Update gfs_cycled.py --- workflow/applications/gfs_cycled.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index b453abc923..543d7a9d8c 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -316,7 +316,7 @@ def get_task_names(self): task_names[run] += ['eobs', 'eupd'] task_names[run].append('echgres') if 'gdas' in run else 0 task_names[run] += ['ediag'] if options['lobsdiag_forenkf'] else ['eomg'] - task_names[run].append('esnowrecen') if options['do_jedisnowda'] and 'gdas' in run else 0 + task_names[run].append('esnowanl') if options['do_jedisnowda'] and 'gdas' in run else 0 task_names[run] += ['stage_ic', 'ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] From bc9af916fa646589322232d173ed929a35cfa99a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 12 Dec 2024 22:05:47 +0000 Subject: [PATCH 150/157] update hera env --- env/HERA.env | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index bd1c3cc051..4ee12f0218 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -115,7 +115,7 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" export NTHREADS_SNOWANL=${NTHREADSmax} - export APRUN_SNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_SNOWANL}" + export APRUN_SNOWANL="${APRUN_default} --mem=0 --cpus-per-task=${NTHREADS_SNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" @@ -124,7 +124,7 @@ elif [[ "${step}" = "esnowanl" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" export NTHREADS_ESNOWANL=${NTHREADSmax} - export APRUN_ESNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWANL}" + export APRUN_ESNOWANL="${APRUN_default} --mem=0 --cpus-per-task=${NTHREADS_SNOWANL}f" export APRUN_APPLY_INCR="${launcher} -n 6" From 723decf96342b46dc5d251b6939ca50ed0a98d0a Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 12 Dec 2024 22:16:31 +0000 Subject: [PATCH 151/157] update esnowanl walltime --- parm/config/gfs/config.resources | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 50473682c9..f236f64017 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -373,7 +373,7 @@ case ${step} in export layout_x export layout_y - walltime="00:15:00" + walltime="00:30:00" ntasks=$(( layout_x * layout_y * 6 )) threads_per_task=1 tasks_per_node=$(( max_tasks_per_node / threads_per_task )) From d8012c0a0cd1e6c3168faf42e4ed91b135704078 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 13 Dec 2024 08:36:14 -0500 Subject: [PATCH 152/157] Update env/HERA.env Co-authored-by: Jiarui Dong --- env/HERA.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/env/HERA.env b/env/HERA.env index 4ee12f0218..051287004b 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -124,7 +124,7 @@ elif [[ "${step}" = "esnowanl" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" export NTHREADS_ESNOWANL=${NTHREADSmax} - export APRUN_ESNOWANL="${APRUN_default} --mem=0 --cpus-per-task=${NTHREADS_SNOWANL}f" + export APRUN_ESNOWANL="${APRUN_default} --mem=0 --cpus-per-task=${NTHREADS_ESNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" From 95cd47ae0f833630995bd30521cd0495fca0a9da Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 13 Dec 2024 15:12:39 +0000 Subject: [PATCH 153/157] rename paths --- parm/config/gfs/config.esnowanl | 2 +- parm/config/gfs/config.snowanl | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index bf7dd93f46..b35eed0633 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -22,7 +22,7 @@ export SNOW_ENS_FINALIZE_TMPL="${PARMgfs}/gdas/snow_finalize_ens_update.yaml.j2" # Name of the executable that applies increment to bkg and its namelist template export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" -export ENS_APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/ens_apply_incr_nml.j2" +export ENS_APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/ens_apply_incr_nml.j2" export JEDI_CONFIG_YAML="${PARMgfs}/gdas/esnowanl_jedi_config.yaml.j2" diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index a0863073e3..67a4fc012f 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -14,10 +14,6 @@ export GTS_SNOW_STAGE_YAML="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.ya export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" export JCB_ALGO_YAML_VAR="${PARMgfs}/gdas/snow/jcb-prototype_2dvar.yaml.j2" -# Ensemble member properties -export SNOWDEPTHVAR="snodl" -export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI - # Process IMS snowcover into snow depth export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" @@ -26,7 +22,7 @@ export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py" # Name of the executable that applies increment to bkg and its namelist template export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" -export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" +export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/apply_incr_nml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/snow_var_bkg.yaml.j2" From 02f0a30d8c7af0c9eddec95fd70c37353d25c009 Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 13 Dec 2024 16:21:46 +0000 Subject: [PATCH 154/157] update hash for testing --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 56edba44ab..fc1108b147 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 56edba44ab61d9853c1c1946e685c55884e4e41c +Subproject commit fc1108b147dafae447a091bbbd31cbd0dc1a142a From d4dd92ed0b6497f93ba1489884155f5de44d557c Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 13 Dec 2024 21:04:02 +0000 Subject: [PATCH 155/157] fix missing config.esnowanl lines --- parm/config/gfs/config.esnowanl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/parm/config/gfs/config.esnowanl b/parm/config/gfs/config.esnowanl index b35eed0633..90b1a51136 100644 --- a/parm/config/gfs/config.esnowanl +++ b/parm/config/gfs/config.esnowanl @@ -14,6 +14,11 @@ export GTS_SNOW_STAGE_YAML="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.ya export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" export JCB_ALGO_YAML_VAR="${PARMgfs}/gdas/snow/jcb-prototype_2dvar.yaml.j2" +# Process IMS snowcover into snow depth +export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" +export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" +export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" +export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py" export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/snow_berror.yaml.j2" export SNOW_ENS_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_ens_update.yaml.j2" From 27df4b7afff95b46864a7af69a5e94cc0a2ade21 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 13 Dec 2024 21:17:25 +0000 Subject: [PATCH 156/157] Update parm/archive/gdas_restarta.yaml.j2 Co-authored-by: Jiarui Dong --- parm/archive/gdas_restarta.yaml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2 index f52cae085c..3456fc1569 100644 --- a/parm/archive/gdas_restarta.yaml.j2 +++ b/parm/archive/gdas_restarta.yaml.j2 @@ -47,7 +47,7 @@ gdas_restarta: # Snow configuration yaml {% if DO_JEDISNOWDA %} - - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowanl.yaml" + - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowanlvar.yaml" {% endif %} # Input BUFR files From a46328e6d1d7dde1801d1ba054f28a172fe6bd8d Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Fri, 13 Dec 2024 21:18:09 +0000 Subject: [PATCH 157/157] update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index fc1108b147..d9a1af65f3 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit fc1108b147dafae447a091bbbd31cbd0dc1a142a +Subproject commit d9a1af65f34ce410603e1880b33caf905adfba86