From dc02f69fc44c80f382769d88dbc961fe45bb68d9 Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Thu, 10 Oct 2024 20:12:23 -0400 Subject: [PATCH 01/27] Fixes task names in gw-ci tests (#1320) This fixes the renaming of g-w tasks in the gw-ci tests. Tested on Hera - some tests fail, but as they did before and evidently not because of this renaming. Resolves https://github.com/NOAA-EMC/GDASApp/issues/1319 --- test/gw-ci/CMakeLists.txt | 46 +++++++++++++++++++-------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/test/gw-ci/CMakeLists.txt b/test/gw-ci/CMakeLists.txt index 1dba4dc0d..6c9faedf1 100644 --- a/test/gw-ci/CMakeLists.txt +++ b/test/gw-ci/CMakeLists.txt @@ -20,17 +20,17 @@ function(add_cycling_tests pslot YAML_PATH HOMEgfs RUNTESTS PROJECT_SOURCE_DIR T # stage IC's message(STATUS "staging the 1/2 cycle IC's for ${test_name} ctest") - add_test(NAME ${test_name}_gdasstage_ic_${HALF_CYCLE} - COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} gdasstage_ic ${HALF_CYCLE}" + add_test(NAME ${test_name}_gdas_stage_ic_${HALF_CYCLE} + COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} gdas_stage_ic ${HALF_CYCLE}" WORKING_DIRECTORY ${RUNTESTS}) - set_tests_properties(${test_name}_gdasstage_ic_${HALF_CYCLE} PROPERTIES LABELS "manual") + set_tests_properties(${test_name}_gdas_stage_ic_${HALF_CYCLE} PROPERTIES LABELS "manual") - # 1/2 cycle gdasfcst - message(STATUS "preparing 1/2 cycle gdasfcst for ${pslot} ctest") - add_test(NAME ${test_name}_gdasfcst_${HALF_CYCLE} - COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} gdasfcst_seg0 ${HALF_CYCLE}" + # 1/2 cycle gdas_fcst + message(STATUS "preparing 1/2 cycle gdas_fcst for ${pslot} ctest") + add_test(NAME ${test_name}_gdas_fcst_${HALF_CYCLE} + COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} gdas_fcst_seg0 ${HALF_CYCLE}" WORKING_DIRECTORY ${RUNTESTS}) - set_tests_properties(${test_name}_gdasfcst_${HALF_CYCLE} PROPERTIES LABELS "manual") + set_tests_properties(${test_name}_gdas_fcst_${HALF_CYCLE} PROPERTIES LABELS "manual") # Select the list of tasks to run for the full cycle message(STATUS "Tasks ${TASK_LIST}") @@ -54,12 +54,12 @@ if (WORKFLOW_TESTS) set(pslot "WCDA-3DVAR-C48mx500") set(YAML_PATH ${HOMEgfs}/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml) set(TASK_LIST - "gdasprepoceanobs" - "gdasmarinebmat" - "gdasmarineanlinit" - "gdasmarineanlvar" - "gdasmarineanlchkpt" - "gdasmarineanlfinal" + "gdas_prepoceanobs" + "gdas_marinebmat" + "gdas_marineanlinit" + "gdas_marineanlvar" + "gdas_marineanlchkpt" + "gdas_marineanlfinal" ) add_cycling_tests(${pslot} ${YAML_PATH} ${HOMEgfs} ${RUNTESTS} ${PROJECT_SOURCE_DIR} "${TASK_LIST}") endif() @@ -85,15 +85,15 @@ if (RUN_GW_CI) set(pslot "GFSv17-3DVAR-C384mx025") set(YAML_PATH ${HOMEgfs}/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml) set(TASK_LIST - "gdasprepoceanobs" - "gdasmarinebmat" - "gdasmarineanlinit" - "gdasmarineanlvar" - "gdasmarineanlchkpt" - "gdasmarineanlfinal" - "gdasocnanalvrfy" - "gdasprep" - "gdasanal" + "gdas_prepoceanobs" + "gdas_marinebmat" + "gdas_marineanlinit" + "gdas_marineanlvar" + "gdas_marineanlchkpt" + "gdas_marineanlfinal" + "gdas_ocnanalvrfy" + "gdas_prep" + "gdas_anal" ) add_cycling_tests(${pslot} ${YAML_PATH} ${HOMEgfs} ${RUNTESTS} ${PROJECT_SOURCE_DIR} "${TASK_LIST}") endif() From 496b624e9f142be8e1ff5aff21f90308eef6b074 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 11 Oct 2024 13:35:02 +0000 Subject: [PATCH 02/27] Update to build on WCOSS (#1321) This updates the hashes of OOPS and SABER to incorporate changes needed for two ternary operators to instead be if-else statements to allow Intel 19 on WCOSS to compile JEDI. --- sorc/oops | 2 +- sorc/saber | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/oops b/sorc/oops index 78a7a1ac3..0d2c235d7 160000 --- a/sorc/oops +++ b/sorc/oops @@ -1 +1 @@ -Subproject commit 78a7a1ac378db5b7950a597e7fe119f4ce684514 +Subproject commit 0d2c235d791e1ba0023ce300103174dddf71aed7 diff --git a/sorc/saber b/sorc/saber index bfab007ac..1f23a3665 160000 --- a/sorc/saber +++ b/sorc/saber @@ -1 +1 @@ -Subproject commit bfab007ac003bec1d6adddee5517f3abb701fdd3 +Subproject commit 1f23a36657f6d10b770348de0f5454e01d377105 From 6fb0a655ffe61c6dd4f2acaa4c4490121ba980fb Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 11 Oct 2024 15:10:24 +0000 Subject: [PATCH 03/27] Add modulefile/compile support for Gaea C6 (#1325) Closes #1323 This PR adds support for compiling GDASApp on Gaea's C6 nodes with spack-stack 1.6.0 Co-authored-by: Cory Martin --- build.sh | 2 +- modulefiles/GDAS/gaeac6.intel.lua | 95 +++++++++++++++++++++++++++++++ ush/detect_machine.sh | 3 + 3 files changed, 99 insertions(+), 1 deletion(-) create mode 100644 modulefiles/GDAS/gaeac6.intel.lua diff --git a/build.sh b/build.sh index 48931b553..42ec34284 100755 --- a/build.sh +++ b/build.sh @@ -71,7 +71,7 @@ while getopts "p:t:c:hvdfa" opt; do done case ${BUILD_TARGET} in - hera | orion | hercules | wcoss2 | noaacloud | gaea) + hera | orion | hercules | wcoss2 | noaacloud | gaea | gaeac6 ) echo "Building GDASApp on $BUILD_TARGET" source $dir_root/ush/module-setup.sh module use $dir_root/modulefiles diff --git a/modulefiles/GDAS/gaeac6.intel.lua b/modulefiles/GDAS/gaeac6.intel.lua new file mode 100644 index 000000000..9069da3b8 --- /dev/null +++ b/modulefiles/GDAS/gaeac6.intel.lua @@ -0,0 +1,95 @@ +help([[ +Load environment for running the GDAS application with Intel compilers and MPI. +]]) + +local pkgName = myModuleName() +local pkgVersion = myModuleVersion() +local pkgNameVer = myModuleFullName() + +prepend_path("MODULEPATH", '/ncrc/proj/epic/spack-stack/c6/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core') +prepend_path("MODULEPATH", '/ncrc/proj/epic/rocoto/modulefiles') + +-- below two lines get us access to the spack-stack modules +load("stack-intel/2023.2.0") +load("stack-cray-mpich/8.1.29") +-- JCSDA has 'jedi-fv3-env/unified-dev', but we should load these manually as needed +load("cmake/3.23.1") +load("gettext/0.20.2") +--load("libunistring/1.1") +--load("libidn2/2.3.4") +load("pcre2/10.42") +load("curl/8.4.0") +load("zlib/1.2.13") +load("git/2.42.0") +load("pkg-config/0.29.2") +load("hdf5/1.14.0") +load("parallel-netcdf/1.12.2") +load("netcdf-c/4.9.2") +load("nccmp/1.9.0.1") +load("netcdf-fortran/4.6.1") +load("nco/5.0.6") +load("parallelio/2.5.10") +load("wget/1.21.3") +load("boost/1.83.0") +load("bufr/12.0.1") +load("git-lfs/2.11.0") +load("ecbuild/3.7.2") +load("openjpeg/2.3.1") +load("eccodes/2.32.0") +load("eigen/3.4.0") +load("openblas/0.3.24") +load("eckit/1.24.5") +load("fftw/3.3.10") +load("fckit/0.11.0") +load("fiat/1.2.0") +load("ectrans/1.2.0") +load("fms/2023.04") +load("esmf/8.6.0") +load("atlas/0.35.1") +load("sp/2.5.0") +load("gsl-lite/0.37.0") +load("libjpeg/2.1.0") +load("krb5/1.20.1") +load("libtirpc/1.3.3") +load("hdf/4.2.15") +load("jedi-cmake/1.4.0") +load("libpng/1.6.37") +--load("libxt/1.1.5") +--load("libxmu/1.1.4") +--load("libxpm/4.11.0") +load("libxaw/1.10.13") +load("udunits/2.2.28") +load("ncview/2.1.9") +load("netcdf-cxx4/4.3.1") +load("json/3.10.5") +load("crtm/2.4.0.1") +load("rocoto/1.3.6") +load("prod_util/2.1.1") + +load("py-jinja2/3.0.3") +load("py-netcdf4/1.5.8") +load("py-pybind11/2.11.0") +load("py-pycodestyle/2.11.0") +load("py-pyyaml/6.0") +load("py-scipy/1.11.3") +load("py-xarray/2023.7.0") +load("py-f90nml/1.4.3") +load("py-pip/23.1.2") + +setenv("CC","cc") +setenv("CXX","CC") +setenv("FC","ftn") + +local mpiexec = '/usr/bin/srun' +local mpinproc = '-n' +setenv('MPIEXEC_EXEC', mpiexec) +setenv('MPIEXEC_NPROC', mpinproc) + +setenv("CRTM_FIX","/gpfs/f6/ira-sti/world-shared/GDASApp/fix/crtm/2.4.0") +setenv("GDASAPP_TESTDATA","/gpfs/f6/ira-sti/world-shared/GDASApp/testdata") +setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/gpfs/f6/ira-sti/world-shared/GDASApp/unittestdata") + +whatis("Name: ".. "pkgName") +whatis("Version: ".. "pkgVersion") +whatis("Category: GDASApp") +whatis("Description: Load all libraries needed for GDASApp") diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 683ee0db7..997c394fa 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -24,6 +24,9 @@ case $(hostname -f) in gaea5[1-8]) MACHINE_ID=gaea ;; ### gaea51-58 gaea5[1-8].ncrc.gov) MACHINE_ID=gaea ;; ### gaea51-58 + gaea6[1-8]) MACHINE_ID=gaeac6 ;; ### gaea61-68 + gaea6[1-8].ncrc.gov) MACHINE_ID=gaeac6 ;; ### gaea61-68 + hfe0[1-9]) MACHINE_ID=hera ;; ### hera01-09 hfe1[0-2]) MACHINE_ID=hera ;; ### hera10-12 hecflow01) MACHINE_ID=hera ;; ### heraecflow01 From e024564f72e8b8b617e2a6a1cc06053e6dfb5786 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA <134300700+DavidNew-NOAA@users.noreply.github.com> Date: Tue, 15 Oct 2024 09:08:09 -0400 Subject: [PATCH 04/27] JCB-based obs+bias staging (#1312) This PR is a companion to Global-Workflow PR [#2992](https://github.com/NOAA-EMC/global-workflow/pull/2992) and JCB-GDAS PR [#31](https://github.com/NOAA-EMC/jcb-gdas/pull/31). It adds two parameters required for the above Global-Workflow PR to the JCB base YAML for the atmosphere. --- parm/atm/jcb-base.yaml.j2 | 14 ++++++++++++++ parm/jcb-gdas | 2 +- test/atm/global-workflow/config.yaml | 9 ++------- 3 files changed, 17 insertions(+), 8 deletions(-) diff --git a/parm/atm/jcb-base.yaml.j2 b/parm/atm/jcb-base.yaml.j2 index 6387fffbb..d7e1e5907 100644 --- a/parm/atm/jcb-base.yaml.j2 +++ b/parm/atm/jcb-base.yaml.j2 @@ -71,6 +71,8 @@ observations: all_observations crtm_coefficient_path: "{{ DATA }}/crtm/" # Naming conventions for observational files +atmosphere_obsdataroot_path: "{{COM_OBS}}" + atmosphere_obsdatain_path: "{{atm_obsdatain_path}}" atmosphere_obsdatain_prefix: "{{OPREFIX}}" atmosphere_obsdatain_suffix: ".tm00.nc" @@ -80,6 +82,8 @@ atmosphere_obsdataout_prefix: diag_ atmosphere_obsdataout_suffix: "_{{ current_cycle | to_YMDH }}.nc" # Naming conventions for bias correction files +atmosphere_obsbiasroot_path: "{{COM_ATMOS_ANALYSIS_PREV}}" + atmosphere_obsbiasin_path: "{{DATA}}/obs/" atmosphere_obsbiasin_prefix: "{{GPREFIX}}" atmosphere_obsbiasin_suffix: ".satbias.nc" @@ -94,6 +98,16 @@ atmosphere_obsbiasout_suffix: ".satbias.nc" atmosphere_obsbiascovout_prefix: "{{APREFIX}}" atmosphere_obsbiascovout_suffix: ".satbias_cov.nc" +bias_files: + atms_n20: rad_varbc_params.tar + atms_npp: rad_varbc_params.tar + mtiasi_metop-a: rad_varbc_params.tar + mtiasi_metop-b: rad_varbc_params.tar + amsua_n19: rad_varbc_params.tar + ssmis_f17: rad_varbc_params.tar + ssmis_f18: rad_varbc_params.tar + cris-fsr_n20: rad_varbc_params.tar + cris-fsr_npp: rad_varbc_params.tar # Local Ensemble DA (LETKF) # ------------------------- diff --git a/parm/jcb-gdas b/parm/jcb-gdas index 2b46aa19b..7717c0e74 160000 --- a/parm/jcb-gdas +++ b/parm/jcb-gdas @@ -1 +1 @@ -Subproject commit 2b46aa19b22df22f33ef0be5030fb719a8fa6298 +Subproject commit 7717c0e7401e344a6bce37a4f8ecc11399256936 diff --git a/test/atm/global-workflow/config.yaml b/test/atm/global-workflow/config.yaml index c06019628..d8bf2e4c2 100644 --- a/test/atm/global-workflow/config.yaml +++ b/test/atm/global-workflow/config.yaml @@ -10,7 +10,7 @@ base: PTMP: "@bindir@/test/atm/global-workflow/testrun" atmanl: - JCB_ALGO_YAML: "@srcdir@/test/atm/global-workflow/jcb-prototype_3dvar.yaml.j2" + JCB_ALGO_YAML_VAR: "@srcdir@/test/atm/global-workflow/jcb-prototype_3dvar.yaml.j2" STATICB_TYPE: "identity" ATMRES_ANL: "C48" LAYOUT_X_ATMANL: 1 @@ -19,11 +19,6 @@ atmanl: atmensanl: JCB_ALGO_YAML_LETKF: "@srcdir@/test/atm/global-workflow/jcb-prototype_lgetkf.yaml.j2" JCB_ALGO_YAML_OBS: "@srcdir@/test/atm/global-workflow/jcb-prototype_lgetkf_observer.yaml.j2" + JCB_ALGO_YAML_SOL: "@srcdir@/test/atm/global-workflow/jcb-prototype_lgetkf_solver.yaml.j2" LAYOUT_X_ATMENSANL: 1 LAYOUT_Y_ATMENSANL: 1 - -atmensanlobs: - JCB_ALGO_YAML: "@srcdir@/test/atm/global-workflow/jcb-prototype_lgetkf_observer.yaml.j2" - -atmensanlsol: - JCB_ALGO_YAML: "@srcdir@/test/atm/global-workflow/jcb-prototype_lgetkf_solver.yaml.j2" From e44d21e7f21f0173fcc6da743a8280e8277cf4f8 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Tue, 15 Oct 2024 13:14:53 +0000 Subject: [PATCH 05/27] Force older version of ubuntu in runners (#1332) Temporary-ish fix to get the Github actions working again --- .github/workflows/hera.yaml | 2 +- .github/workflows/norms.yaml | 2 +- .github/workflows/orion.yaml | 2 +- .github/workflows/unittests.yaml | 2 +- .github/workflows/unittests_g-w.yaml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/hera.yaml b/.github/workflows/hera.yaml index 3685ea4bd..3b4ef6245 100644 --- a/.github/workflows/hera.yaml +++ b/.github/workflows/hera.yaml @@ -5,7 +5,7 @@ on: jobs: test: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 name: Passes on Hera steps: diff --git a/.github/workflows/norms.yaml b/.github/workflows/norms.yaml index 2e721e7af..36b56d554 100644 --- a/.github/workflows/norms.yaml +++ b/.github/workflows/norms.yaml @@ -3,7 +3,7 @@ on: [push] jobs: check_pynorms: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 name: Check coding norms with pycodestyle and cpplint steps: diff --git a/.github/workflows/orion.yaml b/.github/workflows/orion.yaml index f75de79c4..a0b841fe4 100644 --- a/.github/workflows/orion.yaml +++ b/.github/workflows/orion.yaml @@ -5,7 +5,7 @@ on: jobs: test: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 name: Passes on Orion steps: diff --git a/.github/workflows/unittests.yaml b/.github/workflows/unittests.yaml index c103aee50..e716b0e89 100644 --- a/.github/workflows/unittests.yaml +++ b/.github/workflows/unittests.yaml @@ -3,7 +3,7 @@ on: [push, pull_request] jobs: ctests: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 name: Run Unit Tests with ctest steps: diff --git a/.github/workflows/unittests_g-w.yaml b/.github/workflows/unittests_g-w.yaml index 9abaa046a..2d6dc9602 100644 --- a/.github/workflows/unittests_g-w.yaml +++ b/.github/workflows/unittests_g-w.yaml @@ -3,7 +3,7 @@ on: [push, pull_request] jobs: ctests: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 name: Run Unit Tests inside global-workflow with ctest steps: From a1cb2ee3ef2c9f645ced641b30d8dd0c5965140d Mon Sep 17 00:00:00 2001 From: Ed Givelberg Date: Wed, 16 Oct 2024 12:05:50 -0400 Subject: [PATCH 06/27] Feature/b2i tropical (#1311) Two converters added to the previous 6 bufr to ioda converters: drifters and tropical moorings. Both are taken from the dbuoy bufr input file. There is also a small bug correction in the cmake file. --- test/marine/CMakeLists.txt | 23 ++- ...insitu_profile_tropical_2019010700.yaml.in | 13 ++ ..._insitu_surface_drifter_2019010700.yaml.in | 13 ++ ...oda_insitu_profile_tropical_2019010700.ref | 25 ++++ ...ioda_insitu_surface_drifter_2019010700.ref | 20 +++ .../b2i/bufr2ioda_insitu_profile_tropical.py | 40 ++++++ .../b2i/bufr2ioda_insitu_surface_drifter.py | 38 +++++ .../marine/b2i/drifter_ioda_variables.py | 132 ++++++++++++++++++ .../marine/b2i/trkob_ioda_variables.py | 1 - .../marine/b2i/tropical_ioda_variables.py | 80 +++++++++++ 10 files changed, 378 insertions(+), 7 deletions(-) create mode 100644 test/marine/testinput/bufr2ioda_insitu_profile_tropical_2019010700.yaml.in create mode 100644 test/marine/testinput/bufr2ioda_insitu_surface_drifter_2019010700.yaml.in create mode 100644 test/marine/testref/bufr2ioda_insitu_profile_tropical_2019010700.ref create mode 100644 test/marine/testref/bufr2ioda_insitu_surface_drifter_2019010700.ref create mode 100755 ush/ioda/bufr2ioda/marine/b2i/bufr2ioda_insitu_profile_tropical.py create mode 100755 ush/ioda/bufr2ioda/marine/b2i/bufr2ioda_insitu_surface_drifter.py create mode 100644 ush/ioda/bufr2ioda/marine/b2i/drifter_ioda_variables.py create mode 100644 ush/ioda/bufr2ioda/marine/b2i/tropical_ioda_variables.py diff --git a/test/marine/CMakeLists.txt b/test/marine/CMakeLists.txt index 4dda602ca..92c880a58 100644 --- a/test/marine/CMakeLists.txt +++ b/test/marine/CMakeLists.txt @@ -88,7 +88,7 @@ CHECK_AND_SET_PATH( BUFR_TEST_DIR ) if (NOT BUFR_TEST_DIR_EXISTS) - message(STATUS "BUFR test file directory not found -- bufr to ioda tests not generted.") + message(WARNING "BUFR test file directory not found -- bufr to ioda tests not generated.") set(GENERATE_BUFR2IODA_TESTS FALSE) else() # message(STATUS "Found bufr test directory: ${BUFR_TEST_DIR}") @@ -105,7 +105,7 @@ else() OCEAN_BASIN_FILE ) if (NOT OCEAN_BASIN_FILE_EXISTS) - message("Ocean basin data file not found -- bufr to ioda tests not generated.") + message(WARNING "Ocean basin data file not found -- bufr to ioda tests not generated.") set(GENERATE_BUFR2IODA_TESTS FALSE) endif() # message(STATUS "Found ocean basin data in ${OCEAN_BASIN_FILE}") @@ -116,17 +116,26 @@ endif() function(ADD_INSITU_TEST testname testbufr) # set(CONFIG_TYPE "json") set(CONFIG_TYPE "yaml") - set(DATE "2021063006") + + if (testbufr STREQUAL "dbuoy") + set(DATE "2019010700") + set(CYCLE "00") + else() + set(DATE "2021063006") + set(CYCLE "06") + endif() + set(TEST "bufr2ioda_insitu_${testname}") set(TESTREF_FILE "${TEST}_${DATE}.ref") # stage the input file to directory ${BUFR_INPUT_DIR} set(BUFR_INPUT_DIR ${TEST_WORKING_DIR}) - set(BUFR_TEST_FILE "${DATE}-gdas.t06z.${testbufr}.tm00.bufr_d") + set(BUFR_TEST_FILE "${DATE}-gdas.t${CYCLE}z.${testbufr}.tm00.bufr_d") set(BUFR_FILE "${BUFR_TEST_DIR}/${BUFR_TEST_FILE}") if (NOT EXISTS ${BUFR_FILE}) - message(FATAL_ERROR "BUFR file ${BUFR_FILE} not found") + message(WARNING "BUFR file ${BUFR_FILE} not found, test not generated") + return() endif() file(COPY ${BUFR_FILE} DESTINATION ${BUFR_INPUT_DIR}) @@ -144,7 +153,7 @@ function(ADD_INSITU_TEST testname testbufr) ) add_test( - NAME test_${TEST} + NAME test_gdasapp_${TEST} COMMAND ${MARINE_BUFR2IODA_DIR}/${TEST}.py -c ${CONFIG_FILE} -t ${TESTREF_DIR}/${TESTREF_FILE} WORKING_DIRECTORY ${TEST_WORKING_DIR} ) @@ -156,6 +165,8 @@ if (GENERATE_BUFR2IODA_TESTS) ADD_INSITU_TEST("profile_bathy" "bathy") ADD_INSITU_TEST("profile_glider" "subpfl") ADD_INSITU_TEST("profile_tesac" "tesac") + ADD_INSITU_TEST("profile_tropical" "dbuoy") ADD_INSITU_TEST("profile_xbtctd" "xbtctd") + ADD_INSITU_TEST("surface_drifter" "dbuoy") ADD_INSITU_TEST("surface_trkob" "trkob") endif() diff --git a/test/marine/testinput/bufr2ioda_insitu_profile_tropical_2019010700.yaml.in b/test/marine/testinput/bufr2ioda_insitu_profile_tropical_2019010700.yaml.in new file mode 100644 index 000000000..1fa5a1856 --- /dev/null +++ b/test/marine/testinput/bufr2ioda_insitu_profile_tropical_2019010700.yaml.in @@ -0,0 +1,13 @@ +--- +data_format: dbuoy +subsets: dbuoy +source: NCEP data tank +data_type: drifter +cycle_type: gdas +cycle_datetime: '2019010700' +dump_directory: __BUFRINPUTDIR__ +ioda_directory: __IODAOUTPUTDIR__ +ocean_basin: __OCEANBASIN__ +data_description: 6-hrly in situ drifter profiles +data_provider: U.S. NOAA + diff --git a/test/marine/testinput/bufr2ioda_insitu_surface_drifter_2019010700.yaml.in b/test/marine/testinput/bufr2ioda_insitu_surface_drifter_2019010700.yaml.in new file mode 100644 index 000000000..1fa5a1856 --- /dev/null +++ b/test/marine/testinput/bufr2ioda_insitu_surface_drifter_2019010700.yaml.in @@ -0,0 +1,13 @@ +--- +data_format: dbuoy +subsets: dbuoy +source: NCEP data tank +data_type: drifter +cycle_type: gdas +cycle_datetime: '2019010700' +dump_directory: __BUFRINPUTDIR__ +ioda_directory: __IODAOUTPUTDIR__ +ocean_basin: __OCEANBASIN__ +data_description: 6-hrly in situ drifter profiles +data_provider: U.S. NOAA + diff --git a/test/marine/testref/bufr2ioda_insitu_profile_tropical_2019010700.ref b/test/marine/testref/bufr2ioda_insitu_profile_tropical_2019010700.ref new file mode 100644 index 000000000..915d09db3 --- /dev/null +++ b/test/marine/testref/bufr2ioda_insitu_profile_tropical_2019010700.ref @@ -0,0 +1,25 @@ +dateTime: 570, int64 min, max = 1546808400, 1546830000 +dateTime hash = cda368f6c8f06e883db7100f29073084ee92dc0a0021d995a6fe7ce477004da6 +rcptdateTime: 570, int64 min, max = -1, 1546871640 +rcptdateTime hash = d9545d679013eb8fcbc8fda146d184b83378faccf55c710416f4ad72d2819f68 +lon: 570, float32 min, max = -179.89999389648438, 165.10000610351562 +lon hash = c4a1e37e42a02279ec33a69b950b10327d2185adf99d5c0c6f5ffbe1f5c66292 +lat: 570, float32 min, max = -8.300000190734863, 8.0 +lat hash = d748eb8c0e55e7a2c684ee35d73a849ef945944967f8cb73ab31cf91bafdbcb2 +depth: 570, float32 min, max = 0.0, 500.0 +depth hash = 5b20e83513245f3f37e6c43a46cacd2c71d504da1ce175fbfe2eeffab487b20b +stationID: 570, Date: Wed, 16 Oct 2024 13:23:28 -0400 Subject: [PATCH 07/27] Update hercules module file to spack-stack 1.7 (#1330) In case somebody feels like testing this ... Draft because it's untested and still building. - fixes #1329 --- modulefiles/GDAS/hercules.intel.lua | 30 ++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/modulefiles/GDAS/hercules.intel.lua b/modulefiles/GDAS/hercules.intel.lua index 105efddee..83feccb2c 100644 --- a/modulefiles/GDAS/hercules.intel.lua +++ b/modulefiles/GDAS/hercules.intel.lua @@ -6,7 +6,7 @@ local pkgName = myModuleName() local pkgVersion = myModuleVersion() local pkgNameVer = myModuleFullName() -prepend_path("MODULEPATH", '/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core') +prepend_path("MODULEPATH", '/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.7.0/envs/ue-intel/install/modulefiles/Core') prepend_path("MODULEPATH", '/work2/noaa/da/python/opt/modulefiles/stack') -- below two lines get us access to the spack-stack modules @@ -14,35 +14,36 @@ load("stack-intel/2021.9.0") load("stack-intel-oneapi-mpi/2021.9.0") --load("stack-python/3.10.8") -- JCSDA has 'jedi-fv3-env/unified-dev', but we should load these manually as needed + load("cmake/3.23.1") load("curl/8.4.0") load("zlib/1.2.13") load("git/2.31.1") --load("pkg-config/0.27.1") -load("hdf5/1.14.0") -load("parallel-netcdf/1.12.2") +load("hdf5/1.14.3") +load("parallel-netcdf/1.12.3") load("netcdf-c/4.9.2") load("nccmp/1.9.0.1") load("netcdf-fortran/4.6.1") -load("nco/5.0.6") -load("parallelio/2.5.10") +load("nco/5.1.6") +load("parallelio/2.6.2") load("wget/1.21.1") -load("boost/1.83.0") +load("boost/1.84.0") load("bufr/12.0.1") load("git-lfs/3.1.2") load("ecbuild/3.7.2") -load("openjpeg/2.3.1") -load("eccodes/2.32.0") +load("openjpeg/2.4.0") +load("eccodes/2.33.0") load("eigen/3.4.0") -load("openblas/0.3.24") +load("openblas/0.3.27") load("eckit/1.24.5") load("fftw/3.3.10") load("fckit/0.11.0") load("fiat/1.2.0") load("ectrans/1.2.0") load("fms/2023.04") -load("esmf/8.6.0") -load("atlas/0.35.1") +load("esmf/8.6.1") +load("atlas/0.36.0") load("sp/2.5.0") load("gsl-lite/0.37.0") load("libjpeg/2.1.0") @@ -51,10 +52,10 @@ load("libtirpc/1.3.3") load("hdf/4.2.15") load("jedi-cmake/1.4.0") load("libpng/1.6.37") -load("libxt/1.1.5") +load("libxt/1.3.0") load("libxmu/1.1.4") -load("libxpm/4.11.0") -load("libxaw/1.0.13") +load("libxpm/3.5.17") +load("libxaw/1.0.15") load("udunits/2.2.28") load("ncview/2.1.9") load("netcdf-cxx4/4.3.1") @@ -69,7 +70,6 @@ unload("python/3.10.13") unload("py-numpy/1.22.3") load("miniconda3/4.6.14") load("gdasapp/1.0.0") - -- below is a hack because of cmake finding the wrong python... setenv("CONDA_PREFIX", "/work2/noaa/da/python/opt/core/miniconda3/4.6.14/envs/gdasapp/") From 29a80d6e4f1d932dd998b5c62ee387ce66909889 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Thu, 17 Oct 2024 11:39:07 -0400 Subject: [PATCH 08/27] add ensemble mean capability (#1335) Add ensmeanandvariance to gdas.x --- mains/gdas.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mains/gdas.cc b/mains/gdas.cc index 4a894b365..f47b82802 100755 --- a/mains/gdas.cc +++ b/mains/gdas.cc @@ -16,6 +16,7 @@ #include "oops/runs/ConvertToStructuredGrid.h" #include "oops/runs/ConvertState.h" +#include "oops/runs/EnsMeanAndVariance.h" #include "oops/runs/HofX4D.h" #include "oops/runs/LocalEnsembleDA.h" #include "oops/runs/Run.h" @@ -59,6 +60,9 @@ int runApp(int argc, char** argv, const std::string traits, const std::string ap apps["convertstate"] = []() { return std::make_unique>(); }; + apps["ensmean"] = []() { + return std::make_unique>(); + }; apps["hofx4d"] = []() { return std::make_unique>(); }; From 352b372c11e563dc80c61ead4255a29aa6ab8975 Mon Sep 17 00:00:00 2001 From: Mindo Choi <141867620+apchoiCMD@users.noreply.github.com> Date: Fri, 18 Oct 2024 10:47:38 -0400 Subject: [PATCH 09/27] Add ABI sea-ice product to ioda converter (#1280) #### Description - Task for adding a new ioda converter for new `ABI` type sea-ice concentration - For sea-ice obs, AMSR2, MIRS, JPSSRR and ABI products will be used - For the part of conversion of metadata, the reference is [here](https://www.goes-r.gov/products/docs/PUG-L2+-vol5.pdf) Close #1182 --------- Co-authored-by: Guillaume Vernieres --- utils/obsproc/IcecAbi2Ioda.h | 177 +++++ .../applications/gdas_obsprovider2ioda.h | 4 + utils/obsproc/util.h | 65 ++ utils/test/CMakeLists.txt | 9 + utils/test/prepdata.sh | 14 + utils/test/testdata/icec_abi_g16_1.cdl | 664 ++++++++++++++++++ utils/test/testdata/icec_abi_g16_2.cdl | 653 +++++++++++++++++ utils/test/testinput/gdas_icecabi2ioda.yaml | 13 + utils/test/testref/icecabi2ioda.test | 26 + 9 files changed, 1625 insertions(+) create mode 100644 utils/obsproc/IcecAbi2Ioda.h create mode 100644 utils/test/testdata/icec_abi_g16_1.cdl create mode 100644 utils/test/testdata/icec_abi_g16_2.cdl create mode 100644 utils/test/testinput/gdas_icecabi2ioda.yaml create mode 100644 utils/test/testref/icecabi2ioda.test diff --git a/utils/obsproc/IcecAbi2Ioda.h b/utils/obsproc/IcecAbi2Ioda.h new file mode 100644 index 000000000..54515d50f --- /dev/null +++ b/utils/obsproc/IcecAbi2Ioda.h @@ -0,0 +1,177 @@ +#pragma once + +#include +#include +#include +#include +#include // NOLINT (using C API) +#include +#include + +#include "eckit/config/LocalConfiguration.h" + +#include // NOLINT + +#include "ioda/../../../../core/IodaUtils.h" // TODO(All): Use a better way in all converters +#include "ioda/Group.h" +#include "ioda/ObsGroup.h" + +#include "oops/util/dateFunctions.h" + +#include "NetCDFToIodaConverter.h" + +namespace gdasapp { + + class IcecAbi2Ioda : public NetCDFToIodaConverter { + public: + explicit IcecAbi2Ioda(const eckit::Configuration & fullConfig, const eckit::mpi::Comm & comm) + : NetCDFToIodaConverter(fullConfig, comm) { + variable_ = "seaIceFraction"; + } + + // Read netcdf file and populate iodaVars + gdasapp::obsproc::iodavars::IodaVars providerToIodaVars(const std::string fileName) final { + oops::Log::info() << "Processing files provided by the ABI" << std::endl; + + // Open the NetCDF file in read-only mode + netCDF::NcFile ncFile(fileName, netCDF::NcFile::read); + oops::Log::info() << "Reading... " << fileName << std::endl; + + // Get the number of obs in the file + int dimxSize = ncFile.getDim("x").getSize(); + int dimySize = ncFile.getDim("y").getSize(); + int nobs = dimxSize * dimySize; + + // Set the int metadata names + std::vector intMetadataNames = {"oceanBasin"}; + + // Set the float metadata name + std::vector floatMetadataNames = {}; + + // Create instance of iodaVars object + gdasapp::obsproc::iodavars::IodaVars iodaVars(nobs, floatMetadataNames, intMetadataNames); + + oops::Log::debug() << "--- iodaVars.location_: " << iodaVars.location_ << std::endl; + + // Read in GOES ABI fixed grid projection variables and constants + std::vector y_coordinate_1d(dimySize); + ncFile.getVar("y").getVar(y_coordinate_1d.data()); + float yOffSet; + ncFile.getVar("y").getAtt("add_offset").getValues(&yOffSet); + float yScaleFactor; + ncFile.getVar("y").getAtt("scale_factor").getValues(&yScaleFactor); + // Apply the scale factor and add offset to the raw data + for (auto& yval : y_coordinate_1d) { + yval = yval * yScaleFactor + yOffSet; // N-S elevation angle in radians + } + + std::vector x_coordinate_1d(dimxSize); + ncFile.getVar("x").getVar(x_coordinate_1d.data()); + float xOffSet; + ncFile.getVar("x").getAtt("add_offset").getValues(&xOffSet); + float xScaleFactor; + ncFile.getVar("x").getAtt("scale_factor").getValues(&xScaleFactor); + // Apply the scale factor and add offset to the raw data + for (auto& xval : x_coordinate_1d) { + xval = xval * xScaleFactor + xOffSet; // E-W scanning angle in radians + } + + // Create 2D arrays (meshgrid equivalent) + std::vector> x_coordinate_2d(dimySize, std::vector(dimxSize)); + std::vector> y_coordinate_2d(dimySize, std::vector(dimxSize)); + std::vector> abi_lon; + std::vector> abi_lat; + + // Create 2D coordinate matrices from 1D coordinate vectors + for (int i = 0; i < dimySize; ++i) { + for (int j = 0; j < dimxSize; ++j) { + x_coordinate_2d[i][j] = x_coordinate_1d[j]; + y_coordinate_2d[i][j] = y_coordinate_1d[i]; + } + } + + // Retrieve the attributes + double lon_origin; + ncFile.getVar("goes_imager_projection").getAtt("longitude_of_projection_origin") + .getValues(&lon_origin); + double perspective_point_height; + ncFile.getVar("goes_imager_projection").getAtt("perspective_point_height") + .getValues(&perspective_point_height); + double r_eq; + ncFile.getVar("goes_imager_projection").getAtt("semi_major_axis").getValues(&r_eq); + double r_pol; + ncFile.getVar("goes_imager_projection").getAtt("semi_minor_axis").getValues(&r_pol); + + // Calculate H = Satellite height from center of earth(m) + double H = perspective_point_height + r_eq; + + // Calculate Latitude and Longitude from GOES Imager Projection + // for details of calculations in util.h + gdasapp::obsproc::utils::abiToGeoLoc( + x_coordinate_2d, + y_coordinate_2d, + lon_origin, + H, + r_eq, + r_pol, + abi_lat, + abi_lon); + + // Store real number of lat and lon into eigen arrays + int loc(0); + for (int i = 0; i < dimySize; i++) { + for (int j = 0; j < dimxSize; j++) { + iodaVars.longitude_(loc) = std::real(abi_lon[i][j]); + iodaVars.latitude_(loc) = std::real(abi_lat[i][j]); + loc += 1; + } + } + + // Read Quality Flags as a preQc + std::vector fullQcFlagsVar(iodaVars.location_); + ncFile.getVar("DQF").getVar(fullQcFlagsVar.data()); + + // Get Ice_Concentration obs values + std::vector IcecObsVal(iodaVars.location_); + ncFile.getVar("IceConc").getVar(IcecObsVal.data()); + float IcecOffSet; + ncFile.getVar("IceConc").getAtt("add_offset").getValues(&IcecOffSet); + float IcecScaleFactor; + ncFile.getVar("IceConc").getAtt("scale_factor").getValues(&IcecScaleFactor); + + // TODO(All): Think how we will be acle to use Temp later + // Get Ice_Temp obs values + std::vector IcecTempObsVal(iodaVars.location_); + ncFile.getVar("Temp").getVar(IcecTempObsVal.data()); // Kelvin + float IcecTempOffSet; + ncFile.getVar("Temp").getAtt("add_offset").getValues(&IcecTempOffSet); + float IcecTempScaleFactor; + ncFile.getVar("Temp").getAtt("scale_factor").getValues(&IcecTempScaleFactor); + + // Read the dateTime + double TimeVal; + ncFile.getVar("t").getVar(&TimeVal); + + iodaVars.referenceDate_ = "seconds since 2000-01-01T12:00:00Z"; // 12Z + + // Update Eigen arrays + for (int i = 0; i < iodaVars.location_; i++) { + iodaVars.obsVal_(i) + = static_cast((IcecObsVal[i] * IcecScaleFactor + IcecOffSet)*0.01); + iodaVars.obsError_(i) = 0.1; // Do something for obs error + iodaVars.preQc_(i) = fullQcFlagsVar[i]; + // Store optional metadata, set ocean basins to -999 for now + iodaVars.intMetadata_.row(i) << -999; + iodaVars.datetime_(i) = TimeVal; + } + + // basic test for iodaVars.trim + Eigen::Array mask = + ((iodaVars.obsVal_ >= 0.0 && iodaVars.obsVal_ <= 1.0) && + (iodaVars.latitude_ <= -40.0 || iodaVars.latitude_ >= 40.0)); + iodaVars.trim(mask); + + return iodaVars; + }; + }; // class IcecAbi2Ioda +} // namespace gdasapp diff --git a/utils/obsproc/applications/gdas_obsprovider2ioda.h b/utils/obsproc/applications/gdas_obsprovider2ioda.h index a1a6fe6e3..fd924e4c6 100644 --- a/utils/obsproc/applications/gdas_obsprovider2ioda.h +++ b/utils/obsproc/applications/gdas_obsprovider2ioda.h @@ -7,6 +7,7 @@ #include "oops/runs/Application.h" #include "../Ghrsst2Ioda.h" +#include "../IcecAbi2Ioda.h" #include "../IcecAmsr2Ioda.h" #include "../IcecJpssrr2Ioda.h" #include "../IcecMirs2Ioda.h" @@ -49,6 +50,9 @@ namespace gdasapp { } else if (provider == "SMOS") { Smos2Ioda conv2ioda(fullConfig, this->getComm()); conv2ioda.writeToIoda(); + } else if (provider == "ABI") { + IcecAbi2Ioda conv2ioda(fullConfig, this->getComm()); + conv2ioda.writeToIoda(); } else if (provider == "AMSR2") { IcecAmsr2Ioda conv2ioda(fullConfig, this->getComm()); conv2ioda.writeToIoda(); diff --git a/utils/obsproc/util.h b/utils/obsproc/util.h index 7b2421dbb..c699f7f43 100644 --- a/utils/obsproc/util.h +++ b/utils/obsproc/util.h @@ -1,6 +1,7 @@ #pragma once #include +#include #include #include // NOLINT (using C API) #include @@ -254,5 +255,69 @@ namespace gdasapp { } }; } // namespace iodavars + + // TODO(Mindo): To move below as a private method to the iceabi2ioda class + namespace utils { + + // Calculate latitude and longitude from GOES ABI fixed grid projection data + // GOES ABI fixed grid projection is a map projection relative to the GOES satellite + // Units: latitude in °N (°S < 0), longitude in °E (°W < 0) + // See GOES-R Product User Guide (PUG) Volume 5 (L2 products) Section 4.2.8 (p58) + void abiToGeoLoc( + const std::vector>& x_coordinate_2d, + const std::vector>& y_coordinate_2d, + double lon_origin, + double H, + double r_eq, + double r_pol, + std::vector>& abi_lat, + std::vector>& abi_lon + ) { + int sizeX = x_coordinate_2d[0].size(); + int sizeY = x_coordinate_2d.size(); + + double lambda_0 = (lon_origin * M_PI) / 180.0; + + abi_lat.resize(sizeY, std::vector(sizeX)); + abi_lon.resize(sizeY, std::vector(sizeX)); + + for (int i = 0; i < sizeY; ++i) { + for (int j = 0; j < sizeX; ++j) { + double x = x_coordinate_2d[i][j]; + double y = y_coordinate_2d[i][j]; + + // Cache sin(x), cos(x), sin(y), and cos(y) + double sin_x = std::sin(x); + double cos_x = std::cos(x); + double sin_y = std::sin(y); + double cos_y = std::cos(y); + + double a_var = std::pow(sin_x, 2.0) + + std::pow(cos_x, 2.0) * (std::pow(cos_y, 2.0) + + ((r_eq * r_eq) / (r_pol * r_pol)) * std::pow(sin_y, 2.0)); + double b_var = -2.0 * H * cos_x * cos_y; + double c_var = (H * H) - (r_eq * r_eq); + double discriminant = (b_var * b_var) - (4.0 * a_var * c_var); + + // Check if discriminant is strictly positive + if (discriminant > 0) { + double r_s = (-b_var - std::sqrt(discriminant)) / (2.0 * a_var); + double s_x = r_s * cos_x * cos_y; + double s_y = -r_s * sin_x; + double s_z = r_s * cos_x * sin_y; + + abi_lat[i][j] = (180.0 / M_PI) * (std::atan(((r_eq * r_eq) / (r_pol * r_pol)) + * (s_z / std::sqrt(((H - s_x) * (H - s_x)) + (s_y * s_y))))); + abi_lon[i][j] = (lambda_0 - std::atan(s_y / (H - s_x))) * (180.0 / M_PI); + } else { + // Handle invalid values + // Set latitude and longitude to NaN if discriminant <= 0) + abi_lat[i][j] = std::numeric_limits::quiet_NaN(); + abi_lon[i][j] = std::numeric_limits::quiet_NaN(); + } + } + } + } // void + } // namespace utils } // namespace obsproc }; // namespace gdasapp diff --git a/utils/test/CMakeLists.txt b/utils/test/CMakeLists.txt index 3be6cd1c4..00adbad26 100644 --- a/utils/test/CMakeLists.txt +++ b/utils/test/CMakeLists.txt @@ -7,6 +7,7 @@ list( APPEND utils_test_input testinput/gdas_rtofssal.yaml testinput/gdas_smap2ioda.yaml testinput/gdas_smos2ioda.yaml + testinput/gdas_icecabi2ioda.yaml testinput/gdas_icecamsr2ioda.yaml testinput/gdas_icecmirs2ioda.yaml testinput/gdas_icecjpssrr2ioda.yaml @@ -20,6 +21,7 @@ set( gdas_utils_test_ref testref/rads2ioda.test testref/smap2ioda.test testref/smos2ioda.test + testref/icecabi2ioda.test testref/icecamsr2ioda.test testref/icecmirs2ioda.test testref/icecjpssrr2ioda.test @@ -143,6 +145,13 @@ ecbuild_add_test( TARGET test_gdasapp_util_viirsaod2ioda LIBS gdas-utils WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/obsproc) +# Test the ABI to IODA converter +ecbuild_add_test( TARGET test_gdasapp_util_icecabi2ioda + COMMAND ${CMAKE_BINARY_DIR}/bin/gdas_obsprovider2ioda.x + ARGS "../testinput/gdas_icecabi2ioda.yaml" + LIBS gdas-utils + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/obsproc) + # Test the AMSR2 to IODA converter ecbuild_add_test( TARGET test_gdasapp_util_icecamsr2ioda COMMAND ${CMAKE_BINARY_DIR}/bin/gdas_obsprovider2ioda.x diff --git a/utils/test/prepdata.sh b/utils/test/prepdata.sh index 4cb5b31a9..7ce39ce71 100755 --- a/utils/test/prepdata.sh +++ b/utils/test/prepdata.sh @@ -2,6 +2,18 @@ # called for test_gdasapp_util_prepdata, and by # test/soca/gw/setup_obsproc.sh for test_gdasapp_soca_setup_obsproc +# TODO: It needs to point to experimental obs instead of prepdata.sh +# Get the machine hostname +MACHINE_NAME=$(hostname) + +# Check if the machine name is "hera" +if [[ "$MACHINE_NAME" =~ ^hfe0[1-9]$ || "$MACHINE_NAME" =~ ^hfe1[01]$ ]]; then + echo "Running on hera, loading anaconda modules." + module use -a /contrib/anaconda/modulefiles + module load anaconda/latest +else + echo "Not running on hera, skipping anaconda module loading." +fi set -e @@ -19,6 +31,8 @@ project_source_dir=$1 cdl2nc4 rads_adt_3a_2021181.nc4 ${project_source_dir}/testdata/rads_adt_3a_2021181.cdl cdl2nc4 rads_adt_3b_2021181.nc4 ${project_source_dir}/testdata/rads_adt_3b_2021181.cdl +cdl2nc4 icec_abi_g16_1.nc4 ${project_source_dir}/testdata/icec_abi_g16_1.cdl +cdl2nc4 icec_abi_g16_2.nc4 ${project_source_dir}/testdata/icec_abi_g16_2.cdl cdl2nc4 icec_amsr2_north_1.nc4 ${project_source_dir}/testdata/icec_amsr2_north_1.cdl cdl2nc4 icec_amsr2_north_2.nc4 ${project_source_dir}/testdata/icec_amsr2_north_2.cdl cdl2nc4 icec_amsr2_south_1.nc4 ${project_source_dir}/testdata/icec_amsr2_south_1.cdl diff --git a/utils/test/testdata/icec_abi_g16_1.cdl b/utils/test/testdata/icec_abi_g16_1.cdl new file mode 100644 index 000000000..1270f5365 --- /dev/null +++ b/utils/test/testdata/icec_abi_g16_1.cdl @@ -0,0 +1,664 @@ +netcdf icec_abi_g16_1 { +dimensions: + y = 21 ; + x = 21 ; + number_of_LZA_bounds = 2 ; + number_of_SZA_bounds = 2 ; + number_of_time_bounds = 2 ; + number_of_image_bounds = 2 ; +variables: + ushort DQF(y, x) ; + DQF:_FillValue = 65535US ; + DQF:long_name = "ABI L2 Cryosphere Ice Concentration Data Quality Flags" ; + DQF:standard_name = "status_flag" ; + DQF:valid_range = 0US, 3US ; + DQF:units = "1" ; + DQF:coordinates = "retrieval_local_zenith_angle quantitative_local_zenith_angle retrieval_solar_zenith_angle quantitative_solar_zenith_angle t y x" ; + DQF:grid_mapping = "goes_imager_projection" ; + DQF:cell_methods = "retrieval_local_zenith_angle: point quantitative_local_zenith_angle: point retrieval_solar_zenith_angle: point quantitative_solar_zenith_angle: t: point area: point" ; + DQF:flag_values = 0US, 1US, 2US, 3US ; + DQF:flag_meanings = "normal nonretrievable uncertain bad_data" ; + DQF:number_of_qf_values = 4US ; + DQF:potentially_geo_pixel_count_used_as_percent_denominator = 23046372 ; + ushort IceConc(y, x) ; + IceConc:_FillValue = 65535US ; + IceConc:long_name = "ABI L2 Cryosphere Ice Concentration" ; + IceConc:standard_name = "ice_concentration" ; + IceConc:valid_range = 0US, 65530US ; + IceConc:scale_factor = 0.00152602f ; + IceConc:add_offset = 0.f ; + IceConc:units = "percent" ; + IceConc:resolution = "y: 0.000056 rad x: 0.000056 rad" ; + IceConc:coordinates = "retrieval_local_zenith_angle quantitative_local_zenith_angle retrieval_solar_zenith_angle quantitative_solar_zenith_angle t y x" ; + IceConc:grid_mapping = "goes_imager_projection" ; + IceConc:cell_methods = "retrieval_local_zenith_angle: point (good or degraded quality pixel produced) quantitative_local_zenith_angle: point (good quality pixel produced) retrieval_solar_zenith_angle: point (good or degraded quality pixel produced) quantitative_solar_zenith_angle: point (good quality pixel produced) t: point area: point" ; + IceConc:ancillary_variables = "DQF" ; + byte Mask(y, x) ; + Mask:_FillValue = -99b ; + Mask:long_name = "ABI L2 Cryosphere Ice Mask" ; + Mask:standard_name = "ice_mask" ; + Mask:valid_range = -128b, 127b ; + Mask:units = "1" ; + Mask:resolution = "y: 0.000056 rad x: 0.000056 rad" ; + Mask:coordinates = "retrieval_local_zenith_angle quantitative_local_zenith_angle retrieval_solar_zenith_angle t y x" ; + Mask:grid_mapping = "goes_imager_projection" ; + Mask:cell_methods = "retrieval_local_zenith_angle: point (good or degraded quality pixel produced) quantitative_local_zenith_angle: point (good quality pixel produced) retrieval_solar_zenith_angle: point (good quality pixel produced) t: point area: point" ; + Mask:flag_values = -3b, -2b, -1b, 0b, 1b, 2b ; + Mask:flag_meanings = "non_retrieval water land cloud day_ice night_ice" ; + Mask:ancillary_variables = "DQF" ; + Mask:clear_pixel_definition = "no cloud detected and failed a test for high values of spatial heterogeneity" ; + Mask:probably_clear_pixel_definition = "no cloud detected but passed a test for high values of spatial heterogeneity and one or more neighboring pixels identified as cloudy. pixel is possibly cloud-contaminated" ; + Mask:probably_cloudy_pixel_definition = "cloud detected but likely contains a cloud edge, since one or more neighboring pixels are clear. pixel is probably cloud-contaminated" ; + Mask:cloudy_pixel_definition = "cloud detected and failed a test for cloud edges" ; + uint PQI(y, x) ; + PQI:_FillValue = 0U ; + PQI:long_name = "ABI L2 Cryosphere Ice Concentration product quality indicator" ; + PQI:units = "1" ; + PQI:grid_mapping = "goes_imager_projection" ; + PQI:coordinates = "y x" ; + PQI:flag_meanings = "normal nonretrievable uncertain bad_data cloud_mask_clear cloud_mask_probably_clear cloud_mask_probably_cloudy cloud_mask_cloudy day_night_qf sunglint_qf cloud_shadow_qf off_earth_qf solar_zenith_angle_qf satellite_zenith_angle_qf reflectance_band_2_qf reflectance_band_3_qf reflectance_band_5_qf brightness_temp_band_14_qf brightness_temp_band_15_qf Unused_Bit_15 surface_in-land_water surface_land surface_sea_water surface_other reflectance_test_ice_cover_detection_qf NDSI_test_ice_cover_detection_qf skin_temp_test_ice_cover_detection_qf visable_band_tie-pont_qf Unused_Bit_23 read_input_qf Unused_Bit_25 Unused_Bit_26 Unused_Bit_27 Unused_Bit_28 Unused_Bit_29 Unused_Bit_30 Unused_Bit_31" ; + PQI:number_of_qf_values = 37U ; + ushort Temp(y, x) ; + Temp:_FillValue = 65535US ; + Temp:long_name = "ABI L2 Cryosphere Ice Surface Temperature" ; + Temp:standard_name = "ice_temperature" ; + Temp:valid_range = 0US, 65530US ; + Temp:scale_factor = 0.00267053f ; + Temp:add_offset = 100.f ; + Temp:units = "kelvin" ; + Temp:resolution = "y: 0.000056 rad x: 0.000056 rad" ; + Temp:coordinates = "retrieval_local_zenith_angle quantitative_local_zenith_angle retrieval_solar_zenith_angle quantitative_solar_zenith_angle t y x" ; + Temp:grid_mapping = "goes_imager_projection" ; + Temp:cell_methods = "retrieval_local_zenith_angle: point (good or degraded quality pixel produced) quantitative_local_zenith_angle: point (good quality pixel produced) retrieval_solar_zenith_angle: point (good or degraded quality pixel produced) quantitative_solar_zenith_angle: point (good quality pixel produced) t: point area: point" ; + Temp:ancillary_variables = "DQF" ; + int algorithm_disabled_due_to_mitigation ; + algorithm_disabled_due_to_mitigation:long_name = "Status flag indicating if the algorithm was disabled due to upstream degradation" ; + algorithm_disabled_due_to_mitigation:_FillValue = -1 ; + algorithm_disabled_due_to_mitigation:flag_value = 0, 1 ; + algorithm_disabled_due_to_mitigation:flag_meanings = "unset set" ; + algorithm_disabled_due_to_mitigation:valid_range = 0, 1 ; + algorithm_disabled_due_to_mitigation:units = "1" ; + int algorithm_dynamic_input_data_container ; + algorithm_dynamic_input_data_container:long_name = "container for filenames of dynamic algorithm input data" ; + algorithm_dynamic_input_data_container:input_ABI_L2_auxiliary_solar_zenith_angle_data = "OR_I_ABI-L2-AUXF-M6_G16_s20241691800214_e20241691809522_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_auxiliary_local_zenith_angle_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_auxiliary_land_mask_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_auxiliary_lat_lon_position_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_reflectance_band_1_2km_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_reflectance_band_2_2km_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_reflectance_band_3_2km_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_reflectance_band_5_2km_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_brightness_temperature_band_14_2km_data = "OR_ABI-L2-CMIPF-M6C14_G16_s2024-06-17T18:00:21.4Z_e2024-06-17T18:09:52.2Z_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_brightness_temperature_band_15_2km_data = "OR_ABI-L2-CMIPF-M6C15_G16_s2024-06-17T18:00:21.4Z_e2024-06-17T18:09:52.2Z_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_cloud_mask_data_information_flag_data = "OR_I_ABI-L2-ACMDIFF-M6_G16_s20241691800214_e20241691809522_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_4_level_cloud_mask_data = "OR_ABI-L2-ACMF-M6_G16_s20241691800214_e20241691809522_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_cloud_mask_granule_level_quality_flag_data = "OR_ABI-L2-ACMF-M6_G16_s20241691800214_e20241691809522_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_cloud_top_cloud_shadow_flag_data = "OR_I_ABI-L2-ACHF-M6_G16_s20241691800214_e20241691809522_c*.nc" ; + int algorithm_product_version_container ; + algorithm_product_version_container:long_name = "container for algorithm package filename and product version" ; + algorithm_product_version_container:algorithm_version = "OR_ABI-L2-ALG-AICE_v02r00.zip" ; + algorithm_product_version_container:product_version = "v02r00" ; + float geospatial_lat_lon_extent ; + geospatial_lat_lon_extent:long_name = "geospatial latitude and longitude references" ; + geospatial_lat_lon_extent:geospatial_westbound_longitude = -156.2995f ; + geospatial_lat_lon_extent:geospatial_northbound_latitude = 81.3282f ; + geospatial_lat_lon_extent:geospatial_eastbound_longitude = 6.2995f ; + geospatial_lat_lon_extent:geospatial_southbound_latitude = -81.3282f ; + geospatial_lat_lon_extent:geospatial_lat_center = 0.f ; + geospatial_lat_lon_extent:geospatial_lon_center = -75.f ; + geospatial_lat_lon_extent:geospatial_lat_nadir = 0.f ; + geospatial_lat_lon_extent:geospatial_lon_nadir = -75.f ; + geospatial_lat_lon_extent:geospatial_lat_units = "degrees_north" ; + geospatial_lat_lon_extent:geospatial_lon_units = "degrees_east" ; + int goes_imager_projection ; + goes_imager_projection:long_name = "GOES-R ABI fixed grid projection" ; + goes_imager_projection:grid_mapping_name = "geostationary" ; + goes_imager_projection:perspective_point_height = 35786023. ; + goes_imager_projection:semi_major_axis = 6378137. ; + goes_imager_projection:semi_minor_axis = 6356752.31414 ; + goes_imager_projection:inverse_flattening = 298.2572221 ; + goes_imager_projection:latitude_of_projection_origin = 0. ; + goes_imager_projection:longitude_of_projection_origin = -75. ; + goes_imager_projection:sweep_angle_axis = "x" ; + int64 granule_level_quality_flag ; + granule_level_quality_flag:long_name = "Cloud Mask Granule Level Degradation Quality Flag" ; + granule_level_quality_flag:flag_masks = 0LL, 1LL, 63LL ; + granule_level_quality_flag:flag_meanings = "valid_channels channel_missing algorithm_failure" ; + granule_level_quality_flag:_FillValue = -999LL ; + granule_level_quality_flag:valid_range = 0LL, 63LL ; + granule_level_quality_flag:units = "1" ; + float maximum_ice_retrieval ; + maximum_ice_retrieval:long_name = "maximum ice concentration retrieval" ; + maximum_ice_retrieval:standard_name = "ice_concentration_retrieval" ; + maximum_ice_retrieval:_FillValue = -999.f ; + maximum_ice_retrieval:valid_range = 0.f, 20000.f ; + maximum_ice_retrieval:units = "m" ; + maximum_ice_retrieval:coordinates = "local_zenith_angle solar_zenith_angle t y_image x_image" ; + maximum_ice_retrieval:grid_mapping = "goes_imager_projection" ; + maximum_ice_retrieval:cell_methods = "local_zenith_angle: sum solar_zenith_angle: sum t: sum area: maximum (interval: variable[@name=\'x\']/values rad comment: good quality pixels only) where ice retrieval" ; + float mean_ice_retrieval ; + mean_ice_retrieval:long_name = "mean ice concentration retrieval" ; + mean_ice_retrieval:standard_name = "ice_concentration_retrieval" ; + mean_ice_retrieval:_FillValue = -999.f ; + mean_ice_retrieval:valid_range = 0.f, 20000.f ; + mean_ice_retrieval:units = "m" ; + mean_ice_retrieval:coordinates = "local_zenith_angle solar_zenith_angle t y_image x_image" ; + mean_ice_retrieval:grid_mapping = "goes_imager_projection" ; + mean_ice_retrieval:cell_methods = "local_zenith_angle: sum solar_zenith_angle: sum t: sum area: mean (interval: variable[@name=\'x\']/values rad comment: good quality pixels only) where ice retrieval" ; + float minimum_ice_retrieval ; + minimum_ice_retrieval:long_name = "minimum ice concentration retrieval" ; + minimum_ice_retrieval:standard_name = "ice_concentration_retrieval" ; + minimum_ice_retrieval:_FillValue = -999.f ; + minimum_ice_retrieval:valid_range = 0.f, 20000.f ; + minimum_ice_retrieval:units = "m" ; + minimum_ice_retrieval:coordinates = "local_zenith_angle solar_zenith_angle t y_image x_image" ; + minimum_ice_retrieval:grid_mapping = "goes_imager_projection" ; + minimum_ice_retrieval:cell_methods = "local_zenith_angle: sum solar_zenith_angle: sum t: sum area: minimum (interval: variable[@name=\'x\']/values rad comment: good quality pixels only) where ice retrieval" ; + float nominal_satellite_height ; + nominal_satellite_height:long_name = "nominal satellite height above GRS 80 ellipsoid (platform altitude)" ; + nominal_satellite_height:standard_name = "height_above_reference_ellipsoid" ; + nominal_satellite_height:_FillValue = -999.f ; + nominal_satellite_height:units = "km" ; + float nominal_satellite_subpoint_lat ; + nominal_satellite_subpoint_lat:long_name = "nominal satellite subpoint latitude (platform latitude)" ; + nominal_satellite_subpoint_lat:standard_name = "latitude" ; + nominal_satellite_subpoint_lat:_FillValue = -999.f ; + nominal_satellite_subpoint_lat:units = "degrees_north" ; + float nominal_satellite_subpoint_lon ; + nominal_satellite_subpoint_lon:long_name = "nominal satellite subpoint longitude (platform longitude)" ; + nominal_satellite_subpoint_lon:standard_name = "longitude" ; + nominal_satellite_subpoint_lon:_FillValue = -999.f ; + nominal_satellite_subpoint_lon:units = "degrees_east" ; + int number_of_bad_data_pixels ; + number_of_bad_data_pixels:long_name = "number of bad data pixels that do not exceed local zenith angle threshold" ; + number_of_bad_data_pixels:_FillValue = -1 ; + number_of_bad_data_pixels:units = "count" ; + number_of_bad_data_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_bad_data_pixels:grid_mapping = "goes_imager_projection" ; + number_of_bad_data_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where bad data" ; + int number_of_day_pixels ; + number_of_day_pixels:long_name = "number of day pixels that do not exceed local zenith angle threshold" ; + number_of_day_pixels:_FillValue = -1 ; + number_of_day_pixels:units = "count" ; + number_of_day_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_day_pixels:grid_mapping = "goes_imager_projection" ; + number_of_day_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where day" ; + int number_of_ice_retrievals ; + number_of_ice_retrievals:long_name = "number of valid ice cover and retrieval pixels that do not exceed local zenith angle threshold" ; + number_of_ice_retrievals:_FillValue = -1 ; + number_of_ice_retrievals:units = "count" ; + number_of_ice_retrievals:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_ice_retrievals:grid_mapping = "goes_imager_projection" ; + number_of_ice_retrievals:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where valid ice cover and retrieval" ; + int number_of_night_pixels ; + number_of_night_pixels:long_name = "number of night pixels that do not exceed local zenith angle threshold" ; + number_of_night_pixels:_FillValue = -1 ; + number_of_night_pixels:units = "count" ; + number_of_night_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_night_pixels:grid_mapping = "goes_imager_projection" ; + number_of_night_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where night" ; + int number_of_nonretrievable_pixels ; + number_of_nonretrievable_pixels:long_name = "number of nonretrievable pixels that do not exceed local zenith angle threshold" ; + number_of_nonretrievable_pixels:_FillValue = -1 ; + number_of_nonretrievable_pixels:units = "count" ; + number_of_nonretrievable_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_nonretrievable_pixels:grid_mapping = "goes_imager_projection" ; + number_of_nonretrievable_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where nonretrievable" ; + int number_of_normal_pixels ; + number_of_normal_pixels:long_name = "number of normal pixels that do not exceed local zenith angle threshold" ; + number_of_normal_pixels:_FillValue = -1 ; + number_of_normal_pixels:units = "count" ; + number_of_normal_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_normal_pixels:grid_mapping = "goes_imager_projection" ; + number_of_normal_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where normal" ; + int number_of_terminator_pixels ; + number_of_terminator_pixels:long_name = "number of terminator pixels that do not exceed local zenith angle threshold" ; + number_of_terminator_pixels:_FillValue = -1 ; + number_of_terminator_pixels:units = "count" ; + number_of_terminator_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_terminator_pixels:grid_mapping = "goes_imager_projection" ; + number_of_terminator_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where terminator" ; + int number_of_uncertain_pixels ; + number_of_uncertain_pixels:long_name = "number of uncertain pixels that do not exceed local zenith angle threshold" ; + number_of_uncertain_pixels:_FillValue = -1 ; + number_of_uncertain_pixels:units = "count" ; + number_of_uncertain_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_uncertain_pixels:grid_mapping = "goes_imager_projection" ; + number_of_uncertain_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where uncertain" ; + int number_of_water_pixels ; + number_of_water_pixels:long_name = "number of water pixels that do not exceed local zenith angle threshold" ; + number_of_water_pixels:_FillValue = -1 ; + number_of_water_pixels:units = "count" ; + number_of_water_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_water_pixels:grid_mapping = "goes_imager_projection" ; + number_of_water_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where water" ; + float percent_ice_retrieval_pixels ; + percent_ice_retrieval_pixels:long_name = "percent of ice retrieval pixels that do not exceed local zenith angle threshold" ; + percent_ice_retrieval_pixels:standard_name = "clear_sky_area_fraction" ; + percent_ice_retrieval_pixels:_FillValue = -999.f ; + percent_ice_retrieval_pixels:valid_range = 0.f, 1.f ; + percent_ice_retrieval_pixels:units = "percent" ; + percent_ice_retrieval_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + percent_ice_retrieval_pixels:grid_mapping = "goes_imager_projection" ; + percent_ice_retrieval_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where ice retrieval" ; + float percent_terminator_pixels ; + percent_terminator_pixels:long_name = "percent of terminator pixels that do not exceed local zenith angle threshold" ; + percent_terminator_pixels:standard_name = "clear_sky_area_fraction" ; + percent_terminator_pixels:_FillValue = -999.f ; + percent_terminator_pixels:valid_range = 0.f, 1.f ; + percent_terminator_pixels:units = "percent" ; + percent_terminator_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + percent_terminator_pixels:grid_mapping = "goes_imager_projection" ; + percent_terminator_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where terminator" ; + float percent_uncorrectable_GRB_errors ; + percent_uncorrectable_GRB_errors:long_name = "percent data lost due to uncorrectable GRB errors" ; + percent_uncorrectable_GRB_errors:_FillValue = -999.f ; + percent_uncorrectable_GRB_errors:valid_range = 0.f, 1.f ; + percent_uncorrectable_GRB_errors:units = "percent" ; + percent_uncorrectable_GRB_errors:coordinates = "t y_image x_image" ; + percent_uncorrectable_GRB_errors:grid_mapping = "goes_imager_projection" ; + percent_uncorrectable_GRB_errors:cell_methods = "t: sum area: sum (uncorrectable GRB errors only)" ; + float percent_uncorrectable_L0_errors ; + percent_uncorrectable_L0_errors:long_name = "percent data lost due to uncorrectable L0 errors" ; + percent_uncorrectable_L0_errors:_FillValue = -999.f ; + percent_uncorrectable_L0_errors:valid_range = 0.f, 1.f ; + percent_uncorrectable_L0_errors:units = "percent" ; + percent_uncorrectable_L0_errors:coordinates = "t y_image x_image" ; + percent_uncorrectable_L0_errors:grid_mapping = "goes_imager_projection" ; + percent_uncorrectable_L0_errors:cell_methods = "t: sum area: sum (uncorrectable L0 errors only)" ; + int processing_parm_version_container ; + processing_parm_version_container:long_name = "container for processing parameter filenames" ; + processing_parm_version_container:L2_processing_parm_version = "OR_ABI-L2-PARM-AICE_v02r00.zip, OR_ANC-L2-PARM-SEMISTATIC_v01r00.zip, OR_ABI-L2-PARM-AUXILIARY_v01r00.zip" ; + float quantitative_local_zenith_angle ; + quantitative_local_zenith_angle:long_name = "threshold angle between the line of sight to the satellite and the local zenith at the observation target for good quality ice concentration and extent data production" ; + quantitative_local_zenith_angle:standard_name = "platform_zenith_angle" ; + quantitative_local_zenith_angle:units = "degree" ; + quantitative_local_zenith_angle:bounds = "quantitative_local_zenith_angle_bounds" ; + float quantitative_local_zenith_angle_bounds(number_of_LZA_bounds) ; + quantitative_local_zenith_angle_bounds:long_name = "local zenith angle degree range where good quality ice concentration and extent data is produced" ; + float quantitative_solar_zenith_angle ; + quantitative_solar_zenith_angle:long_name = "threshold angle between the line of sight to the sun and the local zenith at the observation target for good quality ice concentration and extent data production" ; + quantitative_solar_zenith_angle:standard_name = "solar_zenith_angle" ; + quantitative_solar_zenith_angle:units = "degree" ; + quantitative_solar_zenith_angle:bounds = "quantitative_solar_zenith_angle_bounds" ; + float quantitative_solar_zenith_angle_bounds(number_of_SZA_bounds) ; + quantitative_solar_zenith_angle_bounds:long_name = "solar zenith angle degree range where good quality ice concentration and extent data is produced" ; + float retrieval_local_zenith_angle ; + retrieval_local_zenith_angle:long_name = "threshold angle between the line of sight to the satellite and the local zenith at the observation target for good or degraded quality ice concentration and extent data production" ; + retrieval_local_zenith_angle:standard_name = "platform_zenith_angle" ; + retrieval_local_zenith_angle:units = "degree" ; + retrieval_local_zenith_angle:bounds = "retrieval_local_zenith_angle_bounds" ; + float retrieval_local_zenith_angle_bounds(number_of_LZA_bounds) ; + retrieval_local_zenith_angle_bounds:long_name = "local zenith angle degree range where good quality ice concentration and extent data is produced" ; + float retrieval_solar_zenith_angle ; + retrieval_solar_zenith_angle:long_name = "threshold angle between the line of sight to the sun and the local zenith at the observation target for good or degraded quality ice concentration and extent data production" ; + retrieval_solar_zenith_angle:standard_name = "solar_zenith_angle" ; + retrieval_solar_zenith_angle:units = "degree" ; + retrieval_solar_zenith_angle:bounds = "retrieval_solar_zenith_angle_bounds" ; + float retrieval_solar_zenith_angle_bounds(number_of_SZA_bounds) ; + retrieval_solar_zenith_angle_bounds:long_name = "solar zenith angle degree range where good or degraded quality ice concentration and extent data is produced" ; + int size_searchwindow ; + size_searchwindow:long_name = "size of search window pixels that do not exceed local zenith angle threshold" ; + size_searchwindow:_FillValue = -1 ; + size_searchwindow:units = "count" ; + size_searchwindow:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + size_searchwindow:grid_mapping = "goes_imager_projection" ; + size_searchwindow:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where search window size" ; + float std_dev_ice_retrieval ; + std_dev_ice_retrieval:long_name = "standard deviation of ice concentration retrieval values" ; + std_dev_ice_retrieval:standard_name = "ice_concentration_retrieval" ; + std_dev_ice_retrieval:_FillValue = -999.f ; + std_dev_ice_retrieval:units = "m" ; + std_dev_ice_retrieval:coordinates = "local_zenith_angle solar_zenith_angle t y_image x_image" ; + std_dev_ice_retrieval:grid_mapping = "goes_imager_projection" ; + std_dev_ice_retrieval:cell_methods = "local_zenith_angle: sum solar_zenith_angle: sum t: sum area: standard_deviation (interval: variable[@name=\'x\']/@value rad comment: good quality pixels only) where ice retrieval" ; + double t ; + t:long_name = "J2000 epoch mid-point between the start and end image scan in seconds" ; + t:standard_name = "time" ; + t:units = "seconds since 2000-01-01 12:00:00" ; + t:axis = "T" ; + t:bounds = "time_bounds" ; + double time_bounds(number_of_time_bounds) ; + time_bounds:long_name = "Scan start and end times in seconds since epoch (2000-01-01 12:00:00)" ; + short x(x) ; + x:scale_factor = 5.6e-05f ; + x:add_offset = -0.151844f ; + x:units = "rad" ; + x:axis = "X" ; + x:long_name = "GOES fixed grid projection x-coordinate" ; + x:standard_name = "projection_x_coordinate" ; + float x_image ; + x_image:long_name = "GOES-R fixed grid projection x-coordinate center of image" ; + x_image:standard_name = "projection_x_coordinate" ; + x_image:units = "rad" ; + x_image:axis = "X" ; + float x_image_bounds(number_of_image_bounds) ; + x_image_bounds:long_name = "GOES-R fixed grid projection x-coordinate west/east extent of image" ; + x_image_bounds:units = "rad" ; + short y(y) ; + y:scale_factor = -5.6e-05f ; + y:add_offset = 0.151844f ; + y:units = "rad" ; + y:axis = "Y" ; + y:long_name = "GOES fixed grid projection y-coordinate" ; + y:standard_name = "projection_y_coordinate" ; + float y_image ; + y_image:long_name = "GOES-R fixed grid projection y-coordinate center of image" ; + y_image:standard_name = "projection_y_coordinate" ; + y_image:units = "rad" ; + y_image:axis = "Y" ; + float y_image_bounds(number_of_image_bounds) ; + y_image_bounds:long_name = "GOES-R fixed grid projection y-coordinate north/south extent of image" ; + y_image_bounds:units = "rad" ; + +// global attributes: + :naming_authority = "gov.nesdis.noaa" ; + :Conventions = "CF-1.7" ; + :Metadata_Conventions = "Unidata Dataset Discovery v1.0" ; + :standard_name_vocabulary = "CF Standard Name Table (v35, 20 July 2016)" ; + :institution = "DOC/NOAA/NESDIS > U.S. Department of Commerce, National Oceanic and Atmospheric Administration, National Environmental Satellite, Data, and Information Services" ; + :project = "GOES" ; + :production_site = "NSOF" ; + :production_environment = "OE" ; + :spatial_resolution = "2.0km at nadir" ; + :orbital_slot = "GOES-East" ; + :platform_ID = "G16" ; + :instrument_type = "GOES-R Series Advanced Baseline Imager (ABI)" ; + :scene_id = "Full Disk" ; + :instrument_ID = "FM1" ; + :dataset_name = "OR_ABI-L2-AICEF-M6_G16_s20241691800214_e20241691809522_c20241691814251.nc" ; + :iso_series_metadata_id = "e7ce8b20-b00a-11e1-afa6-0800200c9a66" ; + :title = "ABI L2 Cryosphere Ice Concentration" ; + :summary = "GOES Cryosphere Ice Concentration" ; + :keywords = "CRYOSPHERE > ICE CONCENTRATION AND EXTENT > ICE CONCENTRATION" ; + :keywords_vocabulary = "NASA Global Change Master Directory (GCMD) Earth Science Keywords, Version 7.0.0.0.0" ; + :license = "Unclassified data. Access is restricted to approved users only." ; + :processing_level = "National Aeronautics and Space Administration (NASA) L2" ; + :cdm_data_type = "Image" ; + :date_created = "2024-06-17T18:14:25.1Z" ; + :time_coverage_start = "2024-06-17T18:00:21.4Z" ; + :time_coverage_end = "2024-06-17T18:09:52.2Z" ; + :timeline_id = "ABI Mode 6" ; + :production_data_source = "Realtime" ; + :id = "077a4f5e-de4a-4bff-86f6-8ddd0ff02f61" ; + :history = "Tue Sep 17 20:20:14 2024: ncks -d x,2620,2640 -d y,80,100 OR_ABI-L2-AICEF-M6_G16_s20241691800214_e20241691809522_c20241691814251.nc icec_abi_g16_1.nc" ; + :NCO = "netCDF Operators version 5.0.6 (Homepage = http://nco.sf.net, Code = http://github.com/nco/nco)" ; +data: + + DQF = + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 ; + + IceConc = + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, 58144, 57830, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + 65530, 65530, 59683, 57534, 64501, 65530, 65530, 65530, 65530, _, _, 65530, + 65530, 65530, 65530, 64031, 56789, _, _, _, _, + 65530, 65530, 65530, 65530, 65280, 65530, 65530, 65530, 64277, 57892, + 45031, 47109, 61463, 65530, 65530, 65530, 62981, 58667, 57831, 63586, + 65530, + 45147, 43501, 57901, 55554, 57166, 65530, 63760, 56221, 52192, 55591, + 50035, 48650, 48057, 45683, 51947, 57478, 62688, 65530, 65530, 65530, + 65530, + 56977, 56455, 52283, 59212, 54508, 51997, 61822, 59940, 65530, 65530, + 65530, 64347, 65530, 65530, 65530, 65530, 65530, 65530, 65530, 65530, + 65530, + 52793, 50723, 51159, 37244, 63617, 65530, 65530, 65530, 65530, 64308, + 63279, 65530, 65530, 61313, 65530, 65530, 65530, 65530, 65530, 65530, + 63967, + _, _, _, _, 64304, 63829, 60868, 62014, 51838, 52195, 46471, 51122, 59087, + 62312, 65530, 65530, 65530, 65530, 65530, 65530, 58765, + _, _, _, _, _, 50061, 50576, 57647, 53324, 63633, 65530, 65530, 65530, + 65530, 65530, 63681, 63003, 65530, 65530, 65385, 62437, + _, _, _, _, _, _, _, 56932, 54853, 63490, 62990, 63962, 65530, 61397, + 64865, 65530, 65179, 65506, 65530, 65530, 64915, + _, _, _, _, _, _, _, _, 62413, 59618, 58176, 64427, 61133, 65330, 64892, + 65530, 65530, 65530, 65530, 65132, 65530, + _, _, _, _, _, _, _, _, _, _, _, _, _, 63567, 65530, 65530, 65530, 65530, + 65530, 65437, 63195, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; + + Mask = + -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2, -2, 0, -2, -2, 0, 0, 0, 0, -1, + -2, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2, -2, -2, -2, -2, -2, 0, 0, 0, 0, 0, + -2, -2, 0, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -3, -1, -1, -1, -1, -1, + -1, -1, + -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, 0, -1, -1, -1, -1, + -1, 0, + -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, 0, 0, 0, -2, 0, 0, -2, + -2, -2, -2, -2, 1, 1, -2, -3, -2, -2, -2, -2, -2, -2, -2, 0, 0, 0, 0, 0, 0, + -2, -2, -2, -2, -2, -2, -2, -2, 0, -2, 0, -2, -2, -2, -2, -2, -2, -2, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, -2, -2, 1, 1, 1, 1, 1, 1, -2, -2, -2, -3, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + -2, -2, -2, -2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + -2, -2, -2, -2, -2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 0, -2, -2, -2, -2, -2, -2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 0, -2, -2, -2, -2, -2, -2, -2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 0, 0, 0, 0, 0, -2, -2, 0, -2, -2, -2, -2, -2, 1, 1, 1, 1, 1, 1, 1, 1, + 0, 0, -2, -2, -2, 0, -2, -2, -2, -2, -2, 0, -2, -2, -2, -2, -2, -2, -2, -2, -2, + 0, 0, 0, 0, 0, 0, 0, 0, -2, -2, 0, -2, -2, -2, 0, -2, -2, -2, -2, -2, -2, + 0, 0, 0, 0, -2, -2, 0, 0, 0, 0, -2, 0, 0, 0, 0, 0, -2, -2, -2, -2, -2, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2, -2, -2, -2 ; + + PQI = + 8192162, 8192230, 8192230, 8192230, 8192238, 8192238, 8192238, 8192230, + 8192238, 8192238, 8192238, 6881506, 6881514, 8192230, 6881506, 6881506, + 8192230, 8192230, 8192238, 8192230, 8257774, + 6881514, 8192230, 8192230, 8192230, 8192230, 8192230, 8192238, 8192238, + 8192230, 8192230, 6881514, 6881506, 6881506, 6881506, 6881506, 6881514, + 8192230, 8192238, 8192238, 8192238, 8192238, + 6881514, 6881514, 8192230, 6881506, 6881514, 6881514, 6881514, 6881514, + 6881506, 6881514, 6881514, 6881506, 6881506, 8192162, 8257770, 8257766, + 8257766, 8257774, 8257774, 8257774, 8257774, + 6881514, 6881514, 6881514, 6881506, 6881506, 6881506, 6881506, 6881514, + 6881506, 6881506, 6881514, 6881514, 6881514, 6881514, 8192238, 8257774, + 8257774, 8257774, 8257774, 8257774, 8192238, + 6881514, 6881506, 6881506, 6881506, 6881506, 6881506, 6881506, 6881514, + 6881506, 6881514, 6881506, 6881514, 6881514, 6881514, 8192230, 8192238, + 8192238, 6881514, 8192230, 8192230, 6881514, + 6881506, 6881506, 6881506, 6881506, 4260064, 4260064, 6881506, 8192162, + 6881506, 6881514, 6881506, 6881514, 6881514, 6881514, 6881514, 8192230, + 8192230, 8192230, 8192230, 8192238, 8192230, + 6881506, 6881506, 6881506, 6881514, 6881506, 6881506, 6881514, 6881514, + 8192230, 6881514, 8192230, 6881514, 6881514, 6881506, 6881506, 6881514, + 6881514, 6881514, 8192230, 8192238, 8192238, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 6881506, 6881506, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 6881506, 6881506, 6881506, 8192162, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 6881514, 6881514, 6881514, 6881514, 4260073, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 6881514, 6881514, 6881514, 6881514, 6881514, 4260073, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 8192230, 6881514, 6881514, 6881514, 6881506, 6881506, 6881506, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 8192230, 6881514, 6881514, 6881514, 6881514, 6881514, 6881514, 6881506, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 8192230, 8192230, 8192230, 8192230, 8192230, 6881514, 6881514, 8192230, + 6881514, 6881514, 6881514, 6881514, 6881514, 4260073, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, + 8192238, 8192238, 6881514, 6881514, 6881514, 8192238, 6881514, 6881514, + 6881514, 6881514, 6881514, 8192230, 6881514, 6881514, 6881514, 6881514, + 6881514, 6881514, 6881514, 6881514, 6881514, + 8192230, 8192230, 8192238, 8192238, 8192238, 8192230, 8192230, 8192238, + 6881514, 6881514, 8192230, 6881514, 6881514, 6881514, 8192230, 6881514, + 6881514, 6881514, 6881514, 6881514, 6881514, + 8192230, 8192230, 8192230, 8192230, 6881514, 6881514, 8192238, 8192238, + 8192238, 8192238, 6881514, 8192238, 8192238, 8192230, 8192230, 8192230, + 6881514, 6881514, 6881514, 6881514, 6881514, + 8192230, 8192230, 8192230, 8192230, 8192230, 8192230, 8192230, 8192230, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 6881514, 6881514, 6881514, 6881514 ; + + Temp = + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, 63221, 63150, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + 63796, 63428, 63226, 63253, 63369, 63295, 63223, 63210, 63150, _, _, 63239, + 63351, 63396, 63266, 63148, 63060, _, _, _, _, + 63709, 63707, 63764, 63707, 63616, 63750, 63766, 63691, 63647, 63648, + 63599, 63572, 63586, 63704, 63677, 63591, 63589, 63605, 63514, 63588, + 63511, + 63284, 63314, 63502, 63548, 63502, 63600, 63618, 63518, 63518, 63546, + 63370, 63223, 63210, 63313, 63383, 63513, 63658, 63804, 63894, 63832, + 63816, + 63430, 63383, 63386, 63460, 63414, 63399, 63578, 63457, 63677, 63674, + 63602, 63620, 63631, 63688, 63761, 63818, 63848, 63832, 63832, 63848, + 63834, + 63430, 63370, 63282, 63105, 63620, 63734, 63691, 63661, 63718, 63647, + 63559, 63647, 63632, 63632, 63761, 63850, 63848, 63864, 63864, 63820, + 63809, + _, _, _, _, 63524, 63629, 63695, 63616, 63502, 63473, 63386, 63462, 63502, + 63652, 63779, 63851, 63836, 63851, 63804, 63821, 63647, + _, _, _, _, _, 63393, 63383, 63457, 63460, 63634, 63779, 63807, 63747, + 63732, 63748, 63706, 63734, 63821, 63821, 63775, 63780, + _, _, _, _, _, _, _, 63466, 63500, 63616, 63645, 63734, 63732, 63616, + 63720, 63763, 63747, 63789, 63850, 63834, 63850, + _, _, _, _, _, _, _, _, 63599, 63530, 63502, 63620, 63570, 63720, 63672, + 63775, 63851, 63850, 63848, 63788, 63820, + _, _, _, _, _, _, _, _, _, _, _, _, _, 63554, 63583, 63640, 63669, 63713, + 63697, 63685, 63624, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; + + algorithm_disabled_due_to_mitigation = 0 ; + + algorithm_dynamic_input_data_container = _ ; + + algorithm_product_version_container = _ ; + + geospatial_lat_lon_extent = _ ; + + goes_imager_projection = _ ; + + granule_level_quality_flag = 0 ; + + maximum_ice_retrieval = 100 ; + + mean_ice_retrieval = 90.95 ; + + minimum_ice_retrieval = 0.1708984 ; + + nominal_satellite_height = 35786.02 ; + + nominal_satellite_subpoint_lat = 0 ; + + nominal_satellite_subpoint_lon = -75.2 ; + + number_of_bad_data_pixels = 1132863 ; + + number_of_day_pixels = 6732 ; + + number_of_ice_retrievals = 36882 ; + + number_of_night_pixels = 30150 ; + + number_of_nonretrievable_pixels = 21017 ; + + number_of_normal_pixels = 36882 ; + + number_of_terminator_pixels = 1153880 ; + + number_of_uncertain_pixels = 21855610 ; + + number_of_water_pixels = 4979711 ; + + percent_ice_retrieval_pixels = 0.5086401 ; + + percent_terminator_pixels = 6.229427 ; + + percent_uncorrectable_GRB_errors = 0 ; + + percent_uncorrectable_L0_errors = 0 ; + + processing_parm_version_container = _ ; + + quantitative_local_zenith_angle = 80 ; + + quantitative_local_zenith_angle_bounds = 0, 80 ; + + quantitative_solar_zenith_angle = 85 ; + + quantitative_solar_zenith_angle_bounds = 0, 85 ; + + retrieval_local_zenith_angle = 80 ; + + retrieval_local_zenith_angle_bounds = 0, 80 ; + + retrieval_solar_zenith_angle = 85 ; + + retrieval_solar_zenith_angle_bounds = 0, 85 ; + + size_searchwindow = 50 ; + + std_dev_ice_retrieval = 18.40504 ; + + t = 771919506.820459 ; + + time_bounds = 771919221.423053, 771919792.217865 ; + + x = 2620, 2621, 2622, 2623, 2624, 2625, 2626, 2627, 2628, 2629, 2630, 2631, + 2632, 2633, 2634, 2635, 2636, 2637, 2638, 2639, 2640 ; + + x_image = 0 ; + + x_image_bounds = -0.151872, 0.151872 ; + + y = 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, + 98, 99, 100 ; + + y_image = 0 ; + + y_image_bounds = 0.151872, -0.151872 ; +} diff --git a/utils/test/testdata/icec_abi_g16_2.cdl b/utils/test/testdata/icec_abi_g16_2.cdl new file mode 100644 index 000000000..a1fcac644 --- /dev/null +++ b/utils/test/testdata/icec_abi_g16_2.cdl @@ -0,0 +1,653 @@ +netcdf icec_abi_g16_2 { +dimensions: + y = 21 ; + x = 21 ; + number_of_LZA_bounds = 2 ; + number_of_SZA_bounds = 2 ; + number_of_time_bounds = 2 ; + number_of_image_bounds = 2 ; +variables: + ushort DQF(y, x) ; + DQF:_FillValue = 65535US ; + DQF:long_name = "ABI L2 Cryosphere Ice Concentration Data Quality Flags" ; + DQF:standard_name = "status_flag" ; + DQF:valid_range = 0US, 3US ; + DQF:units = "1" ; + DQF:coordinates = "retrieval_local_zenith_angle quantitative_local_zenith_angle retrieval_solar_zenith_angle quantitative_solar_zenith_angle t y x" ; + DQF:grid_mapping = "goes_imager_projection" ; + DQF:cell_methods = "retrieval_local_zenith_angle: point quantitative_local_zenith_angle: point retrieval_solar_zenith_angle: point quantitative_solar_zenith_angle: t: point area: point" ; + DQF:flag_values = 0US, 1US, 2US, 3US ; + DQF:flag_meanings = "normal nonretrievable uncertain bad_data" ; + DQF:number_of_qf_values = 4US ; + DQF:potentially_geo_pixel_count_used_as_percent_denominator = 23046372 ; + ushort IceConc(y, x) ; + IceConc:_FillValue = 65535US ; + IceConc:long_name = "ABI L2 Cryosphere Ice Concentration" ; + IceConc:standard_name = "ice_concentration" ; + IceConc:valid_range = 0US, 65530US ; + IceConc:scale_factor = 0.00152602f ; + IceConc:add_offset = 0.f ; + IceConc:units = "percent" ; + IceConc:resolution = "y: 0.000056 rad x: 0.000056 rad" ; + IceConc:coordinates = "retrieval_local_zenith_angle quantitative_local_zenith_angle retrieval_solar_zenith_angle quantitative_solar_zenith_angle t y x" ; + IceConc:grid_mapping = "goes_imager_projection" ; + IceConc:cell_methods = "retrieval_local_zenith_angle: point (good or degraded quality pixel produced) quantitative_local_zenith_angle: point (good quality pixel produced) retrieval_solar_zenith_angle: point (good or degraded quality pixel produced) quantitative_solar_zenith_angle: point (good quality pixel produced) t: point area: point" ; + IceConc:ancillary_variables = "DQF" ; + byte Mask(y, x) ; + Mask:_FillValue = -99b ; + Mask:long_name = "ABI L2 Cryosphere Ice Mask" ; + Mask:standard_name = "ice_mask" ; + Mask:valid_range = -128b, 127b ; + Mask:units = "1" ; + Mask:resolution = "y: 0.000056 rad x: 0.000056 rad" ; + Mask:coordinates = "retrieval_local_zenith_angle quantitative_local_zenith_angle retrieval_solar_zenith_angle t y x" ; + Mask:grid_mapping = "goes_imager_projection" ; + Mask:cell_methods = "retrieval_local_zenith_angle: point (good or degraded quality pixel produced) quantitative_local_zenith_angle: point (good quality pixel produced) retrieval_solar_zenith_angle: point (good quality pixel produced) t: point area: point" ; + Mask:flag_values = -3b, -2b, -1b, 0b, 1b, 2b ; + Mask:flag_meanings = "non_retrieval water land cloud day_ice night_ice" ; + Mask:ancillary_variables = "DQF" ; + Mask:clear_pixel_definition = "no cloud detected and failed a test for high values of spatial heterogeneity" ; + Mask:probably_clear_pixel_definition = "no cloud detected but passed a test for high values of spatial heterogeneity and one or more neighboring pixels identified as cloudy. pixel is possibly cloud-contaminated" ; + Mask:probably_cloudy_pixel_definition = "cloud detected but likely contains a cloud edge, since one or more neighboring pixels are clear. pixel is probably cloud-contaminated" ; + Mask:cloudy_pixel_definition = "cloud detected and failed a test for cloud edges" ; + uint PQI(y, x) ; + PQI:_FillValue = 0U ; + PQI:long_name = "ABI L2 Cryosphere Ice Concentration product quality indicator" ; + PQI:units = "1" ; + PQI:grid_mapping = "goes_imager_projection" ; + PQI:coordinates = "y x" ; + PQI:flag_meanings = "normal nonretrievable uncertain bad_data cloud_mask_clear cloud_mask_probably_clear cloud_mask_probably_cloudy cloud_mask_cloudy day_night_qf sunglint_qf cloud_shadow_qf off_earth_qf solar_zenith_angle_qf satellite_zenith_angle_qf reflectance_band_2_qf reflectance_band_3_qf reflectance_band_5_qf brightness_temp_band_14_qf brightness_temp_band_15_qf Unused_Bit_15 surface_in-land_water surface_land surface_sea_water surface_other reflectance_test_ice_cover_detection_qf NDSI_test_ice_cover_detection_qf skin_temp_test_ice_cover_detection_qf visable_band_tie-pont_qf Unused_Bit_23 read_input_qf Unused_Bit_25 Unused_Bit_26 Unused_Bit_27 Unused_Bit_28 Unused_Bit_29 Unused_Bit_30 Unused_Bit_31" ; + PQI:number_of_qf_values = 37U ; + ushort Temp(y, x) ; + Temp:_FillValue = 65535US ; + Temp:long_name = "ABI L2 Cryosphere Ice Surface Temperature" ; + Temp:standard_name = "ice_temperature" ; + Temp:valid_range = 0US, 65530US ; + Temp:scale_factor = 0.00267053f ; + Temp:add_offset = 100.f ; + Temp:units = "kelvin" ; + Temp:resolution = "y: 0.000056 rad x: 0.000056 rad" ; + Temp:coordinates = "retrieval_local_zenith_angle quantitative_local_zenith_angle retrieval_solar_zenith_angle quantitative_solar_zenith_angle t y x" ; + Temp:grid_mapping = "goes_imager_projection" ; + Temp:cell_methods = "retrieval_local_zenith_angle: point (good or degraded quality pixel produced) quantitative_local_zenith_angle: point (good quality pixel produced) retrieval_solar_zenith_angle: point (good or degraded quality pixel produced) quantitative_solar_zenith_angle: point (good quality pixel produced) t: point area: point" ; + Temp:ancillary_variables = "DQF" ; + int algorithm_disabled_due_to_mitigation ; + algorithm_disabled_due_to_mitigation:long_name = "Status flag indicating if the algorithm was disabled due to upstream degradation" ; + algorithm_disabled_due_to_mitigation:_FillValue = -1 ; + algorithm_disabled_due_to_mitigation:flag_value = 0, 1 ; + algorithm_disabled_due_to_mitigation:flag_meanings = "unset set" ; + algorithm_disabled_due_to_mitigation:valid_range = 0, 1 ; + algorithm_disabled_due_to_mitigation:units = "1" ; + int algorithm_dynamic_input_data_container ; + algorithm_dynamic_input_data_container:long_name = "container for filenames of dynamic algorithm input data" ; + algorithm_dynamic_input_data_container:input_ABI_L2_auxiliary_solar_zenith_angle_data = "OR_I_ABI-L2-AUXF-M6_G16_s20241692100214_e20241692109522_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_auxiliary_local_zenith_angle_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_auxiliary_land_mask_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_auxiliary_lat_lon_position_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_reflectance_band_1_2km_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_reflectance_band_2_2km_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_reflectance_band_3_2km_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_reflectance_band_5_2km_data = "null" ; + algorithm_dynamic_input_data_container:input_ABI_L2_brightness_temperature_band_14_2km_data = "OR_ABI-L2-CMIPF-M6C14_G16_s2024-06-17T21:00:21.4Z_e2024-06-17T21:09:52.2Z_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_brightness_temperature_band_15_2km_data = "OR_ABI-L2-CMIPF-M6C15_G16_s2024-06-17T21:00:21.4Z_e2024-06-17T21:09:52.2Z_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_cloud_mask_data_information_flag_data = "OR_I_ABI-L2-ACMDIFF-M6_G16_s20241692100214_e20241692109522_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_4_level_cloud_mask_data = "OR_ABI-L2-ACMF-M6_G16_s20241692100214_e20241692109522_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_cloud_mask_granule_level_quality_flag_data = "OR_ABI-L2-ACMF-M6_G16_s20241692100214_e20241692109522_c*.nc" ; + algorithm_dynamic_input_data_container:input_ABI_L2_intermediate_product_cloud_top_cloud_shadow_flag_data = "OR_I_ABI-L2-ACHF-M6_G16_s20241692100214_e20241692109522_c*.nc" ; + int algorithm_product_version_container ; + algorithm_product_version_container:long_name = "container for algorithm package filename and product version" ; + algorithm_product_version_container:algorithm_version = "OR_ABI-L2-ALG-AICE_v02r00.zip" ; + algorithm_product_version_container:product_version = "v02r00" ; + float geospatial_lat_lon_extent ; + geospatial_lat_lon_extent:long_name = "geospatial latitude and longitude references" ; + geospatial_lat_lon_extent:geospatial_westbound_longitude = -156.2995f ; + geospatial_lat_lon_extent:geospatial_northbound_latitude = 81.3282f ; + geospatial_lat_lon_extent:geospatial_eastbound_longitude = 6.2995f ; + geospatial_lat_lon_extent:geospatial_southbound_latitude = -81.3282f ; + geospatial_lat_lon_extent:geospatial_lat_center = 0.f ; + geospatial_lat_lon_extent:geospatial_lon_center = -75.f ; + geospatial_lat_lon_extent:geospatial_lat_nadir = 0.f ; + geospatial_lat_lon_extent:geospatial_lon_nadir = -75.f ; + geospatial_lat_lon_extent:geospatial_lat_units = "degrees_north" ; + geospatial_lat_lon_extent:geospatial_lon_units = "degrees_east" ; + int goes_imager_projection ; + goes_imager_projection:long_name = "GOES-R ABI fixed grid projection" ; + goes_imager_projection:grid_mapping_name = "geostationary" ; + goes_imager_projection:perspective_point_height = 35786023. ; + goes_imager_projection:semi_major_axis = 6378137. ; + goes_imager_projection:semi_minor_axis = 6356752.31414 ; + goes_imager_projection:inverse_flattening = 298.2572221 ; + goes_imager_projection:latitude_of_projection_origin = 0. ; + goes_imager_projection:longitude_of_projection_origin = -75. ; + goes_imager_projection:sweep_angle_axis = "x" ; + int64 granule_level_quality_flag ; + granule_level_quality_flag:long_name = "Cloud Mask Granule Level Degradation Quality Flag" ; + granule_level_quality_flag:flag_masks = 0LL, 1LL, 63LL ; + granule_level_quality_flag:flag_meanings = "valid_channels channel_missing algorithm_failure" ; + granule_level_quality_flag:_FillValue = -999LL ; + granule_level_quality_flag:valid_range = 0LL, 63LL ; + granule_level_quality_flag:units = "1" ; + float maximum_ice_retrieval ; + maximum_ice_retrieval:long_name = "maximum ice concentration retrieval" ; + maximum_ice_retrieval:standard_name = "ice_concentration_retrieval" ; + maximum_ice_retrieval:_FillValue = -999.f ; + maximum_ice_retrieval:valid_range = 0.f, 20000.f ; + maximum_ice_retrieval:units = "m" ; + maximum_ice_retrieval:coordinates = "local_zenith_angle solar_zenith_angle t y_image x_image" ; + maximum_ice_retrieval:grid_mapping = "goes_imager_projection" ; + maximum_ice_retrieval:cell_methods = "local_zenith_angle: sum solar_zenith_angle: sum t: sum area: maximum (interval: variable[@name=\'x\']/values rad comment: good quality pixels only) where ice retrieval" ; + float mean_ice_retrieval ; + mean_ice_retrieval:long_name = "mean ice concentration retrieval" ; + mean_ice_retrieval:standard_name = "ice_concentration_retrieval" ; + mean_ice_retrieval:_FillValue = -999.f ; + mean_ice_retrieval:valid_range = 0.f, 20000.f ; + mean_ice_retrieval:units = "m" ; + mean_ice_retrieval:coordinates = "local_zenith_angle solar_zenith_angle t y_image x_image" ; + mean_ice_retrieval:grid_mapping = "goes_imager_projection" ; + mean_ice_retrieval:cell_methods = "local_zenith_angle: sum solar_zenith_angle: sum t: sum area: mean (interval: variable[@name=\'x\']/values rad comment: good quality pixels only) where ice retrieval" ; + float minimum_ice_retrieval ; + minimum_ice_retrieval:long_name = "minimum ice concentration retrieval" ; + minimum_ice_retrieval:standard_name = "ice_concentration_retrieval" ; + minimum_ice_retrieval:_FillValue = -999.f ; + minimum_ice_retrieval:valid_range = 0.f, 20000.f ; + minimum_ice_retrieval:units = "m" ; + minimum_ice_retrieval:coordinates = "local_zenith_angle solar_zenith_angle t y_image x_image" ; + minimum_ice_retrieval:grid_mapping = "goes_imager_projection" ; + minimum_ice_retrieval:cell_methods = "local_zenith_angle: sum solar_zenith_angle: sum t: sum area: minimum (interval: variable[@name=\'x\']/values rad comment: good quality pixels only) where ice retrieval" ; + float nominal_satellite_height ; + nominal_satellite_height:long_name = "nominal satellite height above GRS 80 ellipsoid (platform altitude)" ; + nominal_satellite_height:standard_name = "height_above_reference_ellipsoid" ; + nominal_satellite_height:_FillValue = -999.f ; + nominal_satellite_height:units = "km" ; + float nominal_satellite_subpoint_lat ; + nominal_satellite_subpoint_lat:long_name = "nominal satellite subpoint latitude (platform latitude)" ; + nominal_satellite_subpoint_lat:standard_name = "latitude" ; + nominal_satellite_subpoint_lat:_FillValue = -999.f ; + nominal_satellite_subpoint_lat:units = "degrees_north" ; + float nominal_satellite_subpoint_lon ; + nominal_satellite_subpoint_lon:long_name = "nominal satellite subpoint longitude (platform longitude)" ; + nominal_satellite_subpoint_lon:standard_name = "longitude" ; + nominal_satellite_subpoint_lon:_FillValue = -999.f ; + nominal_satellite_subpoint_lon:units = "degrees_east" ; + int number_of_bad_data_pixels ; + number_of_bad_data_pixels:long_name = "number of bad data pixels that do not exceed local zenith angle threshold" ; + number_of_bad_data_pixels:_FillValue = -1 ; + number_of_bad_data_pixels:units = "count" ; + number_of_bad_data_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_bad_data_pixels:grid_mapping = "goes_imager_projection" ; + number_of_bad_data_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where bad data" ; + int number_of_day_pixels ; + number_of_day_pixels:long_name = "number of day pixels that do not exceed local zenith angle threshold" ; + number_of_day_pixels:_FillValue = -1 ; + number_of_day_pixels:units = "count" ; + number_of_day_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_day_pixels:grid_mapping = "goes_imager_projection" ; + number_of_day_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where day" ; + int number_of_ice_retrievals ; + number_of_ice_retrievals:long_name = "number of valid ice cover and retrieval pixels that do not exceed local zenith angle threshold" ; + number_of_ice_retrievals:_FillValue = -1 ; + number_of_ice_retrievals:units = "count" ; + number_of_ice_retrievals:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_ice_retrievals:grid_mapping = "goes_imager_projection" ; + number_of_ice_retrievals:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where valid ice cover and retrieval" ; + int number_of_night_pixels ; + number_of_night_pixels:long_name = "number of night pixels that do not exceed local zenith angle threshold" ; + number_of_night_pixels:_FillValue = -1 ; + number_of_night_pixels:units = "count" ; + number_of_night_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_night_pixels:grid_mapping = "goes_imager_projection" ; + number_of_night_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where night" ; + int number_of_nonretrievable_pixels ; + number_of_nonretrievable_pixels:long_name = "number of nonretrievable pixels that do not exceed local zenith angle threshold" ; + number_of_nonretrievable_pixels:_FillValue = -1 ; + number_of_nonretrievable_pixels:units = "count" ; + number_of_nonretrievable_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_nonretrievable_pixels:grid_mapping = "goes_imager_projection" ; + number_of_nonretrievable_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where nonretrievable" ; + int number_of_normal_pixels ; + number_of_normal_pixels:long_name = "number of normal pixels that do not exceed local zenith angle threshold" ; + number_of_normal_pixels:_FillValue = -1 ; + number_of_normal_pixels:units = "count" ; + number_of_normal_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_normal_pixels:grid_mapping = "goes_imager_projection" ; + number_of_normal_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where normal" ; + int number_of_terminator_pixels ; + number_of_terminator_pixels:long_name = "number of terminator pixels that do not exceed local zenith angle threshold" ; + number_of_terminator_pixels:_FillValue = -1 ; + number_of_terminator_pixels:units = "count" ; + number_of_terminator_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_terminator_pixels:grid_mapping = "goes_imager_projection" ; + number_of_terminator_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where terminator" ; + int number_of_uncertain_pixels ; + number_of_uncertain_pixels:long_name = "number of uncertain pixels that do not exceed local zenith angle threshold" ; + number_of_uncertain_pixels:_FillValue = -1 ; + number_of_uncertain_pixels:units = "count" ; + number_of_uncertain_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_uncertain_pixels:grid_mapping = "goes_imager_projection" ; + number_of_uncertain_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where uncertain" ; + int number_of_water_pixels ; + number_of_water_pixels:long_name = "number of water pixels that do not exceed local zenith angle threshold" ; + number_of_water_pixels:_FillValue = -1 ; + number_of_water_pixels:units = "count" ; + number_of_water_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + number_of_water_pixels:grid_mapping = "goes_imager_projection" ; + number_of_water_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where water" ; + float percent_ice_retrieval_pixels ; + percent_ice_retrieval_pixels:long_name = "percent of ice retrieval pixels that do not exceed local zenith angle threshold" ; + percent_ice_retrieval_pixels:standard_name = "clear_sky_area_fraction" ; + percent_ice_retrieval_pixels:_FillValue = -999.f ; + percent_ice_retrieval_pixels:valid_range = 0.f, 1.f ; + percent_ice_retrieval_pixels:units = "percent" ; + percent_ice_retrieval_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + percent_ice_retrieval_pixels:grid_mapping = "goes_imager_projection" ; + percent_ice_retrieval_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where ice retrieval" ; + float percent_terminator_pixels ; + percent_terminator_pixels:long_name = "percent of terminator pixels that do not exceed local zenith angle threshold" ; + percent_terminator_pixels:standard_name = "clear_sky_area_fraction" ; + percent_terminator_pixels:_FillValue = -999.f ; + percent_terminator_pixels:valid_range = 0.f, 1.f ; + percent_terminator_pixels:units = "percent" ; + percent_terminator_pixels:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + percent_terminator_pixels:grid_mapping = "goes_imager_projection" ; + percent_terminator_pixels:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where terminator" ; + float percent_uncorrectable_GRB_errors ; + percent_uncorrectable_GRB_errors:long_name = "percent data lost due to uncorrectable GRB errors" ; + percent_uncorrectable_GRB_errors:_FillValue = -999.f ; + percent_uncorrectable_GRB_errors:valid_range = 0.f, 1.f ; + percent_uncorrectable_GRB_errors:units = "percent" ; + percent_uncorrectable_GRB_errors:coordinates = "t y_image x_image" ; + percent_uncorrectable_GRB_errors:grid_mapping = "goes_imager_projection" ; + percent_uncorrectable_GRB_errors:cell_methods = "t: sum area: sum (uncorrectable GRB errors only)" ; + float percent_uncorrectable_L0_errors ; + percent_uncorrectable_L0_errors:long_name = "percent data lost due to uncorrectable L0 errors" ; + percent_uncorrectable_L0_errors:_FillValue = -999.f ; + percent_uncorrectable_L0_errors:valid_range = 0.f, 1.f ; + percent_uncorrectable_L0_errors:units = "percent" ; + percent_uncorrectable_L0_errors:coordinates = "t y_image x_image" ; + percent_uncorrectable_L0_errors:grid_mapping = "goes_imager_projection" ; + percent_uncorrectable_L0_errors:cell_methods = "t: sum area: sum (uncorrectable L0 errors only)" ; + int processing_parm_version_container ; + processing_parm_version_container:long_name = "container for processing parameter filenames" ; + processing_parm_version_container:L2_processing_parm_version = "OR_ABI-L2-PARM-AICE_v02r00.zip, OR_ANC-L2-PARM-SEMISTATIC_v01r00.zip, OR_ABI-L2-PARM-AUXILIARY_v01r00.zip" ; + float quantitative_local_zenith_angle ; + quantitative_local_zenith_angle:long_name = "threshold angle between the line of sight to the satellite and the local zenith at the observation target for good quality ice concentration and extent data production" ; + quantitative_local_zenith_angle:standard_name = "platform_zenith_angle" ; + quantitative_local_zenith_angle:units = "degree" ; + quantitative_local_zenith_angle:bounds = "quantitative_local_zenith_angle_bounds" ; + float quantitative_local_zenith_angle_bounds(number_of_LZA_bounds) ; + quantitative_local_zenith_angle_bounds:long_name = "local zenith angle degree range where good quality ice concentration and extent data is produced" ; + float quantitative_solar_zenith_angle ; + quantitative_solar_zenith_angle:long_name = "threshold angle between the line of sight to the sun and the local zenith at the observation target for good quality ice concentration and extent data production" ; + quantitative_solar_zenith_angle:standard_name = "solar_zenith_angle" ; + quantitative_solar_zenith_angle:units = "degree" ; + quantitative_solar_zenith_angle:bounds = "quantitative_solar_zenith_angle_bounds" ; + float quantitative_solar_zenith_angle_bounds(number_of_SZA_bounds) ; + quantitative_solar_zenith_angle_bounds:long_name = "solar zenith angle degree range where good quality ice concentration and extent data is produced" ; + float retrieval_local_zenith_angle ; + retrieval_local_zenith_angle:long_name = "threshold angle between the line of sight to the satellite and the local zenith at the observation target for good or degraded quality ice concentration and extent data production" ; + retrieval_local_zenith_angle:standard_name = "platform_zenith_angle" ; + retrieval_local_zenith_angle:units = "degree" ; + retrieval_local_zenith_angle:bounds = "retrieval_local_zenith_angle_bounds" ; + float retrieval_local_zenith_angle_bounds(number_of_LZA_bounds) ; + retrieval_local_zenith_angle_bounds:long_name = "local zenith angle degree range where good quality ice concentration and extent data is produced" ; + float retrieval_solar_zenith_angle ; + retrieval_solar_zenith_angle:long_name = "threshold angle between the line of sight to the sun and the local zenith at the observation target for good or degraded quality ice concentration and extent data production" ; + retrieval_solar_zenith_angle:standard_name = "solar_zenith_angle" ; + retrieval_solar_zenith_angle:units = "degree" ; + retrieval_solar_zenith_angle:bounds = "retrieval_solar_zenith_angle_bounds" ; + float retrieval_solar_zenith_angle_bounds(number_of_SZA_bounds) ; + retrieval_solar_zenith_angle_bounds:long_name = "solar zenith angle degree range where good or degraded quality ice concentration and extent data is produced" ; + int size_searchwindow ; + size_searchwindow:long_name = "size of search window pixels that do not exceed local zenith angle threshold" ; + size_searchwindow:_FillValue = -1 ; + size_searchwindow:units = "count" ; + size_searchwindow:coordinates = "quantitative_local_zenith_angle retrieval_solar_zenith_angle t y_image x_image" ; + size_searchwindow:grid_mapping = "goes_imager_projection" ; + size_searchwindow:cell_methods = "quantitative_local_zenith_angle: sum retrieval_solar_zenith_angle: sum t: sum area: sum (interval: 0.000056 rad comment: good quality pixels only) where search window size" ; + float std_dev_ice_retrieval ; + std_dev_ice_retrieval:long_name = "standard deviation of ice concentration retrieval values" ; + std_dev_ice_retrieval:standard_name = "ice_concentration_retrieval" ; + std_dev_ice_retrieval:_FillValue = -999.f ; + std_dev_ice_retrieval:units = "m" ; + std_dev_ice_retrieval:coordinates = "local_zenith_angle solar_zenith_angle t y_image x_image" ; + std_dev_ice_retrieval:grid_mapping = "goes_imager_projection" ; + std_dev_ice_retrieval:cell_methods = "local_zenith_angle: sum solar_zenith_angle: sum t: sum area: standard_deviation (interval: variable[@name=\'x\']/@value rad comment: good quality pixels only) where ice retrieval" ; + double t ; + t:long_name = "J2000 epoch mid-point between the start and end image scan in seconds" ; + t:standard_name = "time" ; + t:units = "seconds since 2000-01-01 12:00:00" ; + t:axis = "T" ; + t:bounds = "time_bounds" ; + double time_bounds(number_of_time_bounds) ; + time_bounds:long_name = "Scan start and end times in seconds since epoch (2000-01-01 12:00:00)" ; + short x(x) ; + x:scale_factor = 5.6e-05f ; + x:add_offset = -0.151844f ; + x:units = "rad" ; + x:axis = "X" ; + x:long_name = "GOES fixed grid projection x-coordinate" ; + x:standard_name = "projection_x_coordinate" ; + float x_image ; + x_image:long_name = "GOES-R fixed grid projection x-coordinate center of image" ; + x_image:standard_name = "projection_x_coordinate" ; + x_image:units = "rad" ; + x_image:axis = "X" ; + float x_image_bounds(number_of_image_bounds) ; + x_image_bounds:long_name = "GOES-R fixed grid projection x-coordinate west/east extent of image" ; + x_image_bounds:units = "rad" ; + short y(y) ; + y:scale_factor = -5.6e-05f ; + y:add_offset = 0.151844f ; + y:units = "rad" ; + y:axis = "Y" ; + y:long_name = "GOES fixed grid projection y-coordinate" ; + y:standard_name = "projection_y_coordinate" ; + float y_image ; + y_image:long_name = "GOES-R fixed grid projection y-coordinate center of image" ; + y_image:standard_name = "projection_y_coordinate" ; + y_image:units = "rad" ; + y_image:axis = "Y" ; + float y_image_bounds(number_of_image_bounds) ; + y_image_bounds:long_name = "GOES-R fixed grid projection y-coordinate north/south extent of image" ; + y_image_bounds:units = "rad" ; + +// global attributes: + :naming_authority = "gov.nesdis.noaa" ; + :Conventions = "CF-1.7" ; + :Metadata_Conventions = "Unidata Dataset Discovery v1.0" ; + :standard_name_vocabulary = "CF Standard Name Table (v35, 20 July 2016)" ; + :institution = "DOC/NOAA/NESDIS > U.S. Department of Commerce, National Oceanic and Atmospheric Administration, National Environmental Satellite, Data, and Information Services" ; + :project = "GOES" ; + :production_site = "NSOF" ; + :production_environment = "OE" ; + :spatial_resolution = "2.0km at nadir" ; + :orbital_slot = "GOES-East" ; + :platform_ID = "G16" ; + :instrument_type = "GOES-R Series Advanced Baseline Imager (ABI)" ; + :scene_id = "Full Disk" ; + :instrument_ID = "FM1" ; + :dataset_name = "OR_ABI-L2-AICEF-M6_G16_s20241692100214_e20241692109522_c20241692114339.nc" ; + :iso_series_metadata_id = "e7ce8b20-b00a-11e1-afa6-0800200c9a66" ; + :title = "ABI L2 Cryosphere Ice Concentration" ; + :summary = "GOES Cryosphere Ice Concentration" ; + :keywords = "CRYOSPHERE > ICE CONCENTRATION AND EXTENT > ICE CONCENTRATION" ; + :keywords_vocabulary = "NASA Global Change Master Directory (GCMD) Earth Science Keywords, Version 7.0.0.0.0" ; + :license = "Unclassified data. Access is restricted to approved users only." ; + :processing_level = "National Aeronautics and Space Administration (NASA) L2" ; + :cdm_data_type = "Image" ; + :date_created = "2024-06-17T21:14:33.9Z" ; + :time_coverage_start = "2024-06-17T21:00:21.4Z" ; + :time_coverage_end = "2024-06-17T21:09:52.2Z" ; + :timeline_id = "ABI Mode 6" ; + :production_data_source = "Realtime" ; + :id = "46003997-f244-4032-9fcd-90d65356d239" ; + :history = "Tue Sep 17 20:20:53 2024: ncks -d x,2940,2960 -d y,60,80 OR_ABI-L2-AICEF-M6_G16_s20241692100214_e20241692109522_c20241692114339.nc icec_abi_g16_2.nc" ; + :NCO = "netCDF Operators version 5.0.6 (Homepage = http://nco.sf.net, Code = http://github.com/nco/nco)" ; +data: + + DQF = + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 ; + + IceConc = + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + 64980, 60355, 58994, 64417, 65530, 65530, 65530, 60998, _, _, _, _, _, _, + _, _, _, _, _, _, _, + 65077, 65530, 65385, 58081, 65530, 65530, 61043, 53357, 65530, 65530, + 65530, 60489, 53651, 62895, 59205, _, _, _, _, _, _, + 65530, 65530, 65530, 60727, 60484, 64039, 65530, 65530, 65530, 65530, + 60138, 65530, 59879, 46424, 56419, _, _, _, _, _, _, + 60769, 65530, 65530, 65530, 65530, 65530, 65530, 61779, 65530, 65530, + 65530, 65530, 65530, 60387, 65530, _, _, _, _, _, _, + _, _, _, 65530, 65530, 65530, 65530, 65530, 65530, 65530, 65530, 65225, + 65530, 59482, 65530, _, _, _, _, _, _, + _, _, _, _, _, _, 64718, 65530, 65530, 65530, 65128, 63451, 62976, 60843, + 65530, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, 63894, 65530, 65530, 65530, 64013, 60501, 65530, _, + _, _, _, _, _, + _, _, _, _, _, _, _, _, _, 62898, 64994, 61547, 64815, 63020, 64341, _, _, + _, _, _, _, + _, _, _, _, _, _, _, _, _, _, 60139, 61896, 65530, 65530, 65530, _, _, _, + _, _, _, + _, _, _, _, _, _, _, _, _, _, _, 60841, 65530, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; + + Mask = + -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, + -3, -3, + 1, 1, 1, 1, 1, 1, 1, 1, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -3, -3, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, + 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -3, -3, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -3, -3, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -3, -3, -3, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, -3, -3, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, -3, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, -3, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ; + + PQI = + 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, + 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, + 7405794, 7405794, 7405794, 7405794, 7405794, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, 7405794, + 7405794, 7405794, 7405794, 7405794, 7405794, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 6357218, + 6357218, 6357218, 6357218, 7405794, 7405794, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 6357218, + 6357218, 6357218, 8192230, 6357218, 6357218, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 8192238, + 8192238, 8192238, 8192238, 6357218, 6357218, + 8192238, 8192238, 8192238, 4260064, 4260064, 4260064, 4260064, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 6357218, + 6357218, 8192162, 8192162, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 4260073, 4260064, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 6357218, + 6357218, 8192162, 8192162, 8192230, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 6357218, + 6357218, 6357218, 8192162, 8192162, 8192162, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 4260064, 4260064, 4260064, 4260064, 4260064, 4260064, 6357218, + 6357218, 8192162, 8192162, 8192230, 8192230, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192230, 4260064, 4260064, 4260064, 4260064, 4260064, 6357218, + 6357218, 8192162, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 4260073, 4260064, 8192238, 8192230, 8192230, + 6357218, 8192162, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, 8192238, + 8192238, 8192238, 8192238, 8192238, 8192238 ; + + Temp = + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + 63797, 63724, 63710, 63825, 63956, 63970, 63895, 63763, _, _, _, _, _, _, + _, _, _, _, _, _, _, + 63786, 63756, 63726, 63640, 63769, 63825, 63739, 63604, 63911, 63967, + 63969, 63748, 63676, 63748, 63736, 63518, 63376, 63528, 63583, _, _, + 63713, 63787, 63699, 63507, 63536, 63713, 63710, 63707, 63837, 63748, + 63588, 63633, 63544, 63181, 62840, 62278, 61662, 61777, _, 63399, 63280, + 62097, 62699, 62965, 63397, 63289, 63340, 63364, 63418, 63532, 63427, + 63044, 63030, 63129, 62906, 62089, _, _, _, _, 62111, 62222, + _, _, _, 62321, 62459, 62400, 62748, 63090, 63124, 63297, 63226, 62857, + 62842, 62991, 62877, 62567, 62520, _, _, _, _, + _, _, _, _, _, _, 62315, 63240, 63232, 63409, 63468, 63215, 63173, 63011, + 63491, 63605, 63383, _, _, _, _, + _, _, _, _, _, _, _, _, 62719, 63372, 63544, 63587, 63424, 63314, 63510, + 63458, 63498, 63497, _, _, _, + _, _, _, _, _, _, _, _, _, 62876, 63431, 63455, 63514, 63597, 63659, 63468, + 63438, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, 61708, 62059, 62695, 62995, 62392, 61234, + 61151, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, 60245, 60776, _, _, _, 61299, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, + _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; + + algorithm_disabled_due_to_mitigation = 0 ; + + algorithm_dynamic_input_data_container = _ ; + + algorithm_product_version_container = _ ; + + geospatial_lat_lon_extent = _ ; + + goes_imager_projection = _ ; + + granule_level_quality_flag = 0 ; + + maximum_ice_retrieval = 100 ; + + mean_ice_retrieval = 90.79719 ; + + minimum_ice_retrieval = 0.001220703 ; + + nominal_satellite_height = 35786.02 ; + + nominal_satellite_subpoint_lat = 0 ; + + nominal_satellite_subpoint_lon = -75.2 ; + + number_of_bad_data_pixels = 5317100 ; + + number_of_day_pixels = 12507 ; + + number_of_ice_retrievals = 37443 ; + + number_of_night_pixels = 24936 ; + + number_of_nonretrievable_pixels = 13714 ; + + number_of_normal_pixels = 37443 ; + + number_of_terminator_pixels = 5330814 ; + + number_of_uncertain_pixels = 17678115 ; + + number_of_water_pixels = 4883805 ; + + percent_ice_retrieval_pixels = 0.5045139 ; + + percent_terminator_pixels = 8.059138 ; + + percent_uncorrectable_GRB_errors = 0 ; + + percent_uncorrectable_L0_errors = 0 ; + + processing_parm_version_container = _ ; + + quantitative_local_zenith_angle = 80 ; + + quantitative_local_zenith_angle_bounds = 0, 80 ; + + quantitative_solar_zenith_angle = 85 ; + + quantitative_solar_zenith_angle_bounds = 0, 85 ; + + retrieval_local_zenith_angle = 80 ; + + retrieval_local_zenith_angle_bounds = 0, 80 ; + + retrieval_solar_zenith_angle = 85 ; + + retrieval_solar_zenith_angle_bounds = 0, 85 ; + + size_searchwindow = 50 ; + + std_dev_ice_retrieval = 18.02047 ; + + t = 771930306.85195 ; + + time_bounds = 771930021.449838, 771930592.254062 ; + + x = 2940, 2941, 2942, 2943, 2944, 2945, 2946, 2947, 2948, 2949, 2950, 2951, + 2952, 2953, 2954, 2955, 2956, 2957, 2958, 2959, 2960 ; + + x_image = 0 ; + + x_image_bounds = -0.151872, 0.151872 ; + + y = 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, + 78, 79, 80 ; + + y_image = 0 ; + + y_image_bounds = 0.151872, -0.151872 ; +} diff --git a/utils/test/testinput/gdas_icecabi2ioda.yaml b/utils/test/testinput/gdas_icecabi2ioda.yaml new file mode 100644 index 000000000..39a893702 --- /dev/null +++ b/utils/test/testinput/gdas_icecabi2ioda.yaml @@ -0,0 +1,13 @@ +provider: ABI +window begin: 2024-06-18T15:00:00Z +window end: 2024-06-19T21:00:00Z +output file: icec_abi.ioda.nc +#ocean basin: RECCAP2_region_masks_all_v20221025.nc +input files: +- icec_abi_g16_1.nc4 +- icec_abi_g16_2.nc4 + +test: + reference filename: testref/icecabi2ioda.test + test output filename: testoutput/icecabi2ioda.test + float relative tolerance: 1e-6 diff --git a/utils/test/testref/icecabi2ioda.test b/utils/test/testref/icecabi2ioda.test new file mode 100644 index 000000000..640701827 --- /dev/null +++ b/utils/test/testref/icecabi2ioda.test @@ -0,0 +1,26 @@ +Reading: [icec_abi_g16_1.nc4,icec_abi_g16_2.nc4] +seconds since 2000-01-01T12:00:00Z +obsVal: + Min: 0.568351 + Max: 0.999635 + Sum: 122.002 +obsError: + Min: 0.1 + Max: 0.1 + Sum: 13.5 +preQc: + Min: 0 + Max: 1 + Sum: 5 +longitude: + Min: -79.8731 + Max: -59.9008 + Sum: -9898.07 +latitude: + Min: 67.1378 + Max: 70.7751 + Sum: 9242.36 +datetime: + Min: 771919506 + Max: 771930306 + Sum: 104209597710 From 936e6faad7d347df5ea52b4ee38a8ad67385b106 Mon Sep 17 00:00:00 2001 From: Ed Givelberg Date: Fri, 18 Oct 2024 13:33:23 -0400 Subject: [PATCH 10/27] fixing the yaml (#1338) corrected the yaml for tropical moorings --- .../bufr2ioda_insitu_profile_tropical_2019010700.yaml.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/marine/testinput/bufr2ioda_insitu_profile_tropical_2019010700.yaml.in b/test/marine/testinput/bufr2ioda_insitu_profile_tropical_2019010700.yaml.in index 1fa5a1856..5657fe263 100644 --- a/test/marine/testinput/bufr2ioda_insitu_profile_tropical_2019010700.yaml.in +++ b/test/marine/testinput/bufr2ioda_insitu_profile_tropical_2019010700.yaml.in @@ -2,12 +2,12 @@ data_format: dbuoy subsets: dbuoy source: NCEP data tank -data_type: drifter +data_type: tropical cycle_type: gdas cycle_datetime: '2019010700' dump_directory: __BUFRINPUTDIR__ ioda_directory: __IODAOUTPUTDIR__ ocean_basin: __OCEANBASIN__ -data_description: 6-hrly in situ drifter profiles +data_description: 6-hrly in situ tropical mooring profiles data_provider: U.S. NOAA From 93e7ec60bbc354a3db42d174eb59f8ed1a170f48 Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Fri, 18 Oct 2024 16:53:12 -0400 Subject: [PATCH 11/27] Addition of a gnu lua file for hercules (#1341) - fixes #1340 I had to comment out `da-utils` in the bundle, but other than that the system builds. --- modulefiles/GDAS/hercules.gnu.lua | 93 +++++++++++++++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 modulefiles/GDAS/hercules.gnu.lua diff --git a/modulefiles/GDAS/hercules.gnu.lua b/modulefiles/GDAS/hercules.gnu.lua new file mode 100644 index 000000000..217538f6c --- /dev/null +++ b/modulefiles/GDAS/hercules.gnu.lua @@ -0,0 +1,93 @@ +help([[ +Load environment for running the GDAS application with gnu compilers and MPI. +]]) + +local pkgName = myModuleName() +local pkgVersion = myModuleVersion() +local pkgNameVer = myModuleFullName() + +prepend_path("MODULEPATH", '/work/noaa/epic/role-epic/spack-stack/hercules/modulefiles') +prepend_path("MODULEPATH", '/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.7.0/envs/ue-gcc/install/modulefiles/Core') +prepend_path("MODULEPATH", '/work2/noaa/da/python/opt/modulefiles/stack') + + +---- below two lines get us access to the spack-stack modules +load("stack-gcc/12.2.0") +load("stack-openmpi/4.1.6") + +load("cmake/3.23.1") +load("curl/8.4.0") +load("zlib/1.2.13") +load("git/2.31.1") +--load("pkg-config/0.27.1") +load("hdf5/1.14.3") +load("parallel-netcdf/1.12.3") +load("netcdf-c/4.9.2") +load("nccmp/1.9.0.1") +load("netcdf-fortran/4.6.1") +load("nco/5.1.6") +load("parallelio/2.6.2") +load("wget/1.21.1") +load("boost/1.84.0") +load("bufr/12.0.1") +load("git-lfs/3.1.2") +load("ecbuild/3.7.2") +load("openjpeg/2.3.1") +load("eccodes/2.33.0") +load("eigen/3.4.0") +load("openblas/0.3.24") +load("eckit/1.24.5") +load("fftw/3.3.10") +load("fckit/0.11.0") +load("fiat/1.2.0") +load("ectrans/1.2.0") +load("fms/2023.04") +load("esmf/8.6.1") +load("atlas/0.36.0") +load("sp/2.5.0") +load("gsl-lite/0.37.0") +load("libjpeg/2.1.0") +load("krb5/1.20.1") +load("libtirpc/1.3.3") +load("hdf/4.2.15") +load("jedi-cmake/1.4.0") +load("libpng/1.6.37") +load("libxt/1.3.0") +load("libxmu/1.1.4") +load("libxpm/3.5.17") +load("libxaw/1.0.15") +load("udunits/2.2.28") +load("ncview/2.1.9") +load("netcdf-cxx4/4.3.1") +load("py-pybind11/2.11.0") +--load("crtm/v2.4_jedi") +load("contrib/0.1") +load("noaatools/3.1") +load("rocoto/1.3.7") + +load("hpc/1.2.0") +unload("python/3.10.13") +unload("py-numpy/1.22.3") +load("miniconda3/4.6.14") +load("gdasapp/1.0.0") +-- below is a hack because of cmake finding the wrong python... +setenv("CONDA_PREFIX", "/work2/noaa/da/python/opt/core/miniconda3/4.6.14/envs/gdasapp/") + +setenv("CC","mpicc") +setenv("FC","mpifort") +setenv("CXX","mpicxx") +local mpiexec = '/opt/slurm/bin/srun' +local mpinproc = '-n' +setenv('MPIEXEC_EXEC', mpiexec) +setenv('MPIEXEC_NPROC', mpinproc) + +setenv("CRTM_FIX","/work2/noaa/da/role-da/GDASApp/fix/crtm/2.4.0") +setenv("GDASAPP_TESTDATA","/work2/noaa/da/role-da/GDASApp/testdata") +setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/work2/noaa/da/role-da/GDASApp/unittestdata") + +execute{cmd="ulimit -s unlimited",modeA={"load"}} + +whatis("Name: ".. pkgName) +whatis("Version: ".. pkgVersion) +whatis("Category: GDASApp") +whatis("Description: Load all libraries needed for GDASApp") From f1222ec37924d567a8d935f0cad1a6a705045e4e Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Wed, 23 Oct 2024 09:47:09 -0400 Subject: [PATCH 12/27] fix CI for aerosol DA (#1344) @RussTreadon-NOAA @KateFriedman-NOAA I think including this in the GDASApp hash in G-W should fix the aero CI (hopefully) --- parm/aero/obs/config/viirs_n20_aod.yaml.j2 | 2 +- parm/aero/obs/config/viirs_n21_aod.yaml.j2 | 2 +- parm/aero/obs/config/viirs_npp_aod.yaml.j2 | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/aero/obs/config/viirs_n20_aod.yaml.j2 b/parm/aero/obs/config/viirs_n20_aod.yaml.j2 index 7806a4cd0..43474ef6c 100644 --- a/parm/aero/obs/config/viirs_n20_aod.yaml.j2 +++ b/parm/aero/obs/config/viirs_n20_aod.yaml.j2 @@ -3,7 +3,7 @@ obsdatain: engine: type: H5File - obsfile: "{{ DATA }}/obs/{{ OPREFIX }}viirs_n20.{{ current_cycle | to_YMDH }}.nc4" + obsfile: "{{ DATA }}/obs/{{ OPREFIX }}viirs_n20_aod.tm00.nc" obsdataout: engine: type: H5File diff --git a/parm/aero/obs/config/viirs_n21_aod.yaml.j2 b/parm/aero/obs/config/viirs_n21_aod.yaml.j2 index 6450ad9ec..46d6d0d32 100644 --- a/parm/aero/obs/config/viirs_n21_aod.yaml.j2 +++ b/parm/aero/obs/config/viirs_n21_aod.yaml.j2 @@ -3,7 +3,7 @@ obsdatain: engine: type: H5File - obsfile: "{{ DATA }}/obs/{{ OPREFIX }}viirs_n21.{{ current_cycle | to_YMDH }}.nc4" + obsfile: "{{ DATA }}/obs/{{ OPREFIX }}viirs_n21_aod.tm00.nc" obsdataout: engine: type: H5File diff --git a/parm/aero/obs/config/viirs_npp_aod.yaml.j2 b/parm/aero/obs/config/viirs_npp_aod.yaml.j2 index 72efb740d..be6149b80 100644 --- a/parm/aero/obs/config/viirs_npp_aod.yaml.j2 +++ b/parm/aero/obs/config/viirs_npp_aod.yaml.j2 @@ -3,7 +3,7 @@ obsdatain: engine: type: H5File - obsfile: "{{ DATA }}/obs/{{ OPREFIX }}viirs_npp.{{ current_cycle | to_YMDH }}.nc4" + obsfile: "{{ DATA }}/obs/{{ OPREFIX }}viirs_npp_aod.tm00.nc" obsdataout: engine: type: H5File From 34f894ffc8ff15b1a43d537a0a1043670231e5a3 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA <134300700+DavidNew-NOAA@users.noreply.github.com> Date: Mon, 28 Oct 2024 09:33:18 -0400 Subject: [PATCH 13/27] Fix jjob testing in related to change if setup_expt.py (#1348) Resolves GDASApp issue [#1347](https://github.com/NOAA-EMC/GDASApp/issues/1347) jjob tests pass with this change now, but you have to revert ```test/atm/global-workflow/config.yaml``` to https://github.com/NOAA-EMC/GDASApp/commit/6fb0a655ffe61c6dd4f2acaa4c4490121ba980fb since jjob testing will not work without modification until Global Workflow PR [#2992](https://github.com/NOAA-EMC/global-workflow/pull/2992) is merged. --- test/atm/global-workflow/setup_workflow_exp.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/atm/global-workflow/setup_workflow_exp.sh b/test/atm/global-workflow/setup_workflow_exp.sh index d793733ae..47bb0d82e 100755 --- a/test/atm/global-workflow/setup_workflow_exp.sh +++ b/test/atm/global-workflow/setup_workflow_exp.sh @@ -10,7 +10,7 @@ idate=2021032312 edate=2021032318 app=ATM starttype='warm' -gfscyc='4' +interval='6' resdetatmos='48' resensatmos='48' nens=3 @@ -37,7 +37,7 @@ $srcdir/../../workflow/setup_expt.py gfs cycled --idate $idate \ --edate $edate \ --app $app \ --start $starttype \ - --gfs_cyc $gfscyc \ + --interval $interval \ --resdetatmos $resdetatmos \ --resensatmos $resensatmos \ --nens $nens \ From 45f22e906f51e4cfe65a3d9d4ecc8c41fdc33fc4 Mon Sep 17 00:00:00 2001 From: Azadeh Gholoubi <51101867+azadeh-gh@users.noreply.github.com> Date: Thu, 31 Oct 2024 12:45:59 -0400 Subject: [PATCH 14/27] Add ABI bufr2IODA python converter and JSON file. (#1356) [#115](https://github.com/NOAA-EMC/JEDI-T2O/issues/115) **New files include:** - `parm/ioda/bufr2ioda/bufr2ioda_gsrcsr.json` : JSON containing data format, sensor/bufr2ioda_gsrcsr.json, and satellite information - `ush/ioda/bufr2ioda/bufr2ioda_gsrcsr.py` : bufr2ioda python code for converting ABI GOES-16, GOES-17, and GOES-18 from BUFR to IODA - Also added _abi_ to the observation list in `jcb-prototype_3dvar.yaml.j2` Co-authored-by: Azadeh Gholoubi --- parm/atm/jcb-prototype_3dvar.yaml.j2 | 1 + parm/ioda/bufr2ioda/bufr2ioda_gsrcsr.json | 16 + ush/ioda/bufr2ioda/bufr2ioda_gsrcsr.py | 555 ++++++++++++++++++++++ 3 files changed, 572 insertions(+) create mode 100644 parm/ioda/bufr2ioda/bufr2ioda_gsrcsr.json create mode 100755 ush/ioda/bufr2ioda/bufr2ioda_gsrcsr.py diff --git a/parm/atm/jcb-prototype_3dvar.yaml.j2 b/parm/atm/jcb-prototype_3dvar.yaml.j2 index 7b6c80011..152683110 100644 --- a/parm/atm/jcb-prototype_3dvar.yaml.j2 +++ b/parm/atm/jcb-prototype_3dvar.yaml.j2 @@ -29,3 +29,4 @@ observations: # - satwnd.viirs_npp # - scatwind_ascat_metop-a # - snowcvr + - abi_g16 diff --git a/parm/ioda/bufr2ioda/bufr2ioda_gsrcsr.json b/parm/ioda/bufr2ioda/bufr2ioda_gsrcsr.json new file mode 100644 index 000000000..036f7bd4b --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_gsrcsr.json @@ -0,0 +1,16 @@ +{ + "data_format" : "bufr_d", + "data_type" : "gsrcsr", + "cycle_type" : "{{ RUN }}", + "cycle_datetime" : "{{ current_cycle | to_YMDH }}", + "dump_directory" : "{{ DMPDIR }}", + "ioda_directory" : "{{ COM_OBS }}", + "subsets" : ['NC021046'], + "data_description" : "NC021046 ABI, GOES-16; NC021046 ABI, GOES-17, NC021046 ABI, GOES-18", + "data_provider" : "U.S. NOAA/NESDIS", + "sensor_info" : { "sensor_name": "ABI", "sensor_full_name": "Advanced Baseline Imager", "sensor_id": 617 }, + "satellite_info" : [ + { "satellite_name": "g16", "satellite_full_name": "GOES-16", "satellite_id": 270, "launch time": "20171119" }, + { "satellite_name": "g17", "satellite_full_name": "GOES-17", "satellite_id": 271, "launch time": "20180301" }, + { "satellite_name": "g18", "satellite_full_name": "GOES-18", "satellite_id": 272, "launch time": "20220301" } ] +} diff --git a/ush/ioda/bufr2ioda/bufr2ioda_gsrcsr.py b/ush/ioda/bufr2ioda/bufr2ioda_gsrcsr.py new file mode 100755 index 000000000..dc4fc20b0 --- /dev/null +++ b/ush/ioda/bufr2ioda/bufr2ioda_gsrcsr.py @@ -0,0 +1,555 @@ +#!/usr/bin/env python3 +import argparse +import calendar +import datetime +import json +import math +import os +import time +from datetime import datetime + +import numpy as np +import numpy.ma as ma +from wxflow import Logger + +from pyioda import ioda_obs_space as ioda_ospace +from pyiodaconv import bufr + +# Define and initialize global variables +global float32_fill_value +global int32_fill_value +global int64_fill_value + +float32_fill_value = np.float32(0) +int32_fill_value = np.int32(0) +int64_fill_value = np.int64(0) + + +def bufr_to_ioda(config, logger): + subsets = config["subsets"] + logger.debug(f"Checking subsets = {subsets}") + + # Get parameters from configuration + subsets = config["subsets"] + data_format = config["data_format"] + data_type = config["data_type"] + data_description = config["data_description"] + data_provider = config["data_provider"] + cycle_type = config["cycle_type"] + dump_dir = config["dump_directory"] + ioda_dir = config["ioda_directory"] + cycle = config["cycle_datetime"] + yyyymmdd = cycle[0:8] + hh = cycle[8:10] + + satellite_info_array = config["satellite_info"] + sensor_name = config["sensor_info"]["sensor_name"] + sensor_full_name = config["sensor_info"]["sensor_full_name"] + sensor_id = config["sensor_info"]["sensor_id"] + + # Get derived parameters + yyyymmdd = cycle[0:8] + hh = cycle[8:10] + reference_time = datetime.strptime(cycle, "%Y%m%d%H") + reference_time = reference_time.strftime("%Y-%m-%dT%H:%M:%SZ") + + # General informaton + converter = "BUFR to IODA Converter" + process_level = "Level-2" + platform_description = "NOAA Series of Geostationary Operational Environmental Satellites - 3rd generation since 2016" + sensor_description = "Spinning Enhanced Visible and InfraRed Imager;12 channels, 1 narrow-bandwidth, 1 high-resolution broad-bandwidth VIS" + + logger.info(f"sensor_name = {sensor_name}") + logger.info(f"sensor_full_name = {sensor_full_name}") + logger.info(f"sensor_id = {sensor_id}") + logger.info(f"reference_time = {reference_time}") + + bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}" + DATA_PATH = os.path.join( + dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh), "atmos", bufrfile + ) + if not os.path.isfile(DATA_PATH): + logger.info(f"The DATA_PATH is: {DATA_PATH}") + + # ============================================ + # Make the QuerySet for all the data we want + # ============================================ + start_time = time.time() + + logger.info("Making QuerySet") + q = bufr.QuerySet(subsets) + + # MetaData + q.add("latitude", "*/CLATH") + q.add("longitude", "*/CLONH") + q.add("satelliteId", "*/SAID") + q.add("year", "*/YEAR") + q.add("month", "*/MNTH") + q.add("day", "*/DAYS") + q.add("hour", "*/HOUR") + q.add("minute", "*/MINU") + q.add("second", "*/SECO") + q.add("sensorId", "*/SIID[1]") + q.add("sensorZenithAngle", "*/SAZA") + q.add("sensorCentralFrequency", "*/CSRADSEQ/SCCF") + q.add("solarZenithAngle", "*/SOZA") + q.add("cloudFree", "*/CLFRASEQ{2}/NCLDMNT") + q.add("brightnessTemperature", "*/CSRADSEQ/TMBRST") + q.add("ClearSkyStdDev", "*/SDRADSQ/SDTB") + q.add("solarAzimuthAngle", "*/SOLAZI") + q.add("sensorAzimuthAngle", "*/BEARAZ") + + end_time = time.time() + running_time = end_time - start_time + logger.debug(f"Processing time for making QuerySet : {running_time} seconds") + + # ============================================================== + # Open the BUFR file and execute the QuerySet to get ResultSet + # Use the ResultSet returned to get numpy arrays of the data + # ============================================================== + start_time = time.time() + + logger.info("Executing QuerySet to get ResultSet") + with bufr.File(DATA_PATH) as f: + try: + r = f.execute(q) + except Exception as err: + logger.info(f'Return with {err}') + return + # MetaData + satid = r.get("satelliteId") + instid = r.get("sensorId") + year = r.get("year") + month = r.get("month") + day = r.get("day") + hour = r.get("hour") + minute = r.get("minute") + second = r.get("second") + lat = r.get("latitude") + lon = r.get("longitude") + satzenang = r.get("sensorZenithAngle") + chanfreq = r.get("sensorCentralFrequency", type="float") + BT = r.get("brightnessTemperature") + clrStdDev = r.get("ClearSkyStdDev") + cldFree = r.get("cloudFree", type="float") + solzenang = r.get("solarZenithAngle") + solaziang = r.get("solarAzimuthAngle") + sataziang = r.get("sensorAzimuthAngle") + # DateTime: seconds since Epoch time + # IODA has no support for numpy datetime arrays dtype=datetime64[s] + timestamp = r.get_datetime( + "year", "month", "day", "hour", "minute", "second" + ).astype(np.int64) + + # Global variables declaration + # Set global fill values + float32_fill_value = satzenang.fill_value + int32_fill_value = satid.fill_value + int64_fill_value = timestamp.fill_value.astype(np.int64) + + end_time = time.time() + running_time = end_time - start_time + logger.info( + f"Processing time for executing QuerySet to get ResultSet : {running_time} seconds" + ) + + # ========================= + # Create derived variables + # ========================= + start_time = time.time() + rounded_values = np.where(satzenang % 1 > 0.5, np.ceil(satzenang), np.floor(satzenang)) + # Convert to integer and add 1 + scanpos = rounded_values.astype(np.int32) + 1 + cloudAmount = 100. - cldFree + # Define the conversion factor from degrees to radians + deg2rad = math.pi/180.0 + sataziang = sataziang*deg2rad + viewang = np.full_like(solzenang, float32_fill_value, dtype=np.float32) + # Define Channel dimension for channels 4 to 11 since the other channel values are missing + channel_start = 7 + channel_end = 16 + channum = np.arange(channel_start, channel_end + 1) + # Define wavenumbers for each satellite ID + wavenum_values_dict = { + 270: np.array( + [ + 257037.4, + 162052.9, + 144355.4, + 136322.8, + 118422, + 104089.1, + 96800.1, + 89400.06, + 81529.43, + 75378.98, + ], + dtype=np.float32, + ), + 271: np.array( + [ + 256550.4, + 159490.2, + 136128.6, + 114870.3, + 103420.4, + 92938.72, + 83886.68, + 75122.19, + 83886.68, + 75122.19, + ], + dtype=np.float32, + ), + } + wavenum_fill_value = float32_fill_value + + logger.info("Creating derived variables") + + end_time = time.time() + running_time = end_time - start_time + logger.info( + f"Processing time for creating derived variables : {running_time} seconds" + ) + + # ===================================== + # Split output based on satellite id + # Create IODA ObsSpace + # Write IODA output + # ===================================== + logger.info("Create IODA ObsSpace and Write IODA output based on satellite ID") + + # Find nique satellite identifiers in data to process + unique_satids = np.unique(satid) + logger.info(f"Number of Unique satellite identifiers: {len(unique_satids)}") + logger.info(f"Unique satellite identifiers: {unique_satids}") + logger.debug(f"Loop through unique satellite identifier {unique_satids}") + total_ob_processed = 0 + for sat in unique_satids.tolist(): + start_time = time.time() + + matched = False + for satellite_info in satellite_info_array: + if satellite_info["satellite_id"] == sat: + matched = True + satellite_id = satellite_info["satellite_id"] + satellite_name = satellite_info["satellite_name"] + satinst = sensor_name.lower() + "_" + satellite_name.lower() + logger.debug(f"Split data for {satinst} satid = {sat}") + + if matched: + if satellite_id in wavenum_values_dict: + # Extract the wavenum values for the current satellite ID + Wavenum = wavenum_values_dict[satellite_id] + else: + # If the satellite ID is not in the dictionary + logger.debug(f"satellite ID is not in the dictionary {satellite_id}") + + # Define a boolean mask to subset data from the original data object + satelite_mask = satid == sat + # Define a boolean mask based on the condition 0 < satzenang2 < 80 + satzenang_mask = np.logical_and(0 < satzenang, satzenang < 80) + combined_mask = satzenang_mask & satelite_mask + # MetaData + lon2 = lon[combined_mask] + lat2 = lat[combined_mask] + timestamp2 = timestamp[combined_mask] + satid2 = satid[combined_mask] + instid2 = instid[combined_mask] + satzenang2 = satzenang[combined_mask] + chanfreq2 = chanfreq[6:16] + scanpos2 = scanpos[combined_mask] + solzenang2 = solzenang[combined_mask] + cldFree2 = cldFree[combined_mask] + cloudAmount2 = cloudAmount[combined_mask] + BT2 = BT[combined_mask] + clrStdDev2 = clrStdDev[combined_mask] + viewang2 = viewang.flatten()[combined_mask] + sataziang2 = sataziang.flatten()[combined_mask] + solaziang2 = solaziang.flatten()[combined_mask] + + # Timestamp Range + timestamp2_min = datetime.fromtimestamp(timestamp2.min()) + timestamp2_max = datetime.fromtimestamp(timestamp2.max()) + + # Check unique observation time + unique_timestamp2 = np.unique(timestamp2) + logger.debug(f"Processing output for satid {sat}") + logger.info(f"number of unique_timestamp2 {len(unique_timestamp2)}") + logger.info(f"unique_timestamp2 {unique_timestamp2}") + + # Create the dimensions + dims = { + "Location": np.arange(0, BT2.shape[0]), + "Channel": np.arange(channel_start, channel_end + 1), + } + + # Create IODA ObsSpace + iodafile = f"{cycle_type}.t{hh}z.{satinst}.tm00.nc" + OUTPUT_PATH = os.path.join(ioda_dir, iodafile) + logger.info(f"Create output file : {OUTPUT_PATH}") + obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode="w", dim_dict=dims) + + # Create Global attributes + logger.debug("Write global attributes") + obsspace.write_attr("Converter", converter) + obsspace.write_attr("sourceFiles", bufrfile) + obsspace.write_attr("description", data_description) + obsspace.write_attr("datetimeReference", reference_time) + obsspace.write_attr( + "datetimeRange", [str(timestamp2_min), str(timestamp2_max)] + ) + obsspace.write_attr("sensor", sensor_id) + obsspace.write_attr("platform", satellite_id) + obsspace.write_attr("platformCommonName", satellite_name) + obsspace.write_attr("sensorCommonName", sensor_name) + obsspace.write_attr("processingLevel", process_level) + obsspace.write_attr("platformLongDescription", platform_description) + obsspace.write_attr("sensorLongDescription", sensor_description) + + # Create IODA variables + logger.debug("Write variables: name, type, units, and attributes") + + # Sensor Channel Number + obsspace.create_var( + "MetaData/sensorChannelNumber", + dim_list=["Channel"], + dtype=np.int32, + fillval=int32_fill_value, + ).write_attr("long_name", "Sensor Channel Number").write_data(channum) + + # Sensor Central Frequency + obsspace.create_var( + "MetaData/sensorCentralFrequency", + dim_list=["Channel"], + dtype=chanfreq2.dtype, + fillval=chanfreq2.fill_value, + ).write_attr("units", "Hz").write_attr( + "long_name", "Satellite Channel Center Frequency" + ).write_data( + chanfreq2 + ) + + # Sensor Central Wavenumber + obsspace.create_var( + "MetaData/sensorCentralWavenumber", + dim_list=["Channel"], + dtype=Wavenum.dtype, + fillval=wavenum_fill_value, + ).write_attr("units", "m-1").write_attr( + "long_name", "Sensor Central Wavenumber" + ).write_data( + Wavenum + ) + + if np.any(combined_mask): + # Longitude + obsspace.create_var( + "MetaData/longitude", dtype=lon2.dtype, fillval=lon2.fill_value + ).write_attr("units", "degrees_east").write_attr( + "valid_range", np.array([-180, 180], dtype=np.float32) + ).write_attr( + "long_name", "Longitude" + ).write_data( + lon2 + ) + + # Latitude + obsspace.create_var( + "MetaData/latitude", dtype=lat2.dtype, fillval=lat2.fill_value + ).write_attr("units", "degrees_north").write_attr( + "valid_range", np.array([-90, 90], dtype=np.float32) + ).write_attr( + "long_name", "Latitude" + ).write_data( + lat2 + ) + + # Datetime + obsspace.create_var( + "MetaData/dateTime", dtype=np.int64, fillval=int64_fill_value + ).write_attr("units", "seconds since 1970-01-01T00:00:00Z").write_attr( + "long_name", "Datetime" + ).write_data( + timestamp2 + ) + + # Satellite Identifier + obsspace.create_var( + "MetaData/satelliteIdentifier", + dtype=satid2.dtype, + fillval=satid2.fill_value, + ).write_attr("long_name", "Satellite Identifier").write_data(satid2) + + # Instrument Identifier + obsspace.create_var( + "MetaData/instrumentIdentifier", + dtype=instid2.dtype, + fillval=instid2.fill_value, + ).write_attr("long_name", "Satellite Instrument Identifier").write_data( + instid2 + ) + + # Scan Position (derived variable, need to specified fill value explicitly) + obsspace.create_var( + "MetaData/sensorScanPosition", + dtype=scanpos2.astype(np.int32).dtype, + fillval=int32_fill_value, + ).write_attr("long_name", "Sensor Scan Position").write_data(scanpos2) + + # Sensor Zenith Angle + obsspace.create_var( + "MetaData/sensorZenithAngle", + dtype=satzenang2.dtype, + fillval=satzenang2.fill_value, + ).write_attr("units", "degree").write_attr( + "valid_range", np.array([0, 90], dtype=np.float32) + ).write_attr( + "long_name", "Sensor Zenith Angle" + ).write_data( + satzenang2 + ) + + # Sensor Azimuth Angle + obsspace.create_var( + "MetaData/sensorAzimuthAngle", + dtype=np.float32, + fillval=sataziang2.fill_value, + ).write_attr("units", "degree").write_attr( + "valid_range", np.array([0, 360], dtype=np.float32) + ).write_attr( + "long_name", "Sensor Azimuth Angle" + ).write_data( + sataziang2 + ) + + # Solar Azimuth Angle + obsspace.create_var( + "MetaData/solarAzimuthAngle", + dtype=np.float32, + fillval=solaziang2.fill_value, + ).write_attr("units", "degree").write_attr( + "valid_range", np.array([0, 360], dtype=np.float32) + ).write_attr( + "long_name", "Solar Azimuth Angle" + ).write_data( + solaziang2 + ) + + # Sensor View Angle + obsspace.create_var( + "MetaData/sensorViewAngle", + dtype=np.float32, + fillval=viewang2.fill_value, + ).write_attr("units", "degree").write_attr( + "long_name", "Sensor View Angle" + ).write_data( + viewang2 + ) + + # Solar Zenith Angle + obsspace.create_var( + "MetaData/solarZenithAngle", + dtype=solzenang2.dtype, + fillval=solzenang2.fill_value, + ).write_attr("units", "degree").write_attr( + "valid_range", np.array([0, 180], dtype=np.float32) + ).write_attr( + "long_name", "Solar Zenith Angle" + ).write_data( + solzenang2 + ) + + # Cloud free + obsspace.create_var( + "MetaData/cloudFree", + dtype=cldFree2.dtype, fillval=int32_fill_value + ).write_attr("units", "1").write_attr( + "valid_range", np.array([0, 100], dtype=np.int32) + ).write_attr( + "long_name", "Amount Segment Cloud Free" + ).write_data( + cldFree2 + ) + + # Cloud amount based on computation + obsspace.create_var( + "MetaData/cloudAmount", + dtype=cloudAmount2.dtype, + fillval=cloudAmount2.fill_value, + ).write_attr("units", "1").write_attr( + "valid_range", np.array([0, 100], dtype=np.float32) + ).write_attr( + "long_name", "Amount of cloud coverage in layer" + ).write_data( + cloudAmount2 + ) + + # ObsType based on computation method/spectral band + obsspace.create_var( + "ObsValue/brightnessTemperature", + dim_list=["Location", "Channel"], + dtype=np.float32, + fillval=BT2.fill_value, + ).write_attr("units", "k").write_attr( + "long_name", "Brightness Temperature" + ).write_data( + BT2 + ) + + obsspace.create_var( + "ClearSkyStdDev/brightnessTemperature", + dim_list=["Location", "Channel"], + dtype=np.float32, + fillval=clrStdDev2.fill_value, + ).write_attr( + "long_name", "Standard Deviation Brightness Temperature" + ).write_data( + clrStdDev2 + ) + + else: + logger.debug( + "No valid values (0 Date: Fri, 1 Nov 2024 15:11:38 -0400 Subject: [PATCH 15/27] load esmf/8.5.0 in gaea.intel.lua, use staged CRTM_FIX on all machines (#1327) --- build.sh | 14 ++++++-------- modulefiles/GDAS/gaea.intel.lua | 13 +++++++------ 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/build.sh b/build.sh index 42ec34284..4ebb3666e 100755 --- a/build.sh +++ b/build.sh @@ -103,14 +103,12 @@ WORKFLOW_BUILD=${WORKFLOW_BUILD:-"OFF"} CMAKE_OPTS+=" -DWORKFLOW_TESTS=${WORKFLOW_BUILD}" # JCSDA changed test data things, need to make a dummy CRTM directory -if [[ $BUILD_TARGET == 'hera' ]]; then - if [ -d "$dir_root/bundle/fix/test-data-release/" ]; then rm -rf $dir_root/bundle/fix/test-data-release/; fi - if [ -d "$dir_root/bundle/test-data-release/" ]; then rm -rf $dir_root/bundle/test-data-release/; fi - mkdir -p $dir_root/bundle/fix/test-data-release/ - mkdir -p $dir_root/bundle/test-data-release/ - ln -sf $GDASAPP_TESTDATA/crtm $dir_root/bundle/fix/test-data-release/crtm - ln -sf $GDASAPP_TESTDATA/crtm $dir_root/bundle/test-data-release/crtm -fi +if [ -d "$dir_root/bundle/fix/test-data-release/" ]; then rm -rf $dir_root/bundle/fix/test-data-release/; fi +if [ -d "$dir_root/bundle/test-data-release/" ]; then rm -rf $dir_root/bundle/test-data-release/; fi +mkdir -p $dir_root/bundle/fix/test-data-release/ +mkdir -p $dir_root/bundle/test-data-release/ +ln -sf $GDASAPP_TESTDATA/crtm $dir_root/bundle/fix/test-data-release/crtm +ln -sf $GDASAPP_TESTDATA/crtm $dir_root/bundle/test-data-release/crtm # Configure echo "Configuring ..." diff --git a/modulefiles/GDAS/gaea.intel.lua b/modulefiles/GDAS/gaea.intel.lua index 85d779669..d1aa1df6c 100644 --- a/modulefiles/GDAS/gaea.intel.lua +++ b/modulefiles/GDAS/gaea.intel.lua @@ -10,8 +10,8 @@ prepend_path("MODULEPATH", '/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/u prepend_path("MODULEPATH", '/ncrc/proj/epic/rocoto/modulefiles') -- below two lines get us access to the spack-stack modules -load("stack-intel/2023.1.0") -load("stack-cray-mpich/8.1.25") +load("stack-intel/2023.2.0") +load("stack-cray-mpich/8.1.28") -- JCSDA has 'jedi-fv3-env/unified-dev', but we should load these manually as needed load("cmake/3.23.1") load("gettext/0.20.2") @@ -44,11 +44,12 @@ load("fckit/0.11.0") load("fiat/1.2.0") load("ectrans/1.2.0") load("fms/2023.04") +load("esmf/8.5.0") load("atlas/0.35.1") load("sp/2.5.0") load("gsl-lite/0.37.0") load("libjpeg/2.1.0") -load("krb5/1.16.3") +load("krb5/1.20.1") load("libtirpc/1.3.3") load("hdf/4.2.15") load("jedi-cmake/1.4.0") @@ -84,9 +85,9 @@ local mpinproc = '-n' setenv('MPIEXEC_EXEC', mpiexec) setenv('MPIEXEC_NPROC', mpinproc) -setenv("CRTM_FIX","/gpfs/f5/ufs-ard/world-shared/GDASApp/fix/crtm/2.4.0") -setenv("GDASAPP_TESTDATA","/gpfs/f5/ufs-ard/world-shared/GDASApp/CI/data") -setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/gpfs/f5/ufs-ard/world-shared/GDASApp/CI/data/test") +setenv("CRTM_FIX","/gpfs/f5/nggps_emc/world-shared/GDASApp/fix/crtm/2.4.0") +setenv("GDASAPP_TESTDATA","/gpfs/f5/nggps_emc/world-shared/GDASApp/testdata") +setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/gpfs/f5/nggps_emc/world-shared/GDASApp/unittestdata") whatis("Name: ".. "pkgName") whatis("Version: ".. "pkgVersion") From e9607fc9d27a2d5e9dde9c2f66903d5ec06f4280 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA <134300700+DavidNew-NOAA@users.noreply.github.com> Date: Wed, 6 Nov 2024 08:53:15 -0500 Subject: [PATCH 16/27] Marine B-matrix refactor companion PR (#1346) This PR is a companion to Global Workflow PR [#2992](https://github.com/NOAA-EMC/global-workflow/pull/2992) and JCB-GDAS PR [#36](https://github.com/NOAA-EMC/jcb-gdas/pull/36). It makes a few necessary changes to the marine JCB base YAML required to have JCB working in the B-matrix task in the Global Workflow. --- parm/jcb-gdas | 2 +- parm/soca/berror/soca_diagb.yaml.j2 | 37 ------- parm/soca/berror/soca_ensb.yaml.j2 | 98 ------------------- parm/soca/berror/soca_ensweights.yaml.j2 | 37 ------- .../soca_parameters_diffusion_hz.yaml.j2 | 37 ------- .../soca_parameters_diffusion_vt.yaml.j2 | 33 ------- parm/soca/berror/soca_setcorscales.yaml | 23 ----- parm/soca/berror/soca_vtscales.yaml.j2 | 13 --- parm/soca/gridgen/gridgen.yaml | 5 - parm/soca/marine-jcb-base.yaml | 9 +- parm/soca/soca_fix_stage_500.yaml.j2 | 2 - parm/soca/soca_utils_stage.yaml.j2 | 1 - 12 files changed, 7 insertions(+), 290 deletions(-) delete mode 100644 parm/soca/berror/soca_diagb.yaml.j2 delete mode 100644 parm/soca/berror/soca_ensb.yaml.j2 delete mode 100644 parm/soca/berror/soca_ensweights.yaml.j2 delete mode 100644 parm/soca/berror/soca_parameters_diffusion_hz.yaml.j2 delete mode 100644 parm/soca/berror/soca_parameters_diffusion_vt.yaml.j2 delete mode 100644 parm/soca/berror/soca_setcorscales.yaml delete mode 100644 parm/soca/berror/soca_vtscales.yaml.j2 delete mode 100644 parm/soca/gridgen/gridgen.yaml diff --git a/parm/jcb-gdas b/parm/jcb-gdas index 7717c0e74..b8e995a4c 160000 --- a/parm/jcb-gdas +++ b/parm/jcb-gdas @@ -1 +1 @@ -Subproject commit 7717c0e7401e344a6bce37a4f8ecc11399256936 +Subproject commit b8e995a4cbf01fa4a662c3da3e7d818f8457ec4e diff --git a/parm/soca/berror/soca_diagb.yaml.j2 b/parm/soca/berror/soca_diagb.yaml.j2 deleted file mode 100644 index c0597d358..000000000 --- a/parm/soca/berror/soca_diagb.yaml.j2 +++ /dev/null @@ -1,37 +0,0 @@ -geometry: - mom6_input_nml: mom_input.nml - fields metadata: ./fields_metadata.yaml - -date: '{{ MARINE_WINDOW_END | to_isotime }}' - -background: - date: '{{ MARINE_WINDOW_END | to_isotime }}' - basename: ./bkg/ - ocn_filename: 'ocean.bkg.f009.nc' - ice_filename: 'ice.bkg.f009.nc' - read_from_file: 1 - -background error: - datadir: ./staticb/ - date: '{{ MARINE_WINDOW_MIDDLE | to_isotime }}' - exp: bkgerr_stddev - type: incr - -variables: - name: [tocn, socn, uocn, vocn, hocn, ssh, cicen, hicen, hsnon, mom6_mld] - -rescale: 2.0 # rescales the filtered std. dev. by "rescale" -min sst: 0.0 # Added to sst bkg. err. -max ssh: 0.0 # Limits the amplitude of the unbalanced bkg err -min depth: 500.0 # zero out the bkg. error. at less than min depth -number of halo points: 4 -number of neighbors: 16 - -simple smoothing: - horizontal iterations: 10 - vertical iterations: 1 - -# TODO(G): Too slow for the below scale -#diffusion: -# horizontal: 500.0e3 -# vertical: 3.0 diff --git a/parm/soca/berror/soca_ensb.yaml.j2 b/parm/soca/berror/soca_ensb.yaml.j2 deleted file mode 100644 index 4033f41ba..000000000 --- a/parm/soca/berror/soca_ensb.yaml.j2 +++ /dev/null @@ -1,98 +0,0 @@ -# Configuration for the recentering and re-balancing of the ensemble members -geometry: - mom6_input_nml: mom_input.nml - fields metadata: ./fields_metadata.yaml - -date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - -layers variable: [hocn] - -increment variables: [tocn, socn, uocn, vocn, ssh, hocn, cicen, hicen, hsnon] - -set increment variables to zero: [ssh] - -vertical geometry: - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - basename: ./INPUT/ - ocn_filename: MOM.res.nc - read_from_file: 3 - -add recentering increment: false - -soca increments: # Could also be states, but they are read as increments - number of increments: {{ NMEM_ENS }} - pattern: '%mem%' - template: - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - basename: '{{ ENSPERT_RELPATH }}/ens/' - ocn_filename: 'ocean.%mem%.nc' - ice_filename: 'ice.%mem%.nc' - read_from_file: 3 - -steric height: - linear variable changes: - - linear variable change name: BalanceSOCA # Only the steric balance is applied - -ensemble mean output: - datadir: ./staticb/ - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - exp: ens_mean - type: incr - -ssh output: - unbalanced: - datadir: ./staticb/ - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - exp: ssh_unbal_stddev - type: incr - - steric: - datadir: ./staticb/ - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - exp: ssh_steric_stddev - type: incr - - total: - datadir: ./staticb/ - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - exp: ssh_total_stddev - type: incr - - explained variance: - datadir: ./staticb/ - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - exp: steric_explained_variance - type: incr - - recentering error: - datadir: ./staticb/ - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - exp: ssh_recentering_error - type: incr - -background error output: - datadir: ./staticb/ - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - exp: bkgerr_stddev - type: incr - -linear variable change: - linear variable changes: - - linear variable change name: BalanceSOCA - -trajectory: - state variables: [tocn, socn, uocn, vocn, ssh, hocn, layer_depth, mld, cicen, hicen, hsnon] - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - basename: ./INPUT/ - ocn_filename: MOM.res.nc - ice_filename: cice.res.nc - read_from_file: 1 - -output increment: - # TODO: Revert this when fms can take more than 128 charactres file names - datadir: '{{ ENSPERT_RELPATH }}/enspert/' - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - exp: trash - type: incr - output file: 'ocn.pert.steric.%mem%.nc' - pattern: '%mem%' diff --git a/parm/soca/berror/soca_ensweights.yaml.j2 b/parm/soca/berror/soca_ensweights.yaml.j2 deleted file mode 100644 index f677d2a8d..000000000 --- a/parm/soca/berror/soca_ensweights.yaml.j2 +++ /dev/null @@ -1,37 +0,0 @@ -geometry: - mom6_input_nml: mom_input.nml - fields metadata: ./fields_metadata.yaml - -date: '{{ MARINE_WINDOW_MIDDLE | to_isotime }}' - -variables: - ice: [cicen, hicen, hsnon] - ocean: [tocn, socn, uocn, vocn, ssh] - -background: - date: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' - basename: ./INPUT/ - ocn_filename: MOM.res.nc - ice_filename: cice.res.nc - read_from_file: 1 - -weights: - # Need to provide weights^2 when reading from file - ice: 0.0025 # 5% of original variance - ocean: 0.0625 # 25% " " - # Apply localized weights to the ocean ens. B - ocean local weights: - - lon: -172.0 - lat: 11.0 - amplitude: -1.0 - length scale: 700.0 - - lon: -160.0 - lat: 12.0 - amplitude: -1.0 - length scale: 700.0 - -output: - datadir: ./ - date: '{{ MARINE_WINDOW_MIDDLE | to_isotime }}' - exp: ens_weights - type: incr diff --git a/parm/soca/berror/soca_parameters_diffusion_hz.yaml.j2 b/parm/soca/berror/soca_parameters_diffusion_hz.yaml.j2 deleted file mode 100644 index 7d3a78cfb..000000000 --- a/parm/soca/berror/soca_parameters_diffusion_hz.yaml.j2 +++ /dev/null @@ -1,37 +0,0 @@ -geometry: &geom - mom6_input_nml: mom_input.nml - fields metadata: ./fields_metadata.yaml - -background: - read_from_file: 1 - basename: ./INPUT/ - ocn_filename: MOM.res.nc - ice_filename: cice.res.nc - date: '{{ MARINE_WINDOW_END | to_isotime }}' - state variables: [ssh] - -background error: - covariance model: SABER - saber central block: - saber block name: diffusion - geometry: *geom - calibration: - normalization: - method: randomization - iterations: 10000 - - groups: - - horizontal: - model file: - date: '{{ MARINE_WINDOW_END | to_isotime }}' - basename: ./ - ocn_filename: ocn.cor_rh.incr.0001-01-01T00:00:00Z.nc - model variable: ssh - write: - filepath: ./staticb/hz_ocean - - - horizontal: - as gaussian: true - fixed value: 50000.0 - write: - filepath: ./staticb/hz_ice diff --git a/parm/soca/berror/soca_parameters_diffusion_vt.yaml.j2 b/parm/soca/berror/soca_parameters_diffusion_vt.yaml.j2 deleted file mode 100644 index 76ab67e94..000000000 --- a/parm/soca/berror/soca_parameters_diffusion_vt.yaml.j2 +++ /dev/null @@ -1,33 +0,0 @@ -geometry: &geom - mom6_input_nml: mom_input.nml - fields metadata: ./fields_metadata.yaml - -background: - read_from_file: 1 - basename: ./INPUT/ - ocn_filename: MOM.res.nc - ice_filename: cice.res.nc - date: '{{ MARINE_WINDOW_MIDDLE | to_isotime }}' - state variables: [tocn] - -background error: - covariance model: SABER - saber central block: - saber block name: diffusion - geometry: *geom - calibration: - normalization: - # NOTE, not actually used here, since the normalization spec is only used for hz - method: randomization #< other option is "brute force" - iterations: 1000 #< in the real world you'll want to use 1e4 or so - - groups: - - vertical: - as gaussian: true - model file: - date: '{{ MARINE_WINDOW_MIDDLE | to_isotime }}' - basename: ./ - ocn_filename: vt_scales.nc - model variable: tocn - write: - filepath: ./staticb/vt_ocean diff --git a/parm/soca/berror/soca_setcorscales.yaml b/parm/soca/berror/soca_setcorscales.yaml deleted file mode 100644 index 0a91777a1..000000000 --- a/parm/soca/berror/soca_setcorscales.yaml +++ /dev/null @@ -1,23 +0,0 @@ -resolution: - mom6_input_nml: mom_input.nml - fields metadata: ./fields_metadata.yaml - -date: 0001-01-01T00:00:00Z - -corr variables: [ssh] - -scales: - vert layers: 5 # in units of layer - ssh: - rossby mult: 1.00 - min grid mult: 2.0 - -rh output: - datadir: ./ - exp: cor_rh - type: incr - -rv output: - datadir: ./ - exp: cor_rv - type: incr diff --git a/parm/soca/berror/soca_vtscales.yaml.j2 b/parm/soca/berror/soca_vtscales.yaml.j2 deleted file mode 100644 index 8f68b1517..000000000 --- a/parm/soca/berror/soca_vtscales.yaml.j2 +++ /dev/null @@ -1,13 +0,0 @@ -gridspec_filename: soca_gridspec.nc -restart_filename: ./INPUT/MOM.res.nc -mld_filename: './staticb/ocn.bkgerr_stddev.incr.{{ MARINE_WINDOW_END | to_isotime }}.nc' -output_filename: ./vt_scales.nc -output_variable_vt: Temp -output_variable_hz: ave_ssh - -VT_MIN: 5 -VT_MAX: 15 - -HZ_ROSSBY_MULT: 1.0 -HZ_MAX: 200e3 -HZ_MIN_GRID_MULT: 2.0 diff --git a/parm/soca/gridgen/gridgen.yaml b/parm/soca/gridgen/gridgen.yaml deleted file mode 100644 index 34fbdeca6..000000000 --- a/parm/soca/gridgen/gridgen.yaml +++ /dev/null @@ -1,5 +0,0 @@ -geometry: - geom_grid_file: soca_gridspec.nc - mom6_input_nml: mom_input.nml - fields metadata: fields_metadata.yaml - rossby file: rossrad.nc diff --git a/parm/soca/marine-jcb-base.yaml b/parm/soca/marine-jcb-base.yaml index cb5230f1c..d07edcd8c 100644 --- a/parm/soca/marine-jcb-base.yaml +++ b/parm/soca/marine-jcb-base.yaml @@ -17,7 +17,7 @@ final_increment_file: marine_final_increment # Assimilation standard things (not prepended with model choice) # ---------------------------- -window_begin: '{{MARINE_WINDOW_BEGIN}}' +window_begin: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' window_length: '{{MARINE_WINDOW_LENGTH}}' bound_to_include: begin minimizer: RPCG @@ -31,6 +31,10 @@ analysis_variables: [cicen, hicen, hsnon, socn, tocn, uocn, vocn, ssh] # ------------ marine_window_begin: '{{MARINE_WINDOW_BEGIN}}' marine_window_middle: '{{MARINE_WINDOW_MIDDLE}}' +marine_window_begin_iso: '{{ MARINE_WINDOW_BEGIN | to_isotime }}' +marine_window_middle_iso: '{{ MARINE_WINDOW_MIDDLE | to_isotime }}' +marine_window_end_iso: '{{ MARINE_WINDOW_END | to_isotime }}' +enspert_relpath: '{{ ENSPERT_RELPATH }}' # Geometry marine_soca_files_path: . @@ -42,11 +46,10 @@ marine_background_time: '{{MARINE_WINDOW_BEGIN_ISO}}' # Pseudo model marine_forecast_timestep: PT3H -marine_pseudo_model_states: !INC 'bkg_list.yaml' # Background error model background_error_file: '{{berror_model}}' -marine_number_ensemble_members: '{{nmem_ens}}' +marine_number_ensemble_members: '{{NMEM_ENS}}' marine_stddev_time: '{{MARINE_WINDOW_MIDDLE}}' # Observations diff --git a/parm/soca/soca_fix_stage_500.yaml.j2 b/parm/soca/soca_fix_stage_500.yaml.j2 index 0b25073ba..9f030a81f 100644 --- a/parm/soca/soca_fix_stage_500.yaml.j2 +++ b/parm/soca/soca_fix_stage_500.yaml.j2 @@ -9,8 +9,6 @@ copy: - ["{{ SOCA_INPUT_FIX_DIR }}/field_table", "{{ DATA }}/field_table"] - ["{{ SOCA_INPUT_FIX_DIR }}/diag_table", "{{ DATA }}/diag_table"] - ["{{ SOCA_INPUT_FIX_DIR }}/MOM_input", "{{ DATA }}/MOM_input"] -- ["{{ SOCA_INPUT_FIX_DIR }}/fields_metadata.yaml", "{{ DATA }}/fields_metadata.yaml"] -- ["{{ SOCA_INPUT_FIX_DIR }}/obsop_name_map.yaml", "{{ DATA }}/obsop_name_map.yaml"] - ["{{ SOCA_INPUT_FIX_DIR }}/INPUT/grid_spec.nc", "{{ DATA }}/INPUT/grid_spec.nc"] - ["{{ SOCA_INPUT_FIX_DIR }}/INPUT/hycom1_25.nc", "{{ DATA }}/INPUT/hycom1_25.nc"] - ["{{ SOCA_INPUT_FIX_DIR }}/INPUT/layer_coord25.nc", "{{ DATA }}/INPUT/layer_coord25.nc"] diff --git a/parm/soca/soca_utils_stage.yaml.j2 b/parm/soca/soca_utils_stage.yaml.j2 index 26570abe3..462cfccd0 100644 --- a/parm/soca/soca_utils_stage.yaml.j2 +++ b/parm/soca/soca_utils_stage.yaml.j2 @@ -4,4 +4,3 @@ copy: - ["{{ HOMEgfs }}/parm/gdas/soca/fields_metadata.yaml", "{{ DATA }}/fields_metadata.yaml"] - ["{{ HOMEgfs }}/parm/gdas/soca/obsop_name_map.yaml", "{{ DATA }}/obsop_name_map.yaml"] -- ["{{ HOMEgfs }}/parm/gdas/soca/gridgen/gridgen.yaml", "{{ DATA }}/gridgen.yaml"] From 2f1638f126d010ee2f34aa4da5013ce14549d436 Mon Sep 17 00:00:00 2001 From: DavidBurrows-NCO <82525974+DavidBurrows-NCO@users.noreply.github.com> Date: Wed, 6 Nov 2024 14:05:14 -0500 Subject: [PATCH 17/27] Update build scripts on Gaea-C5 to conform with ufs-wx-model and global-workflow (#1361) After the recent Gaea-C5 OS upgrade, GDASApp fails to build. This issue corrects Gaea-C5 build and updates the build scripts to conform to ufs-wx-model (following ufs-wx-model https://github.com/ufs-community/ufs-weather-model/pull/2448) and eventual global-workflow updates. Refs NOAA-EMC/global-workflow 3011 https://github.com/NOAA-EMC/global-workflow/issues/3011 Refs NOAA-EMC/global-workflow 3032 https://github.com/NOAA-EMC/global-workflow/pull/3032 Resolves #1360 --- build.sh | 2 +- .../GDAS/{gaea.intel.lua => gaeac5.intel.lua} | 0 ush/detect_machine.sh | 13 ++++++++----- 3 files changed, 9 insertions(+), 6 deletions(-) rename modulefiles/GDAS/{gaea.intel.lua => gaeac5.intel.lua} (100%) diff --git a/build.sh b/build.sh index 4ebb3666e..6445f80cf 100755 --- a/build.sh +++ b/build.sh @@ -71,7 +71,7 @@ while getopts "p:t:c:hvdfa" opt; do done case ${BUILD_TARGET} in - hera | orion | hercules | wcoss2 | noaacloud | gaea | gaeac6 ) + hera | orion | hercules | wcoss2 | noaacloud | gaeac5 | gaeac6 ) echo "Building GDASApp on $BUILD_TARGET" source $dir_root/ush/module-setup.sh module use $dir_root/modulefiles diff --git a/modulefiles/GDAS/gaea.intel.lua b/modulefiles/GDAS/gaeac5.intel.lua similarity index 100% rename from modulefiles/GDAS/gaea.intel.lua rename to modulefiles/GDAS/gaeac5.intel.lua diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 997c394fa..ab039aebf 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -21,8 +21,8 @@ case $(hostname -f) in dlogin0[1-9].dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood01-9 dlogin10.dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood10 - gaea5[1-8]) MACHINE_ID=gaea ;; ### gaea51-58 - gaea5[1-8].ncrc.gov) MACHINE_ID=gaea ;; ### gaea51-58 + gaea5[1-8]) MACHINE_ID=gaeac5 ;; ### gaea51-58 + gaea5[1-8].ncrc.gov) MACHINE_ID=gaeac5 ;; ### gaea51-58 gaea6[1-8]) MACHINE_ID=gaeac6 ;; ### gaea61-68 gaea6[1-8].ncrc.gov) MACHINE_ID=gaeac6 ;; ### gaea61-68 @@ -84,9 +84,12 @@ elif [[ -d /work ]]; then else MACHINE_ID=orion fi -elif [[ -d /gpfs && -d /ncrc ]]; then - # We are on GAEA. - MACHINE_ID=gaea +elif [[ -d /gpfs/f5 ]]; then + # We are on GAEAC5. + MACHINE_ID=gaeac5 +elif [[ -d /gpfs/f6 ]]; then + # We are on GAEAC6. + MACHINE_ID=gaeac6 elif [[ -d /data/prod ]]; then # We are on SSEC's S4 MACHINE_ID=s4 From e5d6563dcc44caa713e75f3626dadac76f3a955a Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Wed, 6 Nov 2024 14:46:55 -0500 Subject: [PATCH 18/27] Use JCB for aerosol DA (#1343) What the title says. Depends on: https://github.com/NOAA-EMC/jcb-gdas/pull/33 https://github.com/NOAA-EMC/jcb-algorithms/pull/5 --- parm/aero/jcb-base.yaml.j2 | 136 ++++++++++++++++++++++++++ parm/aero/jcb-prototype_3dvar.yaml.j2 | 10 ++ parm/jcb-algorithms | 2 +- 3 files changed, 147 insertions(+), 1 deletion(-) create mode 100644 parm/aero/jcb-base.yaml.j2 create mode 100644 parm/aero/jcb-prototype_3dvar.yaml.j2 diff --git a/parm/aero/jcb-base.yaml.j2 b/parm/aero/jcb-base.yaml.j2 new file mode 100644 index 000000000..86988206d --- /dev/null +++ b/parm/aero/jcb-base.yaml.j2 @@ -0,0 +1,136 @@ +# Search path for model and obs for JCB +# ------------------------------------- +algorithm_path: "{{PARMgfs}}/gdas/jcb-algorithms" +app_path_algorithm: "{{PARMgfs}}/gdas/jcb-gdas/algorithm/aero" +app_path_model: "{{PARMgfs}}/gdas/jcb-gdas/model/aero" +app_path_observations: "{{PARMgfs}}/gdas/jcb-gdas/observations/aero" +app_path_observation_chronicle: "{{PARMgfs}}/gdas/jcb-gdas/observation_chronicle/aero" + + +# Places where we deviate from the generic file name of a yaml +# ------------------------------------------------------------ +final_increment_to_latlon_file: aero_final_increment_gaussian +final_increment_file: aero_final_increment_cubed_sphere +model_file: aero_model_pseudo +initial_condition_file: aero_background # Initial conditions for 4D apps is background +background_error_file: "{{BERROR_YAML}}" + +# Assimilation standard things (not prepended with model choice) +# ---------------------------- +window_begin: "{{ AERO_WINDOW_BEGIN | to_isotime }}" +window_length: "{{ AERO_WINDOW_LENGTH }}" +bound_to_include: begin +minimizer: DRPCG +final_diagnostics_departures: anlmob +final_prints_frequency: PT3H +number_of_outer_loops: 2 +analysis_variables: [mass_fraction_of_sulfate_in_air, + mass_fraction_of_hydrophobic_black_carbon_in_air, + mass_fraction_of_hydrophilic_black_carbon_in_air, + mass_fraction_of_hydrophobic_organic_carbon_in_air, + mass_fraction_of_hydrophilic_organic_carbon_in_air, + mass_fraction_of_dust001_in_air, mass_fraction_of_dust002_in_air, + mass_fraction_of_dust003_in_air, mass_fraction_of_dust004_in_air, + mass_fraction_of_dust005_in_air, mass_fraction_of_sea_salt001_in_air, + mass_fraction_of_sea_salt002_in_air, mass_fraction_of_sea_salt003_in_air, + mass_fraction_of_sea_salt004_in_air] + +# Model things +# ------------ +# Geometry +aero_layout_x: {{ layout_x | default(1, true) }} +aero_layout_y: {{ layout_y | default(1, true) }} +aero_npx_ges: {{ npx_ges | default(49, true) }} +aero_npy_ges: {{ npy_ges | default(49, true) }} +aero_npz_ges: {{ npz_ges | default(127, true) }} +aero_npx_anl: {{ npx_anl | default(49, true) }} +aero_npy_anl: {{ npy_anl | default(49, true) }} +aero_npz_anl: {{ npz_anl | default(127, true) }} +aero_npx_clim_b: {{ npx_clim_b | default(49, true) }} +aero_npy_clim_b: {{ npy_clim_b | default(49, true) }} +aero_npz_clim_b: {{ npz_anl | default(127, true) }} + +aero_fv3jedi_files_path: ./fv3jedi # Ideally this would be {{DATA}}/fv3jedi but FMS + +# Background +aero_background_path: ./bkg +aero_background_ensemble_path: ./ens/mem%mem% + +# Default background time is for 3D applications +{% if DOIAU == True %} +aero_background_time_iso: "{{ AERO_WINDOW_BEGIN | to_isotime }}" +{% else %} +aero_background_time_iso: "{{ current_cycle | to_isotime }}" +{% endif %} +aero_cycle_time_iso: "{{ current_cycle | to_isotime }}" +aero_cycle_time_fv3: "{{ current_cycle | to_fv3time }}" + +# time for background error calculation for next cycle +{% set offset_td = "+6H" | to_timedelta %} +{% set background_time = current_cycle | add_to_datetime(offset_td) %} +aero_background_error_time_iso: "{{ background_time | to_isotime }}" +aero_background_error_time_fv3: "{{ background_time | to_fv3time }}" + +# Background error +aero_berror_data_directory: "{{ DATA }}/berror" +aero_berror_diffusion_directory: "{{ DATA }}/diffusion" +aero_standard_devation_path: ./stddev +aero_climatological_b_path: ./clm_stddev +aero_diagb_weight: {{ aero_diagb_weight | default(1.0, true) }} +aero_diagb_static_rescale_factor: {{aero_staticb_rescaling_factor | default(1.0, true) }} +aero_diagb_rescale_factor: {{aero_diagb_rescale | default(1.0, true) }} +aero_diagb_n_halo: {{ aero_diagb_n_halo | default(1, true) }} +aero_diagb_n_neighbors: {{ aero_diagb_n_neighbors | default(1, true) }} +aero_diagb_smooth_horiz_iter: {{ aero_diagb_smooth_horiz_iter | default(1, true) }} +aero_diagb_smooth_vert_iter: {{ aero_diagb_smooth_vert_iter | default(1, true) }} +aero_diffusion_iter: {{ aero_diffusion_iter | default(1, true) }} +aero_diffusion_horiz_len: {{ aero_diffusion_horiz_len | default(1.0, true)}} +aero_diffusion_fixed_val: {{ aero_diffusion_fixed_val | default(1.0, true)}} + +# Forecasting +aero_forecast_timestep: "{{ BKG_TSTEP }}" + +# Observation things +# ------------------ +observations: all_observations + +crtm_coefficient_path: "{{ DATA }}/crtm/" + +# Naming conventions for observational files +aero_obsdataroot_path: "{{COM_OBS}}" + +aero_obsdatain_path: "{{aero_obsdatain_path}}" +aero_obsdatain_prefix: "{{OPREFIX}}" +aero_obsdatain_suffix: ".tm00.nc" + +aero_obsdataout_path: "{{aero_obsdataout_path}}" +aero_obsdataout_prefix: diag_ +aero_obsdataout_suffix: "_{{ current_cycle | to_YMDH }}.nc" + +# Naming conventions for bias correction files +aero_obsbiasroot_path: "{{COM_CHEM_ANALYSIS_PREV}}" + +aero_obsbiasin_path: "{{DATA}}/obs/" +aero_obsbiasin_prefix: "{{GPREFIX}}" +aero_obsbiasin_suffix: ".satbias.nc" +aero_obstlapsein_prefix: "{{GPREFIX}}" +aero_obstlapsein_suffix: ".tlapse.txt" +aero_obsbiascovin_prefix: "{{GPREFIX}}" +aero_obsbiascovin_suffix: ".satbias_cov.nc" + +aero_obsbiasout_path: "{{DATA}}/bc/" +aero_obsbiasout_prefix: "{{APREFIX}}" +aero_obsbiasout_suffix: ".satbias.nc" +aero_obsbiascovout_prefix: "{{APREFIX}}" +aero_obsbiascovout_suffix: ".satbias_cov.nc" + +bias_files: + atms_n20: rad_varbc_params.tar + atms_npp: rad_varbc_params.tar + mtiasi_metop-a: rad_varbc_params.tar + mtiasi_metop-b: rad_varbc_params.tar + amsua_n19: rad_varbc_params.tar + ssmis_f17: rad_varbc_params.tar + ssmis_f18: rad_varbc_params.tar + cris-fsr_n20: rad_varbc_params.tar + cris-fsr_npp: rad_varbc_params.tar diff --git a/parm/aero/jcb-prototype_3dvar.yaml.j2 b/parm/aero/jcb-prototype_3dvar.yaml.j2 new file mode 100644 index 000000000..da4739bca --- /dev/null +++ b/parm/aero/jcb-prototype_3dvar.yaml.j2 @@ -0,0 +1,10 @@ +# Algorithm +# --------- +algorithm: 3dfgat + +# Observation things +# ------------------ +observations: +- viirs_n20_aod +- viirs_npp_aod +# - viirs_n21_aod diff --git a/parm/jcb-algorithms b/parm/jcb-algorithms index a6822d8c1..43d8ff6ba 160000 --- a/parm/jcb-algorithms +++ b/parm/jcb-algorithms @@ -1 +1 @@ -Subproject commit a6822d8c1e72f6b1bf951e378b153cb6df1faee5 +Subproject commit 43d8ff6ba14baf9402ee58d0f3351a143c21211b From 5bb63e7428280e9e876b44e14889c4c08b7b3338 Mon Sep 17 00:00:00 2001 From: emilyhcliu <36091766+emilyhcliu@users.noreply.github.com> Date: Wed, 6 Nov 2024 15:22:16 -0500 Subject: [PATCH 19/27] Rename mtiasi to iasi (#1292) 1. Rename mtiasi to iasi in GDASApp 2. There is a companion [PR #29](https://github.com/NOAA-EMC/jcb-gdas/pull/29) in jcb-gdas - rename mtiasi to iasi - add `reduce obs space` - remove obsolete QC-flag related variables in filters This partly resolves Issue #1291 Co-authored-by: Emily Liu Co-authored-by: Cory Martin --- parm/atm/jcb-prototype_3dvar.yaml.j2 | 4 ++-- parm/atm/jcb-prototype_lgetkf.yaml.j2 | 4 ++-- parm/atm/jcb-prototype_lgetkf_observer.yaml.j2 | 4 ++-- parm/atm/jcb-prototype_lgetkf_solver.yaml.j2 | 4 ++-- .../bufr2ioda/{bufr2ioda_mtiasi.yaml => bufr2ioda_iasi.yaml} | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) rename parm/ioda/bufr2ioda/{bufr2ioda_mtiasi.yaml => bufr2ioda_iasi.yaml} (98%) diff --git a/parm/atm/jcb-prototype_3dvar.yaml.j2 b/parm/atm/jcb-prototype_3dvar.yaml.j2 index 152683110..1975c1f05 100644 --- a/parm/atm/jcb-prototype_3dvar.yaml.j2 +++ b/parm/atm/jcb-prototype_3dvar.yaml.j2 @@ -16,8 +16,8 @@ observations: - conventional_ps - gnssro # - gpsro -# - mtiasi_metop-a -# - mtiasi_metop-b +# - iasi_metop-a +# - iasi_metop-b # - ompsnp_n20 - ompsnp_npp # - ompstc_n20 diff --git a/parm/atm/jcb-prototype_lgetkf.yaml.j2 b/parm/atm/jcb-prototype_lgetkf.yaml.j2 index a1b099bb6..2ed04df3b 100644 --- a/parm/atm/jcb-prototype_lgetkf.yaml.j2 +++ b/parm/atm/jcb-prototype_lgetkf.yaml.j2 @@ -25,8 +25,8 @@ observations: - conventional_ps - gnssro # - gpsro -# - mtiasi_metop-a -# - mtiasi_metop-b +# - iasi_metop-a +# - iasi_metop-b # - ompsnp_n20 - ompsnp_npp # - ompstc_n20 diff --git a/parm/atm/jcb-prototype_lgetkf_observer.yaml.j2 b/parm/atm/jcb-prototype_lgetkf_observer.yaml.j2 index 26654b175..4b800ac8f 100644 --- a/parm/atm/jcb-prototype_lgetkf_observer.yaml.j2 +++ b/parm/atm/jcb-prototype_lgetkf_observer.yaml.j2 @@ -25,8 +25,8 @@ observations: - conventional_ps - gnssro # - gpsro -# - mtiasi_metop-a -# - mtiasi_metop-b +# - iasi_metop-a +# - iasi_metop-b # - ompsnp_n20 - ompsnp_npp # - ompstc_n20 diff --git a/parm/atm/jcb-prototype_lgetkf_solver.yaml.j2 b/parm/atm/jcb-prototype_lgetkf_solver.yaml.j2 index 677934158..b5123dde9 100644 --- a/parm/atm/jcb-prototype_lgetkf_solver.yaml.j2 +++ b/parm/atm/jcb-prototype_lgetkf_solver.yaml.j2 @@ -25,8 +25,8 @@ observations: - conventional_ps - gnssro # - gpsro -# - mtiasi_metop-a -# - mtiasi_metop-b +# - iasi_metop-a +# - iasi_metop-b # - ompsnp_n20 - ompsnp_npp # - ompstc_n20 diff --git a/parm/ioda/bufr2ioda/bufr2ioda_mtiasi.yaml b/parm/ioda/bufr2ioda/bufr2ioda_iasi.yaml similarity index 98% rename from parm/ioda/bufr2ioda/bufr2ioda_mtiasi.yaml rename to parm/ioda/bufr2ioda/bufr2ioda_iasi.yaml index c15df2b62..c11e147c6 100755 --- a/parm/ioda/bufr2ioda/bufr2ioda_mtiasi.yaml +++ b/parm/ioda/bufr2ioda/bufr2ioda_iasi.yaml @@ -94,7 +94,7 @@ observations: ioda: backend: netcdf - obsdataout: "{{ COM_OBS }}/{{ RUN }}.t{{ cyc }}z.mtiasi_$(splitvar).tm00.nc" + obsdataout: "{{ COM_OBS }}/{{ RUN }}.t{{ cyc }}z.iasi_$(splitvar).tm00.nc" dimensions: - name: Channel From 4c9b1d22314845bd8cf388cd5b9603b2cbf9ccfd Mon Sep 17 00:00:00 2001 From: Ed Givelberg Date: Wed, 6 Nov 2024 15:26:03 -0500 Subject: [PATCH 20/27] fixed a bug for empty files (#1351) Added testing for empty files and logging the number of obs. The code exits if the filtering results in no data, so no output file is created. --------- Co-authored-by: Guillaume Vernieres Co-authored-by: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> --- test/marine/CMakeLists.txt | 22 ++++--- .../b2i/b2iconverter/bufr2ioda_converter.py | 14 +++++ .../marine/b2i/b2iconverter/ioda_variables.py | 23 +++++++ .../marine/b2i/b2iconverter/ocean.py | 61 ------------------- 4 files changed, 51 insertions(+), 69 deletions(-) diff --git a/test/marine/CMakeLists.txt b/test/marine/CMakeLists.txt index 92c880a58..c75a5664b 100644 --- a/test/marine/CMakeLists.txt +++ b/test/marine/CMakeLists.txt @@ -28,6 +28,14 @@ install(FILES ${test_input} # bufr to ioda tests: ########################################################################### +set(TEST_WORKING_DIR ${PROJECT_BINARY_DIR}/test/marine) +set(MARINE_BUFR2IODA_DIR ${PROJECT_SOURCE_DIR}/ush/ioda/bufr2ioda/marine) +set(MARINE_BUFR2IODA_DIR ${MARINE_BUFR2IODA_DIR}/b2i) +set(CONFIG_DIR ${PROJECT_SOURCE_DIR}/test/marine/testinput) +set(TESTREF_DIR ${PROJECT_SOURCE_DIR}/test/marine/testref) +set(PYIODACONV_DIR "${PROJECT_SOURCE_DIR}/build/lib/python3.10/") + + # prepare a test.yaml file from test.yaml.in by replacing # placeholder patterns __BUFRINPUTDIR__ and __IODAOUTPUTDIR__ and __OCEANBASIN__ # with actual directory paths @@ -46,14 +54,6 @@ function(CREATE_CONFIG_FILE endfunction() -set(TEST_WORKING_DIR ${PROJECT_BINARY_DIR}/test/marine) - -set(MARINE_BUFR2IODA_DIR ${PROJECT_SOURCE_DIR}/ush/ioda/bufr2ioda/marine) -set(MARINE_BUFR2IODA_DIR ${MARINE_BUFR2IODA_DIR}/b2i) -set(CONFIG_DIR ${PROJECT_SOURCE_DIR}/test/marine/testinput) -set(TESTREF_DIR ${PROJECT_SOURCE_DIR}/test/marine/testref) - - function(CHECK_AND_SET_PATH PATH1 PATH2 RESULT_VAR) # Check if PATH1 exists if(EXISTS ${PATH1}) @@ -157,9 +157,15 @@ function(ADD_INSITU_TEST testname testbufr) COMMAND ${MARINE_BUFR2IODA_DIR}/${TEST}.py -c ${CONFIG_FILE} -t ${TESTREF_DIR}/${TESTREF_FILE} WORKING_DIRECTORY ${TEST_WORKING_DIR} ) + set_property( + TEST test_gdasapp_${TEST} + APPEND PROPERTY + ENVIRONMENT "PYTHONPATH=${PYIODACONV_DIR}:$ENV{PYTHONPATH}" + ) endfunction() + if (GENERATE_BUFR2IODA_TESTS) ADD_INSITU_TEST("profile_argo" "subpfl") ADD_INSITU_TEST("profile_bathy" "bathy") diff --git a/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/bufr2ioda_converter.py b/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/bufr2ioda_converter.py index f65d58523..e104ecce0 100755 --- a/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/bufr2ioda_converter.py +++ b/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/bufr2ioda_converter.py @@ -64,8 +64,22 @@ def run(self): # process query results and set ioda variables self.ioda_vars.set_from_query_result(r) + n_obs = self.ioda_vars.number_of_obs() + self.logger.debug(f"Query result has {n_obs} obs") + if (n_obs == 0): + self.logger.warning(f"No obs! Quitting.") + sys.exit(0) + self.ioda_vars.filter() + n_obs = self.ioda_vars.number_of_obs() + self.logger.debug(f"Filtered result has {n_obs} obs") + if (n_obs == 0): + self.logger.warning(f"No obs! Quitting.") + sys.exit(0) + self.logger.debug(f"Number of temperature obs = {self.ioda_vars.number_of_temp_obs()}") + self.logger.debug(f"Number of salinity obs = {self.ioda_vars.number_of_saln_obs()}") + # set seqNum, PreQC, ObsError, OceanBasin self.ioda_vars.additional_vars.construct() diff --git a/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ioda_variables.py b/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ioda_variables.py index 789fe060a..5319e0ff0 100644 --- a/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ioda_variables.py +++ b/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ioda_variables.py @@ -44,6 +44,29 @@ def set_salinity_range(self, smin, smax): self.S_min = smin self.S_max = smax + def number_of_temp_obs(self): + try: + if isinstance(self.temp, np.ma.MaskedArray): + return self.temp.count() + else: + return 0 + # except NameError: + except AttributeError: + return 0 + + def number_of_saln_obs(self): + try: + if isinstance(self.saln, np.ma.MaskedArray): + return self.saln.count() + else: + return 0 + # except NameError: + except AttributeError: + return 0 + + def number_of_obs(self): + return max(self.number_of_temp_obs(), self.number_of_saln_obs()) + def build_query(self): q = bufr.QuerySet() q.add('year', '*/YEAR') diff --git a/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ocean.py b/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ocean.py index 9e848c5f5..d2063ddcd 100755 --- a/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ocean.py +++ b/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ocean.py @@ -5,9 +5,6 @@ import numpy as np import numpy.ma as ma import math -import matplotlib.pyplot as plt -import cartopy.crs as ccrs -import cartopy.feature as cfeature import netCDF4 as nc import xarray as xr @@ -20,8 +17,6 @@ # the main method is get_station_basin which returns the ocean basin # for a list of station coordinates -# there are methods for plotting and printing the ocean basin data -# as well as printing and plotting station basin data class OceanBasin: @@ -54,32 +49,6 @@ def read_nc_file(self): print(f"An IOError occurred: {e}") sys.exit(1) - def print_basin(self): - for i in range(n1): - for j in range(n2): - print(i, j, self.__basin_array[i][j]) - - def plot_basin(self): - # Create a figure and axes with Cartopy projection - fig = plt.figure(figsize=(10, 6)) - ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree()) - - # Plot the ocean basins using a colormap with 6 colors - # cmap = plt.cm.get_cmap('rainbow', 6) # Choose a colormap with 6 colors - cmap = plt.get_cmap('viridis', 6) # Create a colormap with 6 discrete colors - im = ax.pcolormesh(self.__longitudes, self.__latitudes, self.__basin_array, cmap='viridis', shading='auto', transform=ccrs.PlateCarree()) - - # Add colorbar - cbar = fig.colorbar(im, ax=ax, orientation='vertical', pad=0.05, ticks=np.arange(0, 6)) - cbar.set_label('Ocean Basin', fontsize=12) - # Add title and gridlines - ax.set_title('Ocean Basin Map', fontsize=16) - ax.coastlines() - ax.gridlines(draw_labels=True) - # Show the plot - plt.show() - plt.savefig('ocean_basin.png', dpi=300) - # input: 2 vectors of station coordinates # output: a vector of station ocean basin values def get_station_basin(self, lat, lon): @@ -99,33 +68,3 @@ def get_station_basin(self, lat, lon): i2 = round((lon[i] - lon0) / dlon) ocean_basin.append(self.__basin_array[i1][i2]) return ocean_basin - - def print_station_basin(self, lon, lat, file_path): - ocean_basin = self.get_station_basin(lat, lon) - with open(file_path, 'w') as file: - # Iterate over lon, lat, and ocean_basin arrays simultaneously - for lat_val, lon_val, basin_val in zip(lat, lon, ocean_basin): - file.write(f"{lat_val} {lon_val} {basin_val}\n") - - def plot_stations(self, lon, lat, png_file): - ocean_basin = self.get_station_basin(lon, lat) - - # Initialize the plot - plt.figure(figsize=(12, 8)) - # Create a Cartopy map with PlateCarree projection (latitude/longitude) - ax = plt.axes(projection=ccrs.PlateCarree()) - # Add coastlines and borders - ax.coastlines() - ax.add_feature(cartopy.feature.BORDERS, linestyle=':', linewidth=0.5) - - # Scatter plot with colored dots for each basin type - colors = ['blue', 'green', 'red', 'cyan', 'magenta', 'yellow'] - for basin_type in range(6): - indices = np.where(ocean_basin == basin_type)[0] - ax.scatter(lon[indices], lat[indices], color=colors[basin_type], label=f'Basin {basin_type}', alpha=0.7) - - # Add a legend - plt.legend(loc='lower left') - # Add title and show plot - plt.title('Ocean Basins Plot using Cartopy') - plt.savefig(png_file, dpi=300) From 1df1ad2c8770bbd2018daddede9da8ffebd043b9 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA <134300700+DavidNew-NOAA@users.noreply.github.com> Date: Thu, 7 Nov 2024 10:50:44 -0500 Subject: [PATCH 21/27] Quick indentation bugfix (#1363) Title says it all --- parm/atm/jcb-prototype_3dvar.yaml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/atm/jcb-prototype_3dvar.yaml.j2 b/parm/atm/jcb-prototype_3dvar.yaml.j2 index 1975c1f05..4330a87bd 100644 --- a/parm/atm/jcb-prototype_3dvar.yaml.j2 +++ b/parm/atm/jcb-prototype_3dvar.yaml.j2 @@ -29,4 +29,4 @@ observations: # - satwnd.viirs_npp # - scatwind_ascat_metop-a # - snowcvr - - abi_g16 +# - abi_g16 From dfc871f38fa47c525953af891eafbd237c3b33a8 Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Thu, 7 Nov 2024 10:55:44 -0500 Subject: [PATCH 22/27] Completes LETKF task, adds WCDA CI test (#1287) Completes LETKF task, adds WCDA CI test Mutually dependent on https://github.com/NOAA-EMC/global-workflow/pull/2944 --------- Co-authored-by: Cory Martin Co-authored-by: Guillaume Vernieres --- mains/gdas.cc | 2 +- parm/soca/ensda/stage_ens_mem.yaml.j2 | 7 ++- parm/soca/letkf/letkf.yaml.j2 | 29 ++++++------ parm/soca/letkf/letkf_save.yaml.j2 | 20 +++++++++ parm/soca/letkf/letkf_stage.yaml.j2 | 29 +++++++++--- parm/soca/obs/config/adt_rads_all.yaml | 7 +++ parm/soca/obs/config/icec_amsr2_north.yaml | 7 +++ parm/soca/obs/config/icec_amsr2_south.yaml | 7 +++ parm/soca/obs/config/insitu_profile_argo.yaml | 7 +++ parm/soca/obs/config/sst_abi_g16_l3c.yaml | 7 +++ parm/soca/obs/config/sst_abi_g17_l3c.yaml | 7 +++ parm/soca/obs/config/sst_ahi_h08_l3c.yaml | 7 +++ parm/soca/obs/config/sst_avhrr_ma_l3u.yaml | 7 +++ parm/soca/obs/config/sst_avhrr_mb_l3u.yaml | 7 +++ parm/soca/obs/config/sst_avhrr_mc_l3u.yaml | 7 +++ parm/soca/obs/config/sst_viirs_n20_l3u.yaml | 7 +++ parm/soca/obs/config/sst_viirs_npp_l3u.yaml | 7 +++ parm/soca/soca_ens_bkg_stage.yaml.j2 | 6 +-- parm/soca/soca_fix_stage_025.yaml.j2 | 1 - parm/soca/soca_fix_stage_100.yaml.j2 | 1 - parm/soca/soca_fix_stage_500.yaml.j2 | 2 +- test/gw-ci/CMakeLists.txt | 45 +++++++++++++++++-- ush/soca/marine_recenter.py | 2 - ush/soca/prep_ocean_obs.py | 4 +- 24 files changed, 195 insertions(+), 37 deletions(-) create mode 100644 parm/soca/letkf/letkf_save.yaml.j2 diff --git a/mains/gdas.cc b/mains/gdas.cc index f47b82802..05afd244c 100755 --- a/mains/gdas.cc +++ b/mains/gdas.cc @@ -67,7 +67,7 @@ int runApp(int argc, char** argv, const std::string traits, const std::string ap return std::make_unique>(); }; apps["localensembleda"] = []() { - return std::make_unique>(); + return std::make_unique>(); }; apps["variational"] = []() { return std::make_unique>(); diff --git a/parm/soca/ensda/stage_ens_mem.yaml.j2 b/parm/soca/ensda/stage_ens_mem.yaml.j2 index e30a337e7..d0dca6e18 100644 --- a/parm/soca/ensda/stage_ens_mem.yaml.j2 +++ b/parm/soca/ensda/stage_ens_mem.yaml.j2 @@ -9,8 +9,7 @@ # create working directories ###################################### mkdir: -- "{{ DATAenspert }}/ens" - + - "{{ ENSPERT_RELPATH }}/ens" ###################################### # copy ensemble background files ###################################### @@ -22,6 +21,6 @@ copy: '${YMD}':gPDY, '${HH}':gcyc, '${MEMDIR}':"mem" + '%03d' % mem} %} - - ["{{ COM_OCEAN_HISTORY_TMPL | replace_tmpl(tmpl_dict) }}/{{ GDUMP_ENS }}.ocean.t{{ gcyc }}z.inst.f006.nc", "{{ DATAenspert }}/ens/ocean.{{ mem }}.nc"] - - ["{{ COM_ICE_HISTORY_TMPL | replace_tmpl(tmpl_dict) }}/{{ GDUMP_ENS }}.ice.t{{ gcyc }}z.inst.f006.nc", "{{ DATAenspert }}/ens/ice.{{ mem }}.nc"] + - ["{{ COM_OCEAN_HISTORY_TMPL | replace_tmpl(tmpl_dict) }}/{{ GDUMP_ENS }}.ocean.t{{ gcyc }}z.inst.f006.nc", "{{ ENSPERT_RELPATH }}/ens/ocean.{{ mem }}.nc"] + - ["{{ COM_ICE_HISTORY_TMPL | replace_tmpl(tmpl_dict) }}/{{ GDUMP_ENS }}.ice.t{{ gcyc }}z.inst.f006.nc", "{{ ENSPERT_RELPATH }}/ens/ice.{{ mem }}.nc"] {% endfor %} diff --git a/parm/soca/letkf/letkf.yaml.j2 b/parm/soca/letkf/letkf.yaml.j2 index f4c4d4875..1d5e93d7f 100644 --- a/parm/soca/letkf/letkf.yaml.j2 +++ b/parm/soca/letkf/letkf.yaml.j2 @@ -1,3 +1,5 @@ +{% set gcyc = previous_cycle | strftime("%H") %} + geometry: geom_grid_file: soca_gridspec.nc mom6_input_nml: mom_input.nml @@ -11,11 +13,11 @@ background: members from template: template: date: '{{ WINDOW_MIDDLE | to_isotime }}' - ocn_filename: "{{ RUN }}.ocean.t{{ gcyc }}z.inst.f006.nc" - ice_filename: "{{ RUN }}.ice.t{{ gcyc }}z.inst.f006.nc" + ocn_filename: "ocean.%mem%.nc" + ice_filename: "ice.%mem%.nc" read_from_file: 1 - basename: ./ens/mem%mem% - state variables: [socn, tocn, ssh, uocn, vocn, cicen] + basename: {{ ENSPERT_RELPATH }}/ens/ + state variables: [socn, tocn, ssh, hocn, uocn, vocn, cicen] pattern: '%mem%' nmembers: {{ NMEM_ENS }} @@ -38,32 +40,31 @@ local ensemble DA: mult: 1.1 output: - datadir: data_output/ + datadir: letkf_output/ date: *date exp: letkf type: ens output mean prior: - datadir: data_output/ + datadir: letkf_output/ date: *date - exp: letkf + exp: letkf.mean_prior type: fc output variance prior: - datadir: data_output/ + datadir: letkf_output/ date: *date - exp: letkf + exp: letkf.var_prior type: fc output variance posterior: - datadir: data_output/ + datadir: letkf_output/ date: *date - exp: letkf + exp: letkf.var_post type: an output increment: - datadir: data_output/ + datadir: letkf_output/ date: *date exp: letkf.inc - type: an - + type: ens diff --git a/parm/soca/letkf/letkf_save.yaml.j2 b/parm/soca/letkf/letkf_save.yaml.j2 new file mode 100644 index 000000000..194cf222e --- /dev/null +++ b/parm/soca/letkf/letkf_save.yaml.j2 @@ -0,0 +1,20 @@ +{% set PDY = current_cycle | to_YMD %} +{% set cyc = current_cycle | strftime("%H") %} +{% set timestr = WINDOW_BEGIN | to_isotime %} +###################################### +# save letkf analysis to comout +###################################### + +copy: +{% for mem in range(1, NMEM_ENS + 1) %} + {% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}': GDUMP_ENS, + '${YMD}':PDY, + '${HH}':cyc, + '${MEMDIR}':"mem" + '%03d' % mem} %} + {% set COMOUT_OCEAN_LETKF_MEM = COM_OCEAN_LETKF_TMPL | replace_tmpl(tmpl_dict) %} + {% set COMOUT_ICE_LETKF_MEM = COM_ICE_LETKF_TMPL | replace_tmpl(tmpl_dict) %} + + - ["{{ DATA }}/letkf_output/ocn.letkf.ens.{{ mem }}.{{ timestr }}.PT3H.nc", "{{ COMOUT_OCEAN_LETKF_MEM }}/{{ GDUMP_ENS }}.ocean.t{{ cyc }}z.analysis.nc"] + - ["{{ DATA }}/letkf_output/ice.letkf.ens.{{ mem }}.{{ timestr }}.PT3H.nc", "{{ COMOUT_ICE_LETKF_MEM }}/{{ GDUMP_ENS }}.ice.t{{ cyc }}z.analysis.nc"] +{% endfor %} diff --git a/parm/soca/letkf/letkf_stage.yaml.j2 b/parm/soca/letkf/letkf_stage.yaml.j2 index 019e1ba37..233e45eb9 100644 --- a/parm/soca/letkf/letkf_stage.yaml.j2 +++ b/parm/soca/letkf/letkf_stage.yaml.j2 @@ -1,11 +1,30 @@ -###################################### -# create working directories +{% set PDY = current_cycle | to_YMD %} +{% set cyc = current_cycle | strftime("%H") %} +{% set gcyc = previous_cycle | strftime("%H") %} ###################################### mkdir: -- "{{ DATA }}/Data" +- "{{ DATA }}/letkf_output" - "{{ DATA }}/obs" -copy: +- "{{ DATA }}/diags" +- "{{ COMOUT_OCEAN_LETKF }}" +- "{{ COMOUT_ICE_LETKF }}" +###################################### +# make comout directories ###################################### -# copy mom input template +{% for mem in range(1, NMEM_ENS + 1) %} + {% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':GDUMP_ENS, + '${YMD}':PDY, + '${HH}':cyc, + '${MEMDIR}':"mem" + '%03d' % mem} %} +- "{{ COM_OCEAN_LETKF_TMPL | replace_tmpl(tmpl_dict) }}" +- "{{ COM_ICE_LETKF_TMPL | replace_tmpl(tmpl_dict) }}" +{% endfor %} ###################################### +# copy mom input template and det bkg +###################################### +copy: - ["{{ PARMgfs }}/gdas/soca/fms/input.nml", "{{ DATA }}/mom_input.nml.tmpl"] +- ["{{ PARMgfs }}/gdas/soca/fields_metadata.yaml", "{{ DATA }}/fields_metadata.yaml"] +- ["{{ COMIN_OCEAN_HISTORY_PREV }}/gdas.ocean.t{{ gcyc }}z.inst.f009.nc", "{{ DATA }}/INPUT/MOM.res.nc"] +- ["{{ COMIN_ICE_HISTORY_PREV }}/gdas.ice.t{{ gcyc }}z.inst.f009.nc", "{{ DATA }}/INPUT/cice.res.nc"] diff --git a/parm/soca/obs/config/adt_rads_all.yaml b/parm/soca/obs/config/adt_rads_all.yaml index 177e58a8f..e5b6dee26 100644 --- a/parm/soca/obs/config/adt_rads_all.yaml +++ b/parm/soca/obs/config/adt_rads_all.yaml @@ -17,6 +17,13 @@ obs operator: name: ADT obs error: covariance model: diagonal +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 obs filters: - filter: Domain Check where: diff --git a/parm/soca/obs/config/icec_amsr2_north.yaml b/parm/soca/obs/config/icec_amsr2_north.yaml index 1b1509671..bd454ae88 100644 --- a/parm/soca/obs/config/icec_amsr2_north.yaml +++ b/parm/soca/obs/config/icec_amsr2_north.yaml @@ -43,3 +43,10 @@ obs filters: where: - variable: {name: GeoVaLs/distance_from_coast} minvalue: 100e3 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/icec_amsr2_south.yaml b/parm/soca/obs/config/icec_amsr2_south.yaml index 5373b1bee..eedf6800d 100644 --- a/parm/soca/obs/config/icec_amsr2_south.yaml +++ b/parm/soca/obs/config/icec_amsr2_south.yaml @@ -43,3 +43,10 @@ obs filters: where: - variable: {name: GeoVaLs/distance_from_coast} minvalue: 100e3 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/insitu_profile_argo.yaml b/parm/soca/obs/config/insitu_profile_argo.yaml index b2abaab2c..e533966b8 100644 --- a/parm/soca/obs/config/insitu_profile_argo.yaml +++ b/parm/soca/obs/config/insitu_profile_argo.yaml @@ -625,3 +625,10 @@ obs filters: - ObsError/salinity coefs: - 1000.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/sst_abi_g16_l3c.yaml b/parm/soca/obs/config/sst_abi_g16_l3c.yaml index d96135409..e4bef888d 100644 --- a/parm/soca/obs/config/sst_abi_g16_l3c.yaml +++ b/parm/soca/obs/config/sst_abi_g16_l3c.yaml @@ -54,3 +54,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/sst_abi_g17_l3c.yaml b/parm/soca/obs/config/sst_abi_g17_l3c.yaml index 8843da412..c34f5f777 100644 --- a/parm/soca/obs/config/sst_abi_g17_l3c.yaml +++ b/parm/soca/obs/config/sst_abi_g17_l3c.yaml @@ -54,3 +54,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/sst_ahi_h08_l3c.yaml b/parm/soca/obs/config/sst_ahi_h08_l3c.yaml index d1842320c..0bf07bab1 100644 --- a/parm/soca/obs/config/sst_ahi_h08_l3c.yaml +++ b/parm/soca/obs/config/sst_ahi_h08_l3c.yaml @@ -54,3 +54,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/sst_avhrr_ma_l3u.yaml b/parm/soca/obs/config/sst_avhrr_ma_l3u.yaml index 71f5947a7..1223c1f7f 100644 --- a/parm/soca/obs/config/sst_avhrr_ma_l3u.yaml +++ b/parm/soca/obs/config/sst_avhrr_ma_l3u.yaml @@ -54,3 +54,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/sst_avhrr_mb_l3u.yaml b/parm/soca/obs/config/sst_avhrr_mb_l3u.yaml index 090b47ae6..92cde48fe 100644 --- a/parm/soca/obs/config/sst_avhrr_mb_l3u.yaml +++ b/parm/soca/obs/config/sst_avhrr_mb_l3u.yaml @@ -54,3 +54,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/sst_avhrr_mc_l3u.yaml b/parm/soca/obs/config/sst_avhrr_mc_l3u.yaml index ab03d548f..fa706616c 100644 --- a/parm/soca/obs/config/sst_avhrr_mc_l3u.yaml +++ b/parm/soca/obs/config/sst_avhrr_mc_l3u.yaml @@ -54,3 +54,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/sst_viirs_n20_l3u.yaml b/parm/soca/obs/config/sst_viirs_n20_l3u.yaml index 5941d746c..e78f0f77a 100644 --- a/parm/soca/obs/config/sst_viirs_n20_l3u.yaml +++ b/parm/soca/obs/config/sst_viirs_n20_l3u.yaml @@ -54,3 +54,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/sst_viirs_npp_l3u.yaml b/parm/soca/obs/config/sst_viirs_npp_l3u.yaml index 1d0e447ed..6fd0e47e3 100644 --- a/parm/soca/obs/config/sst_viirs_npp_l3u.yaml +++ b/parm/soca/obs/config/sst_viirs_npp_l3u.yaml @@ -54,3 +54,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/soca_ens_bkg_stage.yaml.j2 b/parm/soca/soca_ens_bkg_stage.yaml.j2 index a74853b3b..b6f1416fb 100644 --- a/parm/soca/soca_ens_bkg_stage.yaml.j2 +++ b/parm/soca/soca_ens_bkg_stage.yaml.j2 @@ -18,7 +18,7 @@ copy: # define variables # Declare a dict of search and replace terms to GDUMP on each template {% set tmpl_dict = {'ROTDIR':ROTDIR, - 'RUN': RUN, + 'RUN': GDUMP_ENS, 'YMD':gPDY, 'HH':gcyc, 'MEMDIR':"mem" + '%03d' % mem} %} @@ -35,6 +35,6 @@ copy: {% set com_prev_ocn.COMIN_OCEAN_HISTORY_MEM = com_prev_ocn.COMIN_OCEAN_HISTORY_MEM.replace(search_term, replace_term) %} {% set com_prev_ice.COMIN_ICE_HISTORY_MEM = com_prev_ice.COMIN_ICE_HISTORY_MEM.replace(search_term, replace_term) %} {% endfor %} - - ["{{ com_prev_ocn.COMIN_OCEAN_HISTORY_MEM }}/{{ RUN }}.ocean.t{{ gcyc }}z.inst.f006.nc", "{{ DATAens }}/ens/mem{{ '%03d' % mem }}/{{ RUN }}.ocean.t{{ gcyc }}z.inst.f006.nc"] - - ["{{ com_prev_ice.COMIN_ICE_HISTORY_MEM }}/{{ RUN }}.ice.t{{ gcyc }}z.inst.f006.nc", "{{ DATAens }}/ens/mem{{ '%03d' % mem }}/{{ RUN }}.ice.t{{ gcyc }}z.inst.f006.nc"] + - ["{{ com_prev_ocn.COMIN_OCEAN_HISTORY_MEM }}/{{ GDUMP_ENS }}.ocean.t{{ gcyc }}z.inst.f006.nc", "{{ DATAens }}/ens/mem{{ '%03d' % mem }}/{{ GDUMP_ENS }}.ocean.t{{ gcyc }}z.inst.f006.nc"] + - ["{{ com_prev_ice.COMIN_ICE_HISTORY_MEM }}/{{ GDUMP_ENS }}.ice.t{{ gcyc }}z.inst.f006.nc", "{{ DATAens }}/ens/mem{{ '%03d' % mem }}/{{ GDUMP_ENS }}.ice.t{{ gcyc }}z.inst.f006.nc"] {% endfor %} diff --git a/parm/soca/soca_fix_stage_025.yaml.j2 b/parm/soca/soca_fix_stage_025.yaml.j2 index 2c41dcad4..f7b334e7d 100644 --- a/parm/soca/soca_fix_stage_025.yaml.j2 +++ b/parm/soca/soca_fix_stage_025.yaml.j2 @@ -1,4 +1,3 @@ -# TODO(AFE): make resolution dependent mkdir: - "{{ DATA }}/INPUT" ###################################### diff --git a/parm/soca/soca_fix_stage_100.yaml.j2 b/parm/soca/soca_fix_stage_100.yaml.j2 index 0869f7d34..e2f4137a2 100644 --- a/parm/soca/soca_fix_stage_100.yaml.j2 +++ b/parm/soca/soca_fix_stage_100.yaml.j2 @@ -1,4 +1,3 @@ -# TODO(AFE): make resolution dependent mkdir: - "{{ DATA }}/INPUT" ###################################### diff --git a/parm/soca/soca_fix_stage_500.yaml.j2 b/parm/soca/soca_fix_stage_500.yaml.j2 index 9f030a81f..6d6930e0b 100644 --- a/parm/soca/soca_fix_stage_500.yaml.j2 +++ b/parm/soca/soca_fix_stage_500.yaml.j2 @@ -1,4 +1,3 @@ -# TODO(AFE): make resolution dependent mkdir: - "{{ DATA }}/INPUT" ###################################### @@ -9,6 +8,7 @@ copy: - ["{{ SOCA_INPUT_FIX_DIR }}/field_table", "{{ DATA }}/field_table"] - ["{{ SOCA_INPUT_FIX_DIR }}/diag_table", "{{ DATA }}/diag_table"] - ["{{ SOCA_INPUT_FIX_DIR }}/MOM_input", "{{ DATA }}/MOM_input"] +- ["{{ SOCA_INPUT_FIX_DIR }}/obsop_name_map.yaml", "{{ DATA }}/obsop_name_map.yaml"] - ["{{ SOCA_INPUT_FIX_DIR }}/INPUT/grid_spec.nc", "{{ DATA }}/INPUT/grid_spec.nc"] - ["{{ SOCA_INPUT_FIX_DIR }}/INPUT/hycom1_25.nc", "{{ DATA }}/INPUT/hycom1_25.nc"] - ["{{ SOCA_INPUT_FIX_DIR }}/INPUT/layer_coord25.nc", "{{ DATA }}/INPUT/layer_coord25.nc"] diff --git a/test/gw-ci/CMakeLists.txt b/test/gw-ci/CMakeLists.txt index 6c9faedf1..4f1a969e3 100644 --- a/test/gw-ci/CMakeLists.txt +++ b/test/gw-ci/CMakeLists.txt @@ -19,18 +19,39 @@ function(add_cycling_tests pslot YAML_PATH HOMEgfs RUNTESTS PROJECT_SOURCE_DIR T list(GET DATES_LIST 1 FULL_CYCLE) # stage IC's - message(STATUS "staging the 1/2 cycle IC's for ${test_name} ctest") + message(STATUS "staging the 1/2 cycle IC's for ${pslot} ctest") add_test(NAME ${test_name}_gdas_stage_ic_${HALF_CYCLE} COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} gdas_stage_ic ${HALF_CYCLE}" WORKING_DIRECTORY ${RUNTESTS}) set_tests_properties(${test_name}_gdas_stage_ic_${HALF_CYCLE} PROPERTIES LABELS "manual") + # stage ensemble ics + if (letkf) + message(STATUS "preparing enkfgdas_stage_ic for ${pslot} ctest") + add_test(NAME ${test_name}_enkfgdas_stage_ic_${HALF_CYCLE} + COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} enkfgdas_stage_ic ${HALF_CYCLE}" + WORKING_DIRECTORY ${RUNTESTS}) + set_tests_properties(${test_name}_enkfgdas_stage_ic_${HALF_CYCLE} PROPERTIES LABELS "manual") + endif() + # 1/2 cycle gdas_fcst message(STATUS "preparing 1/2 cycle gdas_fcst for ${pslot} ctest") - add_test(NAME ${test_name}_gdas_fcst_${HALF_CYCLE} + add_test(NAME ${test_name}_gdas_fcst_seg0_${HALF_CYCLE} COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} gdas_fcst_seg0 ${HALF_CYCLE}" WORKING_DIRECTORY ${RUNTESTS}) - set_tests_properties(${test_name}_gdas_fcst_${HALF_CYCLE} PROPERTIES LABELS "manual") + set_tests_properties(${test_name}_gdas_fcst_seg0_${HALF_CYCLE} PROPERTIES LABELS "manual") + + # 1/2 cycle enkfgdas_fcst + if (letkf) + set(ENS_MEMS "mem001" "mem002" "mem003") + foreach(ENS_MEM ${ENS_MEMS}) + message(STATUS "preparing 1/2 cycle enkfgdas_fcst_${ENS_MEM} for ${pslot} ctest") + add_test(NAME ${test_name}_enkfgdas_fcst_${ENS_MEM}_${HALF_CYCLE} + COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} enkfgdas_fcst_${ENS_MEM} ${HALF_CYCLE}" + WORKING_DIRECTORY ${RUNTESTS}) + set_tests_properties(${test_name}_enkfgdas_fcst_${ENS_MEM}_${HALF_CYCLE} PROPERTIES LABELS "manual") + endforeach() + endif() # Select the list of tasks to run for the full cycle message(STATUS "Tasks ${TASK_LIST}") @@ -62,6 +83,24 @@ if (WORKFLOW_TESTS) "gdas_marineanlfinal" ) add_cycling_tests(${pslot} ${YAML_PATH} ${HOMEgfs} ${RUNTESTS} ${PROJECT_SOURCE_DIR} "${TASK_LIST}") + + # WCDA, low-res, ensemble da + # ------------- + set(pslot "WCDA-hyb-C48mx500") + set(letkf TRUE) + set(YAML_PATH ${HOMEgfs}/ci/cases/pr/C48mx500_hybAOWCDA.yaml) + set(TASK_LIST + "gdas_prepoceanobs" + "gdas_marineanlletkf" + # TODO(AFE) waiting until these are working for hybrid + # "gdas_marinebmat" + # "gdas_marineanlinit" + # "gdas_marineanlvar" + # "gdas_marineanlchkpt" + # "gdas_marineanlfinal" + ) + add_cycling_tests(${pslot} ${YAML_PATH} ${HOMEgfs} ${RUNTESTS} ${PROJECT_SOURCE_DIR} "${TASK_LIST}") + set(letkf FALSE) endif() option(RUN_GW_CI "Enable the global-workflow CI tests" OFF) diff --git a/ush/soca/marine_recenter.py b/ush/soca/marine_recenter.py index 5572e2c76..a3814db85 100644 --- a/ush/soca/marine_recenter.py +++ b/ush/soca/marine_recenter.py @@ -68,8 +68,6 @@ def __init__(self, config: Dict) -> None: 'ATM_WINDOW_MIDDLE': window_middle_iso, 'DATA': DATA, 'dump': self.task_config.RUN, - 'fv3jedi_stage_files': self.task_config.FV3JEDI_STAGE_YAML, - 'fv3jedi_stage': self.task_config.FV3JEDI_STAGE_YAML, 'stage_dir': DATA, 'soca_input_fix_dir': self.task_config.SOCA_INPUT_FIX_DIR, 'NMEM_ENS': self.task_config.NMEM_ENS, diff --git a/ush/soca/prep_ocean_obs.py b/ush/soca/prep_ocean_obs.py index 1d6b55e8e..be80bfcc5 100644 --- a/ush/soca/prep_ocean_obs.py +++ b/ush/soca/prep_ocean_obs.py @@ -252,8 +252,8 @@ def finalize(self): for obsspace_to_save in obsspaces_to_save['observations']: - output_file = obsspace_to_save['output file'] - conv_config_file = obsspace_to_save['conversion config file'] + output_file = os.path.basename(obsspace_to_save['output file']) + conv_config_file = os.path.basename(obsspace_to_save['conversion config file']) output_file_dest = os.path.join(COMOUT_OBS, output_file) conv_config_file_dest = os.path.join(COMOUT_OBS, conv_config_file) From 6bc27606f8490cca5faec88413aecf2fd8f8603b Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Tue, 12 Nov 2024 19:17:54 -0500 Subject: [PATCH 23/27] Changes prepoceanobs to using newly refactored BUFR converters (#1352) Just like the title. This mainly changes the converters used to the new ones that use yaml files, the templates for which are included, along with a little script that does most of the work of converting the earlier JSON templates to yaml. Also removes from `ocean.py` some methods for plotting, but can't go into global-workflow because of lack of the necessary python modules in that environment. --- .../bufr2ioda_insitu_profile_argo.yaml | 11 ++++++ .../bufr2ioda_insitu_profile_bathy.yaml | 11 ++++++ .../bufr2ioda_insitu_profile_glider.yaml | 11 ++++++ .../bufr2ioda_insitu_profile_tesac.yaml | 11 ++++++ .../bufr2ioda_insitu_profile_xbtctd.yaml | 11 ++++++ .../bufr2ioda_insitu_surface_trkob.yaml | 11 ++++++ parm/ioda/bufr2ioda/j2y.py | 24 ++++++++++++ parm/soca/obs/obs_list.yaml | 10 ++--- .../marine/b2i/b2iconverter/ioda_variables.py | 1 - ush/soca/prep_ocean_obs.py | 38 ++++++++++++------- ush/soca/prep_ocean_obs_utils.py | 7 ++-- 11 files changed, 122 insertions(+), 24 deletions(-) create mode 100644 parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_argo.yaml create mode 100644 parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_bathy.yaml create mode 100644 parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_glider.yaml create mode 100644 parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_tesac.yaml create mode 100644 parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_xbtctd.yaml create mode 100644 parm/ioda/bufr2ioda/bufr2ioda_insitu_surface_trkob.yaml create mode 100644 parm/ioda/bufr2ioda/j2y.py diff --git a/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_argo.yaml b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_argo.yaml new file mode 100644 index 000000000..7dd28f21c --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_argo.yaml @@ -0,0 +1,11 @@ +cycle_datetime: '{{ current_cycle | to_YMDH }}' +cycle_type: '{{ RUN }}' +data_description: 6-hrly in situ ARGO profiles +data_format: subpfl +data_provider: U.S. NOAA +data_type: argo +dump_directory: '{{ DMPDIR }}' +ioda_directory: '{{ COM_OBS }}' +source: NCEP data tank +subsets: SUBPFL +ocean_basin: '{{ OCEAN_BASIN_FILE }}' diff --git a/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_bathy.yaml b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_bathy.yaml new file mode 100644 index 000000000..f26d341ea --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_bathy.yaml @@ -0,0 +1,11 @@ +cycle_datetime: '{{ current_cycle | to_YMDH }}' +cycle_type: '{{ RUN }}' +data_description: 6-hrly in situ Bathythermal profiles +data_format: bathy +data_provider: U.S. NOAA +data_type: bathy +dump_directory: '{{ DMPDIR }}' +ioda_directory: '{{ COM_OBS }}' +source: NCEP data tank +subsets: BATHY +ocean_basin: '{{ OCEAN_BASIN_FILE }}' diff --git a/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_glider.yaml b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_glider.yaml new file mode 100644 index 000000000..cc732df94 --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_glider.yaml @@ -0,0 +1,11 @@ +cycle_datetime: '{{ current_cycle | to_YMDH }}' +cycle_type: '{{ RUN }}' +data_description: 6-hrly in situ GLIDER profiles +data_format: subpfl +data_provider: U.S. NOAA +data_type: glider +dump_directory: '{{ DMPDIR }}' +ioda_directory: '{{ COM_OBS }}' +source: NCEP data tank +subsets: SUBPFL +ocean_basin: '{{ OCEAN_BASIN_FILE }}' diff --git a/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_tesac.yaml b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_tesac.yaml new file mode 100644 index 000000000..01dc441ca --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_tesac.yaml @@ -0,0 +1,11 @@ +cycle_datetime: '{{ current_cycle | to_YMDH }}' +cycle_type: '{{ RUN }}' +data_description: 6-hrly in situ TESAC profiles +data_format: tesac +data_provider: U.S. NOAA +data_type: tesac +dump_directory: '{{ DMPDIR }}' +ioda_directory: '{{ COM_OBS }}' +source: NCEP data tank +subsets: TESAC +ocean_basin: '{{ OCEAN_BASIN_FILE }}' diff --git a/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_xbtctd.yaml b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_xbtctd.yaml new file mode 100644 index 000000000..49d7f13d2 --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_xbtctd.yaml @@ -0,0 +1,11 @@ +cycle_datetime: '{{ current_cycle | to_YMDH }}' +cycle_type: '{{ RUN }}' +data_description: 6-hrly in situ XBT/XCTD profiles +data_format: xbtctd +data_provider: U.S. NOAA +data_type: xbtctd +dump_directory: '{{ DMPDIR }}' +ioda_directory: '{{ COM_OBS }}' +source: NCEP data tank +subsets: XBTCTD +ocean_basin: '{{ OCEAN_BASIN_FILE }}' diff --git a/parm/ioda/bufr2ioda/bufr2ioda_insitu_surface_trkob.yaml b/parm/ioda/bufr2ioda/bufr2ioda_insitu_surface_trkob.yaml new file mode 100644 index 000000000..1385920c1 --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_insitu_surface_trkob.yaml @@ -0,0 +1,11 @@ +cycle_datetime: '{{ current_cycle | to_YMDH }}' +cycle_type: '{{ RUN }}' +data_description: 6-hrly in situ TRACKOB surface +data_format: trkob +data_provider: U.S. NOAA +data_type: trackob +dump_directory: '{{ DMPDIR }}' +ioda_directory: '{{ COM_OBS }}' +source: NCEP data tank +subsets: TRACKOB +ocean_basin: '{{ OCEAN_BASIN_FILE }}' diff --git a/parm/ioda/bufr2ioda/j2y.py b/parm/ioda/bufr2ioda/j2y.py new file mode 100644 index 000000000..c8b158517 --- /dev/null +++ b/parm/ioda/bufr2ioda/j2y.py @@ -0,0 +1,24 @@ +import json +import yaml +import argparse + +def convert_json_to_yaml(input_file, output_file): + # Load the JSON data from the input file + with open(input_file, 'r') as json_file: + json_data = json.load(json_file) + + # Convert and save as YAML in the output file + with open(output_file, 'w') as yaml_file: + yaml.dump(json_data, yaml_file, default_flow_style=False) + +if __name__ == '__main__': + # Set up argument parser + parser = argparse.ArgumentParser(description='Convert JSON to YAML.') + parser.add_argument('input_file', help='Path to the input JSON file') + parser.add_argument('output_file', help='Path to the output YAML file') + + args = parser.parse_args() + + # Perform the conversion + convert_json_to_yaml(args.input_file, args.output_file) + diff --git a/parm/soca/obs/obs_list.yaml b/parm/soca/obs/obs_list.yaml index c11dc1ace..0ac8ab5af 100644 --- a/parm/soca/obs/obs_list.yaml +++ b/parm/soca/obs/obs_list.yaml @@ -25,15 +25,15 @@ observers: #- !INC ${MARINE_OBS_YAML_DIR}/icec_ssmis_f17_l2.yaml # in situ: monthly -#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_bathy.yaml +- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_bathy.yaml - !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_argo.yaml -#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_glider.yaml -#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac.yaml +- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_glider.yaml +- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac_salinity.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_marinemammal.yaml -#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_xbtctd.yaml +- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_xbtctd.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_altkob.yaml -#- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob.yaml +- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob_salinity.yaml # in situ: daily diff --git a/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ioda_variables.py b/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ioda_variables.py index 5319e0ff0..83a07771e 100644 --- a/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ioda_variables.py +++ b/ush/ioda/bufr2ioda/marine/b2i/b2iconverter/ioda_variables.py @@ -1,6 +1,5 @@ import numpy as np from pyiodaconv import bufr -from .ocean import OceanBasin from .util import * from .ioda_metadata import IODAMetadata from .ioda_addl_vars import IODAAdditionalVariables diff --git a/ush/soca/prep_ocean_obs.py b/ush/soca/prep_ocean_obs.py index be80bfcc5..da7b2da6a 100644 --- a/ush/soca/prep_ocean_obs.py +++ b/ush/soca/prep_ocean_obs.py @@ -10,6 +10,7 @@ from wxflow import (chdir, FileHandler, logit, + parse_j2yaml, save_as_yaml, Task, YAMLFile) @@ -74,6 +75,7 @@ def initialize(self): SOCA_INPUT_FIX_DIR = self.task_config['SOCA_INPUT_FIX_DIR'] ocean_mask_src = os.path.join(SOCA_INPUT_FIX_DIR, 'RECCAP2_region_masks_all_v20221025.nc') ocean_mask_dest = os.path.join(self.task_config.DATA, 'RECCAP2_region_masks_all_v20221025.nc') + self.task_config['OCEAN_BASIN_FILE'] = ocean_mask_dest try: FileHandler({'copy': [[ocean_mask_src, ocean_mask_dest]]}).sync() @@ -90,11 +92,15 @@ def initialize(self): logger.critical(f"OBSPREP_YAML file {OBSPREP_YAML} does not exist") raise FileNotFoundError - JSON_TMPL_DIR = self.task_config.JSON_TMPL_DIR - BUFR2IODA_PY_DIR = self.task_config.BUFR2IODA_PY_DIR + # TODO (AFE): this should be in the task config file in g-w + BUFR2IODA_TMPL_DIR = os.path.join(self.task_config.HOMEgfs, 'parm/gdas/ioda/bufr2ioda') + # TODO (AFE): this should be in the task config file in g-w, and reaches into GDASApp + # in order to avoid touching the g-w until we know this will remain a task + BUFR2IODA_PY_DIR = os.path.join(self.task_config.HOMEgfs, 'sorc/gdas.cd/ush/ioda/bufr2ioda/marine/b2i') COMIN_OBS = self.task_config.COMIN_OBS COMOUT_OBS = self.task_config['COMOUT_OBS'] + OCEAN_BASIN_FILE = self.task_config['OCEAN_BASIN_FILE'] if not os.path.exists(COMOUT_OBS): os.makedirs(COMOUT_OBS) @@ -146,32 +152,34 @@ def initialize(self): obsprep_space['window end'] = self.window_end ioda_filename = f"{RUN}.t{cyc:02d}z.{obs_space_name}.{cdatestr}.nc4" obsprep_space['output file'] = ioda_filename + ioda_config_file = obtype + '2ioda.yaml' # set up the config file for conversion to IODA for bufr and # netcdf files respectively if obsprep_space['type'] == 'bufr': - gen_bufr_json_config = {'RUN': RUN, - 'current_cycle': cdate, - 'DMPDIR': COMIN_OBS, - 'COM_OBS': COMIN_OBS} - json_config_file = os.path.join(COMIN_OBS, - f"{obtype}_{cdatestr}.json") - obsprep_space['conversion config file'] = json_config_file + bufrconv_config = { + 'RUN': RUN, + 'current_cycle': cdate, + 'DMPDIR': COMIN_OBS, + 'COM_OBS': COMIN_OBS, + 'OCEAN_BASIN_FILE': OCEAN_BASIN_FILE} + obsprep_space['conversion config file'] = ioda_config_file bufr2iodapy = BUFR2IODA_PY_DIR + '/bufr2ioda_' + obtype + '.py' obsprep_space['bufr2ioda converter'] = bufr2iodapy - tmpl_filename = 'bufr2ioda_' + obtype + '.json' - template = os.path.join(JSON_TMPL_DIR, tmpl_filename) + tmpl_filename = 'bufr2ioda_' + obtype + '.yaml' + bufrconv_template = os.path.join(BUFR2IODA_TMPL_DIR, tmpl_filename) + try: - gen_bufr_json(gen_bufr_json_config, template, json_config_file) + bufrconv = parse_j2yaml(bufrconv_template, bufrconv_config) + bufrconv.save(ioda_config_file) except Exception as e: - logger.warning(f"An exeception {e} occured while trying to run gen_bufr_json") + logger.warning(f"An exeception {e} occured while trying to create BUFR2IODA config") logger.warning(f"obtype {obtype} will be skipped") break # go to next observer in OBS_YAML obsspaces_to_convert.append({"obs space": obsprep_space}) elif obsprep_space['type'] == 'nc': - ioda_config_file = obtype + '2ioda.yaml' obsprep_space['conversion config file'] = ioda_config_file save_as_yaml(obsprep_space, ioda_config_file) @@ -260,6 +268,8 @@ def finalize(self): try: FileHandler({'copy': [[output_file, output_file_dest]]}).sync() FileHandler({'copy': [[conv_config_file, conv_config_file_dest]]}).sync() + except Exception as e: + logger.warning(f"An exeception {e} occured while trying to run gen_bufr_json") except OSError: logger.warning(f"Obs file not found, possible IODA converter failure)") continue diff --git a/ush/soca/prep_ocean_obs_utils.py b/ush/soca/prep_ocean_obs_utils.py index 11b18fd37..9ecb06464 100755 --- a/ush/soca/prep_ocean_obs_utils.py +++ b/ush/soca/prep_ocean_obs_utils.py @@ -68,13 +68,12 @@ def run_netcdf_to_ioda(obsspace_to_convert, OCNOBS2IODAEXEC): def run_bufr_to_ioda(obsspace_to_convert): logger.info(f"running run_bufr_to_ioda on {obsspace_to_convert['name']}") - json_output_file = obsspace_to_convert['conversion config file'] + bufrconv_yaml = obsspace_to_convert['conversion config file'] bufr2iodapy = obsspace_to_convert['bufr2ioda converter'] try: - subprocess.run(['python', bufr2iodapy, '-c', json_output_file, '-v'], check=True) - logger.info(f"ran ioda converter on obs space {obsspace_to_convert['name']} successfully") + subprocess.run(['python', bufr2iodapy, '-c', bufrconv_yaml], check=True) return 0 except subprocess.CalledProcessError as e: - logger.warning(f"bufr2ioda converter failed with error {e}, \ + logger.warning(f"bufr2ioda converter failed with error >{e}<, \ return code {e.returncode}") return e.returncode From 1c5cc2e66a3294babddb0ac974d4420ad60e9add Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Wed, 13 Nov 2024 13:02:30 -0500 Subject: [PATCH 24/27] Fix for ensmean option in gdas.x (#1369) When I added `ensmean` as an option, I didn't do it correctly... this PR should allow it to actually work. --- mains/gdas.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mains/gdas.cc b/mains/gdas.cc index 05afd244c..10d826bd9 100755 --- a/mains/gdas.cc +++ b/mains/gdas.cc @@ -56,7 +56,7 @@ int runApp(int argc, char** argv, const std::string traits, const std::string ap apps["converttostructuredgrid"] = []() { return std::make_unique>(); - }; + }; apps["convertstate"] = []() { return std::make_unique>(); }; @@ -106,6 +106,7 @@ int main(int argc, char ** argv) { const std::set validApps = { "converttostructuredgrid", "convertstate", + "ensmean", "hofx4d", "localensembleda", "variational" From 5fb52d368aeda52b205e8323b4d2db7ff3082e70 Mon Sep 17 00:00:00 2001 From: Ed Givelberg Date: Thu, 14 Nov 2024 07:09:24 -0500 Subject: [PATCH 25/27] fixed python version in cmake (#1374) cmake now detects the python version; previously hard-coded. This came up in #1362 --- test/marine/CMakeLists.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/marine/CMakeLists.txt b/test/marine/CMakeLists.txt index c75a5664b..46f26cb86 100644 --- a/test/marine/CMakeLists.txt +++ b/test/marine/CMakeLists.txt @@ -28,12 +28,16 @@ install(FILES ${test_input} # bufr to ioda tests: ########################################################################### +find_package(Python REQUIRED) +# Extract the major and minor version (e.g., "3.10" from "3.10.13") +string(REGEX REPLACE "^([0-9]+\\.[0-9]+).*" "\\1" PYTHON_MAJOR_MINOR ${Python_VERSION}) +set(PYIODACONV_DIR "${PROJECT_SOURCE_DIR}/build/lib/python${PYTHON_MAJOR_MINOR}/") + set(TEST_WORKING_DIR ${PROJECT_BINARY_DIR}/test/marine) set(MARINE_BUFR2IODA_DIR ${PROJECT_SOURCE_DIR}/ush/ioda/bufr2ioda/marine) set(MARINE_BUFR2IODA_DIR ${MARINE_BUFR2IODA_DIR}/b2i) set(CONFIG_DIR ${PROJECT_SOURCE_DIR}/test/marine/testinput) set(TESTREF_DIR ${PROJECT_SOURCE_DIR}/test/marine/testref) -set(PYIODACONV_DIR "${PROJECT_SOURCE_DIR}/build/lib/python3.10/") # prepare a test.yaml file from test.yaml.in by replacing From e514b926561bfc8fa3de741876505aff74255c95 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Fri, 15 Nov 2024 14:56:27 -0500 Subject: [PATCH 26/27] Update JEDI hashes to include Model Variable Renaming Sprint (#1355) --- ci/driver.sh | 139 ++++++---- ci/gw_driver.sh | 144 ++++++---- ci/run_ci.sh | 4 +- ci/run_gw_ci.sh | 4 +- parm/io/fv3jedi_fieldmetadata_fv3inc.yaml | 2 +- parm/io/fv3jedi_fieldmetadata_history.yaml | 12 +- parm/io/fv3jedi_fieldmetadata_restart.yaml | 2 +- parm/jcb-gdas | 2 +- parm/soca/fields_metadata.yaml | 256 +++++++++++++----- parm/soca/letkf/letkf.yaml.j2 | 2 +- parm/soca/marine-jcb-base.yaml | 2 +- .../soca/obs/config/insitu_profile_bathy.yaml | 7 + .../soca/obs/config/insitu_profile_tesac.yaml | 7 + .../soca/obs/config/insitu_surface_trkob.yaml | 7 + parm/soca/obs/obs_list.yaml | 10 +- sorc/fv3-jedi | 2 +- sorc/ioda | 2 +- sorc/iodaconv | 2 +- sorc/oops | 2 +- sorc/saber | 2 +- sorc/soca | 2 +- sorc/ufo | 2 +- sorc/vader | 2 +- .../testinput/gdasapp_fv3jedi_fv3inc.yaml | 2 +- utils/soca/gdas_soca_diagb.h | 21 +- 25 files changed, 432 insertions(+), 207 deletions(-) diff --git a/ci/driver.sh b/ci/driver.sh index ce6372c59..933b9223b 100755 --- a/ci/driver.sh +++ b/ci/driver.sh @@ -1,6 +1,9 @@ #!/bin/bash --login +echo "Start at $(date)" + my_dir="$( cd "$( dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd )" +echo "Set my_dir ${my_dir}" # ============================================================================== usage() { @@ -51,6 +54,14 @@ esac cd $GDAS_CI_ROOT/repo CI_LABEL="${GDAS_CI_HOST}-RT" gh pr list --label "$CI_LABEL" --state "open" | awk '{print $1;}' > $GDAS_CI_ROOT/open_pr_list + +open_pr=`cat $GDAS_CI_ROOT/open_pr_list | wc -l` +if (( $open_pr == 0 )); then + echo "No open PRs with ${CI_LABEL}, exit." + echo "Finish at $(date)" + exit +fi + open_pr_list=$(cat $GDAS_CI_ROOT/open_pr_list) # ============================================================================== @@ -58,72 +69,86 @@ open_pr_list=$(cat $GDAS_CI_ROOT/open_pr_list) repo_url="https://github.com/NOAA-EMC/GDASApp.git" # loop through all open PRs for pr in $open_pr_list; do - gh pr edit $pr --remove-label $CI_LABEL --add-label ${CI_LABEL}-Running - echo "Processing Pull Request #${pr}" + echo " " + echo "Start processing Pull Request #${pr} at $(date)" # get the branch name used for the PR gdasapp_branch=$(gh pr view $pr --json headRefName -q ".headRefName") - - # get the fork information - pr_details=$(gh pr view $pr --repo ${repo_url} --json headRepository,headRepositoryOwner,headRefName) - # extract the necessary info - fork_owner=$(gh pr view $pr --repo ${repo_url} --json headRepositoryOwner --jq '.headRepositoryOwner.login') - fork_name=$(gh pr view $pr --repo ${repo_url} --json headRepository --jq '.headRepository.name') - - # construct the fork URL - gdasapp_url="https://github.com/$fork_owner/${fork_name}.git" + # get additional branch information + branch_owner=$(gh pr view $pr --repo ${repo_url} --json headRepositoryOwner --jq '.headRepositoryOwner.login') + branch_name=$(gh pr view $pr --repo ${repo_url} --json headRepository --jq '.headRepository.name') + pr_assignees=$(gh pr view $pr --repo ${repo_url} --json assignees --jq '.assignees[].login') + + # check if any assignee is authorized to run CI + authorized_by="" + for str in ${pr_assignees[@]}; do + grep $str /scratch1/NCEPDEV/da/role.jedipara/CI/GDASApp/authorized_users + rc=$? + if (( rc == 0 )); then + authorized_by=${str} + echo "FOUND MATCH $str, rc $rc" + break + fi + done + + # Authorized to run CI + if (( rc == 0 )); then + echo "Run CI" + + # update PR label + gh pr edit $pr --remove-label $CI_LABEL --add-label ${CI_LABEL}-Running + + # construct the fork URL + gdasapp_url="https://github.com/$branch_owner/${branch_name}.git" - echo "Fork URL: $gdasapp_url" - echo "Branch Name: $gdasapp_branch" - - # create PR specific directory - if [ -d $GDAS_CI_ROOT/PR/$pr ]; then - rm -rf $GDAS_CI_ROOT/PR/$pr - fi - mkdir -p $GDAS_CI_ROOT/PR/$pr - cd $GDAS_CI_ROOT/PR/$pr - - # clone copy of repo - git clone --recursive --jobs 8 --branch $gdasapp_branch $gdasapp_url - cd GDASApp - - # checkout pull request - git pull - gh pr checkout $pr - git submodule update --init --recursive - - # get commit hash - commit=$(git log --pretty=format:'%h' -n 1) - echo "$commit" > $GDAS_CI_ROOT/PR/$pr/commit - - # load modules - case ${TARGET} in - hera | orion) - echo "Loading modules on $TARGET" - module purge - module use $GDAS_CI_ROOT/PR/$pr/GDASApp/modulefiles - module load GDAS/$TARGET - module list - ;; - *) - echo "Unsupported platform. Exiting with error." - exit 1 - ;; - esac - - # run build and testing command - $my_dir/run_ci.sh -d $GDAS_CI_ROOT/PR/$pr/GDASApp -o $GDAS_CI_ROOT/PR/$pr/output_${commit} - ci_status=$? - gh pr comment $pr --repo ${repo_url} --body-file $GDAS_CI_ROOT/PR/$pr/output_${commit} - if [ $ci_status -eq 0 ]; then - gh pr edit $pr --repo ${repo_url} --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Passed + echo "GDASApp URL: $gdasapp_url" + echo "GDASApp branch Name: $gdasapp_branch" + echo "CI authorized by $authorized_by at $(date)" + + # create PR specific directory + if [ -d $GDAS_CI_ROOT/PR/$pr ]; then + rm -rf $GDAS_CI_ROOT/PR/$pr + fi + mkdir -p $GDAS_CI_ROOT/PR/$pr + cd $GDAS_CI_ROOT/PR/$pr + pwd + + # clone copy of repo + git clone --recursive --jobs 8 --branch $gdasapp_branch $gdasapp_url + cd GDASApp + pwd + + # checkout GDASApp pull request + git pull + gh pr checkout $pr + git submodule update --init --recursive + + # get commit hash + commit=$(git log --pretty=format:'%h' -n 1) + echo "$commit" > $GDAS_CI_ROOT/PR/$pr/commit + + # run build and testing command + echo "Execute $my_dir/run_ci.sh for $GDAS_CI_ROOT/PR/$pr/GDASApp at $(date)" + $my_dir/run_ci.sh -d $GDAS_CI_ROOT/PR/$pr/GDASApp -o $GDAS_CI_ROOT/PR/$pr/output_${commit} + ci_status=$? + echo "After run_ci.sh with ci_status ${ci_status} at $(date)" + gh pr comment $pr --repo ${repo_url} --body-file $GDAS_CI_ROOT/PR/$pr/output_${commit} + if [ $ci_status -eq 0 ]; then + gh pr edit $pr --repo ${repo_url} --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Passed + else + gh pr edit $pr --repo ${repo_url} --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Failed + fi + + # Not authorized to run CI else - gh pr edit $pr --repo ${repo_url} --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Failed + echo "Do NOT run CI" fi + + echo "Finish processing Pull Request #{pr} at $(date)" done # ============================================================================== # scrub working directory for older files find $GDAS_CI_ROOT/PR/* -maxdepth 1 -mtime +3 -exec rm -rf {} \; - +echo "Finish at $(date)" diff --git a/ci/gw_driver.sh b/ci/gw_driver.sh index e85684f84..c40ff4026 100755 --- a/ci/gw_driver.sh +++ b/ci/gw_driver.sh @@ -1,6 +1,9 @@ #!/bin/bash --login +echo "Start at $(date)" + my_dir="$( cd "$( dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd )" +echo "Set my_dir ${my_dir}" # ============================================================================== usage() { @@ -32,7 +35,7 @@ done case ${TARGET} in hera | orion) - echo "Running automated testing with workflow on $TARGET" + echo "Running Automated GW Testing on $TARGET" source $MODULESHOME/init/sh source $my_dir/${TARGET}.sh module purge @@ -51,69 +54,116 @@ esac cd $GDAS_CI_ROOT/repo CI_LABEL="${GDAS_CI_HOST}-GW-RT" gh pr list --label "$CI_LABEL" --state "open" | awk '{print $1;}' > $GDAS_CI_ROOT/open_pr_list_gw + +open_pr=`cat $GDAS_CI_ROOT/open_pr_list_gw | wc -l` +if (( $open_pr == 0 )); then + echo "No open PRs with ${CI_LABEL}, exit." + echo "Finish at $(date)" + exit +fi + open_pr_list=$(cat $GDAS_CI_ROOT/open_pr_list_gw) # ============================================================================== # clone, checkout, build, test, etc. repo_url="https://github.com/NOAA-EMC/GDASApp.git" workflow_url="https://github.com/NOAA-EMC/global-workflow.git" +workflow_branch="develop" # loop through all open PRs for pr in $open_pr_list; do - gh pr edit $pr --remove-label $CI_LABEL --add-label ${CI_LABEL}-Running - echo "Processing Pull Request #${pr}" + echo " " + echo "Start processing Pull Request #${pr} at $(date)" # get the branch name used for the PR gdasapp_branch=$(gh pr view $pr --json headRefName -q ".headRefName") - # check for a companion PR in the global-workflow - companion_pr_exists=$(gh pr list --repo ${workflow_url} --head ${gdasapp_branch} --state open) - if [ -n "$companion_pr_exists" ]; then - # get the PR number - companion_pr=$(echo "$companion_pr_exists" | awk '{print $1;}') - - # extract the necessary info - fork_owner=$(gh pr view $companion_pr --repo $workflow_url --json headRepositoryOwner --jq '.headRepositoryOwner.login') - fork_name=$(gh pr view $companion_pr --repo $workflow_url --json headRepository --jq '.headRepository.name') - - # Construct the fork URL - workflow_url="https://github.com/$fork_owner/$fork_name.git" - - echo "Fork URL: $workflow_url" - echo "Branch Name: $gdasapp_branch" - fi - - # create PR specific directory - if [ -d $GDAS_CI_ROOT/workflow/PR/$pr ]; then - rm -rf $GDAS_CI_ROOT/workflow/PR/$pr - fi - mkdir -p $GDAS_CI_ROOT/workflow/PR/$pr - cd $GDAS_CI_ROOT/workflow/PR/$pr + # get additional branch information + branch_owner=$(gh pr view $pr --repo ${repo_url} --json headRepositoryOwner --jq '.headRepositoryOwner.login') + branch_name=$(gh pr view $pr --repo ${repo_url} --json headRepository --jq '.headRepository.name') + pr_assignees=$(gh pr view $pr --repo ${repo_url} --json assignees --jq '.assignees[].login') + + # check if any assignee is authorized to run CI + authorized_by="" + for str in ${pr_assignees[@]}; do + grep $str /scratch1/NCEPDEV/da/role.jedipara/CI/GDASApp/authorized_users + rc=$? + if (( rc == 0 )); then + authorized_by=${str} + echo "FOUND MATCH $str, rc $rc" + break + fi + done + + # Authorized to run CI + if (( rc == 0 )); then + echo "Run CI" + + # update PR label + gh pr edit $pr --remove-label $CI_LABEL --add-label ${CI_LABEL}-Running + + # check for a companion PR in the global-workflow + companion_pr_exists=$(gh pr list --repo ${workflow_url} --head ${gdasapp_branch} --state open) + if [ -n "$companion_pr_exists" ]; then + # get the PR number + companion_pr=$(echo "$companion_pr_exists" | awk '{print $1;}') + + # extract the necessary info + branch_owner=$(gh pr view $companion_pr --repo $workflow_url --json headRepositoryOwner --jq '.headRepositoryOwner.login') + branch_name=$(gh pr view $companion_pr --repo $workflow_url --json headRepository --jq '.headRepository.name') + + # Construct fork URL. Update workflow branch name + workflow_url="https://github.com/$branch_owner/$branch_name.git" + workflow_branch=$gdasapp_branch + + fi + + echo "Workflow URL: $workflow_url" + echo "Workflow branch name: $workflow_branch" + echo "GDASApp branch name: $gdasapp_branch" + echo "CI authorized by $authorized_by at $(date)" + + # create PR specific directory + if [ -d $GDAS_CI_ROOT/workflow/PR/$pr ]; then + rm -rf $GDAS_CI_ROOT/workflow/PR/$pr + fi + mkdir -p $GDAS_CI_ROOT/workflow/PR/$pr + cd $GDAS_CI_ROOT/workflow/PR/$pr + pwd - # clone global workflow develop branch - git clone --recursive --jobs 8 --branch dev/gdasapp $workflow_url - - # checkout pull request - cd $GDAS_CI_ROOT/workflow/PR/$pr/global-workflow/sorc/gdas.cd - git checkout develop - git pull - gh pr checkout $pr - git submodule update --init --recursive - - # get commit hash - commit=$(git log --pretty=format:'%h' -n 1) - echo "$commit" > $GDAS_CI_ROOT/workflow/PR/$pr/commit - - $my_dir/run_gw_ci.sh -d $GDAS_CI_ROOT/workflow/PR/$pr/global-workflow -o $GDAS_CI_ROOT/workflow/PR/$pr/output_${commit} - ci_status=$? - gh pr comment $pr --body-file $GDAS_CI_ROOT/workflow/PR/$pr/output_${commit} - if [ $ci_status -eq 0 ]; then - gh pr edit $pr --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Passed + # clone global workflow develop branch + git clone --recursive --jobs 8 --branch $workflow_branch $workflow_url + + # checkout GDASApp pull request + cd $GDAS_CI_ROOT/workflow/PR/$pr/global-workflow/sorc/gdas.cd + git pull + gh pr checkout $pr + git submodule update --init --recursive + + # get commit hash + commit=$(git log --pretty=format:'%h' -n 1) + echo "$commit" > $GDAS_CI_ROOT/workflow/PR/$pr/commit + + # run build and testing command + echo "Execute $my_dir/run_gw_ci.sh for $GDAS_CI_ROOT/PR/workflow/PR/$pr/global-workflow at $(date)" + $my_dir/run_gw_ci.sh -d $GDAS_CI_ROOT/workflow/PR/$pr/global-workflow -o $GDAS_CI_ROOT/workflow/PR/$pr/output_${commit} + ci_status=$? + echo "After run_gw_ci.sh with ci_status ${ci_status} at $(date)" + gh pr comment $pr --body-file $GDAS_CI_ROOT/workflow/PR/$pr/output_${commit} + if [ $ci_status -eq 0 ]; then + gh pr edit $pr --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Passed + else + gh pr edit $pr --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Failed + fi + + # Not authorized to run CI else - gh pr edit $pr --remove-label ${CI_LABEL}-Running --add-label ${CI_LABEL}-Failed + echo "Do NOT run CI" fi + + echo "Finish processing Pull Request #{pr} at $(date)" done # ============================================================================== # scrub working directory for older files find $GDAS_CI_ROOT/workflow/PR/* -maxdepth 1 -mtime +3 -exec rm -rf {} \; - +echo "Finish at $(date)" diff --git a/ci/run_ci.sh b/ci/run_ci.sh index b62f78a88..1b5f27231 100755 --- a/ci/run_ci.sh +++ b/ci/run_ci.sh @@ -1,5 +1,5 @@ #!/bin/bash -#set -eu +set -u # ============================================================================== usage() { @@ -61,7 +61,7 @@ module use $GDAS_MODULE_USE module load GDAS/$TARGET echo "---------------------------------------------------" >> $outfile rm -rf log.ctest -ctest -E "manual" -R gdasapp --output-on-failure &>> log.ctest +ctest -R gdasapp --output-on-failure &>> log.ctest ctest_status=$? npassed=$(cat log.ctest | grep "tests passed") if [ $ctest_status -eq 0 ]; then diff --git a/ci/run_gw_ci.sh b/ci/run_gw_ci.sh index 59758e37f..ba1874107 100755 --- a/ci/run_gw_ci.sh +++ b/ci/run_gw_ci.sh @@ -1,5 +1,5 @@ #!/bin/bash -#set -eu +set -u # ============================================================================== usage() { @@ -31,7 +31,7 @@ done # ============================================================================== # start output file -echo "Automated Global-Workflow GDASApp Testing Results:" > $outfile +echo "Automated GW GDASApp Testing Results:" > $outfile echo "Machine: ${TARGET}" >> $outfile echo '```' >> $outfile echo "Start: $(date) on $(hostname)" >> $outfile diff --git a/parm/io/fv3jedi_fieldmetadata_fv3inc.yaml b/parm/io/fv3jedi_fieldmetadata_fv3inc.yaml index 2f8acb839..4750967b1 100644 --- a/parm/io/fv3jedi_fieldmetadata_fv3inc.yaml +++ b/parm/io/fv3jedi_fieldmetadata_fv3inc.yaml @@ -9,7 +9,7 @@ field metadata: - long name: air_temperature io name: T_inc -- long name: specific_humidity +- long name: water_vapor_mixing_ratio_wrt_moist_air io name: sphum_inc - long name: cloud_liquid_water diff --git a/parm/io/fv3jedi_fieldmetadata_history.yaml b/parm/io/fv3jedi_fieldmetadata_history.yaml index a8532c32c..2e59dccb1 100644 --- a/parm/io/fv3jedi_fieldmetadata_history.yaml +++ b/parm/io/fv3jedi_fieldmetadata_history.yaml @@ -6,7 +6,7 @@ field metadata: - long name: northward_wind io name: vgrd -- long name: specific_humidity +- long name: water_vapor_mixing_ratio_wrt_moist_air io name: spfh - long name: air_temperature @@ -45,11 +45,11 @@ field metadata: - long name: upward_air_velocity io name: dzdt -- long name: surface_pressure +- long name: air_pressure_at_surface io name: pressfc io file: atmosphere -- long name: surface_geopotential_height +- long name: geopotential_height_at_surface io name: hgtsfc - long name: u_component_of_native_D_grid_wind @@ -74,7 +74,7 @@ field metadata: - long name: sheleg io name: weasd -- long name: sea_surface_temperature +- long name: skin_temperature_at_surface io name: tmpsfc - long name: stype @@ -86,8 +86,8 @@ field metadata: - long name: totalSnowDepthMeters io name: snod -- long name: surface_eastward_wind +- long name: eastward_wind_at_surface io name: ugrd_hyblev1 -- long name: surface_northward_wind +- long name: northward_wind_at_surface io name: vgrd_hyblev1 diff --git a/parm/io/fv3jedi_fieldmetadata_restart.yaml b/parm/io/fv3jedi_fieldmetadata_restart.yaml index d4a4a3a09..ccba447dc 100644 --- a/parm/io/fv3jedi_fieldmetadata_restart.yaml +++ b/parm/io/fv3jedi_fieldmetadata_restart.yaml @@ -10,7 +10,7 @@ field metadata: io name: T - long name: air_pressure_thickness - io name: DELP + io name: delp - long name: layer_thickness io name: DZ diff --git a/parm/jcb-gdas b/parm/jcb-gdas index b8e995a4c..c41e7d7ac 160000 --- a/parm/jcb-gdas +++ b/parm/jcb-gdas @@ -1 +1 @@ -Subproject commit b8e995a4cbf01fa4a662c3da3e7d818f8457ec4e +Subproject commit c41e7d7aca3f4053b177709adaa66488f3643980 diff --git a/parm/soca/fields_metadata.yaml b/parm/soca/fields_metadata.yaml index 586d8557e..444bd8418 100644 --- a/parm/soca/fields_metadata.yaml +++ b/parm/soca/fields_metadata.yaml @@ -1,103 +1,150 @@ # -------------------------------------------------------------------------------------------------- # Field metadata for SOCA. Each field can contain the following information: # -# name: Internal name used by soca code and config files +# name: name used by soca and by the rest of JEDI +# name surface: JEDI variable name for 2D surface of a 3D field (Default: ) # grid: "h", "u", or "v" (Default: h) # masked: use land mask if true (Default: true) # levels: "1" or "full_ocn" (Default: 1) -# getval_name: variable name expected by GetValues (Default: ) -# getval_name_surface: GetValues variable name for 2D surface of a 3D field (Default: ) -# io_file: The restart file domain "ocn", "sfc", or "ice" (Default: ) -# io_name: The variable name used in the restart IO (Default: ) +# io file: The restart file domain "ocn", "sfc", or "ice" (Default: ) +# io name: The variable name used in the restart IO (Default: ) +# constant value: Used for "dummy" fields. Sets the entire field to the given constant globally +# This parameter cannot be used with io_file/io_name +# fill value: If the field is masked, this value will be used for the masked areas. +# (Default: 0.0) +# categories: Number of categories for a field with a category dimension (Default: -1) +# If > 0, then the fields "name", and "io name" can use the +# placeholder "" which will be replaced with the category number # -------------------------------------------------------------------------------------------------- # -------------------------------------------------------------------------------------------------- # Ocean state variables # -------------------------------------------------------------------------------------------------- -- name: tocn +- name: sea_water_potential_temperature + name surface: sea_surface_temperature levels: full_ocn - getval name: sea_water_potential_temperature - getval name surface: sea_surface_temperature io file: ocn io name: Temp - fill value: 0.0 -- name: socn +- name: sea_water_salinity + name surface: sea_surface_salinity levels: full_ocn - getval name: sea_water_salinity - getval name surface: sea_surface_salinity io file: ocn io name: Salt property: positive_definite - fill value: 0.0 -- name: uocn +- name: eastward_sea_water_velocity + name surface: surface_eastward_sea_water_velocity grid: u levels: full_ocn - getval name: eastward_sea_water_velocity - getval name surface: surface_eastward_sea_water_velocity io file: ocn io name: u - fill value: 0.0 -- name: vocn +- name: northward_sea_water_velocity + name surface: surface_northward_sea_water_velocity grid: v levels: full_ocn - getval name: northward_sea_water_velocity - getval name surface: surface_northward_sea_water_velocity io file: ocn io name: v - fill value: 0.0 -- name: hocn +- name: sea_water_cell_thickness levels: full_ocn - getval name: sea_water_cell_thickness io file: ocn io name: h - fill value: 0.001 vert interp: false -- name: ssh - getval name: sea_surface_height_above_geoid +- name: sea_surface_height_above_geoid io file: ocn io name: ave_ssh - fill value: 0.0 - name: mom6_mld io file: ocn io name: MLD fill value: 0.0 + +# -------------------------------------------------------------------------------------------------- +# ice state variables with no categories +# -------------------------------------------------------------------------------------------------- +- name: sea_ice_thickness + io file: ice + io name: hi_h #note, was hicen + property: positive_definite + +- name: sea_ice_area_fraction + io file: ice + io name: aice_h #note, was aicen + +- name: sea_ice_snow_thickness + io file: ice + io name: hs_h #note, was hsnon + property: positive_definite + +- name: snow_ice_surface_temperature + io file: ice + io name: Tsfc_h + +- name: air_temperature + io file: ice + io name: Tair_h + +- name: bulk_ice_salinity + io file: ice + io name: sice_h # -------------------------------------------------------------------------------------------------- -# ice state variables +# ice state variables with category dimension # -------------------------------------------------------------------------------------------------- -- name: hicen - getval name: sea_ice_category_thickness +- name: sea_ice_category_area_fraction + categories: 5 io file: ice - io name: hi_h + io sup name: aicen_h + io name: aice_h property: positive_definite - fill value: 0.0 -- name: cicen - getval name: sea_ice_category_area_fraction - getval name surface: sea_ice_area_fraction # note: not accurate, should be "sum" not "surface" +- name: sea_ice_category_volume + categories: 5 io file: ice - io name: aice_h - fill value: 0.0 + io sup name: vicen_h + io name: vice_h + property: positive_definite -- name: hsnon - getval name: sea_ice_category_snow_thickness +- name: sea_ice_snow_category_volume + categories: 5 io file: ice - io name: hs_h + io sup name: vsnon_h + io name: vsno_h property: positive_definite - fill value: 0.0 + +# -------------------------------------------------------------------------------------------------- +# Thermodynamic ice state variables with category and level dimension +# -------------------------------------------------------------------------------------------------- +- name: sea_ice_category_temperature + categories: 5 + levels: 7 + io file: ice + io sup name: Tinz_h + io name: tiz_h + +- name: sea_ice_category_salinity + categories: 5 + levels: 7 + io file: ice + io sup name: Sinz_h + io name: siz_h + +- name: sea_ice_snow_category_temperature + categories: 5 + levels: 1 + io file: ice + io sup name: Tsnz_h + io name: tsz_h + # -------------------------------------------------------------------------------------------------- # wave state variables # -------------------------------------------------------------------------------------------------- -- name: swh - getval name: sea_surface_wave_significant_height +- name: sea_surface_wave_significant_height io file: wav io name: hs property: positive_definite @@ -105,33 +152,28 @@ # -------------------------------------------------------------------------------------------------- # sea surface variables # -------------------------------------------------------------------------------------------------- -- name: sw +- name: net_downwelling_shortwave_radiation masked: false - getval name: net_downwelling_shortwave_radiation io file: sfc io name: sw_rad -- name: lw +- name: net_downwelling_longwave_radiation masked: false - getval name: net_downwelling_longwave_radiation io file: sfc io name: lw_rad -- name: lhf +- name: upward_latent_heat_flux_in_air masked: false - getval name: upward_latent_heat_flux_in_air io file: sfc io name: latent_heat -- name: shf +- name: upward_sensible_heat_flux_in_air masked: false - getval name: upward_sensible_heat_flux_in_air io file: sfc io name: sens_heat -- name: us +- name: friction_velocity_over_water masked: false - getval name: friction_velocity_over_water io file: sfc io name: fric_vel @@ -139,18 +181,16 @@ # -------------------------------------------------------------------------------------------------- # BGC # -------------------------------------------------------------------------------------------------- -- name: chl +- name: mass_concentration_of_chlorophyll_in_sea_water + name surface: sea_surface_chlorophyll levels: full_ocn - getval name: mass_concentration_of_chlorophyll_in_sea_water - getval name surface: sea_surface_chlorophyll io file: ocn io name: chl property: positive_definite -- name: biop +- name: molar_concentration_of_biomass_in_sea_water_in_p_units + name surface: sea_surface_biomass_in_p_units levels: full_ocn - getval name: molar_concentration_of_biomass_in_sea_water_in_p_units - getval name surface: sea_surface_biomass_in_p_units io file: ocn io name: biomass_p property: positive_definite @@ -161,20 +201,106 @@ - name: distance_from_coast masked: false -- name: layer_depth - levels: full_ocn - vert interp: false - - name: mesoscale_representation_error -- name: mld +- name: ocean_mixed_layer_thickness - name: sea_floor_depth_below_sea_surface - name: sea_area_fraction masked: false -- name: surface_temperature_where_sea +- name: skin_temperature_at_surface_where_sea - name: sea_water_depth levels: full_ocn + +- name: latitude +- name: longitude + +# -------------------------------------------------------------------------------------------------- +# variables that VADER should be responsible for +# -------------------------------------------------------------------------------------------------- +- name: sea_water_temperature + levels: full_ocn + +# -------------------------------------------------------------------------------------------------- +- name: dummy_atm1 + constant value: 5.0 + +- name: ozone_thickness + constant value: 275 #The average amount of ozone in the atm. is 300 Dobson Units + +- name: water_vapor #g/cm^2 + constant value: 1.2 + +- name: wind_speed_at_surface + constant value: 6 + +- name: air_pressure_at_surface + constant value: 999 + +- name: relative_humidity + constant value: 89 + +- name: cloud_liquid_water_path + constant value: 163 + +- name: cloud_area_fraction_in_atmosphere_layer + constant value: 80 + +- name: aerosol_optical_thickness + constant value: 0.16 + +- name: single_scattering_albedo + constant value: 0.71 + +- name: asymmetry_parameter + constant value: 0.97 + +#---------------------ocean bio +- name: Carbon_nitrogen_detritus_concentration + levels: full_ocn + io file: bio + io name: CDET + property: positive_definite + +- name: Particulate_inorganic_carbon + levels: full_ocn + io file: bio + io name: PIC + +- name: colored_dissolved_organic_carbon + levels: full_ocn + io file: bio + io name: CDC + +- name: diatom_concentration + levels: full_ocn + io file: bio + io name: DIATOM + +- name: chlorophyte_concentration + levels: full_ocn + io file: bio + io name: CHLORO + +- name: cyano-bacteria_concentration + levels: full_ocn + io file: bio + io name: CYANO + +- name: coccolithophore_concentration + levels: full_ocn + io file: bio + io name: COCCO + +- name: dinoflagellate_concentration + levels: full_ocn + io file: bio + io name: DINO + +- name: phaeocystis_concentration + levels: full_ocn + io file: bio + io name: PHAEO diff --git a/parm/soca/letkf/letkf.yaml.j2 b/parm/soca/letkf/letkf.yaml.j2 index 1d5e93d7f..ef60c6ec9 100644 --- a/parm/soca/letkf/letkf.yaml.j2 +++ b/parm/soca/letkf/letkf.yaml.j2 @@ -17,7 +17,7 @@ background: ice_filename: "ice.%mem%.nc" read_from_file: 1 basename: {{ ENSPERT_RELPATH }}/ens/ - state variables: [socn, tocn, ssh, hocn, uocn, vocn, cicen] + state variables: [sea_water_salinity, sea_water_potential_temperature, sea_surface_height_above_geoid, sea_water_cell_thickness, eastward_sea_water_velocity, northward_sea_water_velocity, sea_ice_area_fraction] pattern: '%mem%' nmembers: {{ NMEM_ENS }} diff --git a/parm/soca/marine-jcb-base.yaml b/parm/soca/marine-jcb-base.yaml index d07edcd8c..3a9d40223 100644 --- a/parm/soca/marine-jcb-base.yaml +++ b/parm/soca/marine-jcb-base.yaml @@ -24,7 +24,7 @@ minimizer: RPCG final_diagnostics_departures: oman final_prints_frequency: PT3H number_of_outer_loops: 1 -analysis_variables: [cicen, hicen, hsnon, socn, tocn, uocn, vocn, ssh] +analysis_variables: [sea_ice_area_fraction, sea_ice_thickness, sea_ice_snow_thickness, sea_water_salinity, sea_water_potential_temperature, eastward_sea_water_velocity, northward_sea_water_velocity, sea_surface_height_above_geoid] # Model things diff --git a/parm/soca/obs/config/insitu_profile_bathy.yaml b/parm/soca/obs/config/insitu_profile_bathy.yaml index 0dc2db0aa..d78cacdb6 100644 --- a/parm/soca/obs/config/insitu_profile_bathy.yaml +++ b/parm/soca/obs/config/insitu_profile_bathy.yaml @@ -26,3 +26,10 @@ obs filters: - ObsError/waterTemperature coefs: - 1000.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/insitu_profile_tesac.yaml b/parm/soca/obs/config/insitu_profile_tesac.yaml index 5c966f88a..b2cf1769d 100644 --- a/parm/soca/obs/config/insitu_profile_tesac.yaml +++ b/parm/soca/obs/config/insitu_profile_tesac.yaml @@ -29,3 +29,10 @@ obs filters: - ObsError/waterTemperature coefs: - 1000.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/config/insitu_surface_trkob.yaml b/parm/soca/obs/config/insitu_surface_trkob.yaml index 3b058e527..5846b83d6 100644 --- a/parm/soca/obs/config/insitu_surface_trkob.yaml +++ b/parm/soca/obs/config/insitu_surface_trkob.yaml @@ -27,3 +27,10 @@ obs filters: - ObsError/seaSurfaceTemperature coefs: - 1000.0 +obs localizations: +- localization method: Rossby + base value: 100.0e3 + rossby mult: 1.0 + min grid mult: 2.0 + min value: 200.0e3 + max value: 900.0e3 diff --git a/parm/soca/obs/obs_list.yaml b/parm/soca/obs/obs_list.yaml index 0ac8ab5af..c11dc1ace 100644 --- a/parm/soca/obs/obs_list.yaml +++ b/parm/soca/obs/obs_list.yaml @@ -25,15 +25,15 @@ observers: #- !INC ${MARINE_OBS_YAML_DIR}/icec_ssmis_f17_l2.yaml # in situ: monthly -- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_bathy.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_bathy.yaml - !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_argo.yaml -- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_glider.yaml -- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_glider.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_tesac_salinity.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_marinemammal.yaml -- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_xbtctd.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_profile_xbtctd.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_altkob.yaml -- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob.yaml +#- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob.yaml #- !INC ${MARINE_OBS_YAML_DIR}/insitu_surface_trkob_salinity.yaml # in situ: daily diff --git a/sorc/fv3-jedi b/sorc/fv3-jedi index 88279a632..136dfb9c2 160000 --- a/sorc/fv3-jedi +++ b/sorc/fv3-jedi @@ -1 +1 @@ -Subproject commit 88279a63280c23d6b8974991a8c89380afaf5db7 +Subproject commit 136dfb9c2f8541584e62fa74c616d686602bcdad diff --git a/sorc/ioda b/sorc/ioda index 3fa4a997e..22cd20eae 160000 --- a/sorc/ioda +++ b/sorc/ioda @@ -1 +1 @@ -Subproject commit 3fa4a997e25b3bd018d30e308a26b3e98af0fe6f +Subproject commit 22cd20eae0685914a5b967e13f95779b57bb448c diff --git a/sorc/iodaconv b/sorc/iodaconv index 23e58ed76..6f87a0f27 160000 --- a/sorc/iodaconv +++ b/sorc/iodaconv @@ -1 +1 @@ -Subproject commit 23e58ed76da3628cbd508bd4ac40f8a01c789d7d +Subproject commit 6f87a0f279e836fd604e5b313a25bd1e54bff80e diff --git a/sorc/oops b/sorc/oops index 0d2c235d7..1ba321ff9 160000 --- a/sorc/oops +++ b/sorc/oops @@ -1 +1 @@ -Subproject commit 0d2c235d791e1ba0023ce300103174dddf71aed7 +Subproject commit 1ba321ff912c6338d7362667eff37ddbf569cb18 diff --git a/sorc/saber b/sorc/saber index 1f23a3665..de5015c83 160000 --- a/sorc/saber +++ b/sorc/saber @@ -1 +1 @@ -Subproject commit 1f23a36657f6d10b770348de0f5454e01d377105 +Subproject commit de5015c8328f5b3d64acc99739fbaa64ef571172 diff --git a/sorc/soca b/sorc/soca index 4d7ef21e7..7f2ddb61b 160000 --- a/sorc/soca +++ b/sorc/soca @@ -1 +1 @@ -Subproject commit 4d7ef21e74d78a065156c942a72806ef2e2eb08e +Subproject commit 7f2ddb61bc86796c83dfcd4801c91bffd829ffb4 diff --git a/sorc/ufo b/sorc/ufo index b0cd94558..85ef98cb9 160000 --- a/sorc/ufo +++ b/sorc/ufo @@ -1 +1 @@ -Subproject commit b0cd94558643380ccceea864abac2c34fa291677 +Subproject commit 85ef98cb99f3eae1ed15e39f0cb05046af36fef1 diff --git a/sorc/vader b/sorc/vader index 05eb007e2..3049658d1 160000 --- a/sorc/vader +++ b/sorc/vader @@ -1 +1 @@ -Subproject commit 05eb007e242af3fdc4969c7146a480e12663e452 +Subproject commit 3049658d185c8095caf0e506795d0e5995fa92cf diff --git a/test/fv3jedi/testinput/gdasapp_fv3jedi_fv3inc.yaml b/test/fv3jedi/testinput/gdasapp_fv3jedi_fv3inc.yaml index f59603d92..7ee403ccc 100644 --- a/test/fv3jedi/testinput/gdasapp_fv3jedi_fv3inc.yaml +++ b/test/fv3jedi/testinput/gdasapp_fv3jedi_fv3inc.yaml @@ -1,6 +1,6 @@ variable change: variable change name: Model2GeoVaLs - input variables: &bkgvars [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr,surface_geopotential_height] + input variables: &bkgvars [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr,hgtsfc] output variables: &fv3incrvars [ua,va,t,sphum,ice_wat,liq_wat,o3mr,delp,delz] jedi increment variables: [ua,va,t,ps,sphum,ice_wat,liq_wat,o3mr] fv3 increment variables: *fv3incrvars diff --git a/utils/soca/gdas_soca_diagb.h b/utils/soca/gdas_soca_diagb.h index f45ac8313..563d303b9 100644 --- a/utils/soca/gdas_soca_diagb.h +++ b/utils/soca/gdas_soca_diagb.h @@ -261,7 +261,7 @@ namespace gdasapp { // Get the layer thicknesses and convert to layer depth oops::Log::info() << "====================== calculate layer depth" << std::endl; - auto viewHocn = atlas::array::make_view(xbFs["hocn"]); + auto viewHocn = atlas::array::make_view(xbFs["sea_water_cell_thickness"]); atlas::array::ArrayT depth(viewHocn.shape(0), viewHocn.shape(1)); auto viewDepth = atlas::array::make_view(depth); for (atlas::idx_t jnode = 0; jnode < depth.shape(0); ++jnode) { @@ -299,7 +299,7 @@ namespace gdasapp { } // Update the layer thickness halo - nodeColumns.haloExchange(xbFs["hocn"]); + nodeColumns.haloExchange(xbFs["sea_water_cell_thickness"]); // Loop through variables for (auto & var : configD.socaVars.variables()) { @@ -307,7 +307,7 @@ namespace gdasapp { nodeColumns.haloExchange(xbFs[var]); // Skip the layer thickness variable - if (var == "hocn") { + if (var == "sea_water_cell_thickness") { continue; } oops::Log::info() << "====================== std dev for " << var << std::endl; @@ -330,7 +330,7 @@ namespace gdasapp { stdDevFilt(jnode, 0, 0, configD.depthMin, neighbors, 0, viewHocn, bkg, viewBathy, stdDevBkg, false, 4); - if (var == "ssh") { + if (var == "sea_surface_height_above_geoid") { // TODO(G): Extract the unbalanced ssh variance, in the mean time, do this: stdDevBkg(jnode, 0) = std::min(configD.sshMax, stdDevBkg(jnode, 0)); } @@ -353,7 +353,7 @@ namespace gdasapp { if (configD.simpleSmoothing) { for (auto & var : configD.socaVars.variables()) { // Skip the layer thickness variable - if (var == "hocn") { + if (var == "sea_water_cell_thickness") { continue; } @@ -365,7 +365,8 @@ namespace gdasapp { // Loops through nodes and levels for (atlas::idx_t level = 0; level < xbFs[var].shape(1); ++level) { - for (atlas::idx_t jnode = 0; jnode < xbFs["tocn"].shape(0); ++jnode) { + for (atlas::idx_t jnode = 0; + jnode < xbFs["sea_water_potential_temperature"].shape(0); ++jnode) { // Early exit if on a ghost cell if (ghostView(jnode) > 0) { continue; @@ -403,7 +404,8 @@ namespace gdasapp { auto stdDevBkg = atlas::array::make_view(bkgErrFs[var]); auto tmpArray(stdDevBkg); for (int iter = 0; iter < configD.niterVert; ++iter) { - for (atlas::idx_t jnode = 0; jnode < xbFs["tocn"].shape(0); ++jnode) { + for (atlas::idx_t jnode = 0; + jnode < xbFs["sea_water_potential_temperature"].shape(0); ++jnode) { for (atlas::idx_t level = 1; level < xbFs[var].shape(1)-1; ++level) { stdDevBkg(jnode, level) = (tmpArray(jnode, level-1) + tmpArray(jnode, level) + @@ -424,14 +426,15 @@ namespace gdasapp { << std::endl; // Create the diffusion object oops::GeometryData geometryData(geom.functionSpace(), - bkgErrFs["tocn"], true, this->getComm()); + bkgErrFs["sea_water_potential_temperature"], + true, this->getComm()); oops::Diffusion diffuse(geometryData); diffuse.calculateDerivedGeom(geometryData); // Lambda function to construct a field with a constant filtering value auto assignScale = [&](double scale, const std::string& fieldName) { atlas::Field field; - auto levels = xbFs["tocn"].shape(1); + auto levels = xbFs["sea_water_potential_temperature"].shape(1); field = geom.functionSpace().createField(atlas::option::levels(levels) | atlas::option::name(fieldName)); auto viewField = atlas::array::make_view(field); From 2bcedf26cea991fc61f640ccb5be39409b1bd931 Mon Sep 17 00:00:00 2001 From: Mindo Choi <141867620+apchoiCMD@users.noreply.github.com> Date: Fri, 15 Nov 2024 15:45:14 -0500 Subject: [PATCH 27/27] Enable the manual operation of the marine verification tool (#1373) #### This PR enables the Marine Verification Tool to run outside of the g-w CI workflow by submitting an `sbatch` job manually on Hera Includes, - Vrfy task run by a simple driver in the offline #1345 - Improve cosmetic issues we found #1349 - Bug fixes and more #1314 - ~~Move `exgdas_global_marine_analysis_vrfy.py` to `scripts/old` directory~~ Most up-to-date plots can be found at ``` /scratch1/NCEPDEV/da/Mindo.Choi/sandbox/marine_vrfy/gdas.20210827/00/analysis/ocean/vrfy_final_PR ``` The wall time is as follows: ``` [Mindo.Choi@hfe02 vrfy]$ sacct -j 2477688 --format=JobID,JobName,State,ExitCode,Elapsed JobID JobName State ExitCode Elapsed ------------ ---------- ---------- -------- ---------- 2477688 marine_vr+ COMPLETED 0:0 00:11:54 2477688.bat+ batch COMPLETED 0:0 00:11:54 2477688.ext+ extern COMPLETED 0:0 00:11:54 ``` Additional plotting work will be added by consolidating vrfy task as follows: - SST/SSH time series - Omb time series - Spatial SSH/SST/OHC - HTML (?) Close #1314 , Close #1345 , Close #1349 --------- Co-authored-by: Guillaume Vernieres --- scripts/exgdas_global_marine_analysis_vrfy.py | 0 ush/eva/marine_eva_post.py | 4 +- ush/eva/marine_gdas_plots.yaml | 6 +- ush/soca/soca_vrfy.py | 50 +++-- ...gdas_global_marine_analysis_vrfy_manual.py | 210 ++++++++++++++++++ .../run_marine_analysis_vrfy_manual.job | 45 ++++ 6 files changed, 297 insertions(+), 18 deletions(-) mode change 100755 => 100644 scripts/exgdas_global_marine_analysis_vrfy.py create mode 100644 utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py create mode 100644 utils/soca/fig_gallery/run_marine_analysis_vrfy_manual.job diff --git a/scripts/exgdas_global_marine_analysis_vrfy.py b/scripts/exgdas_global_marine_analysis_vrfy.py old mode 100755 new mode 100644 diff --git a/ush/eva/marine_eva_post.py b/ush/eva/marine_eva_post.py index a355621a1..b537ddb3a 100755 --- a/ush/eva/marine_eva_post.py +++ b/ush/eva/marine_eva_post.py @@ -12,7 +12,9 @@ vminmax = {'seaSurfaceTemperature': {'vmin': -2.0, 'vmax': 2.0}, 'seaIceFraction': {'vmin': -0.2, 'vmax': 0.2}, 'seaSurfaceSalinity': {'vmin': -0.2, 'vmax': 0.2}, # TODO: this should be changed - 'absoluteDynamicTopography': {'vmin': -0.2, 'vmax': 0.2}} + 'absoluteDynamicTopography': {'vmin': -0.2, 'vmax': 0.2}, + 'waterTemperature': {'vmin': -2.0, 'vmax': 2.0}, + 'salinity': {'vmin': -0.2, 'vmax': 0.2}} def marine_eva_post(inputyaml, outputdir, diagdir): diff --git a/ush/eva/marine_gdas_plots.yaml b/ush/eva/marine_gdas_plots.yaml index 5bedd1f69..0a903d0c4 100644 --- a/ush/eva/marine_gdas_plots.yaml +++ b/ush/eva/marine_gdas_plots.yaml @@ -73,7 +73,7 @@ graphics: data variable: experiment::OmBQC::${variable} figure: layout: [1,1] - figure size: [11,5] + figure size: [20,10] title: 'OmB post QC | @NAME@ @CYCLE@ | ${variable_title}' output name: map_plots/@NAME@/${variable}/@CHANNELVAR@/@NAME@_${variable}@CHANNELVAR@OmBQC.png tight_layout: true @@ -94,11 +94,11 @@ graphics: data: variable: experiment::OmBQC::${variable} @CHANNELKEY@ - markersize: 1 + markersize: 0.01 label: '$(variable)' colorbar: true # below may need to be edited/removed - cmap: ${dynamic_cmap} + cmap: 'seismic' vmin: ${dynamic_vmin} vmax: ${dynamic_vmax} diff --git a/ush/soca/soca_vrfy.py b/ush/soca/soca_vrfy.py index 854d7ab69..a4060fecd 100755 --- a/ush/soca/soca_vrfy.py +++ b/ush/soca/soca_vrfy.py @@ -38,6 +38,18 @@ def plotConfig(grid_file=[], proj='set me', projs=['Global']): + # Map variable names to their units + variable_units = { + 'ave_ssh': 'meter', + 'Temp': 'deg C', + 'Salt': 'psu', + 'aice_h': 'meter', + 'hi_h': 'meter', + 'hs_h': 'meter', + 'u': 'm/s', + 'v': 'm/s' + } + """ Prepares the configuration for the plotting functions below """ @@ -64,6 +76,9 @@ def plotConfig(grid_file=[], config['variable'] = variable # the variable currently plotted config['projs'] = projs # all the projections etc. config['proj'] = proj + + # Add units to the config for each variable + config['variable_units'] = variable_units return config @@ -78,6 +93,7 @@ def plotHorizontalSlice(config): os.makedirs(dirname, exist_ok=True) variable = config['variable'] + unit = config['variable_units'].get(config['variable'], 'unknown') exp = config['exp'] PDY = config['PDY'] cyc = config['cyc'] @@ -85,12 +101,12 @@ def plotHorizontalSlice(config): if variable in ['Temp', 'Salt', 'u', 'v']: level = config['levels'][0] slice_data = np.squeeze(data[variable])[level, :, :] - label_colorbar = variable + ' Level ' + str(level) + label_colorbar = f"{variable} ({unit}) Level {level}" figname = os.path.join(dirname, variable + '_Level_' + str(level)) title = f"{exp} {PDY} {cyc} {variable} Level {level}" else: slice_data = np.squeeze(data[variable]) - label_colorbar = variable + label_colorbar = f"{variable} ({unit})" figname = os.path.join(dirname, variable + '_' + config['proj']) title = f"{exp} {PDY} {cyc} {variable}" @@ -99,17 +115,17 @@ def plotHorizontalSlice(config): fig, ax = plt.subplots(figsize=(8, 5), subplot_kw={'projection': projs[config['proj']]}) - # Plot the filled contours - contourf_plot = ax.contourf(np.squeeze(grid.lon), + # Use pcolor to plot the data + pcolor_plot = ax.pcolormesh(np.squeeze(grid.lon), np.squeeze(grid.lat), slice_data, - levels=100, vmin=bounds[0], vmax=bounds[1], transform=ccrs.PlateCarree(), - cmap=config['colormap']) + cmap=config['colormap'], + zorder=0) # Add colorbar for filled contours - cbar = fig.colorbar(contourf_plot, ax=ax, shrink=0.75, orientation='horizontal') + cbar = fig.colorbar(pcolor_plot, ax=ax, shrink=0.75, orientation='horizontal') cbar.set_label(label_colorbar) # Add contour lines with specified linewidths @@ -120,16 +136,20 @@ def plotHorizontalSlice(config): levels=contour_levels, colors='black', linewidths=0.1, - transform=ccrs.PlateCarree()) + transform=ccrs.PlateCarree(), + zorder=2) - ax.coastlines() # TODO: make this work on hpc + try: + ax.coastlines() # TODO: make this work on hpc + except Exception as e: + print(f"Warning: could not add coastlines. {e}") ax.set_title(title) if config['proj'] == 'South': ax.set_extent([-180, 180, -90, -50], ccrs.PlateCarree()) if config['proj'] == 'North': ax.set_extent([-180, 180, 50, 90], ccrs.PlateCarree()) # ax.add_feature(cartopy.feature.LAND) # TODO: make this work on hpc - plt.savefig(figname, bbox_inches='tight', dpi=600) + plt.savefig(figname, bbox_inches='tight', dpi=300) plt.close(fig) @@ -138,6 +158,7 @@ def plotZonalSlice(config): Contourf of a zonal slice of an ocean field """ variable = config['variable'] + unit = config['variable_units'].get(config['variable'], 'unknown') exp = config['exp'] PDY = config['PDY'] cyc = config['cyc'] @@ -171,7 +192,7 @@ def plotZonalSlice(config): # Add colorbar for filled contours cbar = fig.colorbar(contourf_plot, ax=ax, shrink=0.5, orientation='horizontal') - cbar.set_label(variable + ' Lat ' + str(lat)) + cbar.set_label(f"{config['variable']} ({unit}) Lat {lat}") # Set the colorbar ticks cbar.set_ticks(contour_levels) @@ -184,7 +205,7 @@ def plotZonalSlice(config): os.makedirs(dirname, exist_ok=True) figname = os.path.join(dirname, config['variable'] + 'zonal_lat_' + str(int(lat)) + '_' + str(int(config['max depth'])) + 'm') - plt.savefig(figname, bbox_inches='tight', dpi=600) + plt.savefig(figname, bbox_inches='tight', dpi=300) plt.close(fig) @@ -193,6 +214,7 @@ def plotMeridionalSlice(config): Contourf of a Meridional slice of an ocean field """ variable = config['variable'] + unit = config['variable_units'].get(config['variable'], 'unknown') exp = config['exp'] PDY = config['PDY'] cyc = config['cyc'] @@ -226,7 +248,7 @@ def plotMeridionalSlice(config): # Add colorbar for filled contours cbar = fig.colorbar(contourf_plot, ax=ax, shrink=0.5, orientation='horizontal') - cbar.set_label(variable + ' Lon ' + str(lon)) + cbar.set_label(f"{config['variable']} ({unit}) Lon {lon}") # Set the colorbar ticks cbar.set_ticks(contour_levels) @@ -239,7 +261,7 @@ def plotMeridionalSlice(config): os.makedirs(dirname, exist_ok=True) figname = os.path.join(dirname, config['variable'] + 'meridional_lon_' + str(int(lon)) + '_' + str(int(config['max depth'])) + 'm') - plt.savefig(figname, bbox_inches='tight', dpi=600) + plt.savefig(figname, bbox_inches='tight', dpi=300) plt.close(fig) diff --git a/utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py b/utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py new file mode 100644 index 000000000..7c8efd0a6 --- /dev/null +++ b/utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py @@ -0,0 +1,210 @@ +import os +import numpy as np +import gen_eva_obs_yaml +import marine_eva_post +import diag_statistics +from multiprocessing import Process +from soca_vrfy import statePlotter, plotConfig +import subprocess + +comout = os.getenv('COM_OCEAN_ANALYSIS') +com_ice_history = os.getenv('COM_ICE_HISTORY_PREV') +com_ocean_history = os.getenv('COM_OCEAN_HISTORY_PREV') +cyc = os.getenv('cyc') +RUN = os.getenv('RUN') + +bcyc = str((int(cyc) - 3) % 24).zfill(2) +gcyc = str((int(cyc) - 6) % 24).zfill(2) +grid_file = os.path.join(comout, f'{RUN}.t'+bcyc+'z.ocngrid.nc') +layer_file = os.path.join(comout, f'{RUN}.t'+cyc+'z.ocninc.nc') + +# for eva +diagdir = os.path.join(comout, 'diags') +HOMEgfs = os.getenv('HOMEgfs') + +# Get flags from environment variables (set in the bash driver) +run_ensemble_analysis = os.getenv('RUN_ENSENBLE_ANALYSIS', 'OFF').upper() == 'ON' +run_bkgerr_analysis = os.getenv('RUN_BACKGROUND_ERROR_ANALYSIS', 'OFF').upper() == 'ON' +run_bkg_analysis = os.getenv('RUN_BACKGROUND_ANALYSIS', 'OFF').upper() == 'ON' +run_increment_analysis = os.getenv('RUN_INCREMENT_ANLYSIS', 'OFF').upper() == 'ON' + +# Initialize an empty list for the main config +configs = [plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ocnana.nc'), + variables_horiz={'ave_ssh': [-1.8, 1.3], + 'Temp': [-1.8, 34.0], + 'Salt': [32, 40]}, + colormap='nipy_spectral', + comout=os.path.join(comout, 'vrfy', 'ana')), # ocean surface analysis + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.iceana.nc'), + variables_horiz={'aice_h': [0.0, 1.0], + 'hi_h': [0.0, 4.0], + 'hs_h': [0.0, 0.5]}, + colormap='jet', + projs=['North', 'South', 'Global'], + comout=os.path.join(comout, 'vrfy', 'ana'))] # sea ice analysis + +# Define each config and add to main_config if its flag is True +if run_ensemble_analysis: + config_ens = [plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.recentering_error.nc'), + variables_horiz={'ave_ssh': [-1, 1]}, + colormap='seismic', + comout=os.path.join(comout, 'vrfy', 'recentering_error')), # recentering error + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_steric_stddev.nc'), + variables_horiz={'ave_ssh': [0, 0.8]}, + colormap='gist_ncar', + comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_steric_stddev')), # ssh steric stddev + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_unbal_stddev.nc'), + variables_horiz={'ave_ssh': [0, 0.8]}, + colormap='gist_ncar', + comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_unbal_stddev')), # ssh unbal stddev + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.ssh_total_stddev.nc'), + variables_horiz={'ave_ssh': [0, 0.8]}, + colormap='gist_ncar', + comout=os.path.join(comout, 'vrfy', 'bkgerr', 'ssh_total_stddev')), # ssh total stddev + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t{cyc}z.ocn.steric_explained_variance.nc'), + variables_horiz={'ave_ssh': [0, 1]}, + colormap='seismic', + comout=os.path.join(comout, 'vrfy', 'bkgerr', 'steric_explained_variance'))] # steric explained variance + configs.extend(config_ens) + +if run_bkgerr_analysis: + config_bkgerr = [plotConfig(grid_file=grid_file, + layer_file=layer_file, + data_file=os.path.join(comout, os.path.pardir, os.path.pardir, + 'bmatrix', 'ocean', f'{RUN}.t'+cyc+'z.ocean.bkgerr_stddev.nc'), + lats=np.arange(-60, 60, 10), + lons=np.arange(-280, 80, 30), + variables_zonal={'Temp': [0, 2], + 'Salt': [0, 0.2], + 'u': [0, 0.2], + 'v': [0, 0.2]}, + variables_meridional={'Temp': [0, 2], + 'Salt': [0, 0.2], + 'u': [0, 0.2], + 'v': [0, 0.2]}, + variables_horiz={'Temp': [0, 2], + 'Salt': [0, 0.2], + 'u': [0, 0.2], + 'v': [0, 0.2], + 'ave_ssh': [0, 0.1]}, + colormap='jet', + comout=os.path.join(comout, 'vrfy', 'bkgerr'))] # ocn bkgerr stddev + configs.extend(config_bkgerr) + +if run_bkg_analysis: + config_bkg = [plotConfig(grid_file=grid_file, + data_file=os.path.join(com_ice_history, f'{RUN}.ice.t{gcyc}z.inst.f006.nc'), + variables_horiz={'aice_h': [0.0, 1.0], + 'hi_h': [0.0, 4.0], + 'hs_h': [0.0, 0.5]}, + colormap='jet', + projs=['North', 'South', 'Global'], + comout=os.path.join(comout, 'vrfy', 'bkg')), # sea ice background + plotConfig(grid_file=grid_file, + layer_file=layer_file, + data_file=os.path.join(com_ocean_history, f'{RUN}.ocean.t{gcyc}z.inst.f006.nc'), + lats=np.arange(-60, 60, 10), + lons=np.arange(-280, 80, 30), + variables_zonal={'Temp': [-1.8, 34.0], + 'Salt': [32, 40]}, + variables_meridional={'Temp': [-1.8, 34.0], + 'Salt': [32, 40]}, + variables_horiz={'ave_ssh': [-1.8, 1.3], + 'Temp': [-1.8, 34.0], + 'Salt': [32, 40]}, + colormap='nipy_spectral', + comout=os.path.join(comout, 'vrfy', 'bkg'))] + configs.extend(config_bkg) + +if run_increment_analysis: + config_incr = [plotConfig(grid_file=grid_file, + layer_file=layer_file, + data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ocninc.nc'), + lats=np.arange(-60, 60, 10), + lons=np.arange(-280, 80, 30), + variables_zonal={'Temp': [-0.5, 0.5], + 'Salt': [-0.1, 0.1]}, + variables_horiz={'Temp': [-0.5, 0.5], + 'Salt': [-0.1, 0.1], + 'ave_ssh': [-0.1, 0.1]}, + variables_meridional={'Temp': [-0.5, 0.5], + 'Salt': [-0.1, 0.1]}, + colormap='seismic', + comout=os.path.join(comout, 'vrfy', 'incr')), # ocean increment + plotConfig(grid_file=grid_file, + data_file=os.path.join(comout, f'{RUN}.t'+cyc+'z.ice.incr.nc'), + lats=np.arange(-60, 60, 10), + variables_horiz={'aice_h': [-0.2, 0.2], + 'hi_h': [-0.5, 0.5], + 'hs_h': [-0.1, 0.1]}, + colormap='seismic', + projs=['North', 'South'], + comout=os.path.join(comout, 'vrfy', 'incr'))] # sea ice increment + configs.extend(config_incr) + + +# plot marine analysis vrfy + +def plot_marine_vrfy(config): + ocnvrfyPlotter = statePlotter(config) + ocnvrfyPlotter.plot() + + +# Number of processes +num_processes = len(configs) + +# Create a list to store the processes +processes = [] + +# Iterate over configs +for config in configs[:num_processes]: + process = Process(target=plot_marine_vrfy, args=(config,)) + process.start() + processes.append(process) + +# Wait for all processes to finish +for process in processes: + process.join() + +####################################### +# eva plots +####################################### + +evadir = os.path.join(HOMEgfs, 'sorc', f'{RUN}.cd', 'ush', 'eva') +marinetemplate = os.path.join(evadir, 'marine_gdas_plots.yaml') +varyaml = os.path.join(comout, 'yaml', 'var_original.yaml') + +# it would be better to refrence the dirs explicitly with the comout path +# but eva doesn't allow for specifying output directories +os.chdir(os.path.join(comout, 'vrfy')) +if not os.path.exists('preevayamls'): + os.makedirs('preevayamls') +if not os.path.exists('evayamls'): + os.makedirs('evayamls') + +gen_eva_obs_yaml.gen_eva_obs_yaml(varyaml, marinetemplate, 'preevayamls') + +files = os.listdir('preevayamls') +for file in files: + infile = os.path.join('preevayamls', file) + marine_eva_post.marine_eva_post(infile, 'evayamls', diagdir) + +files = os.listdir('evayamls') +for file in files: + infile = os.path.join('evayamls', file) + print('running eva on', infile) + subprocess.run(['eva', infile], check=True) + +####################################### +# calculate diag statistics +####################################### + +# As of 11/12/2024 not working +# diag_statistics.get_diag_stats() diff --git a/utils/soca/fig_gallery/run_marine_analysis_vrfy_manual.job b/utils/soca/fig_gallery/run_marine_analysis_vrfy_manual.job new file mode 100644 index 000000000..38ce48ffc --- /dev/null +++ b/utils/soca/fig_gallery/run_marine_analysis_vrfy_manual.job @@ -0,0 +1,45 @@ +#!/bin/bash +#SBATCH --job-name=marine_vrfy # Assign a name to the job (customize as needed) +#SBATCH --account=da-cpu +#SBATCH --qos=debug +#SBATCH -A da-cpu +#SBATCH --output=run_marine_vrfy_analysis.out +#SBATCH --nodes=1 # Request 1 node +#SBATCH --ntasks=40 # Request 40 total tasks (processors across nodes) +#SBATCH --partition=hera # Specify the partition (cluster) named "hera" +#SBATCH --cpus-per-task=1 # Set 1 CPU per task (equivalent to ppn=40 and tpp=1) +#SBATCH --mem=24GB # Request 24GB of memory +#SBATCH --time=00:30:00 # Set the walltime limit to 30 minutes + +# Define HOMEgfs +export HOMEgfs="/scratch1/NCEPDEV/da/Mindo.Choi/workflow_11122024/global-workflow/" + +# Load EVA module +module use ${HOMEgfs}sorc/gdas.cd/modulefiles +module load EVA/hera + +# Set PYTHONPATH using HOMEgfs +export PYTHONPATH="${HOMEgfs}sorc/gdas.cd/ush/:\ +${HOMEgfs}sorc/gdas.cd/ush/eva/:\ +${HOMEgfs}sorc/gdas.cd/ush/soca/:\ +$PYTHONPATH" + +# Set flags to control plotConfig in the Python script +export RUN_ENSENBLE_ANALYSIS=OFF # Check if ensemble run is ON +export RUN_BACKGROUND_ERROR_ANALYSIS=ON +export RUN_BACKGROUND_ANALYSIS=ON +export RUN_INCREMENT_ANLYSIS=ON + +# Define and export the environment variables +export cyc="00" +export RUN="gdas" +export PSLOT="gdas_test" +export PDY="20210827" + +# Define and export environment variables with paths +export COM_OCEAN_ANALYSIS="/scratch1/NCEPDEV/da/Mindo.Choi/sandbox/marine_vrfy/gdas.20210827/00/analysis/ocean" +export COM_ICE_HISTORY_PREV="/scratch1/NCEPDEV/da/Mindo.Choi/sandbox/marine_vrfy/gdas.20210826/18/model/ice/history" +export COM_OCEAN_HISTORY_PREV="/scratch1/NCEPDEV/da/Mindo.Choi/sandbox/marine_vrfy/gdas.20210826/18/model/ocean/history" + +# Excute Marine Verify Analysis +python3 ${HOMEgfs}sorc/gdas.cd/utils/soca/fig_gallery/exgdas_global_marine_analysis_vrfy_manual.py