Skip to content

Commit

Permalink
Update Aurora file paths
Browse files Browse the repository at this point in the history
  • Loading branch information
amametjanov committed Dec 12, 2023
1 parent 7fdab8f commit 8f6720f
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 30 deletions.
6 changes: 3 additions & 3 deletions cime_config/machines/config_batch.xml
Original file line number Diff line number Diff line change
Expand Up @@ -551,10 +551,10 @@
</batch_system>

<batch_system MACH="aurora" type="pbspro">
<batch_submit>/lus/gecko/projects/CSC249ADSE15_auroratmp_CNDA/tools/qsub/throttle</batch_submit>
<batch_submit>/lus/gecko/projects/CSC249ADSE15_CNDA/tools/qsub/throttle</batch_submit>
<queues>
<queue walltimemax="00:59:00" jobmin="1" jobmax="3000" default="true">workq-route</queue>
<queue walltimemax="00:59:00" jobmin="1" jobmax="3000">workq</queue>
<queue walltimemax="00:59:00" jobmin="1" jobmax="10624" default="true">workq-route</queue>
<queue walltimemax="00:59:00" jobmin="1" jobmax="10624">workq</queue>
<queue walltimemax="00:59:00" jobmin="1" jobmax="128">diag</queue>
</queues>
</batch_system>
Expand Down
40 changes: 13 additions & 27 deletions cime_config/machines/config_machines.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3073,13 +3073,14 @@
</resource_limits>
</machine>

<machine MACH="aurora">
<DESC>ANL Aurora ECP machine, Aurora consists of multiple racks, each with many nodes, for a total of over 10,000 nodes, batch system is pbspro</DESC>
<NODENAME_REGEX>aurora-uan-.*</NODENAME_REGEX> <OS>LINUX</OS>
<COMPILERS>oneapi-ifx,oneapi-ifxgpu,oneapi-ifort,gnu</COMPILERS>
<MPILIBS>mpich,impi,openmpi</MPILIBS>
<machine MACH="aurora">
<DESC>ALCF Aurora, 10624 nodes, 2x52c SPR, 6x2s PVC, 2x512GB DDR5, 2x64GB CPU-HBM, 6x128GB GPU-HBM, Slingshot 11, PBSPro</DESC>
<NODENAME_REGEX>aurora-uan-.*</NODENAME_REGEX>
<OS>LINUX</OS>
<COMPILERS>oneapi-ifx,oneapi-ifxgpu,gnu</COMPILERS>
<MPILIBS>mpich</MPILIBS>
<CHARGE_ACCOUNT>CSC249ADSE15_CNDA</CHARGE_ACCOUNT>
<SAVE_TIMING_DIR>/lus/gecko/CSC249ADSE15_CNDA/performance_archive</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR>/lus/gecko/projects/CSC249ADSE15_CNDA/performance_archive</SAVE_TIMING_DIR>
<SAVE_TIMING_DIR_PROJECTS>.*</SAVE_TIMING_DIR_PROJECTS>
<CIME_OUTPUT_ROOT>/lus/gecko/projects/CSC249ADSE15_CNDA/$USER/scratch</CIME_OUTPUT_ROOT>
<DIN_LOC_ROOT>/lus/gecko/projects/CSC249ADSE15_CNDA/inputdata</DIN_LOC_ROOT>
Expand All @@ -3093,10 +3094,8 @@
<BATCH_SYSTEM>pbspro</BATCH_SYSTEM>
<SUPPORTED_BY>e3sm</SUPPORTED_BY>
<MAX_TASKS_PER_NODE>208</MAX_TASKS_PER_NODE>
<MAX_TASKS_PER_NODE compiler="oneapi-ifx">208</MAX_TASKS_PER_NODE>
<MAX_TASKS_PER_NODE compiler="oneapi-ifxgpu">104</MAX_TASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE>104</MAX_MPITASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE compiler="oneapi-ifx">104</MAX_MPITASKS_PER_NODE>
<MAX_MPITASKS_PER_NODE compiler="oneapi-ifxgpu">12</MAX_MPITASKS_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<mpirun mpilib="default">
Expand All @@ -3110,17 +3109,10 @@
<arg name="gpu_maps">$ENV{GPU_TILE_COMPACT}</arg>
</arguments>
</mpirun>
<mpirun mpilib="openmpi">
<executable>mpirun</executable>
<arguments>
<arg name="num_tasks">--tag-output -n {{ total_tasks }}</arg>
<arg name="tasks_per_node"> --map-by ppr:{{ tasks_per_numa }}:socket:PE=$ENV{OMP_NUM_THREADS} --bind-to hwthread</arg>
</arguments>
</mpirun>
<module_system type="module" allow_error="true">
<init_path lang="sh">$ENV{HOME}/lmod.sh</init_path>
<init_path lang="csh">/soft/sunspot_migrate/soft/packaging/lmod/lmod/csh</init_path>
<init_path lang="python">/soft/sunspot_migrate/soft/packaging/lmod/lmod/env_modules_python.py</init_path>
<init_path lang="sh">/lus/gecko/projects/CSC249ADSE15_CNDA/modules/lmod.sh</init_path>
<init_path lang="csh">/soft/sunspot_migrate/soft/packaging/lmod/lmod/init/csh</init_path>
<init_path lang="python">/soft/sunspot_migrate/soft/packaging/lmod/lmod/init/env_modules_python.py</init_path>
<cmd_path lang="sh">module</cmd_path>
<cmd_path lang="csh">module</cmd_path>
<cmd_path lang="python">/soft/sunspot_migrate/soft/packaging/lmod/lmod/libexec/lmod python</cmd_path>
Expand All @@ -3139,8 +3131,8 @@
</modules>
<modules>
<command name="load">cray-pals</command>
<!--command name="load">append-deps/default</command-->
<command name="load">libfabric/1.15.2.0</command>
<command name="load">cray-libpals/1.3.2</command>
</modules>
</module_system>
<RUNDIR>$CIME_OUTPUT_ROOT/$CASE/run</RUNDIR>
Expand All @@ -3154,15 +3146,9 @@
<env name="LD_LIBRARY_PATH">/lus/gecko/projects/CSC249ADSE15_auroratmp_CNDA/software/oneAPI.2022.12.30.003/netcdf/lib:/lus/gecko/projects/CSC249ADSE15_auroratmp_CNDA/software/oneAPI.2022.12.30.003/pnetcdf/lib:/lus/gecko/projects/CSC249ADSE15_auroratmp_CNDA/software/oneAPI.2022.12.30.003/hdf5/lib:$ENV{LD_LIBRARY_PATH}</env>
<env name="RANKS_BIND">list:0-7,104-111:8-15,112-119:16-23,120-127:24-31,128-135:32-39,136-143:40-47,144-151:52-59,156-163:60-67,164-171:68-75,172-179:76-83,180-187:84-91,188-195:92-99,196-203</env>
</environment_variables>
<environment_variables mpilib="mpich" DEBUG="TRUE">
<environment_variables DEBUG="TRUE">
<env name="HYDRA_TOPO_DEBUG">1</env>
</environment_variables>
<environment_variables mpilib="impi">
<env name="I_MPI_DEBUG">10</env>
<env name="I_MPI_PIN_DOMAIN">omp</env>
<env name="I_MPI_PIN_ORDER">spread</env>
<env name="I_MPI_PIN_CELL">unit</env>
</environment_variables>
<environment_variables compiler="oneapi-ifxgpu">
<env name="ONEAPI_DEVICE_SELECTOR">level_zero:gpu</env>
<env name="ONEAPI_MPICH_GPU">NO_GPU</env>
Expand Down Expand Up @@ -3196,7 +3182,7 @@
<resource name="RLIMIT_STACK">-1</resource>
</resource_limits>
</machine>

<machine MACH="sooty">
<DESC>PNL cluster, OS is Linux, batch system is SLURM</DESC>
<NODENAME_REGEX>sooty</NODENAME_REGEX>
Expand Down

0 comments on commit 8f6720f

Please sign in to comment.