-
Notifications
You must be signed in to change notification settings - Fork 9
/
arvine.yaml
149 lines (122 loc) · 3.6 KB
/
arvine.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
#
# Main stack definition for arvine
#
spack_release: v0.15.4
site: scitas
stack_release: "arvine"
stack_version: "v1"
spack_root: /ssoft/spack
spack_external: external
# List of the environments to be managed
environments:
#- "fidis"
#- "gacrux"
# - "helvetios" == "gacrux"
- "izar"
# - "gcp-c2"
mirrors:
local: spack-mirror
restricted: spack-mirror-restricted
extra_repos:
scitas-external:
repo: http://github.com/epfl-scitas/spack-repo-externals.git
path: scitas-repos-externals/
tag: releases/arvine
scitas-spack-packages:
repo: http://github.com/epfl-scitas/scitas-spack-packages.git
path: scitas-spack-packages/
tag: releases/arvine
# default version of compilers mpi and openblas
default_environment:
os: rhel7.7
cpu: intel
slurm: 20.02.5
python:
3: 3.7.7
2: 2.7.18
variant:
3: +optimizations+tkinter~debug
2: +tkinter~debug
# compiler contains the arch since it is the highest arch this compiler can
# compile for
core_compiler: [email protected] arch=linux-rhel7-haswell
compilers: [gcc, intel]
stack_types: [stable, bleeding_edge]
stable:
# intel 19.0 is the highest supported by cuda 10.2.89
intel:
compiler: "[email protected]"
mpi: "[email protected]"
blas: "[email protected]"
suite_version: '2019.5.281'
# [email protected] higest supported by cuda 10.2.89 important for izar
gcc:
compiler: "[email protected]"
mpi: [ "mvapich2 process_managers=slurm fabrics=mrail threads=multiple" ]
blas: [ "openblas threads=none" ]
bleeding_edge:
gcc:
# gcc 9 is the highest supported by [email protected] the current highest
# version of mvapich2 a stack with gcc 10.2.0 will need a different mpi
# cuda 11 does not support sm_30 anymmore, gcc+nvptx need to be patched
# and might be unstable
compiler: "[email protected]"
mpi: [ "mvapich2 process_managers=slurm fabrics=mrail threads=multiple" ]
blas: [ "openblas threads=none" ]
intel:
# intel 19.1.1 is already considered a intel 20 hence the mixed versions
compiler: "[email protected]"
mpi: "[email protected]"
blas: "[email protected]"
suite_version: '2020.1.217'
izar:
gpu: nvidia
compilers: [gcc, intel]
stable:
gcc:
compiler: "[email protected]+nvptx+piclibs"
mpi:
- "mvapich2 process_managers=slurm fabrics=mrail threads=multiple"
arch: 'skylake_avx512'
cuda:
package: "[email protected]"
arch: "sm_70"
bleeding_edge:
gcc:
compiler: "[email protected]+nvptx+piclibs"
mpi:
- "mvapich2 process_managers=slurm fabrics=mrail threads=multiple"
cuda:
package: "[email protected]"
arch: "sm_70"
# cannot use it as a compiler
# nvhpc:
# compiler: "[email protected]+blas+lapack+mpi"
# blas: "[email protected]+blas+lapack+mpi"
# mpi: "[email protected]+blas+lapack+mpi"
gcp-c2:
os: centos7.9
slurm: 20.02.6
compilers: [gcc]
core_compiler: [email protected] arch=linux-centos7-haswell
cloud: gcp
stable:
gcc:
mpi:
- "mvapich2 process_managers=slurm fabrics=sock threads=multiple"
- "mvapich2 process_managers=slurm fabrics=sock threads=multiple file_systems=nfs"
- "mpich+slurm+verbs device=ch4 netmod=ofi pmi=pmi2"
- "mpich+slurm pmi=pmi2"
- "openmpi +thread_multiple schedulers=slurm fabrics=auto +pmi"
bleeding_edge:
gcc:
mpi:
- "mvapich2 process_managers=slurm fabrics=sock threads=multiple"
gacrux:
os: rhel7.6
slurm: 19.05.3-2
fidis:
os: rhel7.6
slurm: 19.05.3-2