diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml
index db64ca94d5e6a0..fd863b6abb8726 100644
--- a/.github/workflows/bootstrap.yml
+++ b/.github/workflows/bootstrap.yml
@@ -159,6 +159,9 @@ jobs:
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
+ - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
+ with:
+ python-version: "3.12"
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml
index e429dabc5284de..b09c959198eaf8 100644
--- a/.github/workflows/build-containers.yml
+++ b/.github/workflows/build-containers.yml
@@ -38,12 +38,11 @@ jobs:
# Meaning of the various items in the matrix list
# 0: Container name (e.g. ubuntu-bionic)
# 1: Platforms to build for
- # 2: Base image (e.g. ubuntu:18.04)
+ # 2: Base image (e.g. ubuntu:22.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
- [ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
@@ -58,18 +57,20 @@ jobs:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- - name: Set Container Tag Normal (Nightly)
- run: |
- container="${{ matrix.dockerfile[0] }}:latest"
- echo "container=${container}" >> $GITHUB_ENV
- echo "versioned=${container}" >> $GITHUB_ENV
-
- # On a new release create a container with the same tag as the release.
- - name: Set Container Tag on Release
- if: github.event_name == 'release'
- run: |
- versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
- echo "versioned=${versioned}" >> $GITHUB_ENV
+ - uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
+ id: docker_meta
+ with:
+ images: |
+ ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
+ ${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
+ tags: |
+ type=schedule,pattern=nightly
+ type=schedule,pattern=develop
+ type=semver,pattern={{version}}
+ type=semver,pattern={{major}}.{{minor}}
+ type=semver,pattern={{major}}
+ type=ref,event=branch
+ type=ref,event=pr
- name: Generate the Dockerfile
env:
@@ -92,13 +93,13 @@ jobs:
path: dockerfiles
- name: Set up QEMU
- uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
+ uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
+ uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
- name: Log in to GitHub Container Registry
- uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -106,21 +107,18 @@ jobs:
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
- uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
- uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # @v2
+ uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
push: ${{ github.event_name != 'pull_request' }}
cache-from: type=gha
cache-to: type=gha,mode=max
- tags: |
- spack/${{ env.container }}
- spack/${{ env.versioned }}
- ghcr.io/spack/${{ env.container }}
- ghcr.io/spack/${{ env.versioned }}
+ tags: ${{ steps.docker_meta.outputs.tags }}
+ labels: ${{ steps.docker_meta.outputs.labels }}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7cfdf00e67d3bb..9ec04198b22912 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,320 @@
+# v0.21.0 (2023-11-11)
+
+`v0.21.0` is a major feature release.
+
+## Features in this release
+
+1. **Better error messages with condition chaining**
+
+ In v0.18, we added better error messages that could tell you what problem happened,
+ but they couldn't tell you *why* it happened. `0.21` adds *condition chaining* to the
+ solver, and Spack can now trace back through the conditions that led to an error and
+ build a tree of causes potential causes and where they came from. For example:
+
+ ```console
+ $ spack solve hdf5 ^cmake@3.0.1
+ ==> Error: concretization failed for the following reasons:
+
+ 1. Cannot satisfy 'cmake@3.0.1'
+ 2. Cannot satisfy 'cmake@3.0.1'
+ required because hdf5 ^cmake@3.0.1 requested from CLI
+ 3. Cannot satisfy 'cmake@3.18:' and 'cmake@3.0.1
+ required because hdf5 ^cmake@3.0.1 requested from CLI
+ required because hdf5 depends on cmake@3.18: when @1.13:
+ required because hdf5 ^cmake@3.0.1 requested from CLI
+ 4. Cannot satisfy 'cmake@3.12:' and 'cmake@3.0.1
+ required because hdf5 depends on cmake@3.12:
+ required because hdf5 ^cmake@3.0.1 requested from CLI
+ required because hdf5 ^cmake@3.0.1 requested from CLI
+ ```
+
+ More details in #40173.
+
+2. **OCI build caches**
+
+ You can now use an arbitrary [OCI](https://opencontainers.org) registry as a build
+ cache:
+
+ ```console
+ $ spack mirror add my_registry oci://user/image # Dockerhub
+ $ spack mirror add my_registry oci://ghcr.io/haampie/spack-test # GHCR
+ $ spack mirror set --push --oci-username ... --oci-password ... my_registry # set login creds
+ $ spack buildcache push my_registry [specs...]
+ ```
+
+ And you can optionally add a base image to get *runnable* images:
+
+ ```console
+ $ spack buildcache push --base-image ubuntu:23.04 my_registry python
+ Pushed ... as [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack
+
+ $ docker run --rm -it [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack
+ ```
+
+ This creates a container image from the Spack installations on the host system,
+ without the need to run `spack install` from a `Dockerfile` or `sif` file. It also
+ addresses the inconvenience of losing binaries of dependencies when `RUN spack
+ install` fails inside `docker build`.
+
+ Further, the container image layers and build cache tarballs are the same files. This
+ means that `spack install` and `docker pull` use the exact same underlying binaries.
+ If you previously used `spack install` inside of `docker build`, this feature helps
+ you save storage by a factor two.
+
+ More details in #38358.
+
+3. **Multiple versions of build dependencies**
+
+ Increasingly, complex package builds require multiple versions of some build
+ dependencies. For example, Python packages frequently require very specific versions
+ of `setuptools`, `cython`, and sometimes different physics packages require different
+ versions of Python to build. The concretizer enforced that every solve was *unified*,
+ i.e., that there only be one version of every package. The concretizer now supports
+ "duplicate" nodes for *build dependencies*, but enforces unification through
+ transitive link and run dependencies. This will allow it to better resolve complex
+ dependency graphs in ecosystems like Python, and it also gets us very close to
+ modeling compilers as proper dependencies.
+
+ This change required a major overhaul of the concretizer, as well as a number of
+ performance optimizations. See #38447, #39621.
+
+4. **Cherry-picking virtual dependencies**
+
+ You can now select only a subset of virtual dependencies from a spec that may provide
+ more. For example, if you want `mpich` to be your `mpi` provider, you can be explicit
+ by writing:
+
+ ```
+ hdf5 ^[virtuals=mpi] mpich
+ ```
+
+ Or, if you want to use, e.g., `intel-parallel-studio` for `blas` along with an external
+ `lapack` like `openblas`, you could write:
+
+ ```
+ strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
+ ```
+
+ The `virtuals=mpi` is an edge attribute, and dependency edges in Spack graphs now
+ track which virtuals they satisfied. More details in #17229 and #35322.
+
+ Note for packaging: in Spack 0.21 `spec.satisfies("^virtual")` is true if and only if
+ the package specifies `depends_on("virtual")`. This is different from Spack 0.20,
+ where depending on a provider implied depending on the virtual provided. See #41002
+ for an example where `^mkl` was being used to test for several `mkl` providers in a
+ package that did not depend on `mkl`.
+
+5. **License directive**
+
+ Spack packages can now have license metadata, with the new `license()` directive:
+
+ ```python
+ license("Apache-2.0")
+ ```
+
+ Licenses use [SPDX identifiers](https://spdx.org/licenses), and you can use SPDX
+ expressions to combine them:
+
+ ```python
+ license("Apache-2.0 OR MIT")
+ ```
+
+ Like other directives in Spack, it's conditional, so you can handle complex cases like
+ Spack itself:
+
+ ```python
+ license("LGPL-2.1", when="@:0.11")
+ license("Apache-2.0 OR MIT", when="@0.12:")
+ ```
+
+ More details in #39346, #40598.
+
+6. **`spack deconcretize` command**
+
+ We are getting close to having a `spack update` command for environments, but we're
+ not quite there yet. This is the next best thing. `spack deconcretize` gives you
+ control over what you want to update in an already concrete environment. If you have
+ an environment built with, say, `meson`, and you want to update your `meson` version,
+ you can run:
+
+ ```console
+ spack deconcretize meson
+ ```
+
+ and have everything that depends on `meson` rebuilt the next time you run `spack
+ concretize`. In a future Spack version, we'll handle all of this in a single command,
+ but for now you can use this to drop bits of your lockfile and resolve your
+ dependencies again. More in #38803.
+
+7. **UI Improvements**
+
+ The venerable `spack info` command was looking shabby compared to the rest of Spack's
+ UI, so we reworked it to have a bit more flair. `spack info` now makes much better
+ use of terminal space and shows variants, their values, and their descriptions much
+ more clearly. Conditional variants are grouped separately so you can more easily
+ understand how packages are structured. More in #40998.
+
+ `spack checksum` now allows you to filter versions from your editor, or by version
+ range. It also notifies you about potential download URL changes. See #40403.
+
+8. **Environments can include definitions**
+
+ Spack did not previously support using `include:` with The
+ [definitions](https://spack.readthedocs.io/en/latest/environments.html#spec-list-references)
+ section of an environment, but now it does. You can use this to curate lists of specs
+ and more easily reuse them across environments. See #33960.
+
+9. **Aliases**
+
+ You can now add aliases to Spack commands in `config.yaml`, e.g. this might enshrine
+ your favorite args to `spack find` as `spack f`:
+
+ ```yaml
+ config:
+ aliases:
+ f: find -lv
+ ```
+
+ See #17229.
+
+10. **Improved autoloading of modules**
+
+ Spack 0.20 was the first release to enable autoloading of direct dependencies in
+ module files.
+
+ The downside of this was that `module avail` and `module load` tab completion would
+ show users too many modules to choose from, and many users disabled generating
+ modules for dependencies through `exclude_implicits: true`. Further, it was
+ necessary to keep hashes in module names to avoid file name clashes.
+
+ In this release, you can start using `hide_implicits: true` instead, which exposes
+ only explicitly installed packages to the user, while still autoloading
+ dependencies. On top of that, you can safely use `hash_length: 0`, as this config
+ now only applies to the modules exposed to the user -- you don't have to worry about
+ file name clashes for hidden dependencies.
+
+ Note: for `tcl` this feature requires Modules 4.7 or higher
+
+11. **Updated container labeling**
+
+ Nightly Docker images from the `develop` branch will now be tagged as `:develop` and
+ `:nightly`. The `:latest` tag is no longer associated with `:develop`, but with the
+ latest stable release. Releases will be tagged with `:{major}`, `:{major}.{minor}`
+ and `:{major}.{minor}.{patch}`. `ubuntu:18.04` has also been removed from the list of
+ generated Docker images, as it is no longer supported. See #40593.
+
+## Other new commands and directives
+
+* `spack env activate` without arguments now loads a `default` environment that you do
+ not have to create (#40756).
+* `spack find -H` / `--hashes`: a new shortcut for piping `spack find` output to
+ other commands (#38663)
+* Add `spack checksum --verify`, fix `--add` (#38458)
+* New `default_args` context manager factors out common args for directives (#39964)
+* `spack compiler find --[no]-mixed-toolchain` lets you easily mix `clang` and
+ `gfortran` on Linux (#40902)
+
+## Performance improvements
+
+* `spack external find` execution is now much faster (#39843)
+* `spack location -i` now much faster on success (#40898)
+* Drop redundant rpaths post install (#38976)
+* ASP-based solver: avoid cycles in clingo using hidden directive (#40720)
+* Fix multiple quadratic complexity issues in environments (#38771)
+
+## Other new features of note
+
+* archspec: update to v0.2.2, support for Sapphire Rapids, Power10, Neoverse V2 (#40917)
+* Propagate variants across nodes that don't have that variant (#38512)
+* Implement fish completion (#29549)
+* Can now distinguish between source/binary mirror; don't ping mirror.spack.io as much (#34523)
+* Improve status reporting on install (add [n/total] display) (#37903)
+
+## Windows
+
+This release has the best Windows support of any Spack release yet, with numerous
+improvements and much larger swaths of tests passing:
+
+* MSVC and SDK improvements (#37711, #37930, #38500, #39823, #39180)
+* Windows external finding: update default paths; treat .bat as executable on Windows (#39850)
+* Windows decompression: fix removal of intermediate file (#38958)
+* Windows: executable/path handling (#37762)
+* Windows build systems: use ninja and enable tests (#33589)
+* Windows testing (#36970, #36972, #36973, #36840, #36977, #36792, #36834, #34696, #36971)
+* Windows PowerShell support (#39118, #37951)
+* Windows symlinking and libraries (#39933, #38599, #34701, #38578, #34701)
+
+## Notable refactors
+* User-specified flags take precedence over others in Spack compiler wrappers (#37376)
+* Improve setup of build, run, and test environments (#35737, #40916)
+* `make` is no longer a required system dependency of Spack (#40380)
+* Support Python 3.12 (#40404, #40155, #40153)
+* docs: Replace package list with packages.spack.io (#40251)
+* Drop Python 2 constructs in Spack (#38720, #38718, #38703)
+
+## Binary cache and stack updates
+* e4s arm stack: duplicate and target neoverse v1 (#40369)
+* Add macOS ML CI stacks (#36586)
+* E4S Cray CI Stack (#37837)
+* e4s cray: expand spec list (#38947)
+* e4s cray sles ci: expand spec list (#39081)
+
+## Removals, deprecations, and syntax changes
+* ASP: targets, compilers and providers soft-preferences are only global (#31261)
+* Parser: fix ambiguity with whitespace in version ranges (#40344)
+* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
+* Remove deprecated "extra_instructions" option for containers (#40365)
+* Stand-alone test feature deprecation postponed to v0.22 (#40600)
+* buildcache push: make `--allow-root` the default and deprecate the option (#38878)
+
+## Notable Bugfixes
+* Bugfix: propagation of multivalued variants (#39833)
+* Allow `/` in git versions (#39398)
+* Fetch & patch: actually acquire stage lock, and many more issues (#38903)
+* Environment/depfile: better escaping of targets with Git versions (#37560)
+* Prevent "spack external find" to error out on wrong permissions (#38755)
+* lmod: allow core compiler to be specified with a version range (#37789)
+
+## Spack community stats
+
+* 7,469 total packages, 303 new since `v0.20.0`
+ * 150 new Python packages
+ * 34 new R packages
+* 353 people contributed to this release
+ * 336 committers to packages
+ * 65 committers to core
+
+
+# v0.20.3 (2023-10-31)
+
+## Bugfixes
+
+- Fix a bug where `spack mirror set-url` would drop configured connection info (reverts #34210)
+- Fix a minor issue with package hash computation for Python 3.12 (#40328)
+
+
+# v0.20.2 (2023-10-03)
+
+## Features in this release
+
+Spack now supports Python 3.12 (#40155)
+
+## Bugfixes
+
+- Improve escaping in Tcl module files (#38375)
+- Make repo cache work on repositories with zero mtime (#39214)
+- Ignore errors for newer, incompatible buildcache version (#40279)
+- Print an error when git is required, but missing (#40254)
+- Ensure missing build dependencies get installed when using `spack install --overwrite` (#40252)
+- Fix an issue where Spack freezes when the build process unexpectedly exits (#39015)
+- Fix a bug where installation failures cause an unrelated `NameError` to be thrown (#39017)
+- Fix an issue where Spack package versions would be incorrectly derived from git tags (#39414)
+- Fix a bug triggered when file locking fails internally (#39188)
+- Prevent "spack external find" to error out when a directory cannot be accessed (#38755)
+- Fix multiple performance regressions in environments (#38771)
+- Add more ignored modules to `pyproject.toml` for `mypy` (#38769)
+
+
# v0.20.1 (2023-07-10)
## Spack Bugfixes
diff --git a/README.md b/README.md
index c4c784cd1a6aba..d048140ec1badc 100644
--- a/README.md
+++ b/README.md
@@ -66,7 +66,7 @@ Resources:
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
* [**Github Discussions**](https://github.com/spack/spack/discussions):
- not just for discussions, also Q&A.
+ not just for discussions, but also Q&A.
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
`@mention` us!
diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml
index b4d81f69da6629..018e8deb55ba17 100644
--- a/etc/spack/defaults/config.yaml
+++ b/etc/spack/defaults/config.yaml
@@ -229,3 +229,11 @@ config:
flags:
# Whether to keep -Werror flags active in package builds.
keep_werror: 'none'
+
+ # A mapping of aliases that can be used to define new commands. For instance,
+ # `sp: spec -I` will define a new command `sp` that will execute `spec` with
+ # the `-I` argument. Aliases cannot override existing commands.
+ aliases:
+ concretise: concretize
+ containerise: containerize
+ rm: remove
diff --git a/etc/spack/defaults/darwin/packages.yaml b/etc/spack/defaults/darwin/packages.yaml
index 63f85bbbd98b65..9fd54f2e6e0991 100644
--- a/etc/spack/defaults/darwin/packages.yaml
+++ b/etc/spack/defaults/darwin/packages.yaml
@@ -50,4 +50,4 @@ packages:
# Apple bundles libuuid in libsystem_c version 1353.100.2,
# although the version number used here isn't critical
- spec: apple-libuuid@1353.100.2
- prefix: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk
+ prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index d1f048ac055acc..52054a9405653e 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -1526,6 +1526,30 @@ any MPI implementation will do. If another package depends on
error. Likewise, if you try to plug in some package that doesn't
provide MPI, Spack will raise an error.
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Explicit binding of virtual dependencies
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+There are packages that provide more than just one virtual dependency. When interacting with them, users
+might want to utilize just a subset of what they could provide, and use other providers for virtuals they
+need.
+
+It is possible to be more explicit and tell Spack which dependency should provide which virtual, using a
+special syntax:
+
+.. code-block:: console
+
+ $ spack spec strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
+
+Concretizing the spec above produces the following DAG:
+
+.. figure:: images/strumpack_virtuals.svg
+ :scale: 60 %
+ :align: center
+
+where ``intel-parallel-studio`` *could* provide ``mpi``, ``lapack``, and ``blas`` but is used only for the former. The ``lapack``
+and ``blas`` dependencies are satisfied by ``openblas``.
+
^^^^^^^^^^^^^^^^^^^^^^^^
Specifying Specs by Hash
^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst
index 280d957c086e56..dcc09910c74080 100644
--- a/lib/spack/docs/binary_caches.rst
+++ b/lib/spack/docs/binary_caches.rst
@@ -155,16 +155,183 @@ List of popular build caches
* `Extreme-scale Scientific Software Stack (E4S) `_: `build cache `_
-
----------
Relocation
----------
-Initial build and later installation do not necessarily happen at the same
-location. Spack provides a relocation capability and corrects for RPATHs and
-non-relocatable scripts. However, many packages compile paths into binary
-artifacts directly. In such cases, the build instructions of this package would
-need to be adjusted for better re-locatability.
+When using buildcaches across different machines, it is likely that the install
+root will be different from the one used to build the binaries.
+
+To address this issue, Spack automatically relocates all paths encoded in binaries
+and scripts to their new location upon install.
+
+Note that there are some cases where this is not possible: if binaries are built in
+a relatively short path, and then installed to a longer path, there may not be enough
+space in the binary to encode the new path. In this case, Spack will fail to install
+the package from the build cache, and a source build is required.
+
+To reduce the likelihood of this happening, it is highly recommended to add padding to
+the install root during the build, as specified in the :ref:`config `
+section of the configuration:
+
+.. code-block:: yaml
+
+ config:
+ install_tree:
+ root: /opt/spack
+ padded_length: 128
+
+
+.. _binary_caches_oci:
+
+-----------------------------------------
+OCI / Docker V2 registries as build cache
+-----------------------------------------
+
+Spack can also use OCI or Docker V2 registries such as Dockerhub, Quay.io,
+Github Packages, GitLab Container Registry, JFrog Artifactory, and others
+as build caches. This is a convenient way to share binaries using public
+infrastructure, or to cache Spack built binaries in Github Actions and
+GitLab CI.
+
+To get started, configure an OCI mirror using ``oci://`` as the scheme,
+and optionally specify a username and password (or personal access token):
+
+.. code-block:: console
+
+ $ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
+
+Spack follows the naming conventions of Docker, with Dockerhub as the default
+registry. To use Dockerhub, you can omit the registry domain:
+
+.. code-block:: console
+
+ $ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
+
+From here, you can use the mirror as any other build cache:
+
+.. code-block:: console
+
+ $ spack buildcache push my_registry # push to the registry
+ $ spack install # install from the registry
+
+A unique feature of buildcaches on top of OCI registries is that it's incredibly
+easy to generate get a runnable container image with the binaries installed. This
+is a great way to make applications available to users without requiring them to
+install Spack -- all you need is Docker, Podman or any other OCI-compatible container
+runtime.
+
+To produce container images, all you need to do is add the ``--base-image`` flag
+when pushing to the build cache:
+
+.. code-block:: console
+
+ $ spack buildcache push --base-image ubuntu:20.04 my_registry ninja
+ Pushed to example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
+
+ $ docker run -it example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
+ root@e4c2b6f6b3f4:/# ninja --version
+ 1.11.1
+
+If ``--base-image`` is not specified, distroless images are produced. In practice,
+you won't be able to run these as containers, since they don't come with libc and
+other system dependencies. However, they are still compatible with tools like
+``skopeo``, ``podman``, and ``docker`` for pulling and pushing.
+
+.. note::
+ The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
+ ``max depth exceeded`` error may be produced when pulling the image. There
+ are `alternative drivers `_.
+
+------------------------------------
+Spack build cache for GitHub Actions
+------------------------------------
+
+To significantly speed up Spack in GitHub Actions, binaries can be cached in
+GitHub Packages. This service is an OCI registry that can be linked to a GitHub
+repository.
+
+A typical workflow is to include a ``spack.yaml`` environment in your repository
+that specifies the packages to install, the target architecture, and the build
+cache to use under ``mirrors``:
+
+.. code-block:: yaml
+
+ spack:
+ specs:
+ - python@3.11
+ config:
+ install_tree:
+ root: /opt/spack
+ padded_length: 128
+ packages:
+ all:
+ require: target=x86_64_v2
+ mirrors:
+ local-buildcache: oci://ghcr.io//
+
+A GitHub action can then be used to install the packages and push them to the
+build cache:
+
+.. code-block:: yaml
+
+ name: Install Spack packages
+
+ on: push
+
+ env:
+ SPACK_COLOR: always
+
+ jobs:
+ example:
+ runs-on: ubuntu-22.04
+ permissions:
+ packages: write
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Checkout Spack
+ uses: actions/checkout@v3
+ with:
+ repository: spack/spack
+ path: spack
+
+ - name: Setup Spack
+ run: echo "$PWD/spack/bin" >> "$GITHUB_PATH"
+
+ - name: Concretize
+ run: spack -e . concretize
+
+ - name: Install
+ run: spack -e . install --no-check-signature
+
+ - name: Run tests
+ run: ./my_view/bin/python3 -c 'print("hello world")'
+
+ - name: Push to buildcache
+ run: |
+ spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache
+ spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache
+ if: ${{ !cancelled() }}
+
+The first time this action runs, it will build the packages from source and
+push them to the build cache. Subsequent runs will pull the binaries from the
+build cache. The concretizer will ensure that prebuilt binaries are favored
+over source builds.
+
+The build cache entries appear in the GitHub Packages section of your repository,
+and contain instructions for pulling and running them with ``docker`` or ``podman``.
+
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Using Spack's public build cache for GitHub Actions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Spack offers a public build cache for GitHub Actions with a set of common packages,
+which lets you get started quickly. See the following resources for more information:
+
+* `spack/github-actions-buildcache `_
.. _cmd-spack-buildcache:
diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst
index 402b33f6a2585b..0f53355a81dd80 100644
--- a/lib/spack/docs/build_settings.rst
+++ b/lib/spack/docs/build_settings.rst
@@ -37,7 +37,11 @@ to enable reuse for a single installation, and you can use:
spack install --fresh
to do a fresh install if ``reuse`` is enabled by default.
-``reuse: true`` is the default.
+``reuse: dependencies`` is the default.
+
+.. seealso::
+
+ FAQ: :ref:`Why does Spack pick particular versions and variants? `
------------------------------------------
Selection of the target microarchitectures
@@ -99,551 +103,3 @@ while `py-numpy` still needs an older version:
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
default behavior is ``duplicates:strategy:minimal``.
-
-.. _build-settings:
-
-================================
-Package Settings (packages.yaml)
-================================
-
-Spack allows you to customize how your software is built through the
-``packages.yaml`` file. Using it, you can make Spack prefer particular
-implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK),
-or you can make it prefer to build with particular compilers. You can
-also tell Spack to use *external* software installations already
-present on your system.
-
-At a high level, the ``packages.yaml`` file is structured like this:
-
-.. code-block:: yaml
-
- packages:
- package1:
- # settings for package1
- package2:
- # settings for package2
- # ...
- all:
- # settings that apply to all packages.
-
-So you can either set build preferences specifically for *one* package,
-or you can specify that certain settings should apply to *all* packages.
-The types of settings you can customize are described in detail below.
-
-Spack's build defaults are in the default
-``etc/spack/defaults/packages.yaml`` file. You can override them in
-``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more
-details on how this works, see :ref:`configuration-scopes`.
-
-.. _sec-external-packages:
-
------------------
-External Packages
------------------
-
-Spack can be configured to use externally-installed
-packages rather than building its own packages. This may be desirable
-if machines ship with system packages, such as a customized MPI
-that should be used instead of Spack building its own MPI.
-
-External packages are configured through the ``packages.yaml`` file.
-Here's an example of an external configuration:
-
-.. code-block:: yaml
-
- packages:
- openmpi:
- externals:
- - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
- prefix: /opt/openmpi-1.4.3
- - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
- prefix: /opt/openmpi-1.4.3-debug
- - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
- prefix: /opt/openmpi-1.6.5-intel
-
-This example lists three installations of OpenMPI, one built with GCC,
-one built with GCC and debug information, and another built with Intel.
-If Spack is asked to build a package that uses one of these MPIs as a
-dependency, it will use the pre-installed OpenMPI in
-the given directory. Note that the specified path is the top-level
-install prefix, not the ``bin`` subdirectory.
-
-``packages.yaml`` can also be used to specify modules to load instead
-of the installation prefixes. The following example says that module
-``CMake/3.7.2`` provides cmake version 3.7.2.
-
-.. code-block:: yaml
-
- cmake:
- externals:
- - spec: cmake@3.7.2
- modules:
- - CMake/3.7.2
-
-Each ``packages.yaml`` begins with a ``packages:`` attribute, followed
-by a list of package names. To specify externals, add an ``externals:``
-attribute under the package name, which lists externals.
-Each external should specify a ``spec:`` string that should be as
-well-defined as reasonably possible. If a
-package lacks a spec component, such as missing a compiler or
-package version, then Spack will guess the missing component based
-on its most-favored packages, and it may guess incorrectly.
-
-Each package version and compiler listed in an external should
-have entries in Spack's packages and compiler configuration, even
-though the package and compiler may not ever be built.
-
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Prevent packages from being built from sources
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Adding an external spec in ``packages.yaml`` allows Spack to use an external location,
-but it does not prevent Spack from building packages from sources. In the above example,
-Spack might choose for many valid reasons to start building and linking with the
-latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions.
-
-To prevent this, the ``packages.yaml`` configuration also allows packages
-to be flagged as non-buildable. The previous example could be modified to
-be:
-
-.. code-block:: yaml
-
- packages:
- openmpi:
- externals:
- - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
- prefix: /opt/openmpi-1.4.3
- - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
- prefix: /opt/openmpi-1.4.3-debug
- - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
- prefix: /opt/openmpi-1.6.5-intel
- buildable: False
-
-The addition of the ``buildable`` flag tells Spack that it should never build
-its own version of OpenMPI from sources, and it will instead always rely on a pre-built
-OpenMPI.
-
-.. note::
-
- If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag)
- pre-built specs include specs already available from a local store, an upstream store, a registered
- buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only
- external specs in ``packages.yaml`` are included in the list of pre-built specs.
-
-If an external module is specified as not buildable, then Spack will load the
-external module into the build environment which can be used for linking.
-
-The ``buildable`` does not need to be paired with external packages.
-It could also be used alone to forbid packages that may be
-buggy or otherwise undesirable.
-
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Non-buildable virtual packages
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Virtual packages in Spack can also be specified as not buildable, and
-external implementations can be provided. In the example above,
-OpenMPI is configured as not buildable, but Spack will often prefer
-other MPI implementations over the externally available OpenMPI. Spack
-can be configured with every MPI provider not buildable individually,
-but more conveniently:
-
-.. code-block:: yaml
-
- packages:
- mpi:
- buildable: False
- openmpi:
- externals:
- - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
- prefix: /opt/openmpi-1.4.3
- - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
- prefix: /opt/openmpi-1.4.3-debug
- - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
- prefix: /opt/openmpi-1.6.5-intel
-
-Spack can then use any of the listed external implementations of MPI
-to satisfy a dependency, and will choose depending on the compiler and
-architecture.
-
-In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers
-(available via stores or buildcaches) are not wanted, Spack can be configured to require
-specs matching only the available externals:
-
-.. code-block:: yaml
-
- packages:
- mpi:
- buildable: False
- require:
- - one_of: [
- "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64",
- "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug",
- "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
- ]
- openmpi:
- externals:
- - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
- prefix: /opt/openmpi-1.4.3
- - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
- prefix: /opt/openmpi-1.4.3-debug
- - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
- prefix: /opt/openmpi-1.6.5-intel
-
-This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused,
-unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see
-:ref:`package-requirements`.
-
-.. _cmd-spack-external-find:
-
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Automatically Find External Packages
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-You can run the :ref:`spack external find ` command
-to search for system-provided packages and add them to ``packages.yaml``.
-After running this command your ``packages.yaml`` may include new entries:
-
-.. code-block:: yaml
-
- packages:
- cmake:
- externals:
- - spec: cmake@3.17.2
- prefix: /usr
-
-Generally this is useful for detecting a small set of commonly-used packages;
-for now this is generally limited to finding build-only dependencies.
-Specific limitations include:
-
-* Packages are not discoverable by default: For a package to be
- discoverable with ``spack external find``, it needs to add special
- logic. See :ref:`here ` for more details.
-* The logic does not search through module files, it can only detect
- packages with executables defined in ``PATH``; you can help Spack locate
- externals which use module files by loading any associated modules for
- packages that you want Spack to know about before running
- ``spack external find``.
-* Spack does not overwrite existing entries in the package configuration:
- If there is an external defined for a spec at any configuration scope,
- then Spack will not add a new external entry (``spack config blame packages``
- can help locate all external entries).
-
-.. _package-requirements:
-
---------------------
-Package Requirements
---------------------
-
-Spack can be configured to always use certain compilers, package
-versions, and variants during concretization through package
-requirements.
-
-Package requirements are useful when you find yourself repeatedly
-specifying the same constraints on the command line, and wish that
-Spack respects these constraints whether you mention them explicitly
-or not. Another use case is specifying constraints that should apply
-to all root specs in an environment, without having to repeat the
-constraint everywhere.
-
-Apart from that, requirements config is more flexible than constraints
-on the command line, because it can specify constraints on packages
-*when they occur* as a dependency. In contrast, on the command line it
-is not possible to specify constraints on dependencies while also keeping
-those dependencies optional.
-
-^^^^^^^^^^^^^^^^^^^
-Requirements syntax
-^^^^^^^^^^^^^^^^^^^
-
-The package requirements configuration is specified in ``packages.yaml``,
-keyed by package name and expressed using the Spec syntax. In the simplest
-case you can specify attributes that you always want the package to have
-by providing a single spec string to ``require``:
-
-.. code-block:: yaml
-
- packages:
- libfabric:
- require: "@1.13.2"
-
-In the above example, ``libfabric`` will always build with version 1.13.2. If you
-need to compose multiple configuration scopes ``require`` accepts a list of
-strings:
-
-.. code-block:: yaml
-
- packages:
- libfabric:
- require:
- - "@1.13.2"
- - "%gcc"
-
-In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
-as a compiler.
-
-For more complex use cases, require accepts also a list of objects. These objects
-must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
-and they can optionally have a ``when`` and a ``message`` attribute:
-
-.. code-block:: yaml
-
- packages:
- openmpi:
- require:
- - any_of: ["@4.1.5", "%gcc"]
- message: "in this example only 4.1.5 can build with other compilers"
-
-``any_of`` is a list of specs. One of those specs must be satisfied
-and it is also allowed for the concretized spec to match more than one.
-In the above example, that means you could build ``openmpi@4.1.5%gcc``,
-``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
-not ``openmpi@3.9%clang``.
-
-If a custom message is provided, and the requirement is not satisfiable,
-Spack will print the custom error message:
-
-.. code-block:: console
-
- $ spack spec openmpi@3.9%clang
- ==> Error: in this example only 4.1.5 can build with other compilers
-
-We could express a similar requirement using the ``when`` attribute:
-
-.. code-block:: yaml
-
- packages:
- openmpi:
- require:
- - any_of: ["%gcc"]
- when: "@:4.1.4"
- message: "in this example only 4.1.5 can build with other compilers"
-
-In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
-For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
-constraint:
-
-.. code-block:: yaml
-
- packages:
- openmpi:
- require:
- - spec: "%gcc"
- when: "@:4.1.4"
- message: "in this example only 4.1.5 can build with other compilers"
-
-This code snippet and the one before it are semantically equivalent.
-
-Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
-concretized spec must match one and only one of them:
-
-.. code-block:: yaml
-
- packages:
- mpich:
- require:
- - one_of: ["+cuda", "+rocm"]
-
-In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``.
-
-.. note::
-
- For ``any_of`` and ``one_of``, the order of specs indicates a
- preference: items that appear earlier in the list are preferred
- (note that these preferences can be ignored in favor of others).
-
-.. note::
-
- When using a conditional requirement, Spack is allowed to actively avoid the triggering
- condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
- the optimization criteria. To check the current optimization criteria and their
- priorities you can run ``spack solve zlib``.
-
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Setting default requirements
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-You can also set default requirements for all packages under ``all``
-like this:
-
-.. code-block:: yaml
-
- packages:
- all:
- require: '%clang'
-
-which means every spec will be required to use ``clang`` as a compiler.
-
-Note that in this case ``all`` represents a *default set of requirements* -
-if there are specific package requirements, then the default requirements
-under ``all`` are disregarded. For example, with a configuration like this:
-
-.. code-block:: yaml
-
- packages:
- all:
- require: '%clang'
- cmake:
- require: '%gcc'
-
-Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
-dependencies) to use ``clang``.
-
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Setting requirements on virtual specs
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-A requirement on a virtual spec applies whenever that virtual is present in the DAG.
-This can be useful for fixing which virtual provider you want to use:
-
-.. code-block:: yaml
-
- packages:
- mpi:
- require: 'mvapich2 %gcc'
-
-With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
-
-Requirements on the virtual spec and on the specific provider are both applied, if
-present. For instance with a configuration like:
-
-.. code-block:: yaml
-
- packages:
- mpi:
- require: 'mvapich2 %gcc'
- mvapich2:
- require: '~cuda'
-
-you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
-
-.. _package-preferences:
-
--------------------
-Package Preferences
--------------------
-
-In some cases package requirements can be too strong, and package
-preferences are the better option. Package preferences do not impose
-constraints on packages for particular versions or variants values,
-they rather only set defaults -- the concretizer is free to change
-them if it must due to other constraints. Also note that package
-preferences are of lower priority than reuse of already installed
-packages.
-
-Here's an example ``packages.yaml`` file that sets preferred packages:
-
-.. code-block:: yaml
-
- packages:
- opencv:
- compiler: [gcc@4.9]
- variants: +debug
- gperftools:
- version: [2.2, 2.4, 2.3]
- all:
- compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
- target: [sandybridge]
- providers:
- mpi: [mvapich2, mpich, openmpi]
-
-At a high level, this example is specifying how packages are preferably
-concretized. The opencv package should prefer using GCC 4.9 and
-be built with debug options. The gperftools package should prefer version
-2.2 over 2.4. Every package on the system should prefer mvapich2 for
-its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
-These options are used to fill in implicit defaults. Any of them can be overwritten
-on the command line if explicitly requested.
-
-Package preferences accept the follow keys or components under
-the specific package (or ``all``) section: ``compiler``, ``variants``,
-``version``, ``providers``, and ``target``. Each component has an
-ordered list of spec ``constraints``, with earlier entries in the
-list being preferred over later entries.
-
-Sometimes a package installation may have constraints that forbid
-the first concretization rule, in which case Spack will use the first
-legal concretization rule. Going back to the example, if a user
-requests gperftools 2.3 or later, then Spack will install version 2.4
-as the 2.4 version of gperftools is preferred over 2.3.
-
-An explicit concretization rule in the preferred section will always
-take preference over unlisted concretizations. In the above example,
-xlc isn't listed in the compiler list. Every listed compiler from
-gcc to pgi will thus be preferred over the xlc compiler.
-
-The syntax for the ``provider`` section differs slightly from other
-concretization rules. A provider lists a value that packages may
-``depends_on`` (e.g, MPI) and a list of rules for fulfilling that
-dependency.
-
-.. _package_permissions:
-
--------------------
-Package Permissions
--------------------
-
-Spack can be configured to assign permissions to the files installed
-by a package.
-
-In the ``packages.yaml`` file under ``permissions``, the attributes
-``read``, ``write``, and ``group`` control the package
-permissions. These attributes can be set per-package, or for all
-packages under ``all``. If permissions are set under ``all`` and for a
-specific package, the package-specific settings take precedence.
-
-The ``read`` and ``write`` attributes take one of ``user``, ``group``,
-and ``world``.
-
-.. code-block:: yaml
-
- packages:
- all:
- permissions:
- write: group
- group: spack
- my_app:
- permissions:
- read: group
- group: my_team
-
-The permissions settings describe the broadest level of access to
-installations of the specified packages. The execute permissions of
-the file are set to the same level as read permissions for those files
-that are executable. The default setting for ``read`` is ``world``,
-and for ``write`` is ``user``. In the example above, installations of
-``my_app`` will be installed with user and group permissions but no
-world permissions, and owned by the group ``my_team``. All other
-packages will be installed with user and group write privileges, and
-world read privileges. Those packages will be owned by the group
-``spack``.
-
-The ``group`` attribute assigns a Unix-style group to a package. All
-files installed by the package will be owned by the assigned group,
-and the sticky group bit will be set on the install prefix and all
-directories inside the install prefix. This will ensure that even
-manually placed files within the install prefix are owned by the
-assigned group. If no group is assigned, Spack will allow the OS
-default behavior to go as expected.
-
-----------------------------
-Assigning Package Attributes
-----------------------------
-
-You can assign class-level attributes in the configuration:
-
-.. code-block:: yaml
-
- packages:
- mpileaks:
- # Override existing attributes
- url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
- # ... or add new ones
- x: 1
-
-Attributes set this way will be accessible to any method executed
-in the package.py file (e.g. the ``install()`` method). Values for these
-attributes may be any value parseable by yaml.
-
-These can only be applied to specific packages, not "all" or
-virtual packages.
diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst
index abf25f149bc59a..8b8ccb8f35c1c7 100644
--- a/lib/spack/docs/build_systems/autotoolspackage.rst
+++ b/lib/spack/docs/build_systems/autotoolspackage.rst
@@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add:
.. code-block:: python
- depends_on('autoconf', type='build', when='@master')
- depends_on('automake', type='build', when='@master')
- depends_on('libtool', type='build', when='@master')
+ depends_on("autoconf", type="build", when="@master")
+ depends_on("automake", type="build", when="@master")
+ depends_on("libtool", type="build", when="@master")
It is typically redundant to list the ``m4`` macro processor package as a
dependency, since ``autoconf`` already depends on it.
@@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
.. code-block:: python
def autoreconf(self, spec, prefix):
- which('bash')('autogen.sh')
+ which("bash")("autogen.sh")
"""""""""""""""""""""""""""""""""""""""
patching configure or Makefile.in files
@@ -186,9 +186,9 @@ To opt out of this feature, use the following setting:
To enable it conditionally on different architectures, define a property and
make the package depend on ``gnuconfig`` as a build dependency:
-.. code-block
+.. code-block:: python
- depends_on('gnuconfig', when='@1.0:')
+ depends_on("gnuconfig", when="@1.0:")
@property
def patch_config_files(self):
@@ -230,7 +230,7 @@ version, this can be done like so:
@property
def force_autoreconf(self):
- return self.version == Version('1.2.3')
+ return self.version == Version("1.2.3")
^^^^^^^^^^^^^^^^^^^^^^^
Finding configure flags
@@ -278,13 +278,22 @@ function like so:
def configure_args(self):
args = []
- if '+mpi' in self.spec:
- args.append('--enable-mpi')
+ if self.spec.satisfies("+mpi"):
+ args.append("--enable-mpi")
else:
- args.append('--disable-mpi')
+ args.append("--disable-mpi")
return args
+
+Alternatively, you can use the :ref:`enable_or_disable ` helper:
+
+.. code-block:: python
+
+ def configure_args(self):
+ return [self.enable_or_disable("mpi")]
+
+
Note that we are explicitly disabling MPI support if it is not
requested. This is important, as many Autotools packages will enable
options by default if the dependencies are found, and disable them
@@ -295,9 +304,11 @@ and `here `.
+Before 2024, the directory structure was different::
+
+ spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin/intel64
+ spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin
+
+
Libraries
---------
diff --git a/lib/spack/docs/build_systems/intelpackage.rst b/lib/spack/docs/build_systems/intelpackage.rst
index d64fd469712299..9afe1a8b919543 100644
--- a/lib/spack/docs/build_systems/intelpackage.rst
+++ b/lib/spack/docs/build_systems/intelpackage.rst
@@ -392,7 +392,7 @@ See section
:ref:`Configuration Scopes `
for an explanation about the different files
and section
-:ref:`Build customization `
+:ref:`Build customization `
for specifics and examples for ``packages.yaml`` files.
.. If your system administrator did not provide modules for pre-installed Intel
diff --git a/lib/spack/docs/build_systems/makefilepackage.rst b/lib/spack/docs/build_systems/makefilepackage.rst
index 66f54a1c4bb97a..af027aab1c69c7 100644
--- a/lib/spack/docs/build_systems/makefilepackage.rst
+++ b/lib/spack/docs/build_systems/makefilepackage.rst
@@ -59,7 +59,7 @@ using GNU Make, you should add a dependency on ``gmake``:
.. code-block:: python
- depends_on('gmake', type='build')
+ depends_on("gmake", type="build")
^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -93,8 +93,8 @@ there are any other variables you need to set, you can do this in the
.. code-block:: python
def edit(self, spec, prefix):
- env['PREFIX'] = prefix
- env['BLASLIB'] = spec['blas'].libs.ld_flags
+ env["PREFIX"] = prefix
+ env["BLASLIB"] = spec["blas"].libs.ld_flags
`cbench `_
@@ -113,7 +113,7 @@ you can do this like so:
.. code-block:: python
- build_targets = ['CC=cc']
+ build_targets = ["CC=cc"]
If you do need access to the spec, you can create a property like so:
@@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so:
spec = self.spec
return [
- 'CC=cc',
- 'BLASLIB={0}'.format(spec['blas'].libs.ld_flags),
+ "CC=cc",
+ f"BLASLIB={spec['blas'].libs.ld_flags}",
]
@@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example:
.. code-block:: python
def edit(self, spec, prefix):
- makefile = FileFilter('Makefile')
+ makefile = FileFilter("Makefile")
- makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc)
- makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx)
- makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77)
- makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc)
+ makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}")
+ makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}")
+ makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}")
+ makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
`stream `_
@@ -181,16 +181,16 @@ well for storing variables:
def edit(self, spec, prefix):
config = {
- 'CC': 'cc',
- 'MAKE': 'make',
+ "CC": "cc",
+ "MAKE": "make",
}
- if '+blas' in spec:
- config['BLAS_LIBS'] = spec['blas'].libs.joined()
+ if spec.satisfies("+blas"):
+ config["BLAS_LIBS"] = spec["blas"].libs.joined()
- with open('make.inc', 'w') as inc:
+ with open("make.inc", "w") as inc:
for key in config:
- inc.write('{0} = {1}\n'.format(key, config[key]))
+ inc.write(f"{key} = {config[key]}\n")
`elk `_
@@ -204,14 +204,14 @@ them in a list:
def edit(self, spec, prefix):
config = [
- 'INSTALL_DIR = {0}'.format(prefix),
- 'INCLUDE_DIR = $(INSTALL_DIR)/include',
- 'LIBRARY_DIR = $(INSTALL_DIR)/lib',
+ f"INSTALL_DIR = {prefix}",
+ "INCLUDE_DIR = $(INSTALL_DIR)/include",
+ "LIBRARY_DIR = $(INSTALL_DIR)/lib",
]
- with open('make.inc', 'w') as inc:
+ with open("make.inc", "w") as inc:
for var in config:
- inc.write('{0}\n'.format(var))
+ inc.write(f"{var}\n")
`hpl `_
@@ -284,7 +284,7 @@ can tell Spack where to locate it like so:
.. code-block:: python
- build_directory = 'src'
+ build_directory = "src"
^^^^^^^^^^^^^^^^^^^
@@ -299,8 +299,8 @@ install the package:
def install(self, spec, prefix):
mkdir(prefix.bin)
- install('foo', prefix.bin)
- install_tree('lib', prefix.lib)
+ install("foo", prefix.bin)
+ install_tree("lib", prefix.lib)
^^^^^^^^^^^^^^^^^^^^^^
diff --git a/lib/spack/docs/build_systems/pythonpackage.rst b/lib/spack/docs/build_systems/pythonpackage.rst
index 17295a457fe139..168ff5dc88223c 100644
--- a/lib/spack/docs/build_systems/pythonpackage.rst
+++ b/lib/spack/docs/build_systems/pythonpackage.rst
@@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``,
.. code-block:: python
- homepage = 'https://pypi.org/project/setuptools/'
- url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip'
- list_url = 'https://pypi.org/simple/setuptools/'
+ homepage = "https://pypi.org/project/setuptools/"
+ url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip"
+ list_url = "https://pypi.org/simple/setuptools/"
is equivalent to:
.. code-block:: python
- pypi = 'setuptools/setuptools-49.2.0.zip'
+ pypi = "setuptools/setuptools-49.2.0.zip"
If a package has a different homepage listed on PyPI, you can
@@ -208,7 +208,7 @@ dependencies to your package:
.. code-block:: python
- depends_on('py-setuptools@42:', type='build')
+ depends_on("py-setuptools@42:", type="build")
Note that ``py-wheel`` is already listed as a build dependency in the
@@ -232,7 +232,7 @@ Look for dependencies under the following keys:
* ``dependencies`` under ``[project]``
These packages are required for building and installation. You can
- add them with ``type=('build', 'run')``.
+ add them with ``type=("build", "run")``.
* ``[project.optional-dependencies]``
@@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to
* ``setup_requires``
These packages are usually only needed at build-time, so you can
- add them with ``type='build'``.
+ add them with ``type="build"``.
* ``install_requires``
These packages are required for building and installation. You can
- add them with ``type=('build', 'run')``.
+ add them with ``type=("build", "run")``.
* ``extras_require``
@@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to
These are packages that are required to run the unit tests for the
package. These dependencies can be specified using the
- ``type='test'`` dependency type. However, the PyPI tarballs rarely
+ ``type="test"`` dependency type. However, the PyPI tarballs rarely
contain unit tests, so there is usually no reason to add these.
See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html
@@ -321,7 +321,7 @@ older versions of flit may use the following keys:
* ``requires`` under ``[tool.flit.metadata]``
These packages are required for building and installation. You can
- add them with ``type=('build', 'run')``.
+ add them with ``type=("build", "run")``.
* ``[tool.flit.metadata.requires-extra]``
@@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for:
.. code-block:: python
- depends_on('py-pip@22.1:', type='build')
+ depends_on("py-pip@22.1:", type="build")
def config_settings(self, spec, prefix):
return {
- 'blas': spec['blas'].libs.names[0],
- 'lapack': spec['lapack'].libs.names[0],
+ "blas": spec["blas"].libs.names[0],
+ "lapack": spec["lapack"].libs.names[0],
}
@@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so:
def global_options(self, spec, prefix):
options = []
- if '+libyaml' in spec:
- options.append('--with-libyaml')
+ if spec.satisfies("+libyaml"):
+ options.append("--with-libyaml")
else:
- options.append('--without-libyaml')
+ options.append("--without-libyaml")
return options
@@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``:
def install_options(self, spec, prefix):
options = []
- if '+libyaml' in spec:
+ if spec.satisfies("+libyaml"):
options.extend([
- spec['libyaml'].libs.search_flags,
- spec['libyaml'].headers.include_flags,
+ spec["libyaml"].libs.search_flags,
+ spec["libyaml"].headers.include_flags,
])
return options
@@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding
.. code-block:: python
- import_modules = ['six']
+ import_modules = ["six"]
Sometimes the list of module names to import depends on how the
@@ -571,9 +571,9 @@ This can be expressed like so:
@property
def import_modules(self):
- modules = ['yaml']
- if '+libyaml' in self.spec:
- modules.append('yaml.cyaml')
+ modules = ["yaml"]
+ if self.spec.satisfies("+libyaml"):
+ modules.append("yaml.cyaml")
return modules
@@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset
of module names to be skipped can be defined by using ``skip_modules``.
If a defined module has submodules, they are skipped as well, e.g.,
in case the ``plotting`` modules should be excluded from the
-automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface',
-'nilearn.plotting', 'nilearn.plotting.data']`` set:
+automatically detected ``import_modules`` ``["nilearn", "nilearn.surface",
+"nilearn.plotting", "nilearn.plotting.data"]`` set:
.. code-block:: python
- skip_modules = ['nilearn.plotting']
+ skip_modules = ["nilearn.plotting"]
-This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']``
+This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]``
Import tests can be run during the installation using ``spack install
--test=root`` or at any time after the installation using
@@ -612,11 +612,11 @@ after the ``install`` phase:
.. code-block:: python
- @run_after('install')
+ @run_after("install")
@on_package_attributes(run_tests=True)
def install_test(self):
- with working_dir('spack-test', create=True):
- python('-c', 'import numpy; numpy.test("full", verbose=2)')
+ with working_dir("spack-test", create=True):
+ python("-c", "import numpy; numpy.test('full', verbose=2)")
when testing is enabled during the installation (i.e., ``spack install
@@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use:
.. code-block:: python
- build_directory = 'python'
+ build_directory = "python"
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/lib/spack/docs/build_systems/rocmpackage.rst b/lib/spack/docs/build_systems/rocmpackage.rst
index 636e5b812623f2..8f90794dfb7df5 100644
--- a/lib/spack/docs/build_systems/rocmpackage.rst
+++ b/lib/spack/docs/build_systems/rocmpackage.rst
@@ -81,28 +81,27 @@ class of your package. For example, you can add it to your
class MyRocmPackage(CMakePackage, ROCmPackage):
...
# Ensure +rocm and amdgpu_targets are passed to dependencies
- depends_on('mydeppackage', when='+rocm')
+ depends_on("mydeppackage", when="+rocm")
for val in ROCmPackage.amdgpu_targets:
- depends_on('mydeppackage amdgpu_target={0}'.format(val),
- when='amdgpu_target={0}'.format(val))
+ depends_on(f"mydeppackage amdgpu_target={val}",
+ when=f"amdgpu_target={val}")
...
def cmake_args(self):
spec = self.spec
args = []
...
- if '+rocm' in spec:
+ if spec.satisfies("+rocm"):
# Set up the hip macros needed by the build
args.extend([
- '-DENABLE_HIP=ON',
- '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
- rocm_archs = spec.variants['amdgpu_target'].value
- if 'none' not in rocm_archs:
- args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
- .format(",".join(rocm_archs)))
+ "-DENABLE_HIP=ON",
+ f"-DHIP_ROOT_DIR={spec['hip'].prefix}"])
+ rocm_archs = spec.variants["amdgpu_target"].value
+ if "none" not in rocm_archs:
+ args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}")
else:
# Ensure build with hip is disabled
- args.append('-DENABLE_HIP=OFF')
+ args.append("-DENABLE_HIP=OFF")
...
return args
...
@@ -114,7 +113,7 @@ build.
This example also illustrates how to check for the ``rocm`` variant using
``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value
-using ``self.spec.variants['amdgpu_target'].value``.
+using ``self.spec.variants["amdgpu_target"].value``.
All five packages using ``ROCmPackage`` as of January 2021 also use the
:ref:`CudaPackage `. So it is worth looking at those packages
diff --git a/lib/spack/docs/build_systems/sconspackage.rst b/lib/spack/docs/build_systems/sconspackage.rst
index 18002586a06c75..a17e1271b86d3b 100644
--- a/lib/spack/docs/build_systems/sconspackage.rst
+++ b/lib/spack/docs/build_systems/sconspackage.rst
@@ -57,7 +57,7 @@ overridden like so:
.. code-block:: python
def test(self):
- scons('check')
+ scons("check")
^^^^^^^^^^^^^^^
@@ -88,7 +88,7 @@ base class already contains:
.. code-block:: python
- depends_on('scons', type='build')
+ depends_on("scons", type="build")
If you want to specify a particular version requirement, you can override
@@ -96,7 +96,7 @@ this in your package:
.. code-block:: python
- depends_on('scons@2.3.0:', type='build')
+ depends_on("scons@2.3.0:", type="build")
^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so:
def build_args(self, spec, prefix):
args = [
- 'PREFIX={0}'.format(prefix),
- 'ZLIB={0}'.format(spec['zlib'].prefix),
+ f"PREFIX={prefix}",
+ f"ZLIB={spec['zlib'].prefix}",
]
- if '+debug' in spec:
- args.append('DEBUG=yes')
+ if spec.satisfies("+debug"):
+ args.append("DEBUG=yes")
else:
- args.append('DEBUG=no')
+ args.append("DEBUG=no")
return args
@@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option:
* env_vars: [ string ]
Environment variables to propagate through to SCons. Either the
string "all" or a comma separated list of variable names, e.g.
- 'LD_LIBRARY_PATH,HOME'.
- - default: 'LD_LIBRARY_PATH,PYTHONPATH'
+ "LD_LIBRARY_PATH,HOME".
+ - default: "LD_LIBRARY_PATH,PYTHONPATH"
In the case of cantera, using ``env_vars=all`` allows us to use
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index f1bde9c9fbdfd0..250a600e7f75ec 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -204,6 +204,7 @@ def setup(sphinx):
("py:class", "clingo.Control"),
("py:class", "six.moves.urllib.parse.ParseResult"),
("py:class", "TextIO"),
+ ("py:class", "hashlib._Hash"),
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),
diff --git a/lib/spack/docs/config_yaml.rst b/lib/spack/docs/config_yaml.rst
index 294f7c34369e20..d54977bebab21c 100644
--- a/lib/spack/docs/config_yaml.rst
+++ b/lib/spack/docs/config_yaml.rst
@@ -304,3 +304,17 @@ To work properly, this requires your terminal to reset its title after
Spack has finished its work, otherwise Spack's status information will
remain in the terminal's title indefinitely. Most terminals should already
be set up this way and clear Spack's status information.
+
+-----------
+``aliases``
+-----------
+
+Aliases can be used to define new Spack commands. They can be either shortcuts
+for longer commands or include specific arguments for convenience. For instance,
+if users want to use ``spack install``'s ``-v`` argument all the time, they can
+create a new alias called ``inst`` that will always call ``install -v``:
+
+.. code-block:: yaml
+
+ aliases:
+ inst: install -v
diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst
index 7026825fa8b3d0..f79f300f4c7f3f 100644
--- a/lib/spack/docs/configuration.rst
+++ b/lib/spack/docs/configuration.rst
@@ -17,7 +17,7 @@ case you want to skip directly to specific docs:
* :ref:`config.yaml `
* :ref:`mirrors.yaml `
* :ref:`modules.yaml `
-* :ref:`packages.yaml `
+* :ref:`packages.yaml `
* :ref:`repos.yaml `
You can also add any of these as inline configuration in the YAML
@@ -243,9 +243,11 @@ lower-precedence settings. Completely ignoring higher-level configuration
options is supported with the ``::`` notation for keys (see
:ref:`config-overrides` below).
-There are also special notations for string concatenation and precendense override.
-Using the ``+:`` notation can be used to force *prepending* strings or lists. For lists, this is identical
-to the default behavior. Using the ``-:`` works similarly, but for *appending* values.
+There are also special notations for string concatenation and precendense override:
+
+* ``+:`` will force *prepending* strings or lists. For lists, this is the default behavior.
+* ``-:`` works similarly, but for *appending* values.
+
:ref:`config-prepend-append`
^^^^^^^^^^^
diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst
index 64ca1df926bbec..acf48e3eae2c20 100644
--- a/lib/spack/docs/containers.rst
+++ b/lib/spack/docs/containers.rst
@@ -24,6 +24,16 @@ image, or to set up a proper entrypoint to run the image. These tasks are
usually both necessary and repetitive, so Spack comes with a command
to generate recipes for container images starting from a ``spack.yaml``.
+.. seealso::
+
+ This page is a reference for generating recipes to build container images.
+ It means that your environment is built from scratch inside the container
+ runtime.
+
+ Since v0.21, Spack can also create container images from existing package installations
+ on your host system. See :ref:`binary_caches_oci` for more information on
+ that topic.
+
--------------------
A Quick Introduction
--------------------
diff --git a/lib/spack/docs/frequently_asked_questions.rst b/lib/spack/docs/frequently_asked_questions.rst
new file mode 100644
index 00000000000000..345fa1a81a816a
--- /dev/null
+++ b/lib/spack/docs/frequently_asked_questions.rst
@@ -0,0 +1,77 @@
+.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+ Spack Project Developers. See the top-level COPYRIGHT file for details.
+
+ SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+==========================
+Frequently Asked Questions
+==========================
+
+This page contains answers to frequently asked questions about Spack.
+If you have questions that are not answered here, feel free to ask on
+`Slack `_ or `GitHub Discussions
+`_. If you've learned the
+answer to a question that you think should be here, please consider
+contributing to this page.
+
+.. _faq-concretizer-precedence:
+
+-----------------------------------------------------
+Why does Spack pick particular versions and variants?
+-----------------------------------------------------
+
+This question comes up in a variety of forms:
+
+ 1. Why does Spack seem to ignore my package preferences from ``packages.yaml`` config?
+ 2. Why does Spack toggle a variant instead of using the default from the ``package.py`` file?
+
+The short answer is that Spack always picks an optimal configuration
+based on a complex set of criteria\ [#f1]_. These criteria are more nuanced
+than always choosing the latest versions or default variants.
+
+.. note::
+
+ As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
+
+The following set of criteria (from lowest to highest precedence) explain
+common cases where concretization output may seem surprising at first.
+
+1. :ref:`Package preferences ` configured in ``packages.yaml``
+ override variant defaults from ``package.py`` files, and influence the optimal
+ ordering of versions. Preferences are specified as follows:
+
+ .. code-block:: yaml
+
+ packages:
+ foo:
+ version: [1.0, 1.1]
+ variants: ~mpi
+
+2. :ref:`Reuse concretization ` configured in ``concretizer.yaml``
+ overrides preferences, since it's typically faster to reuse an existing spec than to
+ build a preferred one from sources. When build caches are enabled, specs may be reused
+ from a remote location too. Reuse concretization is configured as follows:
+
+ .. code-block:: yaml
+
+ concretizer:
+ reuse: dependencies # other options are 'true' and 'false'
+
+3. :ref:`Package requirements ` configured in ``packages.yaml``,
+ and constraints from the command line as well as ``package.py`` files override all
+ of the above. Requirements are specified as follows:
+
+ .. code-block:: yaml
+
+ packages:
+ foo:
+ require:
+ - "@1.2: +mpi"
+
+Requirements and constraints restrict the set of possible solutions, while reuse
+behavior and preferences influence what an optimal solution looks like.
+
+
+.. rubric:: Footnotes
+
+.. [#f1] The exact list of criteria can be retrieved with the ``spack solve`` command
diff --git a/lib/spack/docs/images/setup_env.png b/lib/spack/docs/images/setup_env.png
new file mode 100644
index 00000000000000..4b16cac281f981
Binary files /dev/null and b/lib/spack/docs/images/setup_env.png differ
diff --git a/lib/spack/docs/images/strumpack_virtuals.svg b/lib/spack/docs/images/strumpack_virtuals.svg
new file mode 100644
index 00000000000000..eb580f0a5805c7
--- /dev/null
+++ b/lib/spack/docs/images/strumpack_virtuals.svg
@@ -0,0 +1,534 @@
+
\ No newline at end of file
diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst
index 0dd27a2444516a..7607181ada5947 100644
--- a/lib/spack/docs/index.rst
+++ b/lib/spack/docs/index.rst
@@ -55,6 +55,7 @@ or refer to the full manual below.
getting_started
basic_usage
replace_conda_homebrew
+ frequently_asked_questions
.. toctree::
:maxdepth: 2
@@ -70,7 +71,7 @@ or refer to the full manual below.
configuration
config_yaml
- bootstrapping
+ packages_yaml
build_settings
environments
containers
@@ -78,6 +79,7 @@ or refer to the full manual below.
module_file_support
repositories
binary_caches
+ bootstrapping
command_index
chain
extensions
diff --git a/lib/spack/docs/module_file_support.rst b/lib/spack/docs/module_file_support.rst
index 52d74a56699e3a..f6b292e7553dab 100644
--- a/lib/spack/docs/module_file_support.rst
+++ b/lib/spack/docs/module_file_support.rst
@@ -519,11 +519,11 @@ inspections and customize them per-module-set.
modules:
prefix_inspections:
- bin:
+ ./bin:
- PATH
- man:
+ ./man:
- MANPATH
- '':
+ ./:
- CMAKE_PREFIX_PATH
Prefix inspections are only applied if the relative path inside the
@@ -579,7 +579,7 @@ the view.
view_relative_modules:
use_view: my_view
prefix_inspections:
- bin:
+ ./bin:
- PATH
view:
my_view:
diff --git a/lib/spack/docs/packages_yaml.rst b/lib/spack/docs/packages_yaml.rst
new file mode 100644
index 00000000000000..af0acf0f9a4e63
--- /dev/null
+++ b/lib/spack/docs/packages_yaml.rst
@@ -0,0 +1,591 @@
+.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+ Spack Project Developers. See the top-level COPYRIGHT file for details.
+
+ SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+.. _packages-config:
+
+================================
+Package Settings (packages.yaml)
+================================
+
+Spack allows you to customize how your software is built through the
+``packages.yaml`` file. Using it, you can make Spack prefer particular
+implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK),
+or you can make it prefer to build with particular compilers. You can
+also tell Spack to use *external* software installations already
+present on your system.
+
+At a high level, the ``packages.yaml`` file is structured like this:
+
+.. code-block:: yaml
+
+ packages:
+ package1:
+ # settings for package1
+ package2:
+ # settings for package2
+ # ...
+ all:
+ # settings that apply to all packages.
+
+So you can either set build preferences specifically for *one* package,
+or you can specify that certain settings should apply to *all* packages.
+The types of settings you can customize are described in detail below.
+
+Spack's build defaults are in the default
+``etc/spack/defaults/packages.yaml`` file. You can override them in
+``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more
+details on how this works, see :ref:`configuration-scopes`.
+
+.. _sec-external-packages:
+
+-----------------
+External Packages
+-----------------
+
+Spack can be configured to use externally-installed
+packages rather than building its own packages. This may be desirable
+if machines ship with system packages, such as a customized MPI
+that should be used instead of Spack building its own MPI.
+
+External packages are configured through the ``packages.yaml`` file.
+Here's an example of an external configuration:
+
+.. code-block:: yaml
+
+ packages:
+ openmpi:
+ externals:
+ - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
+ prefix: /opt/openmpi-1.4.3
+ - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
+ prefix: /opt/openmpi-1.4.3-debug
+ - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
+ prefix: /opt/openmpi-1.6.5-intel
+
+This example lists three installations of OpenMPI, one built with GCC,
+one built with GCC and debug information, and another built with Intel.
+If Spack is asked to build a package that uses one of these MPIs as a
+dependency, it will use the pre-installed OpenMPI in
+the given directory. Note that the specified path is the top-level
+install prefix, not the ``bin`` subdirectory.
+
+``packages.yaml`` can also be used to specify modules to load instead
+of the installation prefixes. The following example says that module
+``CMake/3.7.2`` provides cmake version 3.7.2.
+
+.. code-block:: yaml
+
+ cmake:
+ externals:
+ - spec: cmake@3.7.2
+ modules:
+ - CMake/3.7.2
+
+Each ``packages.yaml`` begins with a ``packages:`` attribute, followed
+by a list of package names. To specify externals, add an ``externals:``
+attribute under the package name, which lists externals.
+Each external should specify a ``spec:`` string that should be as
+well-defined as reasonably possible. If a
+package lacks a spec component, such as missing a compiler or
+package version, then Spack will guess the missing component based
+on its most-favored packages, and it may guess incorrectly.
+
+Each package version and compiler listed in an external should
+have entries in Spack's packages and compiler configuration, even
+though the package and compiler may not ever be built.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Prevent packages from being built from sources
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Adding an external spec in ``packages.yaml`` allows Spack to use an external location,
+but it does not prevent Spack from building packages from sources. In the above example,
+Spack might choose for many valid reasons to start building and linking with the
+latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions.
+
+To prevent this, the ``packages.yaml`` configuration also allows packages
+to be flagged as non-buildable. The previous example could be modified to
+be:
+
+.. code-block:: yaml
+
+ packages:
+ openmpi:
+ externals:
+ - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
+ prefix: /opt/openmpi-1.4.3
+ - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
+ prefix: /opt/openmpi-1.4.3-debug
+ - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
+ prefix: /opt/openmpi-1.6.5-intel
+ buildable: False
+
+The addition of the ``buildable`` flag tells Spack that it should never build
+its own version of OpenMPI from sources, and it will instead always rely on a pre-built
+OpenMPI.
+
+.. note::
+
+ If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag)
+ pre-built specs include specs already available from a local store, an upstream store, a registered
+ buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only
+ external specs in ``packages.yaml`` are included in the list of pre-built specs.
+
+If an external module is specified as not buildable, then Spack will load the
+external module into the build environment which can be used for linking.
+
+The ``buildable`` does not need to be paired with external packages.
+It could also be used alone to forbid packages that may be
+buggy or otherwise undesirable.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Non-buildable virtual packages
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Virtual packages in Spack can also be specified as not buildable, and
+external implementations can be provided. In the example above,
+OpenMPI is configured as not buildable, but Spack will often prefer
+other MPI implementations over the externally available OpenMPI. Spack
+can be configured with every MPI provider not buildable individually,
+but more conveniently:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ buildable: False
+ openmpi:
+ externals:
+ - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
+ prefix: /opt/openmpi-1.4.3
+ - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
+ prefix: /opt/openmpi-1.4.3-debug
+ - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
+ prefix: /opt/openmpi-1.6.5-intel
+
+Spack can then use any of the listed external implementations of MPI
+to satisfy a dependency, and will choose depending on the compiler and
+architecture.
+
+In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers
+(available via stores or buildcaches) are not wanted, Spack can be configured to require
+specs matching only the available externals:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ buildable: False
+ require:
+ - one_of: [
+ "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64",
+ "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug",
+ "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
+ ]
+ openmpi:
+ externals:
+ - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
+ prefix: /opt/openmpi-1.4.3
+ - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
+ prefix: /opt/openmpi-1.4.3-debug
+ - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
+ prefix: /opt/openmpi-1.6.5-intel
+
+This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused,
+unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see
+:ref:`package-requirements`.
+
+.. _cmd-spack-external-find:
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Automatically Find External Packages
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+You can run the :ref:`spack external find ` command
+to search for system-provided packages and add them to ``packages.yaml``.
+After running this command your ``packages.yaml`` may include new entries:
+
+.. code-block:: yaml
+
+ packages:
+ cmake:
+ externals:
+ - spec: cmake@3.17.2
+ prefix: /usr
+
+Generally this is useful for detecting a small set of commonly-used packages;
+for now this is generally limited to finding build-only dependencies.
+Specific limitations include:
+
+* Packages are not discoverable by default: For a package to be
+ discoverable with ``spack external find``, it needs to add special
+ logic. See :ref:`here ` for more details.
+* The logic does not search through module files, it can only detect
+ packages with executables defined in ``PATH``; you can help Spack locate
+ externals which use module files by loading any associated modules for
+ packages that you want Spack to know about before running
+ ``spack external find``.
+* Spack does not overwrite existing entries in the package configuration:
+ If there is an external defined for a spec at any configuration scope,
+ then Spack will not add a new external entry (``spack config blame packages``
+ can help locate all external entries).
+
+.. _package-requirements:
+
+--------------------
+Package Requirements
+--------------------
+
+Spack can be configured to always use certain compilers, package
+versions, and variants during concretization through package
+requirements.
+
+Package requirements are useful when you find yourself repeatedly
+specifying the same constraints on the command line, and wish that
+Spack respects these constraints whether you mention them explicitly
+or not. Another use case is specifying constraints that should apply
+to all root specs in an environment, without having to repeat the
+constraint everywhere.
+
+Apart from that, requirements config is more flexible than constraints
+on the command line, because it can specify constraints on packages
+*when they occur* as a dependency. In contrast, on the command line it
+is not possible to specify constraints on dependencies while also keeping
+those dependencies optional.
+
+.. seealso::
+
+ FAQ: :ref:`Why does Spack pick particular versions and variants? `
+
+
+^^^^^^^^^^^^^^^^^^^
+Requirements syntax
+^^^^^^^^^^^^^^^^^^^
+
+The package requirements configuration is specified in ``packages.yaml``,
+keyed by package name and expressed using the Spec syntax. In the simplest
+case you can specify attributes that you always want the package to have
+by providing a single spec string to ``require``:
+
+.. code-block:: yaml
+
+ packages:
+ libfabric:
+ require: "@1.13.2"
+
+In the above example, ``libfabric`` will always build with version 1.13.2. If you
+need to compose multiple configuration scopes ``require`` accepts a list of
+strings:
+
+.. code-block:: yaml
+
+ packages:
+ libfabric:
+ require:
+ - "@1.13.2"
+ - "%gcc"
+
+In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
+as a compiler.
+
+For more complex use cases, require accepts also a list of objects. These objects
+must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
+and they can optionally have a ``when`` and a ``message`` attribute:
+
+.. code-block:: yaml
+
+ packages:
+ openmpi:
+ require:
+ - any_of: ["@4.1.5", "%gcc"]
+ message: "in this example only 4.1.5 can build with other compilers"
+
+``any_of`` is a list of specs. One of those specs must be satisfied
+and it is also allowed for the concretized spec to match more than one.
+In the above example, that means you could build ``openmpi@4.1.5%gcc``,
+``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
+not ``openmpi@3.9%clang``.
+
+If a custom message is provided, and the requirement is not satisfiable,
+Spack will print the custom error message:
+
+.. code-block:: console
+
+ $ spack spec openmpi@3.9%clang
+ ==> Error: in this example only 4.1.5 can build with other compilers
+
+We could express a similar requirement using the ``when`` attribute:
+
+.. code-block:: yaml
+
+ packages:
+ openmpi:
+ require:
+ - any_of: ["%gcc"]
+ when: "@:4.1.4"
+ message: "in this example only 4.1.5 can build with other compilers"
+
+In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
+For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
+constraint:
+
+.. code-block:: yaml
+
+ packages:
+ openmpi:
+ require:
+ - spec: "%gcc"
+ when: "@:4.1.4"
+ message: "in this example only 4.1.5 can build with other compilers"
+
+This code snippet and the one before it are semantically equivalent.
+
+Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
+concretized spec must match one and only one of them:
+
+.. code-block:: yaml
+
+ packages:
+ mpich:
+ require:
+ - one_of: ["+cuda", "+rocm"]
+
+In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``.
+
+.. note::
+
+ For ``any_of`` and ``one_of``, the order of specs indicates a
+ preference: items that appear earlier in the list are preferred
+ (note that these preferences can be ignored in favor of others).
+
+.. note::
+
+ When using a conditional requirement, Spack is allowed to actively avoid the triggering
+ condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
+ the optimization criteria. To check the current optimization criteria and their
+ priorities you can run ``spack solve zlib``.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Setting default requirements
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+You can also set default requirements for all packages under ``all``
+like this:
+
+.. code-block:: yaml
+
+ packages:
+ all:
+ require: '%clang'
+
+which means every spec will be required to use ``clang`` as a compiler.
+
+Requirements on variants for all packages are possible too, but note that they
+are only enforced for those packages that define these variants, otherwise they
+are disregarded. For example:
+
+.. code-block:: yaml
+
+ packages:
+ all:
+ require:
+ - "+shared"
+ - "+cuda"
+
+will just enforce ``+shared`` on ``zlib``, which has a boolean ``shared`` variant but
+no ``cuda`` variant.
+
+Constraints in a single spec literal are always considered as a whole, so in a case like:
+
+.. code-block:: yaml
+
+ packages:
+ all:
+ require: "+shared +cuda"
+
+the default requirement will be enforced only if a package has both a ``cuda`` and
+a ``shared`` variant, and will never be partially enforced.
+
+Finally, ``all`` represents a *default set of requirements* -
+if there are specific package requirements, then the default requirements
+under ``all`` are disregarded. For example, with a configuration like this:
+
+.. code-block:: yaml
+
+ packages:
+ all:
+ require:
+ - 'build_type=Debug'
+ - '%clang'
+ cmake:
+ require:
+ - 'build_type=Debug'
+ - '%gcc'
+
+Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
+dependencies) to use ``clang``. If enforcing ``build_type=Debug`` is needed also
+on ``cmake``, it must be repeated in the specific ``cmake`` requirements.
+
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Setting requirements on virtual specs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A requirement on a virtual spec applies whenever that virtual is present in the DAG.
+This can be useful for fixing which virtual provider you want to use:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ require: 'mvapich2 %gcc'
+
+With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
+
+Requirements on the virtual spec and on the specific provider are both applied, if
+present. For instance with a configuration like:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ require: 'mvapich2 %gcc'
+ mvapich2:
+ require: '~cuda'
+
+you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
+
+.. _package-preferences:
+
+-------------------
+Package Preferences
+-------------------
+
+In some cases package requirements can be too strong, and package
+preferences are the better option. Package preferences do not impose
+constraints on packages for particular versions or variants values,
+they rather only set defaults. The concretizer is free to change
+them if it must, due to other constraints, and also prefers reusing
+installed packages over building new ones that are a better match for
+preferences.
+
+.. seealso::
+
+ FAQ: :ref:`Why does Spack pick particular versions and variants? `
+
+
+Most package preferences (``compilers``, ``target`` and ``providers``)
+can only be set globally under the ``all`` section of ``packages.yaml``:
+
+.. code-block:: yaml
+
+ packages:
+ all:
+ compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
+ target: [x86_64_v3]
+ providers:
+ mpi: [mvapich2, mpich, openmpi]
+
+These preferences override Spack's default and effectively reorder priorities
+when looking for the best compiler, target or virtual package provider. Each
+preference takes an ordered list of spec constraints, with earlier entries in
+the list being preferred over later entries.
+
+In the example above all packages prefer to be compiled with ``gcc@12.2.0``,
+to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they
+depend on ``mpi``.
+
+The ``variants`` and ``version`` preferences can be set under
+package specific sections of the ``packages.yaml`` file:
+
+.. code-block:: yaml
+
+ packages:
+ opencv:
+ variants: +debug
+ gperftools:
+ version: [2.2, 2.4, 2.3]
+
+In this case, the preference for ``opencv`` is to build with debug options, while
+``gperftools`` prefers version 2.2 over 2.4.
+
+Any preference can be overwritten on the command line if explicitly requested.
+
+Preferences cannot overcome explicit constraints, as they only set a preferred
+ordering among homogeneous attribute values. Going back to the example, if
+``gperftools@2.3:`` was requested, then Spack will install version 2.4
+since the most preferred version 2.2 is prohibited by the version constraint.
+
+.. _package_permissions:
+
+-------------------
+Package Permissions
+-------------------
+
+Spack can be configured to assign permissions to the files installed
+by a package.
+
+In the ``packages.yaml`` file under ``permissions``, the attributes
+``read``, ``write``, and ``group`` control the package
+permissions. These attributes can be set per-package, or for all
+packages under ``all``. If permissions are set under ``all`` and for a
+specific package, the package-specific settings take precedence.
+
+The ``read`` and ``write`` attributes take one of ``user``, ``group``,
+and ``world``.
+
+.. code-block:: yaml
+
+ packages:
+ all:
+ permissions:
+ write: group
+ group: spack
+ my_app:
+ permissions:
+ read: group
+ group: my_team
+
+The permissions settings describe the broadest level of access to
+installations of the specified packages. The execute permissions of
+the file are set to the same level as read permissions for those files
+that are executable. The default setting for ``read`` is ``world``,
+and for ``write`` is ``user``. In the example above, installations of
+``my_app`` will be installed with user and group permissions but no
+world permissions, and owned by the group ``my_team``. All other
+packages will be installed with user and group write privileges, and
+world read privileges. Those packages will be owned by the group
+``spack``.
+
+The ``group`` attribute assigns a Unix-style group to a package. All
+files installed by the package will be owned by the assigned group,
+and the sticky group bit will be set on the install prefix and all
+directories inside the install prefix. This will ensure that even
+manually placed files within the install prefix are owned by the
+assigned group. If no group is assigned, Spack will allow the OS
+default behavior to go as expected.
+
+----------------------------
+Assigning Package Attributes
+----------------------------
+
+You can assign class-level attributes in the configuration:
+
+.. code-block:: yaml
+
+ packages:
+ mpileaks:
+ # Override existing attributes
+ url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
+ # ... or add new ones
+ x: 1
+
+Attributes set this way will be accessible to any method executed
+in the package.py file (e.g. the ``install()`` method). Values for these
+attributes may be any value parseable by yaml.
+
+These can only be applied to specific packages, not "all" or
+virtual packages.
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index ae6be5b4a6eb3e..84046a654e93a4 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1549,7 +1549,7 @@ its value:
def configure_args(self):
...
- if "+shared" in self.spec:
+ if self.spec.satisfies("+shared"):
extra_args.append("--enable-shared")
else:
extra_args.append("--disable-shared")
@@ -1636,7 +1636,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s
.. code-block:: python
- if "languages=jit" in spec:
+ if spec.satisfies("languages=jit"):
options.append("--enable-host-shared")
"""""""""""""""""""""""""""""""""""""""""""
@@ -2337,7 +2337,7 @@ window while a batch job is running ``spack install`` on the same or
overlapping dependencies without any process trying to re-do the work of
another.
-For example, if you are using SLURM, you could launch an installation
+For example, if you are using Slurm, you could launch an installation
of ``mpich`` using the following command:
.. code-block:: console
@@ -2352,7 +2352,7 @@ the following at the command line of a bash shell:
.. code-block:: console
- $ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 &; done
+ $ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done
.. note::
@@ -2557,9 +2557,10 @@ Conditional dependencies
^^^^^^^^^^^^^^^^^^^^^^^^
You may have a package that only requires a dependency under certain
-conditions. For example, you may have a package that has optional MPI support,
-- MPI is only a dependency when you want to enable MPI support for the
-package. In that case, you could say something like:
+conditions. For example, you may have a package with optional MPI support.
+You would then provide a variant to reflect that the feature is optional
+and specify the MPI dependency only applies when MPI support is enabled.
+In that case, you could say something like:
.. code-block:: python
@@ -2567,13 +2568,39 @@ package. In that case, you could say something like:
depends_on("mpi", when="+mpi")
-``when`` can include constraints on the variant, version, compiler, etc. and
-the :mod:`syntax` is the same as for Specs written on the command
-line.
-If a dependency/feature of a package isn't typically used, you can save time
-by making it conditional (since Spack will not build the dependency unless it
-is required for the Spec).
+Suppose the above package also has, since version 3, optional `Trilinos`
+support and you want them both to build either with or without MPI. Further
+suppose you require a version of `Trilinos` no older than 12.6. In that case,
+the `trilinos` variant and dependency directives would be:
+
+.. code-block:: python
+
+ variant("trilinos", default=False, description="Enable Trilinos support")
+
+ depends_on("trilinos@12.6:", when="@3: +trilinos")
+ depends_on("trilinos@12.6: +mpi", when="@3: +trilinos +mpi")
+
+
+Alternatively, you could use the `when` context manager to equivalently specify
+the `trilinos` variant dependencies as follows:
+
+.. code-block:: python
+
+ with when("@3: +trilinos"):
+ depends_on("trilinos@12.6:")
+ depends_on("trilinos +mpi", when="+mpi")
+
+
+The argument to ``when`` in either case can include any Spec constraints that
+are supported on the command line using the same :ref:`syntax `.
+
+.. note::
+
+ If a dependency isn't typically used, you can save time by making it
+ conditional since Spack will not build the dependency unless it is
+ required for the Spec.
+
.. _dependency_dependency_patching:
@@ -2661,60 +2688,6 @@ appear in the package file (or in this case, in the list).
right version. If two packages depend on ``binutils`` patched *the
same* way, they can both use a single installation of ``binutils``.
-.. _setup-dependent-environment:
-
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Influence how dependents are built or run
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Spack provides a mechanism for dependencies to influence the
-environment of their dependents by overriding the
-:meth:`setup_dependent_run_environment `
-or the
-:meth:`setup_dependent_build_environment `
-methods.
-The Qt package, for instance, uses this call:
-
-.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
- :pyobject: Qt.setup_dependent_build_environment
- :linenos:
-
-to set the ``QTDIR`` environment variable so that packages
-that depend on a particular Qt installation will find it.
-Another good example of how a dependency can influence
-the build environment of dependents is the Python package:
-
-.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
- :pyobject: Python.setup_dependent_build_environment
- :linenos:
-
-In the method above it is ensured that any package that depends on Python
-will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
-variables set appropriately before starting the installation. To make things
-even simpler the ``python setup.py`` command is also inserted into the module
-scope of dependents by overriding a third method called
-:meth:`setup_dependent_package `
-:
-
-.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
- :pyobject: Python.setup_dependent_package
- :linenos:
-
-This allows most python packages to have a very simple install procedure,
-like the following:
-
-.. code-block:: python
-
- def install(self, spec, prefix):
- setup_py("install", "--prefix={0}".format(prefix))
-
-Finally the Python package takes also care of the modifications to ``PYTHONPATH``
-to allow dependencies to run correctly:
-
-.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
- :pyobject: Python.setup_dependent_run_environment
- :linenos:
-
.. _packaging_conflicts:
@@ -2859,6 +2832,70 @@ variant(s) are selected. This may be accomplished with conditional
extends("python", when="+python")
...
+.. _setup-environment:
+
+--------------------------------------------
+Runtime and build time environment variables
+--------------------------------------------
+
+Spack provides a few methods to help package authors set up the required environment variables for
+their package. Environment variables typically depend on how the package is used: variables that
+make sense during the build phase may not be needed at runtime, and vice versa. Further, sometimes
+it makes sense to let a dependency set the environment variables for its dependents. To allow all
+this, Spack provides four different methods that can be overridden in a package:
+
+1. :meth:`setup_build_environment `
+2. :meth:`setup_run_environment `
+3. :meth:`setup_dependent_build_environment `
+4. :meth:`setup_dependent_run_environment `
+
+The Qt package, for instance, uses this call:
+
+.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
+ :pyobject: Qt.setup_dependent_build_environment
+ :linenos:
+
+to set the ``QTDIR`` environment variable so that packages that depend on a particular Qt
+installation will find it.
+
+The following diagram will give you an idea when each of these methods is called in a build
+context:
+
+.. image:: images/setup_env.png
+ :align: center
+
+Notice that ``setup_dependent_run_environment`` can be called multiple times, once for each
+dependent package, whereas ``setup_run_environment`` is called only once for the package itself.
+This means that the former should only be used if the environment variables depend on the dependent
+package, whereas the latter should be used if the environment variables depend only on the package
+itself.
+
+--------------------------------
+Setting package module variables
+--------------------------------
+
+Apart from modifying environment variables of the dependent package, you can also define Python
+variables to be used by the dependent. This is done by implementing
+:meth:`setup_dependent_package `. An
+example of this can be found in the ``Python`` package:
+
+.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
+ :pyobject: Python.setup_dependent_package
+ :linenos:
+
+This allows Python packages to directly use these variables:
+
+.. code-block:: python
+
+ def install(self, spec, prefix):
+ ...
+ install("script.py", python_platlib)
+
+.. note::
+
+ We recommend using ``setup_dependent_package`` sparingly, as it is not always clear where
+ global variables are coming from when editing a ``package.py`` file.
+
-----
Views
-----
@@ -2937,6 +2974,33 @@ The ``provides("mpi")`` call tells Spack that the ``mpich`` package
can be used to satisfy the dependency of any package that
``depends_on("mpi")``.
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Providing multiple virtuals simultaneously
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Packages can provide more than one virtual dependency. Sometimes, due to implementation details,
+there are subsets of those virtuals that need to be provided together by the same package.
+
+A well-known example is ``openblas``, which provides both the ``lapack`` and ``blas`` API in a single ``libopenblas``
+library. A package that needs ``lapack`` and ``blas`` must either use ``openblas`` to provide both, or not use
+``openblas`` at all. It cannot pick one or the other.
+
+To express this constraint in a package, the two virtual dependencies must be listed in the same ``provides`` directive:
+
+.. code-block:: python
+
+ provides('blas', 'lapack')
+
+This makes it impossible to select ``openblas`` as a provider for one of the two
+virtual dependencies and not for the other. If you try to, Spack will report an error:
+
+.. code-block:: console
+
+ $ spack spec netlib-scalapack ^[virtuals=lapack] openblas ^[virtuals=blas] atlas
+ ==> Error: concretization failed for the following reasons:
+
+ 1. Package 'openblas' needs to provide both 'lapack' and 'blas' together, but provides only 'lapack'
+
^^^^^^^^^^^^^^^^^^^^
Versioned Interfaces
^^^^^^^^^^^^^^^^^^^^
@@ -3439,6 +3503,56 @@ is equivalent to:
Constraints from nested context managers are also combined together, but they are rarely
needed or recommended.
+.. _default_args:
+
+------------------------
+Common default arguments
+------------------------
+
+Similarly, if directives have a common set of default arguments, you can
+group them together in a ``with default_args()`` block:
+
+.. code-block:: python
+
+ class PyExample(PythonPackage):
+
+ with default_args(type=("build", "run")):
+ depends_on("py-foo")
+ depends_on("py-foo@2:", when="@2:")
+ depends_on("py-bar")
+ depends_on("py-bz")
+
+The above is short for:
+
+.. code-block:: python
+
+ class PyExample(PythonPackage):
+
+ depends_on("py-foo", type=("build", "run"))
+ depends_on("py-foo@2:", when="@2:", type=("build", "run"))
+ depends_on("py-bar", type=("build", "run"))
+ depends_on("py-bz", type=("build", "run"))
+
+.. note::
+
+ The ``with when()`` context manager is composable, while ``with default_args()``
+ merely overrides the default. For example:
+
+ .. code-block:: python
+
+ with default_args(when="+feature"):
+ depends_on("foo")
+ depends_on("bar")
+ depends_on("baz", when="+baz")
+
+ is equivalent to:
+
+ .. code-block:: python
+
+ depends_on("foo", when="+feature")
+ depends_on("bar", when="+feature")
+ depends_on("baz", when="+baz") # Note: not when="+feature+baz"
+
.. _install-method:
------------------
@@ -3501,7 +3615,7 @@ need to override methods like ``configure_args``:
def configure_args(self):
args = ["--enable-cxx"] + self.enable_or_disable("libs")
- if "libs=static" in self.spec:
+ if self.spec.satisfies("libs=static"):
args.append("--with-pic")
return args
@@ -3738,7 +3852,7 @@ Similarly, ``spack install example +feature build_system=autotools`` will pick
the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``.
Dependencies are always specified in the package class. When some dependencies
-depend on the choice of the build system, it is possible to use when conditions as
+depend on the choice of the build system, it is possible to use when conditions as
usual:
.. code-block:: python
@@ -3756,7 +3870,7 @@ usual:
depends_on("cmake@3.18:", when="@2.0:", type="build")
depends_on("cmake@3:", type="build")
- # Specify extra build dependencies used only in the configure script
+ # Specify extra build dependencies used only in the configure script
with when("build_system=autotools"):
depends_on("perl", type="build")
depends_on("pkgconfig", type="build")
@@ -4364,7 +4478,7 @@ for supported features, for instance:
.. code-block:: python
- if "avx512" in spec.target:
+ if spec.satisfies("target=avx512"):
args.append("--with-avx512")
The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding
@@ -6804,25 +6918,58 @@ the adapter role is to "emulate" a method resolution order like the one represen
Specifying License Information
------------------------------
-A significant portion of software that Spack packages is open source. Most open
-source software is released under one or more common open source licenses.
-Specifying the specific license that a package is released under in a project's
-`package.py` is good practice. To specify a license, find the SPDX identifier for
-a project and then add it using the license directive:
+Most of the software in Spack is open source, and most open source software is released
+under one or more `common open source licenses `_.
+Specifying the license that a package is released under in a project's
+`package.py` is good practice. To specify a license, find the `SPDX identifier
+`_ for a project and then add it using the license
+directive:
.. code-block:: python
license("")
+For example, the SPDX ID for the Apache Software License, version 2.0 is ``Apache-2.0``,
+so you'd write:
+
+.. code-block:: python
+
+ license("Apache-2.0")
+
+Or, for a dual-licensed package like Spack, you would use an `SPDX Expression
+`_ with both of its
+licenses:
+
+.. code-block:: python
+
+ license("Apache-2.0 OR MIT")
+
Note that specifying a license without a when clause makes it apply to all
versions and variants of the package, which might not actually be the case.
For example, a project might have switched licenses at some point or have
certain build configurations that include files that are licensed differently.
-To account for this, you can specify when licenses should be applied. For
-example, to specify that a specific license identifier should only apply
-to versionup to and including 1.5, you could write the following directive:
+Spack itself used to be under the ``LGPL-2.1`` license, until it was relicensed
+in version ``0.12`` in 2018.
+
+You can specify when a ``license()`` directive applies using with a ``when=``
+clause, just like other directives. For example, to specify that a specific
+license identifier should only apply to versions up to ``0.11``, but another
+license should apply for later versions, you could write:
.. code-block:: python
- license("...", when="@:1.5")
+ license("LGPL-2.1", when="@:0.11")
+ license("Apache-2.0 OR MIT", when="@0.12:")
+
+Note that unlike for most other directives, the ``when=`` constraints in the
+``license()`` directive can't intersect. Spack needs to be able to resolve
+exactly one license identifier expression for any given version. To specify
+*multiple* licenses, use SPDX expressions and operators as above. The operators
+you probably care most about are:
+
+* ``OR``: user chooses one license to adhere to; and
+* ``AND``: user has to adhere to all the licenses.
+You may also care about `license exceptions
+`_ that use the ``WITH`` operator,
+e.g. ``Apache-2.0 WITH LLVM-exception``.
diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt
index 319fce03e25471..ab1c80a791de26 100644
--- a/lib/spack/docs/requirements.txt
+++ b/lib/spack/docs/requirements.txt
@@ -6,7 +6,7 @@ python-levenshtein==0.23.0
docutils==0.18.1
pygments==2.17.2
urllib3==2.1.0
-pytest==7.4.2
+pytest==7.4.3
isort==5.12.0
black==23.11.0
flake8==6.1.0
diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py
index 23cd44e6044a3f..2e8bf3a4f8b80b 100644
--- a/lib/spack/external/__init__.py
+++ b/lib/spack/external/__init__.py
@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
-* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
+* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e)
astunparse
----------------
diff --git a/lib/spack/external/archspec/__init__.py b/lib/spack/external/archspec/__init__.py
index dfad9f3743d526..22a430894b4af0 100644
--- a/lib/spack/external/archspec/__init__.py
+++ b/lib/spack/external/archspec/__init__.py
@@ -1,2 +1,2 @@
"""Init file to avoid namespace packages"""
-__version__ = "0.2.1"
+__version__ = "0.2.2"
diff --git a/lib/spack/external/archspec/json/cpu/microarchitectures.json b/lib/spack/external/archspec/json/cpu/microarchitectures.json
index 2ddad29345441b..1e77caba4aea00 100644
--- a/lib/spack/external/archspec/json/cpu/microarchitectures.json
+++ b/lib/spack/external/archspec/json/cpu/microarchitectures.json
@@ -2318,6 +2318,26 @@
]
}
},
+ "power10": {
+ "from": ["power9"],
+ "vendor": "IBM",
+ "generation": 10,
+ "features": [],
+ "compilers": {
+ "gcc": [
+ {
+ "versions": "11.1:",
+ "flags": "-mcpu={name} -mtune={name}"
+ }
+ ],
+ "clang": [
+ {
+ "versions": "11.0:",
+ "flags": "-mcpu={name} -mtune={name}"
+ }
+ ]
+ }
+ },
"ppc64le": {
"from": [],
"vendor": "generic",
@@ -2405,6 +2425,29 @@
]
}
},
+ "power10le": {
+ "from": ["power9le"],
+ "vendor": "IBM",
+ "generation": 10,
+ "features": [],
+ "compilers": {
+ "gcc": [
+ {
+ "name": "power10",
+ "versions": "11.1:",
+ "flags": "-mcpu={name} -mtune={name}"
+ }
+ ],
+ "clang": [
+ {
+ "versions": "11.0:",
+ "family": "ppc64le",
+ "name": "power10",
+ "flags": "-mcpu={name} -mtune={name}"
+ }
+ ]
+ }
+ },
"aarch64": {
"from": [],
"vendor": "generic",
@@ -2592,6 +2635,37 @@
]
}
},
+ "armv9.0a": {
+ "from": ["armv8.5a"],
+ "vendor": "generic",
+ "features": [],
+ "compilers": {
+ "gcc": [
+ {
+ "versions": "12:",
+ "flags": "-march=armv9-a -mtune=generic"
+ }
+ ],
+ "clang": [
+ {
+ "versions": "14:",
+ "flags": "-march=armv9-a -mtune=generic"
+ }
+ ],
+ "apple-clang": [
+ {
+ "versions": ":",
+ "flags": "-march=armv9-a -mtune=generic"
+ }
+ ],
+ "arm": [
+ {
+ "versions": ":",
+ "flags": "-march=armv9-a -mtune=generic"
+ }
+ ]
+ }
+ },
"thunderx2": {
"from": ["armv8.1a"],
"vendor": "Cavium",
@@ -2813,8 +2887,12 @@
],
"arm" : [
{
- "versions": "20:",
+ "versions": "20:21.9",
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
+ },
+ {
+ "versions": "22:",
+ "flags" : "-mcpu=neoverse-n1"
}
],
"nvhpc" : [
@@ -2942,7 +3020,7 @@
},
{
"versions": "22:",
- "flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
+ "flags" : "-mcpu=neoverse-v1"
}
],
"nvhpc" : [
@@ -2954,6 +3032,126 @@
]
}
},
+ "neoverse_v2": {
+ "from": ["neoverse_n1", "armv9.0a"],
+ "vendor": "ARM",
+ "features": [
+ "fp",
+ "asimd",
+ "evtstrm",
+ "aes",
+ "pmull",
+ "sha1",
+ "sha2",
+ "crc32",
+ "atomics",
+ "fphp",
+ "asimdhp",
+ "cpuid",
+ "asimdrdm",
+ "jscvt",
+ "fcma",
+ "lrcpc",
+ "dcpop",
+ "sha3",
+ "sm3",
+ "sm4",
+ "asimddp",
+ "sha512",
+ "sve",
+ "asimdfhm",
+ "dit",
+ "uscat",
+ "ilrcpc",
+ "flagm",
+ "ssbs",
+ "sb",
+ "paca",
+ "pacg",
+ "dcpodp",
+ "sve2",
+ "sveaes",
+ "svepmull",
+ "svebitperm",
+ "svesha3",
+ "svesm4",
+ "flagm2",
+ "frint",
+ "svei8mm",
+ "svebf16",
+ "i8mm",
+ "bf16",
+ "dgh",
+ "bti"
+ ],
+ "compilers" : {
+ "gcc": [
+ {
+ "versions": "4.8:5.99",
+ "flags": "-march=armv8-a"
+ },
+ {
+ "versions": "6:6.99",
+ "flags" : "-march=armv8.1-a"
+ },
+ {
+ "versions": "7.0:7.99",
+ "flags" : "-march=armv8.2-a -mtune=cortex-a72"
+ },
+ {
+ "versions": "8.0:8.99",
+ "flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
+ },
+ {
+ "versions": "9.0:9.99",
+ "flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
+ },
+ {
+ "versions": "10.0:11.99",
+ "flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
+ },
+ {
+ "versions": "12.0:12.99",
+ "flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
+ },
+ {
+ "versions": "13.0:",
+ "flags" : "-mcpu=neoverse-v2"
+ }
+ ],
+ "clang" : [
+ {
+ "versions": "9.0:10.99",
+ "flags" : "-march=armv8.5-a+sve"
+ },
+ {
+ "versions": "11.0:13.99",
+ "flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
+ },
+ {
+ "versions": "14.0:15.99",
+ "flags" : "-march=armv9-a+i8mm+bf16"
+ },
+ {
+ "versions": "16.0:",
+ "flags" : "-mcpu=neoverse-v2"
+ }
+ ],
+ "arm" : [
+ {
+ "versions": "23.04.0:",
+ "flags" : "-mcpu=neoverse-v2"
+ }
+ ],
+ "nvhpc" : [
+ {
+ "versions": "23.3:",
+ "name": "neoverse-v2",
+ "flags": "-tp {name}"
+ }
+ ]
+ }
+ },
"m1": {
"from": ["armv8.4a"],
"vendor": "Apple",
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index b3975cc08d7d88..ec7bd665374c08 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -211,6 +211,7 @@ def info(message, *args, **kwargs):
stream.write(line + "\n")
else:
stream.write(indent + _output_filter(str(arg)) + "\n")
+ stream.flush()
def verbose(message, *args, **kwargs):
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 2fd13d0fe38b83..c2fa6aa90e6e39 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: PEP440 canonical ... string
-__version__ = "0.21.0.dev0"
+__version__ = "0.22.0.dev0"
spack_version = __version__
diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py
index 176c45487f51d0..970e4a3b367ff9 100644
--- a/lib/spack/spack/audit.py
+++ b/lib/spack/spack/audit.py
@@ -40,6 +40,7 @@ def _search_duplicate_compilers(error_cls):
import collections.abc
import glob
import inspect
+import io
import itertools
import pathlib
import pickle
@@ -54,6 +55,7 @@ def _search_duplicate_compilers(error_cls):
import spack.repo
import spack.spec
import spack.util.crypto
+import spack.util.spack_yaml as syaml
import spack.variant
#: Map an audit tag to a list of callables implementing checks
@@ -250,6 +252,88 @@ def _search_duplicate_specs_in_externals(error_cls):
return errors
+@config_packages
+def _deprecated_preferences(error_cls):
+ """Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
+ # TODO (v0.22): remove this audit as the attributes will not be allowed in config
+ errors = []
+ packages_yaml = spack.config.CONFIG.get_config("packages")
+
+ def make_error(attribute_name, config_data, summary):
+ s = io.StringIO()
+ s.write("Occurring in the following file:\n")
+ dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
+ syaml.dump_config(dict_view, stream=s, blame=True)
+ return error_cls(summary=summary, details=[s.getvalue()])
+
+ if "all" in packages_yaml and "version" in packages_yaml["all"]:
+ summary = "Using the deprecated 'version' attribute under 'packages:all'"
+ errors.append(make_error("version", packages_yaml["all"], summary))
+
+ for package_name in packages_yaml:
+ if package_name == "all":
+ continue
+
+ package_conf = packages_yaml[package_name]
+ for attribute in ("compiler", "providers", "target"):
+ if attribute not in package_conf:
+ continue
+ summary = (
+ f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
+ )
+ errors.append(make_error(attribute, package_conf, summary))
+
+ return errors
+
+
+@config_packages
+def _avoid_mismatched_variants(error_cls):
+ """Warns if variant preferences have mismatched types or names."""
+ errors = []
+ packages_yaml = spack.config.CONFIG.get_config("packages")
+
+ def make_error(config_data, summary):
+ s = io.StringIO()
+ s.write("Occurring in the following file:\n")
+ syaml.dump_config(config_data, stream=s, blame=True)
+ return error_cls(summary=summary, details=[s.getvalue()])
+
+ for pkg_name in packages_yaml:
+ # 'all:' must be more forgiving, since it is setting defaults for everything
+ if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]:
+ continue
+
+ preferences = packages_yaml[pkg_name]["variants"]
+ if not isinstance(preferences, list):
+ preferences = [preferences]
+
+ for variants in preferences:
+ current_spec = spack.spec.Spec(variants)
+ pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
+ for variant in current_spec.variants.values():
+ # Variant does not exist at all
+ if variant.name not in pkg_cls.variants:
+ summary = (
+ f"Setting a preference for the '{pkg_name}' package to the "
+ f"non-existing variant '{variant.name}'"
+ )
+ errors.append(make_error(preferences, summary))
+ continue
+
+ # Variant cannot accept this value
+ s = spack.spec.Spec(pkg_name)
+ try:
+ s.update_variant_validate(variant.name, variant.value)
+ except Exception:
+ summary = (
+ f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
+ f"to the invalid value '{str(variant)}'"
+ )
+ errors.append(make_error(preferences, summary))
+
+ return errors
+
+
#: Sanity checks on package directives
package_directives = AuditClass(
group="packages",
@@ -307,10 +391,17 @@ def _check_build_test_callbacks(pkgs, error_cls):
@package_directives
def _check_patch_urls(pkgs, error_cls):
- """Ensure that patches fetched from GitHub have stable sha256 hashes."""
+ """Ensure that patches fetched from GitHub and GitLab have stable sha256
+ hashes."""
github_patch_url_re = (
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
- ".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
+ r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
+ )
+ # Only .diff URLs have stable/full hashes:
+ # https://forum.gitlab.com/t/patches-with-full-index/29313
+ gitlab_patch_url_re = (
+ r"^https?://(?:.+)?gitlab(?:.+)/"
+ r".+/.+/-/(?:commit|merge_requests)/[a-fA-F0-9]+\.(?:patch|diff)"
)
errors = []
@@ -321,19 +412,27 @@ def _check_patch_urls(pkgs, error_cls):
if not isinstance(patch, spack.patch.UrlPatch):
continue
- if not re.match(github_patch_url_re, patch.url):
- continue
-
- full_index_arg = "?full_index=1"
- if not patch.url.endswith(full_index_arg):
- errors.append(
- error_cls(
- "patch URL in package {0} must end with {1}".format(
- pkg_cls.name, full_index_arg
- ),
- [patch.url],
+ if re.match(github_patch_url_re, patch.url):
+ full_index_arg = "?full_index=1"
+ if not patch.url.endswith(full_index_arg):
+ errors.append(
+ error_cls(
+ "patch URL in package {0} must end with {1}".format(
+ pkg_cls.name, full_index_arg
+ ),
+ [patch.url],
+ )
+ )
+ elif re.match(gitlab_patch_url_re, patch.url):
+ if not patch.url.endswith(".diff"):
+ errors.append(
+ error_cls(
+ "patch URL in package {0} must end with .diff".format(
+ pkg_cls.name
+ ),
+ [patch.url],
+ )
)
- )
return errors
@@ -761,7 +860,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
)
except Exception:
summary = (
- "{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
+ "{0}: dependency on {1} cannot be satisfied by known versions of {1.name}"
).format(pkg_name, s)
details = ["happening in " + filename]
if dependency_pkg_cls is not None:
@@ -803,6 +902,53 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
return errors
+@package_directives
+def _named_specs_in_when_arguments(pkgs, error_cls):
+ """Reports named specs in the 'when=' attribute of a directive.
+
+ Note that 'conflicts' is the only directive allowing that.
+ """
+ errors = []
+ for pkg_name in pkgs:
+ pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
+
+ def _extracts_errors(triggers, summary):
+ _errors = []
+ for trigger in list(triggers):
+ when_spec = spack.spec.Spec(trigger)
+ if when_spec.name is not None and when_spec.name != pkg_name:
+ details = [f"using '{trigger}', should be '^{trigger}'"]
+ _errors.append(error_cls(summary=summary, details=details))
+ return _errors
+
+ for dname, triggers in pkg_cls.dependencies.items():
+ summary = f"{pkg_name}: wrong 'when=' condition for the '{dname}' dependency"
+ errors.extend(_extracts_errors(triggers, summary))
+
+ for vname, (variant, triggers) in pkg_cls.variants.items():
+ summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
+ errors.extend(_extracts_errors(triggers, summary))
+
+ for provided, triggers in pkg_cls.provided.items():
+ summary = f"{pkg_name}: wrong 'when=' condition for the '{provided}' virtual"
+ errors.extend(_extracts_errors(triggers, summary))
+
+ for _, triggers in pkg_cls.requirements.items():
+ triggers = [when_spec for when_spec, _, _ in triggers]
+ summary = f"{pkg_name}: wrong 'when=' condition in 'requires' directive"
+ errors.extend(_extracts_errors(triggers, summary))
+
+ triggers = list(pkg_cls.patches)
+ summary = f"{pkg_name}: wrong 'when=' condition in 'patch' directives"
+ errors.extend(_extracts_errors(triggers, summary))
+
+ triggers = list(pkg_cls.resources)
+ summary = f"{pkg_name}: wrong 'when=' condition in 'resource' directives"
+ errors.extend(_extracts_errors(triggers, summary))
+
+ return llnl.util.lang.dedupe(errors)
+
+
#: Sanity checks on package directives
external_detection = AuditClass(
group="externals",
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 2632e3036e1d66..a22fbf92f32a09 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -5,11 +5,13 @@
import codecs
import collections
+import errno
import hashlib
import io
import itertools
import json
import os
+import pathlib
import re
import shutil
import sys
@@ -23,7 +25,7 @@
import warnings
from contextlib import closing, contextmanager
from gzip import GzipFile
-from typing import Dict, List, NamedTuple, Optional, Tuple, Union
+from typing import Dict, List, NamedTuple, Optional, Set, Tuple
from urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys
@@ -31,6 +33,7 @@
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
+import spack.caches
import spack.cmd
import spack.config as config
import spack.database as spack_db
@@ -39,6 +42,9 @@
import spack.hooks
import spack.hooks.sbang
import spack.mirror
+import spack.oci.image
+import spack.oci.oci
+import spack.oci.opener
import spack.platforms
import spack.relocate as relocate
import spack.repo
@@ -48,6 +54,7 @@
import spack.util.crypto
import spack.util.file_cache as file_cache
import spack.util.gpg
+import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.timer as timer
@@ -60,8 +67,9 @@
from spack.stage import Stage
from spack.util.executable import which
-_build_cache_relative_path = "build_cache"
-_build_cache_keys_relative_path = "_pgp"
+BUILD_CACHE_RELATIVE_PATH = "build_cache"
+BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
+CURRENT_BUILD_CACHE_LAYOUT_VERSION = 1
class BuildCacheDatabase(spack_db.Database):
@@ -125,25 +133,25 @@ class BinaryCacheIndex:
mean we should have paid the price to update the cache earlier?
"""
- def __init__(self, cache_root):
- self._index_cache_root = cache_root
+ def __init__(self, cache_root: Optional[str] = None):
+ self._index_cache_root: str = cache_root or binary_index_location()
# the key associated with the serialized _local_index_cache
self._index_contents_key = "contents.json"
# a FileCache instance storing copies of remote binary cache indices
- self._index_file_cache = None
+ self._index_file_cache: Optional[file_cache.FileCache] = None
# stores a map of mirror URL to index hash and cache key (index path)
- self._local_index_cache = None
+ self._local_index_cache: Optional[dict] = None
# hashes of remote indices already ingested into the concrete spec
# cache (_mirrors_for_spec)
- self._specs_already_associated = set()
+ self._specs_already_associated: Set[str] = set()
# mapping from mirror urls to the time.time() of the last index fetch and a bool indicating
# whether the fetch succeeded or not.
- self._last_fetch_times = {}
+ self._last_fetch_times: Dict[str, float] = {}
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
# entries indicating mirrors where that concrete spec can be found.
@@ -153,7 +161,7 @@ def __init__(self, cache_root):
# - the concrete spec itself, keyed by ``spec`` (including the
# full hash, since the dag hash may match but we want to
# use the updated source if available)
- self._mirrors_for_spec = {}
+ self._mirrors_for_spec: Dict[str, dict] = {}
def _init_local_index_cache(self):
if not self._index_file_cache:
@@ -478,14 +486,18 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
FetchIndexError
"""
# TODO: get rid of this request, handle 404 better
- if not web_util.url_exists(
- url_util.join(mirror_url, _build_cache_relative_path, "index.json")
+ scheme = urllib.parse.urlparse(mirror_url).scheme
+
+ if scheme != "oci" and not web_util.url_exists(
+ url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
):
return False
- etag = cache_entry.get("etag", None)
- if etag:
- fetcher = EtagIndexFetcher(mirror_url, etag)
+ if scheme == "oci":
+ # TODO: Actually etag and OCI are not mutually exclusive...
+ fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None))
+ elif cache_entry.get("etag"):
+ fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"])
else:
fetcher = DefaultIndexFetcher(
mirror_url, local_hash=cache_entry.get("index_hash", None)
@@ -526,15 +538,8 @@ def binary_index_location():
return spack.util.path.canonicalize_path(cache_root)
-def _binary_index():
- """Get the singleton store instance."""
- return BinaryCacheIndex(binary_index_location())
-
-
-#: Singleton binary_index instance
-binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
- _binary_index
-)
+#: Default binary cache index instance
+BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
class NoOverwriteException(spack.error.SpackError):
@@ -603,6 +608,10 @@ def __init__(self, msg):
super().__init__(msg)
+class InvalidMetadataFile(spack.error.SpackError):
+ pass
+
+
class UnsignedPackageException(spack.error.SpackError):
"""
Raised if installation of unsigned package is attempted without
@@ -617,11 +626,11 @@ def compute_hash(data):
def build_cache_relative_path():
- return _build_cache_relative_path
+ return BUILD_CACHE_RELATIVE_PATH
def build_cache_keys_relative_path():
- return _build_cache_keys_relative_path
+ return BUILD_CACHE_KEYS_RELATIVE_PATH
def build_cache_prefix(prefix):
@@ -629,21 +638,14 @@ def build_cache_prefix(prefix):
def buildinfo_file_name(prefix):
- """
- Filename of the binary package meta-data file
- """
- return os.path.join(prefix, ".spack/binary_distribution")
+ """Filename of the binary package meta-data file"""
+ return os.path.join(prefix, ".spack", "binary_distribution")
def read_buildinfo_file(prefix):
- """
- Read buildinfo file
- """
- filename = buildinfo_file_name(prefix)
- with open(filename, "r") as inputfile:
- content = inputfile.read()
- buildinfo = syaml.load(content)
- return buildinfo
+ """Read buildinfo file"""
+ with open(buildinfo_file_name(prefix), "r") as f:
+ return syaml.load(f)
class BuildManifestVisitor(BaseDirectoryVisitor):
@@ -826,18 +828,6 @@ def tarball_path_name(spec, ext):
return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext))
-def checksum_tarball(file):
- # calculate sha256 hash of tar file
- block_size = 65536
- hasher = hashlib.sha256()
- with open(file, "rb") as tfile:
- buf = tfile.read(block_size)
- while len(buf) > 0:
- hasher.update(buf)
- buf = tfile.read(block_size)
- return hasher.hexdigest()
-
-
def select_signing_key(key=None):
if key is None:
keys = spack.util.gpg.signing_keys()
@@ -1154,14 +1144,17 @@ def gzip_compressed_tarfile(path):
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
- with open(path, "wb") as fileobj, closing(
- GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
- ) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
- yield tar
+ with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing(
+ GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum)
+ ) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile(
+ name="", mode="w", fileobj=outer_checksum
+ ) as tar:
+ yield tar, inner_checksum, outer_checksum
-def _tarinfo_name(p: str):
- return p.lstrip("/")
+def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str:
+ """Compute tarfile entry name as the relative path from the (system) root."""
+ return _path(*_path(absolute_path).parts[1:]).as_posix()
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
@@ -1241,8 +1234,88 @@ def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
+class ChecksumWriter(io.BufferedIOBase):
+ """Checksum writer computes a checksum while writing to a file."""
+
+ myfileobj = None
+
+ def __init__(self, fileobj, algorithm=hashlib.sha256):
+ self.fileobj = fileobj
+ self.hasher = algorithm()
+ self.length = 0
+
+ def hexdigest(self):
+ return self.hasher.hexdigest()
+
+ def write(self, data):
+ if isinstance(data, (bytes, bytearray)):
+ length = len(data)
+ else:
+ data = memoryview(data)
+ length = data.nbytes
+
+ if length > 0:
+ self.fileobj.write(data)
+ self.hasher.update(data)
+
+ self.length += length
+
+ return length
+
+ def read(self, size=-1):
+ raise OSError(errno.EBADF, "read() on write-only object")
+
+ def read1(self, size=-1):
+ raise OSError(errno.EBADF, "read1() on write-only object")
+
+ def peek(self, n):
+ raise OSError(errno.EBADF, "peek() on write-only object")
+
+ @property
+ def closed(self):
+ return self.fileobj is None
+
+ def close(self):
+ fileobj = self.fileobj
+ if fileobj is None:
+ return
+ self.fileobj.close()
+ self.fileobj = None
+
+ def flush(self):
+ self.fileobj.flush()
+
+ def fileno(self):
+ return self.fileobj.fileno()
+
+ def rewind(self):
+ raise OSError("Can't rewind while computing checksum")
+
+ def readable(self):
+ return False
+
+ def writable(self):
+ return True
+
+ def seekable(self):
+ return True
+
+ def tell(self):
+ return self.fileobj.tell()
+
+ def seek(self, offset, whence=io.SEEK_SET):
+ # In principle forward seek is possible with b"0" padding,
+ # but this is not implemented.
+ if offset == 0 and whence == io.SEEK_CUR:
+ return
+ raise OSError("Can't seek while computing checksum")
+
+ def readline(self, size=-1):
+ raise OSError(errno.EBADF, "readline() on write-only object")
+
+
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
- with gzip_compressed_tarfile(tarfile_path) as tar:
+ with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum):
# Tarball the install prefix
tarfile_of_spec_prefix(tar, binaries_dir)
@@ -1254,6 +1327,8 @@ def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
tarinfo.mode = 0o644
tar.addfile(tarinfo, io.BytesIO(bstring))
+ return inner_checksum.hexdigest(), outer_checksum.hexdigest()
+
class PushOptions(NamedTuple):
#: Overwrite existing tarball/metadata files in buildcache
@@ -1329,20 +1404,16 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
# create info for later relocation and create tar
buildinfo = get_buildinfo_dict(spec)
- _do_create_tarball(tarfile_path, binaries_dir, buildinfo)
-
- # get the sha256 checksum of the tarball
- checksum = checksum_tarball(tarfile_path)
+ checksum, _ = _do_create_tarball(tarfile_path, binaries_dir, buildinfo)
# add sha256 checksum to spec.json
-
with open(spec_file, "r") as inputfile:
content = inputfile.read()
if spec_file.endswith(".json"):
spec_dict = sjson.load(content)
else:
raise ValueError("{0} not a valid spec file type".format(spec_file))
- spec_dict["buildcache_layout_version"] = 1
+ spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
with open(specfile_path, "w") as outfile:
@@ -1378,6 +1449,16 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
return None
+class NotInstalledError(spack.error.SpackError):
+ """Raised when a spec is not installed but picked to be packaged."""
+
+ def __init__(self, specs: List[Spec]):
+ super().__init__(
+ "Cannot push non-installed packages",
+ ", ".join(s.cformat("{name}{@version}{/hash:7}") for s in specs),
+ )
+
+
def specs_to_be_packaged(
specs: List[Spec],
deptype: dt.DepTypes = dt.DEFAULT_TYPES,
@@ -1385,6 +1466,7 @@ def specs_to_be_packaged(
dependencies: bool = True,
) -> List[Spec]:
"""Return the list of nodes to be packaged, given a list of specs.
+ Raises NotInstalledError if a spec is not installed but picked to be packaged.
Args:
specs: list of root specs to be processed
@@ -1393,19 +1475,35 @@ def specs_to_be_packaged(
dependencies: include the dependencies of each
spec in the nodes
"""
+
if not root and not dependencies:
return []
- elif dependencies:
- nodes = traverse.traverse_nodes(specs, root=root, deptype=deptype)
- else:
- nodes = set(specs)
-
- # Limit to installed non-externals.
- packageable = lambda n: not n.external and n.installed
- # Mass install check
+ # Filter packageable roots
with spack.store.STORE.db.read_transaction():
- return list(filter(packageable, nodes))
+ if root:
+ # Error on uninstalled roots, when roots are requested
+ uninstalled_roots = list(s for s in specs if not s.installed)
+ if uninstalled_roots:
+ raise NotInstalledError(uninstalled_roots)
+ roots = specs
+ else:
+ roots = []
+
+ if dependencies:
+ # Error on uninstalled deps, when deps are requested
+ deps = list(
+ traverse.traverse_nodes(
+ specs, deptype="all", order="breadth", root=False, key=traverse.by_dag_hash
+ )
+ )
+ uninstalled_deps = list(s for s in deps if not s.installed)
+ if uninstalled_deps:
+ raise NotInstalledError(uninstalled_deps)
+ else:
+ deps = []
+
+ return [s for s in itertools.chain(roots, deps) if not s.external]
def push(spec: Spec, mirror_url: str, options: PushOptions):
@@ -1478,6 +1576,42 @@ def _delete_staged_downloads(download_result):
download_result["specfile_stage"].destroy()
+def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, int]:
+ """Read and validate a spec file, returning the spec dict with its layout version, or raising
+ InvalidMetadataFile if invalid."""
+ try:
+ with open(path, "rb") as f:
+ binary_content = f.read()
+ except OSError:
+ raise InvalidMetadataFile(f"No such file: {path}")
+
+ # In the future we may support transparently decompressing compressed spec files.
+ if binary_content[:2] == b"\x1f\x8b":
+ raise InvalidMetadataFile("Compressed spec files are not supported")
+
+ try:
+ as_string = binary_content.decode("utf-8")
+ if path.endswith(".json.sig"):
+ spec_dict = Spec.extract_json_from_clearsig(as_string)
+ else:
+ spec_dict = json.loads(as_string)
+ except Exception as e:
+ raise InvalidMetadataFile(f"Could not parse {path} due to: {e}") from e
+
+ # Ensure this version is not too new.
+ try:
+ layout_version = int(spec_dict.get("buildcache_layout_version", 0))
+ except ValueError as e:
+ raise InvalidMetadataFile("Could not parse layout version") from e
+
+ if layout_version > max_supported_layout:
+ raise InvalidMetadataFile(
+ f"Layout version {layout_version} is too new for this version of Spack"
+ )
+
+ return spec_dict, layout_version
+
+
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
"""
Download binary tarball for given package into stage area, returning
@@ -1513,8 +1647,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
tarball = tarball_path_name(spec, ".spack")
specfile_prefix = tarball_name(spec, ".spec")
- mirrors_to_try = []
-
# Note on try_first and try_next:
# mirrors_for_spec mostly likely came from spack caching remote
# mirror indices locally and adding their specs to a local data
@@ -1527,63 +1659,140 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
- for url in try_first + try_next:
- mirrors_to_try.append(
- {
- "specfile": url_util.join(url, _build_cache_relative_path, specfile_prefix),
- "spackfile": url_util.join(url, _build_cache_relative_path, tarball),
- }
- )
+ mirrors = try_first + try_next
tried_to_verify_sigs = []
# Assumes we care more about finding a spec file by preferred ext
# than by mirrory priority. This can be made less complicated as
# we remove support for deprecated spec formats and buildcache layouts.
- for ext in ["json.sig", "json"]:
- for mirror_to_try in mirrors_to_try:
- specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
- spackfile_url = mirror_to_try["spackfile"]
- local_specfile_stage = try_fetch(specfile_url)
- if local_specfile_stage:
- local_specfile_path = local_specfile_stage.save_filename
- signature_verified = False
-
- if ext.endswith(".sig") and not unsigned:
- # If we found a signed specfile at the root, try to verify
- # the signature immediately. We will not download the
- # tarball if we could not verify the signature.
- tried_to_verify_sigs.append(specfile_url)
- signature_verified = try_verify(local_specfile_path)
- if not signature_verified:
- tty.warn("Failed to verify: {0}".format(specfile_url))
-
- if unsigned or signature_verified or not ext.endswith(".sig"):
- # We will download the tarball in one of three cases:
- # 1. user asked for --no-check-signature
- # 2. user didn't ask for --no-check-signature, but we
- # found a spec.json.sig and verified the signature already
- # 3. neither of the first two cases are true, but this file
- # is *not* a signed json (not a spec.json.sig file). That
- # means we already looked at all the mirrors and either didn't
- # find any .sig files or couldn't verify any of them. But it
- # is still possible to find an old style binary package where
- # the signature is a detached .asc file in the outer archive
- # of the tarball, and in that case, the only way to know is to
- # download the tarball. This is a deprecated use case, so if
- # something goes wrong during the extraction process (can't
- # verify signature, checksum doesn't match) we will fail at
- # that point instead of trying to download more tarballs from
- # the remaining mirrors, looking for one we can use.
- tarball_stage = try_fetch(spackfile_url)
- if tarball_stage:
- return {
- "tarball_stage": tarball_stage,
- "specfile_stage": local_specfile_stage,
- "signature_verified": signature_verified,
- }
+ for try_signed in (True, False):
+ for mirror in mirrors:
+ # If it's an OCI index, do things differently, since we cannot compose URLs.
+ parsed = urllib.parse.urlparse(mirror)
- local_specfile_stage.destroy()
+ # TODO: refactor this to some "nice" place.
+ if parsed.scheme == "oci":
+ ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag(
+ spack.oci.image.default_tag(spec)
+ )
+
+ # Fetch the manifest
+ try:
+ response = spack.oci.opener.urlopen(
+ urllib.request.Request(
+ url=ref.manifest_url(),
+ headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
+ )
+ )
+ except Exception:
+ continue
+
+ # Download the config = spec.json and the relevant tarball
+ try:
+ manifest = json.loads(response.read())
+ spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
+ tarball_digest = spack.oci.image.Digest.from_string(
+ manifest["layers"][-1]["digest"]
+ )
+ except Exception:
+ continue
+
+ with spack.oci.oci.make_stage(
+ ref.blob_url(spec_digest), spec_digest, keep=True
+ ) as local_specfile_stage:
+ try:
+ local_specfile_stage.fetch()
+ local_specfile_stage.check()
+ try:
+ _get_valid_spec_file(
+ local_specfile_stage.save_filename,
+ CURRENT_BUILD_CACHE_LAYOUT_VERSION,
+ )
+ except InvalidMetadataFile as e:
+ tty.warn(
+ f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
+ f"from {mirror} due to invalid metadata file: {e}"
+ )
+ local_specfile_stage.destroy()
+ continue
+ except Exception:
+ continue
+ local_specfile_stage.cache_local()
+
+ with spack.oci.oci.make_stage(
+ ref.blob_url(tarball_digest), tarball_digest, keep=True
+ ) as tarball_stage:
+ try:
+ tarball_stage.fetch()
+ tarball_stage.check()
+ except Exception:
+ continue
+ tarball_stage.cache_local()
+
+ return {
+ "tarball_stage": tarball_stage,
+ "specfile_stage": local_specfile_stage,
+ "signature_verified": False,
+ }
+
+ else:
+ ext = "json.sig" if try_signed else "json"
+ specfile_path = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, specfile_prefix)
+ specfile_url = f"{specfile_path}.{ext}"
+ spackfile_url = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, tarball)
+ local_specfile_stage = try_fetch(specfile_url)
+ if local_specfile_stage:
+ local_specfile_path = local_specfile_stage.save_filename
+ signature_verified = False
+
+ try:
+ _get_valid_spec_file(
+ local_specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION
+ )
+ except InvalidMetadataFile as e:
+ tty.warn(
+ f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
+ f"from {mirror} due to invalid metadata file: {e}"
+ )
+ local_specfile_stage.destroy()
+ continue
+
+ if try_signed and not unsigned:
+ # If we found a signed specfile at the root, try to verify
+ # the signature immediately. We will not download the
+ # tarball if we could not verify the signature.
+ tried_to_verify_sigs.append(specfile_url)
+ signature_verified = try_verify(local_specfile_path)
+ if not signature_verified:
+ tty.warn("Failed to verify: {0}".format(specfile_url))
+
+ if unsigned or signature_verified or not try_signed:
+ # We will download the tarball in one of three cases:
+ # 1. user asked for --no-check-signature
+ # 2. user didn't ask for --no-check-signature, but we
+ # found a spec.json.sig and verified the signature already
+ # 3. neither of the first two cases are true, but this file
+ # is *not* a signed json (not a spec.json.sig file). That
+ # means we already looked at all the mirrors and either didn't
+ # find any .sig files or couldn't verify any of them. But it
+ # is still possible to find an old style binary package where
+ # the signature is a detached .asc file in the outer archive
+ # of the tarball, and in that case, the only way to know is to
+ # download the tarball. This is a deprecated use case, so if
+ # something goes wrong during the extraction process (can't
+ # verify signature, checksum doesn't match) we will fail at
+ # that point instead of trying to download more tarballs from
+ # the remaining mirrors, looking for one we can use.
+ tarball_stage = try_fetch(spackfile_url)
+ if tarball_stage:
+ return {
+ "tarball_stage": tarball_stage,
+ "specfile_stage": local_specfile_stage,
+ "signature_verified": signature_verified,
+ }
+
+ local_specfile_stage.destroy()
# Falling through the nested loops meeans we exhaustively searched
# for all known kinds of spec files on all mirrors and did not find
@@ -1816,7 +2025,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
)
# compute the sha256 checksum of the tarball
- local_checksum = checksum_tarball(tarfile_path)
+ local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
expected = remote_checksum["hash"]
# if the checksums don't match don't install
@@ -1868,23 +2077,16 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
)
specfile_path = download_result["specfile_stage"].save_filename
-
- with open(specfile_path, "r") as inputfile:
- content = inputfile.read()
- if specfile_path.endswith(".json.sig"):
- spec_dict = Spec.extract_json_from_clearsig(content)
- else:
- spec_dict = sjson.load(content)
-
+ spec_dict, layout_version = _get_valid_spec_file(
+ specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION
+ )
bchecksum = spec_dict["binary_cache_checksum"]
+
filename = download_result["tarball_stage"].save_filename
signature_verified = download_result["signature_verified"]
tmpdir = None
- if (
- "buildcache_layout_version" not in spec_dict
- or int(spec_dict["buildcache_layout_version"]) < 1
- ):
+ if layout_version == 0:
# Handle the older buildcache layout where the .spack file
# contains a spec json, maybe an .asc file (signature),
# and another tarball containing the actual install tree.
@@ -1895,7 +2097,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
_delete_staged_downloads(download_result)
shutil.rmtree(tmpdir)
raise e
- else:
+ elif layout_version == 1:
# Newer buildcache layout: the .spack file contains just
# in the install tree, the signature, if it exists, is
# wrapped around the spec.json at the root. If sig verify
@@ -1909,7 +2111,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
)
# compute the sha256 checksum of the tarball
- local_checksum = checksum_tarball(tarfile_path)
+ local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
expected = bchecksum["hash"]
# if the checksums don't match don't install
@@ -1919,7 +2121,6 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
raise NoChecksumException(
tarfile_path, size, contents, "sha256", expected, local_checksum
)
-
try:
with closing(tarfile.open(tarfile_path, "r")) as tar:
# Remove install prefix from tarfil to extract directly into spec.prefix
@@ -2050,10 +2251,10 @@ def try_direct_fetch(spec, mirrors=None):
for mirror in binary_mirrors:
buildcache_fetch_url_json = url_util.join(
- mirror.fetch_url, _build_cache_relative_path, specfile_name
+ mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_name
)
buildcache_fetch_url_signed_json = url_util.join(
- mirror.fetch_url, _build_cache_relative_path, signed_specfile_name
+ mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name
)
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
@@ -2115,7 +2316,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
tty.debug("No Spack mirrors are currently configured")
return {}
- results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
+ results = BINARY_INDEX.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
# The index may be out-of-date. If we aren't only considering indices, try
# to fetch directly since we know where the file should be.
@@ -2124,7 +2325,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
# We found a spec by the direct fetch approach, we might as well
# add it to our mapping.
if results:
- binary_index.update_spec(spec, results)
+ BINARY_INDEX.update_spec(spec, results)
return results
@@ -2140,12 +2341,12 @@ def update_cache_and_get_specs():
Throws:
FetchCacheError
"""
- binary_index.update()
- return binary_index.get_all_built_specs()
+ BINARY_INDEX.update()
+ return BINARY_INDEX.get_all_built_specs()
def clear_spec_cache():
- binary_index.clear()
+ BINARY_INDEX.clear()
def get_keys(install=False, trust=False, force=False, mirrors=None):
@@ -2158,7 +2359,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
for mirror in mirror_collection.values():
fetch_url = mirror.fetch_url
keys_url = url_util.join(
- fetch_url, _build_cache_relative_path, _build_cache_keys_relative_path
+ fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
)
keys_index = url_util.join(keys_url, "index.json")
@@ -2223,7 +2424,7 @@ def push_keys(*mirrors, **kwargs):
for mirror in mirrors:
push_url = getattr(mirror, "push_url", mirror)
keys_url = url_util.join(
- push_url, _build_cache_relative_path, _build_cache_keys_relative_path
+ push_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
)
keys_local = url_util.local_file_path(keys_url)
@@ -2363,11 +2564,11 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
)
if mirror_url:
- mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
+ mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
return _download_buildcache_entry(mirror_root, file_descriptions)
for mirror in spack.mirror.MirrorCollection(binary=True).values():
- mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path)
+ mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
if _download_buildcache_entry(mirror_root, file_descriptions):
return True
@@ -2458,7 +2659,7 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
def get_remote_hash(self):
# Failure to fetch index.json.hash is not fatal
- url_index_hash = url_util.join(self.url, _build_cache_relative_path, "index.json.hash")
+ url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
try:
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
except urllib.error.URLError:
@@ -2470,7 +2671,7 @@ def get_remote_hash(self):
return None
return remote_hash.decode("utf-8")
- def conditional_fetch(self):
+ def conditional_fetch(self) -> FetchIndexResult:
# Do an intermediate fetch for the hash
# and a conditional fetch for the contents
@@ -2479,17 +2680,17 @@ def conditional_fetch(self):
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
# Otherwise, download index.json
- url_index = url_util.join(self.url, _build_cache_relative_path, "index.json")
+ url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
except urllib.error.URLError as e:
- raise FetchIndexError("Could not fetch index from {}".format(url_index), e)
+ raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
try:
result = codecs.getreader("utf-8")(response).read()
except ValueError as e:
- return FetchCacheError("Remote index {} is invalid".format(url_index), e)
+ raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
computed_hash = compute_hash(result)
@@ -2521,9 +2722,9 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
self.etag = etag
self.urlopen = urlopen
- def conditional_fetch(self):
+ def conditional_fetch(self) -> FetchIndexResult:
# Just do a conditional fetch immediately
- url = url_util.join(self.url, _build_cache_relative_path, "index.json")
+ url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
headers = {
"User-Agent": web_util.SPACK_USER_AGENT,
"If-None-Match": '"{}"'.format(self.etag),
@@ -2552,3 +2753,59 @@ def conditional_fetch(self):
data=result,
fresh=False,
)
+
+
+class OCIIndexFetcher:
+ def __init__(self, url: str, local_hash, urlopen=None) -> None:
+ self.local_hash = local_hash
+
+ # Remove oci:// prefix
+ assert url.startswith("oci://")
+ self.ref = spack.oci.image.ImageReference.from_string(url[6:])
+ self.urlopen = urlopen or spack.oci.opener.urlopen
+
+ def conditional_fetch(self) -> FetchIndexResult:
+ """Download an index from an OCI registry type mirror."""
+ url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
+ try:
+ response = self.urlopen(
+ urllib.request.Request(
+ url=url_manifest,
+ headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
+ )
+ )
+ except urllib.error.URLError as e:
+ raise FetchIndexError(
+ "Could not fetch manifest from {}".format(url_manifest), e
+ ) from e
+
+ try:
+ manifest = json.loads(response.read())
+ except Exception as e:
+ raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
+
+ # Get first blob hash, which should be the index.json
+ try:
+ index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
+ except Exception as e:
+ raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
+
+ # Fresh?
+ if index_digest.digest == self.local_hash:
+ return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
+
+ # Otherwise fetch the blob / index.json
+ response = self.urlopen(
+ urllib.request.Request(
+ url=self.ref.blob_url(index_digest),
+ headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
+ )
+ )
+
+ result = codecs.getreader("utf-8")(response).read()
+
+ # Make sure the blob we download has the advertised hash
+ if compute_hash(result) != index_digest.digest:
+ raise FetchIndexError(f"Remote index {url_manifest} is invalid")
+
+ return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
diff --git a/lib/spack/spack/bootstrap/_common.py b/lib/spack/spack/bootstrap/_common.py
index 69f32d62639d81..0b8192f77f807d 100644
--- a/lib/spack/spack/bootstrap/_common.py
+++ b/lib/spack/spack/bootstrap/_common.py
@@ -213,7 +213,8 @@ def _root_spec(spec_str: str) -> str:
if str(spack.platforms.host()) == "darwin":
spec_str += " %apple-clang"
elif str(spack.platforms.host()) == "windows":
- spec_str += " %msvc"
+ # TODO (johnwparent): Remove version constraint when clingo patch is up
+ spec_str += " %msvc@:19.37"
else:
spec_str += " %gcc"
diff --git a/lib/spack/spack/bootstrap/config.py b/lib/spack/spack/bootstrap/config.py
index e38c5669d993a8..6786bc0d3ead00 100644
--- a/lib/spack/spack/bootstrap/config.py
+++ b/lib/spack/spack/bootstrap/config.py
@@ -143,7 +143,9 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
- new_compilers = spack.compilers.find_new_compilers()
+ new_compilers = spack.compilers.find_new_compilers(
+ mixed_toolchain=sys.platform == "darwin"
+ )
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py
index d7b39b02e0cc38..5f73c7bfaf49c8 100644
--- a/lib/spack/spack/bootstrap/core.py
+++ b/lib/spack/spack/bootstrap/core.py
@@ -214,7 +214,7 @@ def _install_and_test(
with spack.config.override(self.mirror_scope):
# This index is currently needed to get the compiler used to build some
# specs that we know by dag hash.
- spack.binary_distribution.binary_index.regenerate_spec_cache()
+ spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache()
index = spack.binary_distribution.update_cache_and_get_specs()
if not index:
@@ -291,6 +291,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
+ # This is needed to help the old concretizer taking the `setuptools` dependency
+ # only when bootstrapping from sources on Python 3.12
+ if spec_for_current_python() == "python@3.12":
+ concrete_spec.constrain("+force_setuptools")
if module == "clingo":
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
diff --git a/lib/spack/spack/bootstrap/environment.py b/lib/spack/spack/bootstrap/environment.py
index 2a2fc37b454afa..71d54a8ad145b0 100644
--- a/lib/spack/spack/bootstrap/environment.py
+++ b/lib/spack/spack/bootstrap/environment.py
@@ -161,7 +161,7 @@ def _write_spack_yaml_file(self) -> None:
def isort_root_spec() -> str:
"""Return the root spec used to bootstrap isort"""
- return _root_spec("py-isort@4.3.5:")
+ return _root_spec("py-isort@5")
def mypy_root_spec() -> str:
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 96c8cb8a4ad71a..20d8e75f9bc47b 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -324,19 +324,29 @@ def set_compiler_environment_variables(pkg, env):
# ttyout, ttyerr, etc.
link_dir = spack.paths.build_env_path
- # Set SPACK compiler variables so that our wrapper knows what to call
+ # Set SPACK compiler variables so that our wrapper knows what to
+ # call. If there is no compiler configured then use a default
+ # wrapper which will emit an error if it is used.
if compiler.cc:
env.set("SPACK_CC", compiler.cc)
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
+ else:
+ env.set("CC", os.path.join(link_dir, "cc"))
if compiler.cxx:
env.set("SPACK_CXX", compiler.cxx)
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
+ else:
+ env.set("CC", os.path.join(link_dir, "c++"))
if compiler.f77:
env.set("SPACK_F77", compiler.f77)
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
+ else:
+ env.set("F77", os.path.join(link_dir, "f77"))
if compiler.fc:
env.set("SPACK_FC", compiler.fc)
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
+ else:
+ env.set("FC", os.path.join(link_dir, "fc"))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
@@ -743,28 +753,23 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods)
- tty.debug("setup_package: grabbing modifications from dependencies")
- env_mods.extend(setup_context.get_env_modifications())
- tty.debug("setup_package: collected all modifications from dependencies")
-
- # architecture specific setup
+ # Platform specific setup goes before package specific setup. This is for setting
+ # defaults like MACOSX_DEPLOYMENT_TARGET on macOS.
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
target = platform.target(pkg.spec.architecture.target)
platform.setup_platform_environment(pkg, env_mods)
- if context == Context.BUILD:
- tty.debug("setup_package: setup build environment for root")
- builder = spack.builder.create(pkg)
- builder.setup_build_environment(env_mods)
-
- if (not dirty) and (not env_mods.is_unset("CPATH")):
- tty.debug(
- "A dependency has updated CPATH, this may lead pkg-"
- "config to assume that the package is part of the system"
- " includes and omit it when invoked with '--cflags'."
- )
- elif context == Context.TEST:
+ tty.debug("setup_package: grabbing modifications from dependencies")
+ env_mods.extend(setup_context.get_env_modifications())
+ tty.debug("setup_package: collected all modifications from dependencies")
+
+ if context == Context.TEST:
env_mods.prepend_path("PATH", ".")
+ elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
+ tty.debug(
+ "A dependency has updated CPATH, this may lead pkg-config to assume that the package "
+ "is part of the system includes and omit it when invoked with '--cflags'."
+ )
# First apply the clean environment changes
env_base.apply_modifications()
@@ -953,8 +958,11 @@ def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
reversed(specs_with_type), lambda t: t[0].external
)
self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE
- self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
+ self.should_setup_run_env = (
+ UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
+ )
self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT
+ self.should_setup_build_env = UseMode.ROOT if context == Context.BUILD else UseMode(0)
if context == Context.RUN or context == Context.TEST:
self.should_be_runnable |= UseMode.ROOT
@@ -994,8 +1002,9 @@ def get_env_modifications(self) -> EnvironmentModifications:
- Updating PATH for packages that are required at runtime
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
tools can find Spack-built dependencies (when context=build)
- - Running custom package environment modifications (setup_run_environment,
- setup_dependent_build_environment, setup_dependent_run_environment)
+ - Running custom package environment modifications: setup_run_environment,
+ setup_dependent_run_environment, setup_build_environment,
+ setup_dependent_build_environment.
The (partial) order imposed on the specs is externals first, then topological
from leaf to root. That way externals cannot contribute search paths that would shadow
@@ -1008,19 +1017,27 @@ def get_env_modifications(self) -> EnvironmentModifications:
if self.should_setup_dependent_build_env & flag:
self._make_buildtime_detectable(dspec, env)
- for spec in self.specs:
- builder = spack.builder.create(pkg)
- builder.setup_dependent_build_environment(env, spec)
+ for root in self.specs: # there is only one root in build context
+ spack.builder.create(pkg).setup_dependent_build_environment(env, root)
+
+ if self.should_setup_build_env & flag:
+ spack.builder.create(pkg).setup_build_environment(env)
if self.should_be_runnable & flag:
self._make_runnable(dspec, env)
if self.should_setup_run_env & flag:
- # TODO: remove setup_dependent_run_environment...
- for spec in dspec.dependents(deptype=dt.RUN):
+ run_env_mods = EnvironmentModifications()
+ for spec in dspec.dependents(deptype=dt.LINK | dt.RUN):
if id(spec) in self.nodes_in_subdag:
- pkg.setup_dependent_run_environment(env, spec)
- pkg.setup_run_environment(env)
+ pkg.setup_dependent_run_environment(run_env_mods, spec)
+ pkg.setup_run_environment(run_env_mods)
+ if self.context == Context.BUILD:
+ # Don't let the runtime environment of comiler like dependencies leak into the
+ # build env
+ run_env_mods.drop("CC", "CXX", "F77", "FC")
+ env.extend(run_env_mods)
+
return env
def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
diff --git a/lib/spack/spack/build_systems/cached_cmake.py b/lib/spack/spack/build_systems/cached_cmake.py
index d85c2b7e199352..74304f1dc63293 100644
--- a/lib/spack/spack/build_systems/cached_cmake.py
+++ b/lib/spack/spack/build_systems/cached_cmake.py
@@ -34,6 +34,11 @@ def cmake_cache_option(name, boolean_value, comment="", force=False):
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
+def cmake_cache_filepath(name, value, comment=""):
+ """Generate a string for a cmake cache variable of type FILEPATH"""
+ return 'set({0} "{1}" CACHE FILEPATH "{2}")\n'.format(name, value, comment)
+
+
class CachedCMakeBuilder(CMakeBuilder):
#: Phases of a Cached CMake package
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
@@ -257,6 +262,15 @@ def initconfig_hardware_entries(self):
entries.append(
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
)
+ llvm_bin = spec["llvm-amdgpu"].prefix.bin
+ llvm_prefix = spec["llvm-amdgpu"].prefix
+ # Some ROCm systems seem to point to //rocm-/ and
+ # others point to //rocm-/llvm
+ if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
+ llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
+ entries.append(
+ cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
+ )
archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none":
arch_str = ";".join(archs)
@@ -277,7 +291,7 @@ def std_initconfig_entries(self):
"#------------------{0}".format("-" * 60),
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
"#------------------{0}\n".format("-" * 60),
- cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
+ cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
]
diff --git a/lib/spack/spack/build_systems/cargo.py b/lib/spack/spack/build_systems/cargo.py
new file mode 100644
index 00000000000000..28da47595642e1
--- /dev/null
+++ b/lib/spack/spack/build_systems/cargo.py
@@ -0,0 +1,89 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import inspect
+
+import llnl.util.filesystem as fs
+
+import spack.builder
+import spack.package_base
+from spack.directives import build_system, depends_on
+from spack.multimethod import when
+
+from ._checks import BaseBuilder, execute_install_time_tests
+
+
+class CargoPackage(spack.package_base.PackageBase):
+ """Specialized class for packages built using a Makefiles."""
+
+ #: This attribute is used in UI queries that need to know the build
+ #: system base class
+ build_system_class = "CargoPackage"
+
+ build_system("cargo")
+
+ with when("build_system=cargo"):
+ depends_on("rust", type="build")
+
+
+@spack.builder.builder("cargo")
+class CargoBuilder(BaseBuilder):
+ """The Cargo builder encodes the most common way of building software with
+ a rust Cargo.toml file. It has two phases that can be overridden, if need be:
+
+ 1. :py:meth:`~.CargoBuilder.build`
+ 2. :py:meth:`~.CargoBuilder.install`
+
+ For a finer tuning you may override:
+
+ +-----------------------------------------------+----------------------+
+ | **Method** | **Purpose** |
+ +===============================================+======================+
+ | :py:meth:`~.CargoBuilder.build_args` | Specify arguments |
+ | | to ``cargo install`` |
+ +-----------------------------------------------+----------------------+
+ | :py:meth:`~.CargoBuilder.check_args` | Specify arguments |
+ | | to ``cargo test`` |
+ +-----------------------------------------------+----------------------+
+ """
+
+ phases = ("build", "install")
+
+ #: Callback names for install-time test
+ install_time_test_callbacks = ["check"]
+
+ @property
+ def build_directory(self):
+ """Return the directory containing the main Cargo.toml."""
+ return self.pkg.stage.source_path
+
+ @property
+ def build_args(self):
+ """Arguments for ``cargo build``."""
+ return []
+
+ @property
+ def check_args(self):
+ """Argument for ``cargo test`` during check phase"""
+ return []
+
+ def build(self, pkg, spec, prefix):
+ """Runs ``cargo install`` in the source directory"""
+ with fs.working_dir(self.build_directory):
+ inspect.getmodule(pkg).cargo(
+ "install", "--root", "out", "--path", ".", *self.build_args
+ )
+
+ def install(self, pkg, spec, prefix):
+ """Copy build files into package prefix."""
+ with fs.working_dir(self.build_directory):
+ fs.install_tree("out", prefix)
+
+ spack.builder.run_after("install")(execute_install_time_tests)
+
+ def check(self):
+ """Run "cargo test"."""
+ with fs.working_dir(self.build_directory):
+ inspect.getmodule(self.pkg).cargo("test", *self.check_args)
diff --git a/lib/spack/spack/build_systems/go.py b/lib/spack/spack/build_systems/go.py
new file mode 100644
index 00000000000000..a7dd04fcf6c649
--- /dev/null
+++ b/lib/spack/spack/build_systems/go.py
@@ -0,0 +1,98 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import inspect
+
+import llnl.util.filesystem as fs
+
+import spack.builder
+import spack.package_base
+from spack.directives import build_system, extends
+from spack.multimethod import when
+
+from ._checks import BaseBuilder, execute_install_time_tests
+
+
+class GoPackage(spack.package_base.PackageBase):
+ """Specialized class for packages built using the Go toolchain."""
+
+ #: This attribute is used in UI queries that need to know the build
+ #: system base class
+ build_system_class = "GoPackage"
+
+ #: Legacy buildsystem attribute used to deserialize and install old specs
+ legacy_buildsystem = "go"
+
+ build_system("go")
+
+ with when("build_system=go"):
+ # TODO: this seems like it should be depends_on, see
+ # setup_dependent_build_environment in go for why I kept it like this
+ extends("go@1.14:", type="build")
+
+
+@spack.builder.builder("go")
+class GoBuilder(BaseBuilder):
+ """The Go builder encodes the most common way of building software with
+ a golang go.mod file. It has two phases that can be overridden, if need be:
+
+ 1. :py:meth:`~.GoBuilder.build`
+ 2. :py:meth:`~.GoBuilder.install`
+
+ For a finer tuning you may override:
+
+ +-----------------------------------------------+--------------------+
+ | **Method** | **Purpose** |
+ +===============================================+====================+
+ | :py:meth:`~.GoBuilder.build_args` | Specify arguments |
+ | | to ``go build`` |
+ +-----------------------------------------------+--------------------+
+ | :py:meth:`~.GoBuilder.check_args` | Specify arguments |
+ | | to ``go test`` |
+ +-----------------------------------------------+--------------------+
+ """
+
+ phases = ("build", "install")
+
+ #: Callback names for install-time test
+ install_time_test_callbacks = ["check"]
+
+ def setup_build_environment(self, env):
+ env.set("GO111MODULE", "on")
+ env.set("GOTOOLCHAIN", "local")
+
+ @property
+ def build_directory(self):
+ """Return the directory containing the main go.mod."""
+ return self.pkg.stage.source_path
+
+ @property
+ def build_args(self):
+ """Arguments for ``go build``."""
+ # Pass ldflags -s = --strip-all and -w = --no-warnings by default
+ return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
+
+ @property
+ def check_args(self):
+ """Argument for ``go test`` during check phase"""
+ return []
+
+ def build(self, pkg, spec, prefix):
+ """Runs ``go build`` in the source directory"""
+ with fs.working_dir(self.build_directory):
+ inspect.getmodule(pkg).go("build", *self.build_args)
+
+ def install(self, pkg, spec, prefix):
+ """Install built binaries into prefix bin."""
+ with fs.working_dir(self.build_directory):
+ fs.mkdirp(prefix.bin)
+ fs.install(pkg.name, prefix.bin)
+
+ spack.builder.run_after("install")(execute_install_time_tests)
+
+ def check(self):
+ """Run ``go test .`` in the source directory"""
+ with fs.working_dir(self.build_directory):
+ inspect.getmodule(self.pkg).go("test", *self.check_args)
diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py
index 1961eb312cee3c..234a4c9ae0be26 100644
--- a/lib/spack/spack/build_systems/oneapi.py
+++ b/lib/spack/spack/build_systems/oneapi.py
@@ -7,13 +7,12 @@
import os
import platform
import shutil
-from os.path import basename, dirname, isdir
+from os.path import basename, isdir
-from llnl.util.filesystem import find_headers, find_libraries, join_path
+from llnl.util.filesystem import HeaderList, find_libraries, join_path, mkdirp
from llnl.util.link_tree import LinkTree
from spack.directives import conflicts, variant
-from spack.package import mkdirp
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
@@ -56,10 +55,21 @@ def component_dir(self):
"""Subdirectory for this component in the install prefix."""
raise NotImplementedError
+ @property
+ def v2_layout_versions(self):
+ """Version that implements the v2 directory layout."""
+ raise NotImplementedError
+
+ @property
+ def v2_layout(self):
+ """Returns true if this version implements the v2 directory layout."""
+ return self.spec.satisfies(self.v2_layout_versions)
+
@property
def component_prefix(self):
"""Path to component //."""
- return self.prefix.join(join_path(self.component_dir, self.spec.version))
+ v = self.spec.version.up_to(2) if self.v2_layout else self.spec.version
+ return self.prefix.join(self.component_dir).join(str(v))
@property
def env_script_args(self):
@@ -113,8 +123,9 @@ def install_component(self, installer_path):
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
# Some installers have a bug and do not return an error code when failing
- if not isdir(join_path(self.prefix, self.component_dir)):
- raise RuntimeError("install failed")
+ install_dir = self.component_prefix
+ if not isdir(install_dir):
+ raise RuntimeError("install failed to directory: {0}".format(install_dir))
def setup_run_environment(self, env):
"""Adds environment variables to the generated module file.
@@ -129,7 +140,7 @@ def setup_run_environment(self, env):
if "~envmods" not in self.spec:
env.extend(
EnvironmentModifications.from_sourcing_file(
- join_path(self.component_prefix, "env", "vars.sh"), *self.env_script_args
+ self.component_prefix.env.join("vars.sh"), *self.env_script_args
)
)
@@ -168,16 +179,40 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
"""
+ def header_directories(self, dirs):
+ h = HeaderList([])
+ h.directories = dirs
+ return h
+
+ @property
+ def headers(self):
+ return self.header_directories(
+ [self.component_prefix.include, self.component_prefix.include.join(self.component_dir)]
+ )
+
+ @property
+ def libs(self):
+ # for v2_layout all libraries are in the top level, v1 sometimes put them in intel64
+ return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
+
+
+class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
+ """Base class for Intel oneAPI library packages with SDK components.
+
+ Contains some convenient default implementations for libraries
+ that expose functionality in sdk subdirectories.
+ Implement the method directly in the package if something
+ different is needed.
+
+ """
+
@property
def headers(self):
- include_path = join_path(self.component_prefix, "include")
- return find_headers("*", include_path, recursive=True)
+ return self.header_directories([self.component_prefix.sdk.include])
@property
def libs(self):
- lib_path = join_path(self.component_prefix, "lib", "intel64")
- lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
- return find_libraries("*", root=lib_path, shared=True, recursive=True)
+ return find_libraries("*", self.component_prefix.sdk.lib64)
class IntelOneApiStaticLibraryList:
@@ -212,3 +247,7 @@ def link_flags(self):
@property
def ld_flags(self):
return "{0} {1}".format(self.search_flags, self.link_flags)
+
+
+#: Tuple of Intel math libraries, exported to packages
+INTEL_MATH_LIBRARIES = ("intel-mkl", "intel-oneapi-mkl", "intel-parallel-studio")
diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py
index 518968db432a67..19db10d40a8c6c 100644
--- a/lib/spack/spack/build_systems/python.py
+++ b/lib/spack/spack/build_systems/python.py
@@ -6,8 +6,7 @@
import os
import re
import shutil
-import stat
-from typing import List, Optional # novm
+from typing import List, Optional
import archspec
@@ -24,10 +23,8 @@
import spack.spec
import spack.store
from spack.directives import build_system, depends_on, extends, maintainers
-from spack.error import NoHeadersError, NoLibrariesError, SpackError, SpecError
+from spack.error import NoHeadersError, NoLibrariesError, SpackError
from spack.install_test import test_part
-from spack.util.executable import Executable
-from spack.version import Version
from ._checks import BaseBuilder, execute_install_time_tests
@@ -47,6 +44,23 @@ def __init__(self, name, provided, selected):
self.selected = selected
+def _flatten_dict(dictionary):
+ """Iterable that yields KEY=VALUE paths through a dictionary.
+ Args:
+ dictionary: Possibly nested dictionary of arbitrary keys and values.
+ Yields:
+ A single path through the dictionary.
+ """
+ for key, item in dictionary.items():
+ if isinstance(item, dict):
+ # Recursive case
+ for value in _flatten_dict(item):
+ yield f"{key}={value}"
+ else:
+ # Base case
+ yield f"{key}={item}"
+
+
class PythonExtension(spack.package_base.PackageBase):
maintainers("adamjstewart")
@@ -372,51 +386,6 @@ def libs(self):
raise NoLibrariesError(msg.format(self.spec.name, root))
-def fixup_shebangs(path: str, old_interpreter: bytes, new_interpreter: bytes):
- # Recurse into the install prefix and fixup shebangs
- exe = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
- dirs = [path]
- hardlinks = set()
-
- while dirs:
- with os.scandir(dirs.pop()) as entries:
- for entry in entries:
- if entry.is_dir(follow_symlinks=False):
- dirs.append(entry.path)
- continue
-
- # Only consider files, not symlinks
- if not entry.is_file(follow_symlinks=False):
- continue
-
- lstat = entry.stat(follow_symlinks=False)
-
- # Skip over files that are not executable
- if not (lstat.st_mode & exe):
- continue
-
- # Don't modify hardlinks more than once
- if lstat.st_nlink > 1:
- key = (lstat.st_ino, lstat.st_dev)
- if key in hardlinks:
- continue
- hardlinks.add(key)
-
- # Finally replace shebangs if any.
- with open(entry.path, "rb+") as f:
- contents = f.read(2)
- if contents != b"#!":
- continue
- contents += f.read()
-
- if old_interpreter not in contents:
- continue
-
- f.seek(0)
- f.write(contents.replace(old_interpreter, new_interpreter))
- f.truncate()
-
-
@spack.builder.builder("python_pip")
class PythonPipBuilder(BaseBuilder):
phases = ("install",)
@@ -428,7 +397,7 @@ class PythonPipBuilder(BaseBuilder):
legacy_long_methods = ("install_options", "global_options", "config_settings")
#: Names associated with package attributes in the old build-system format
- legacy_attributes = ("build_directory", "install_time_test_callbacks")
+ legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
#: Callback names for install-time test
install_time_test_callbacks = ["test"]
@@ -473,14 +442,15 @@ def build_directory(self):
def config_settings(self, spec, prefix):
"""Configuration settings to be passed to the PEP 517 build backend.
- Requires pip 22.1 or newer.
+ Requires pip 22.1 or newer for keys that appear only a single time,
+ or pip 23.1 or newer if the same key appears multiple times.
Args:
spec (spack.spec.Spec): build spec
prefix (spack.util.prefix.Prefix): installation prefix
Returns:
- dict: dictionary of KEY, VALUE settings
+ dict: Possibly nested dictionary of KEY, VALUE settings
"""
return {}
@@ -517,56 +487,17 @@ def optional_extras(self, spec, prefix):
"""Specify optional extra features to build"""
return []
- @property
- def _build_venv_path(self):
- """Return the path to the virtual environment used for building when
- python is external."""
- return os.path.join(self.spec.package.stage.path, "build_env")
-
- @property
- def _build_venv_python(self) -> Executable:
- """Return the Python executable in the build virtual environment when
- python is external."""
- return Executable(os.path.join(self._build_venv_path, "bin", "python"))
-
def install(self, pkg, spec, prefix):
"""Install everything from build directory."""
- python: Executable = spec["python"].command
- # Since we invoke pip with --no-build-isolation, we have to make sure that pip cannot
- # execute hooks from user and system site-packages.
- if spec["python"].external:
- # There are no environment variables to disable the system site-packages, so we use a
- # virtual environment instead. The downside of this approach is that pip produces
- # incorrect shebangs that refer to the virtual environment, which we have to fix up.
- python("-m", "venv", "--without-pip", self._build_venv_path)
- pip = self._build_venv_python
- else:
- # For a Spack managed Python, system site-packages is empty/unused by design, so it
- # suffices to disable user site-packages, for which there is an environment variable.
- pip = python
- pip.add_default_env("PYTHONNOUSERSITE", "1")
- pip.add_default_arg("-m")
- pip.add_default_arg("pip")
-
- args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
- config_settings = self.config_settings(spec, prefix)
- if config_settings:
- if spec["py-pip"].version < Version("22.1"):
- raise SpecError(
- "'{}' package uses 'config_settings' which is only supported by "
- "pip 22.1+. Add the following line to the package to fix this:\n\n"
- ' depends_on("py-pip@22.1:", type="build")'.format(spec.name)
- )
- args += [
- "--config-settings={}={}".format(key, value)
- for key, value in config_settings.items()
- ]
+ args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
+ for setting in _flatten_dict(self.config_settings(spec, prefix)):
+ args.append(f"--config-settings={setting}")
for option in self.install_options(spec, prefix):
- args.append("--install-option=" + option)
+ args.append(f"--install-option={option}")
for option in self.global_options(spec, prefix):
- args.append("--global-option=" + option)
+ args.append(f"--global-option={option}")
pip_project = (
self.stage.archive_file
@@ -583,31 +514,15 @@ def install(self, pkg, spec, prefix):
args.append(pip_project)
+ pip = spec["python"].command
+ # Hide user packages, since we don't have build isolation. This is
+ # necessary because pip / setuptools may run hooks from arbitrary
+ # packages during the build. There is no equivalent variable to hide
+ # system packages, so this is not reliable for external Python.
+ pip.add_default_env("PYTHONNOUSERSITE", "1")
+ pip.add_default_arg("-m")
+ pip.add_default_arg("pip")
with fs.working_dir(self.build_directory):
pip(*args)
- @spack.builder.run_after("install")
- def fixup_shebangs_pointing_to_build(self):
- """When installing a package using an external python, we use a temporary virtual
- environment which improves build isolation. The downside is that pip produces shebangs
- that point to the temporary virtual environment. This method fixes them up to point to the
- underlying Python."""
- # No need to fixup shebangs if no build venv was used. (this post install function also
- # runs when install was overridden in another package, so check existence of the venv path)
- if not os.path.exists(self._build_venv_path):
- return
-
- # Use sys.executable, since that's what pip uses.
- interpreter = (
- lambda python: python("-c", "import sys; print(sys.executable)", output=str)
- .strip()
- .encode("utf-8")
- )
-
- fixup_shebangs(
- path=self.spec.prefix,
- old_interpreter=interpreter(self._build_venv_python),
- new_interpreter=interpreter(self.spec["python"].command),
- )
-
spack.builder.run_after("install")(execute_install_time_tests)
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index fca28362540623..ac308045742f9b 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -25,6 +25,7 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.lang import memoized
+from llnl.util.tty.color import cescape, colorize
import spack
import spack.binary_distribution as bindist
@@ -45,7 +46,22 @@
from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import build_stamp as cdash_build_stamp
-JOB_RETRY_CONDITIONS = ["always"]
+# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
+JOB_RETRY_CONDITIONS = [
+ # "always",
+ "unknown_failure",
+ "script_failure",
+ "api_failure",
+ "stuck_or_timeout_failure",
+ "runner_system_failure",
+ "runner_unsupported",
+ "stale_schedule",
+ # "job_execution_timeout",
+ "archived_failure",
+ "unmet_prerequisites",
+ "scheduler_failure",
+ "data_integrity_failure",
+]
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
@@ -97,15 +113,6 @@ def _remove_reserved_tags(tags):
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
-def _get_spec_string(spec):
- format_elements = ["{name}{@version}", "{%compiler}"]
-
- if spec.architecture:
- format_elements.append(" {arch=architecture}")
-
- return spec.format("".join(format_elements))
-
-
def _spec_deps_key(s):
return "{0}/{1}".format(s.name, s.dag_hash(7))
@@ -210,22 +217,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
tty.msg("Staging summary ([x] means a job needs rebuilding):")
for stage_index, stage in enumerate(stages):
- tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
+ tty.msg(f" stage {stage_index} ({len(stage)} jobs):")
- for job in sorted(stage):
+ for job in sorted(stage, key=lambda j: (not rebuild_decisions[j].rebuild, j)):
s = spec_labels[job]
- rebuild = rebuild_decisions[job].rebuild
reason = rebuild_decisions[job].reason
- reason_msg = " ({0})".format(reason) if reason else ""
- tty.msg(
- " [{1}] {0} -> {2}{3}".format(
- job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
- )
- )
- if rebuild_decisions[job].mirrors:
- tty.msg(" found on the following mirrors:")
- for murl in rebuild_decisions[job].mirrors:
- tty.msg(" {0}".format(murl))
+ reason_msg = f" ({reason})" if reason else ""
+ spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
+ if rebuild_decisions[job].rebuild:
+ status = colorize("@*g{[x]} ")
+ msg = f" {status}{s.cformat(spec_fmt)}{reason_msg}"
+ else:
+ msg = f"{s.format(spec_fmt)}{reason_msg}"
+ if rebuild_decisions[job].mirrors:
+ msg += f" [{', '.join(rebuild_decisions[job].mirrors)}]"
+ msg = colorize(f" @K - {cescape(msg)}@.")
+ tty.msg(msg)
def _compute_spec_deps(spec_list):
@@ -932,7 +939,7 @@ def generate_gitlab_ci_yaml(
# Speed up staging by first fetching binary indices from all mirrors
try:
- bindist.binary_index.update()
+ bindist.BINARY_INDEX.update()
except bindist.FetchCacheError as e:
tty.warn(e)
@@ -2258,13 +2265,13 @@ def build_name(self):
spec.architecture,
self.build_group,
)
- tty.verbose(
+ tty.debug(
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
)
return build_name
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
- tty.verbose("Using CDash build name ({0}) from the environment".format(build_name))
+ tty.debug("Using CDash build name ({0}) from the environment".format(build_name))
return build_name
@property # type: ignore
@@ -2278,11 +2285,11 @@ def build_stamp(self):
Returns: (str) current CDash build stamp"""
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
if build_stamp:
- tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp))
+ tty.debug("Using build stamp ({0}) from the environment".format(build_stamp))
return build_stamp
build_stamp = cdash_build_stamp(self.build_group, time.time())
- tty.verbose("Generated new build stamp ({0})".format(build_stamp))
+ tty.debug("Generated new build stamp ({0})".format(build_stamp))
return build_stamp
@property # type: ignore
diff --git a/lib/spack/spack/cmd/audit.py b/lib/spack/spack/cmd/audit.py
index 86eea9f7bc8b9a..58d7a5362cf56d 100644
--- a/lib/spack/spack/cmd/audit.py
+++ b/lib/spack/spack/cmd/audit.py
@@ -2,6 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import warnings
+
import llnl.util.tty as tty
import llnl.util.tty.colify
import llnl.util.tty.color as cl
@@ -52,8 +54,10 @@ def setup_parser(subparser):
def configs(parser, args):
- reports = spack.audit.run_group(args.subcommand)
- _process_reports(reports)
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ reports = spack.audit.run_group(args.subcommand)
+ _process_reports(reports)
def packages(parser, args):
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index 2b2d47fd6b199a..853566bb9b1af2 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -3,16 +3,19 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
+import copy
import glob
+import hashlib
import json
+import multiprocessing.pool
import os
import shutil
import sys
import tempfile
-from typing import List
+import urllib.request
+from typing import Dict, List, Optional, Tuple
import llnl.util.tty as tty
-import llnl.util.tty.color as clr
from llnl.string import plural
from llnl.util.lang import elide_list
@@ -23,17 +26,37 @@
import spack.deptypes as dt
import spack.environment as ev
import spack.error
+import spack.hash_types as ht
import spack.mirror
+import spack.oci.oci
+import spack.oci.opener
import spack.relocate
import spack.repo
import spack.spec
+import spack.stage
import spack.store
+import spack.user_environment
import spack.util.crypto
import spack.util.url as url_util
import spack.util.web as web_util
+from spack.build_environment import determine_number_of_jobs
from spack.cmd import display_specs
+from spack.oci.image import (
+ Digest,
+ ImageReference,
+ default_config,
+ default_index_tag,
+ default_manifest,
+ default_tag,
+ tag_is_spec,
+)
+from spack.oci.oci import (
+ copy_missing_layers_with_retry,
+ get_manifest_and_config_with_retry,
+ upload_blob_with_retry,
+ upload_manifest_with_retry,
+)
from spack.spec import Spec, save_dependency_specfiles
-from spack.stage import Stage
from spack.util.pattern import Args
description = "create, download and install binary packages"
@@ -72,7 +95,9 @@ def setup_parser(subparser: argparse.ArgumentParser):
push_sign.add_argument(
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
)
- push.add_argument("mirror", type=str, help="mirror name, path, or URL")
+ push.add_argument(
+ "mirror", type=arguments.mirror_name_or_url, help="mirror name, path, or URL"
+ )
push.add_argument(
"--update-index",
"--rebuild-index",
@@ -98,7 +123,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
action="store_true",
help="stop pushing on first failure (default is best effort)",
)
- arguments.add_common_arguments(push, ["specs", "deptype_default_default_deptype"])
+ push.add_argument(
+ "--base-image", default=None, help="specify the base image for the buildcache. "
+ )
+ arguments.add_common_arguments(push, ["specs", "deptype_default_default_deptype", "jobs"])
push.set_defaults(func=push_fn)
install = subparsers.add_parser("install", help=install_fn.__doc__)
@@ -282,7 +310,22 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]:
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
-def push_fn(args: argparse.Namespace):
+def _format_spec(spec: Spec) -> str:
+ return spec.cformat("{name}{@version}{/hash:7}")
+
+
+def _progress(i: int, total: int):
+ if total > 1:
+ digits = len(str(total))
+ return f"[{i+1:{digits}}/{total}] "
+ return ""
+
+
+def _make_pool():
+ return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
+
+
+def push_fn(args):
"""create a binary package and push it to a mirror"""
if args.spec_file:
tty.warn(
@@ -295,15 +338,28 @@ def push_fn(args: argparse.Namespace):
else:
specs = spack.cmd.require_active_env("buildcache push").all_specs()
- mirror = arguments.mirror_name_or_url(args.mirror)
-
if args.allow_root:
tty.warn(
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
)
- url = mirror.push_url
+ # Check if this is an OCI image.
+ try:
+ image_ref = spack.oci.oci.image_from_mirror(args.mirror)
+ except ValueError:
+ image_ref = None
+
+ # For OCI images, we require dependencies to be pushed for now.
+ if image_ref:
+ if "dependencies" not in args.things_to_install:
+ tty.die("Dependencies must be pushed for OCI images.")
+ if not args.unsigned:
+ tty.warn(
+ "Code signing is currently not supported for OCI images. "
+ "Use --unsigned to silence this warning."
+ )
+ # This is a list of installed, non-external specs.
specs = bindist.specs_to_be_packaged(
specs,
root="package" in args.things_to_install,
@@ -311,48 +367,52 @@ def push_fn(args: argparse.Namespace):
deptype=args.deptype,
)
+ url = args.mirror.push_url
+
# When pushing multiple specs, print the url once ahead of time, as well as how
# many specs are being pushed.
if len(specs) > 1:
tty.info(f"Selected {len(specs)} specs to push to {url}")
- skipped = []
failed = []
- # tty printing
- color = clr.get_color_when()
- format_spec = lambda s: s.format("{name}{@version}{/hash:7}", color=color)
- total_specs = len(specs)
- digits = len(str(total_specs))
-
- for i, spec in enumerate(specs):
- try:
- bindist.push_or_raise(
- spec,
- url,
- bindist.PushOptions(
- force=args.force,
- unsigned=args.unsigned,
- key=args.key,
- regenerate_index=args.update_index,
- ),
- )
-
- if total_specs > 1:
- msg = f"[{i+1:{digits}}/{total_specs}] Pushed {format_spec(spec)}"
- else:
- msg = f"Pushed {format_spec(spec)} to {url}"
+ # TODO: unify this logic in the future.
+ if image_ref:
+ with tempfile.TemporaryDirectory(
+ dir=spack.stage.get_stage_root()
+ ) as tmpdir, _make_pool() as pool:
+ skipped = _push_oci(args, image_ref, specs, tmpdir, pool)
+ else:
+ skipped = []
+
+ for i, spec in enumerate(specs):
+ try:
+ bindist.push_or_raise(
+ spec,
+ url,
+ bindist.PushOptions(
+ force=args.force,
+ unsigned=args.unsigned,
+ key=args.key,
+ regenerate_index=args.update_index,
+ ),
+ )
- tty.info(msg)
+ msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
+ if len(specs) == 1:
+ msg += f" to {url}"
+ tty.info(msg)
- except bindist.NoOverwriteException:
- skipped.append(format_spec(spec))
+ except bindist.NoOverwriteException:
+ skipped.append(_format_spec(spec))
- # Catch any other exception unless the fail fast option is set
- except Exception as e:
- if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
- raise
- failed.append((format_spec(spec), e))
+ # Catch any other exception unless the fail fast option is set
+ except Exception as e:
+ if args.fail_fast or isinstance(
+ e, (bindist.PickKeyException, bindist.NoKeyException)
+ ):
+ raise
+ failed.append((_format_spec(spec), e))
if skipped:
if len(specs) == 1:
@@ -379,6 +439,341 @@ def push_fn(args: argparse.Namespace):
),
)
+ # Update the index if requested
+ # TODO: remove update index logic out of bindist; should be once after all specs are pushed
+ # not once per spec.
+ if image_ref and len(skipped) < len(specs) and args.update_index:
+ with tempfile.TemporaryDirectory(
+ dir=spack.stage.get_stage_root()
+ ) as tmpdir, _make_pool() as pool:
+ _update_index_oci(image_ref, tmpdir, pool)
+
+
+def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
+ """Get the spack tarball layer digests and size if it exists"""
+ try:
+ manifest, config = get_manifest_and_config_with_retry(image_ref)
+
+ return spack.oci.oci.Blob(
+ compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
+ uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
+ size=manifest["layers"][-1]["size"],
+ )
+ except Exception:
+ return None
+
+
+def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
+ filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
+
+ # Create an oci.image.layer aka tarball of the package
+ compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
+
+ blob = spack.oci.oci.Blob(
+ Digest.from_sha256(compressed_tarfile_checksum),
+ Digest.from_sha256(tarfile_checksum),
+ os.path.getsize(filename),
+ )
+
+ # Upload the blob
+ upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
+
+ # delete the file
+ os.unlink(filename)
+
+ return blob
+
+
+def _retrieve_env_dict_from_config(config: dict) -> dict:
+ """Retrieve the environment variables from the image config file.
+ Sets a default value for PATH if it is not present.
+
+ Args:
+ config (dict): The image config file.
+
+ Returns:
+ dict: The environment variables.
+ """
+ env = {"PATH": "/bin:/usr/bin"}
+
+ if "Env" in config.get("config", {}):
+ for entry in config["config"]["Env"]:
+ key, value = entry.split("=", 1)
+ env[key] = value
+ return env
+
+
+def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
+ name = spec.target.family.name
+ name_map = {"aarch64": "arm64", "x86_64": "amd64"}
+ return name_map.get(name, name)
+
+
+def _put_manifest(
+ base_images: Dict[str, Tuple[dict, dict]],
+ checksums: Dict[str, spack.oci.oci.Blob],
+ spec: spack.spec.Spec,
+ image_ref: ImageReference,
+ tmpdir: str,
+):
+ architecture = _archspec_to_gooarch(spec)
+
+ dependencies = list(
+ reversed(
+ list(
+ s
+ for s in spec.traverse(order="topo", deptype=("link", "run"), root=True)
+ if not s.external
+ )
+ )
+ )
+
+ base_manifest, base_config = base_images[architecture]
+ env = _retrieve_env_dict_from_config(base_config)
+
+ spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env)
+
+ # Create an oci.image.config file
+ config = copy.deepcopy(base_config)
+
+ # Add the diff ids of the dependencies
+ for s in dependencies:
+ config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
+
+ # Set the environment variables
+ config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
+
+ # From the OCI v1.0 spec:
+ # > Any extra fields in the Image JSON struct are considered implementation
+ # > specific and MUST be ignored by any implementations which are unable to
+ # > interpret them.
+ # We use this to store the Spack spec, so we can use it to create an index.
+ spec_dict = spec.to_dict(hash=ht.dag_hash)
+ spec_dict["buildcache_layout_version"] = 1
+ spec_dict["binary_cache_checksum"] = {
+ "hash_algorithm": "sha256",
+ "hash": checksums[spec.dag_hash()].compressed_digest.digest,
+ }
+ config.update(spec_dict)
+
+ config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json")
+
+ with open(config_file, "w") as f:
+ json.dump(config, f, separators=(",", ":"))
+
+ config_file_checksum = Digest.from_sha256(
+ spack.util.crypto.checksum(hashlib.sha256, config_file)
+ )
+
+ # Upload the config file
+ upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
+
+ oci_manifest = {
+ "mediaType": "application/vnd.oci.image.manifest.v1+json",
+ "schemaVersion": 2,
+ "config": {
+ "mediaType": base_manifest["config"]["mediaType"],
+ "digest": str(config_file_checksum),
+ "size": os.path.getsize(config_file),
+ },
+ "layers": [
+ *(layer for layer in base_manifest["layers"]),
+ *(
+ {
+ "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
+ "digest": str(checksums[s.dag_hash()].compressed_digest),
+ "size": checksums[s.dag_hash()].size,
+ }
+ for s in dependencies
+ ),
+ ],
+ "annotations": {"org.opencontainers.image.description": spec.format()},
+ }
+
+ image_ref_for_spec = image_ref.with_tag(default_tag(spec))
+
+ # Finally upload the manifest
+ upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest)
+
+ # delete the config file
+ os.unlink(config_file)
+
+ return image_ref_for_spec
+
+
+def _push_oci(
+ args,
+ image_ref: ImageReference,
+ installed_specs_with_deps: List[Spec],
+ tmpdir: str,
+ pool: multiprocessing.pool.Pool,
+) -> List[str]:
+ """Push specs to an OCI registry
+
+ Args:
+ args: The command line arguments.
+ image_ref: The image reference.
+ installed_specs_with_deps: The installed specs to push, excluding externals,
+ including deps, ordered from roots to leaves.
+
+ Returns:
+ List[str]: The list of skipped specs (already in the buildcache).
+ """
+
+ # Reverse the order
+ installed_specs_with_deps = list(reversed(installed_specs_with_deps))
+
+ # The base image to use for the package. When not set, we use
+ # the OCI registry only for storage, and do not use any base image.
+ base_image_ref: Optional[ImageReference] = (
+ ImageReference.from_string(args.base_image) if args.base_image else None
+ )
+
+ # Spec dag hash -> blob
+ checksums: Dict[str, spack.oci.oci.Blob] = {}
+
+ # arch -> (manifest, config)
+ base_images: Dict[str, Tuple[dict, dict]] = {}
+
+ # Specs not uploaded because they already exist
+ skipped = []
+
+ if not args.force:
+ tty.info("Checking for existing specs in the buildcache")
+ to_be_uploaded = []
+
+ tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps)
+ available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
+
+ for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
+ if maybe_blob is not None:
+ checksums[spec.dag_hash()] = maybe_blob
+ skipped.append(_format_spec(spec))
+ else:
+ to_be_uploaded.append(spec)
+ else:
+ to_be_uploaded = installed_specs_with_deps
+
+ if not to_be_uploaded:
+ return skipped
+
+ tty.info(
+ f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}"
+ )
+
+ # Upload blobs
+ new_blobs = pool.starmap(
+ _push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded)
+ )
+
+ # And update the spec to blob mapping
+ for spec, blob in zip(to_be_uploaded, new_blobs):
+ checksums[spec.dag_hash()] = blob
+
+ # Copy base image layers, probably fine to do sequentially.
+ for spec in to_be_uploaded:
+ architecture = _archspec_to_gooarch(spec)
+ # Get base image details, if we don't have them yet
+ if architecture in base_images:
+ continue
+ if base_image_ref is None:
+ base_images[architecture] = (default_manifest(), default_config(architecture, "linux"))
+ else:
+ base_images[architecture] = copy_missing_layers_with_retry(
+ base_image_ref, image_ref, architecture
+ )
+
+ # Upload manifests
+ tty.info("Uploading manifests")
+ pushed_image_ref = pool.starmap(
+ _put_manifest,
+ ((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded),
+ )
+
+ # Print the image names of the top-level specs
+ for spec, ref in zip(to_be_uploaded, pushed_image_ref):
+ tty.info(f"Pushed {_format_spec(spec)} to {ref}")
+
+ return skipped
+
+
+def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
+ # Don't allow recursion here, since Spack itself always uploads
+ # vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
+ _, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
+
+ # Do very basic validation: if "spec" is a key in the config, it
+ # must be a Spec object too.
+ return config if "spec" in config else None
+
+
+def _update_index_oci(
+ image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool
+) -> None:
+ response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url()))
+ spack.oci.opener.ensure_status(response, 200)
+ tags = json.load(response)["tags"]
+
+ # Fetch all image config files in parallel
+ spec_dicts = pool.starmap(
+ _config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
+ )
+
+ # Populate the database
+ db_root_dir = os.path.join(tmpdir, "db_root")
+ db = bindist.BuildCacheDatabase(db_root_dir)
+
+ for spec_dict in spec_dicts:
+ spec = Spec.from_dict(spec_dict)
+ db.add(spec, directory_layout=None)
+ db.mark(spec, "in_buildcache", True)
+
+ # Create the index.json file
+ index_json_path = os.path.join(tmpdir, "index.json")
+ with open(index_json_path, "w") as f:
+ db._write_to_file(f)
+
+ # Create an empty config.json file
+ empty_config_json_path = os.path.join(tmpdir, "config.json")
+ with open(empty_config_json_path, "wb") as f:
+ f.write(b"{}")
+
+ # Upload the index.json file
+ index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
+ upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
+
+ # Upload the config.json file
+ empty_config_digest = Digest.from_sha256(
+ spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
+ )
+ upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
+
+ # Push a manifest file that references the index.json file as a layer
+ # Notice that we push this as if it is an image, which it of course is not.
+ # When the ORAS spec becomes official, we can use that instead of a fake image.
+ # For now we just use the OCI image spec, so that we don't run into issues with
+ # automatic garbage collection of blobs that are not referenced by any image manifest.
+ oci_manifest = {
+ "mediaType": "application/vnd.oci.image.manifest.v1+json",
+ "schemaVersion": 2,
+ # Config is just an empty {} file for now, and irrelevant
+ "config": {
+ "mediaType": "application/vnd.oci.image.config.v1+json",
+ "digest": str(empty_config_digest),
+ "size": os.path.getsize(empty_config_json_path),
+ },
+ # The buildcache index is the only layer, and is not a tarball, we lie here.
+ "layers": [
+ {
+ "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
+ "digest": str(index_shasum),
+ "size": os.path.getsize(index_json_path),
+ }
+ ],
+ }
+
+ upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
+
def install_fn(args):
"""install from a binary package"""
@@ -537,7 +932,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
local_path = os.path.join(tmpdir, os.path.basename(src_url))
try:
- temp_stage = Stage(src_url, path=os.path.dirname(local_path))
+ temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path))
try:
temp_stage.create()
temp_stage.fetch()
@@ -631,6 +1026,20 @@ def manifest_copy(manifest_file_list):
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
+ # Special case OCI images for now.
+ try:
+ image_ref = spack.oci.oci.image_from_mirror(mirror)
+ except ValueError:
+ image_ref = None
+
+ if image_ref:
+ with tempfile.TemporaryDirectory(
+ dir=spack.stage.get_stage_root()
+ ) as tmpdir, _make_pool() as pool:
+ _update_index_oci(image_ref, tmpdir, pool)
+ return
+
+ # Otherwise, assume a normal mirror.
url = mirror.push_url
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index efa4a268c16b5b..f927d2d922a26d 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import argparse
import re
import sys
@@ -21,7 +20,6 @@
from spack.package_base import PackageBase, deprecated_version, preferred_version
from spack.util.editor import editor
from spack.util.format import get_version_lines
-from spack.util.naming import valid_fully_qualified_module_name
from spack.version import Version
description = "checksum available versions of a package"
@@ -37,30 +35,30 @@ def setup_parser(subparser):
help="don't clean up staging area when command completes",
)
subparser.add_argument(
- "-b",
"--batch",
+ "-b",
action="store_true",
default=False,
help="don't ask which versions to checksum",
)
subparser.add_argument(
- "-l",
"--latest",
+ "-l",
action="store_true",
default=False,
help="checksum the latest available version",
)
subparser.add_argument(
- "-p",
"--preferred",
+ "-p",
action="store_true",
default=False,
help="checksum the known Spack preferred version",
)
modes_parser = subparser.add_mutually_exclusive_group()
modes_parser.add_argument(
- "-a",
"--add-to-package",
+ "-a",
action="store_true",
default=False,
help="add new versions to package",
@@ -68,27 +66,26 @@ def setup_parser(subparser):
modes_parser.add_argument(
"--verify", action="store_true", default=False, help="verify known package checksums"
)
- arguments.add_common_arguments(subparser, ["package", "jobs"])
+ subparser.add_argument("package", help="name or spec (e.g. `cmake` or `cmake@3.18`)")
subparser.add_argument(
- "versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
+ "versions",
+ nargs="*",
+ help="checksum these specific versions (if omitted, Spack searches for remote versions)",
+ )
+ arguments.add_common_arguments(subparser, ["jobs"])
+ subparser.epilog = (
+ "examples:\n"
+ " `spack checksum zlib@1.2` autodetects versions 1.2.0 to 1.2.13 from the remote\n"
+ " `spack checksum zlib 1.2.13` checksums exact version 1.2.13 directly without search\n"
)
def checksum(parser, args):
- # Did the user pass 'package@version' string?
- if len(args.versions) == 0 and "@" in args.package:
- args.versions = [args.package.split("@")[1]]
- args.package = args.package.split("@")[0]
-
- # Make sure the user provided a package and not a URL
- if not valid_fully_qualified_module_name(args.package):
- tty.die("`spack checksum` accepts package names, not URLs.")
+ spec = spack.spec.Spec(args.package)
# Get the package we're going to generate checksums for
- pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
- pkg = pkg_cls(spack.spec.Spec(args.package))
+ pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
- # Build a list of versions to checksum
versions = [Version(v) for v in args.versions]
# Define placeholder for remote versions.
@@ -152,7 +149,10 @@ def checksum(parser, args):
tty.die(f"Could not find any remote versions for {pkg.name}")
elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
filtered_url_dict = spack.stage.interactive_version_filter(
- url_dict, pkg.versions, url_changes=url_changed_for_version
+ url_dict,
+ pkg.versions,
+ url_changes=url_changed_for_version,
+ initial_verion_filter=spec.versions,
)
if not filtered_url_dict:
exit(0)
diff --git a/lib/spack/spack/cmd/commands.py b/lib/spack/spack/cmd/commands.py
index 9ebaa62239f26e..25e1a24d0077d0 100644
--- a/lib/spack/spack/cmd/commands.py
+++ b/lib/spack/spack/cmd/commands.py
@@ -796,7 +796,9 @@ def names(args: Namespace, out: IO) -> None:
commands = copy.copy(spack.cmd.all_commands())
if args.aliases:
- commands.extend(spack.main.aliases.keys())
+ aliases = spack.config.get("config:aliases")
+ if aliases:
+ commands.extend(aliases.keys())
colify(commands, output=out)
@@ -812,8 +814,10 @@ def bash(args: Namespace, out: IO) -> None:
parser = spack.main.make_argument_parser()
spack.main.add_all_commands(parser)
- aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
- out.write(f'SPACK_ALIASES="{aliases}"\n\n')
+ aliases_config = spack.config.get("config:aliases")
+ if aliases_config:
+ aliases = ";".join(f"{key}:{val}" for key, val in aliases_config.items())
+ out.write(f'SPACK_ALIASES="{aliases}"\n\n')
writer = BashCompletionWriter(parser.prog, out, args.aliases)
writer.write(parser)
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
index 2b343923c5f9e9..9aa3edac479f50 100644
--- a/lib/spack/spack/cmd/common/arguments.py
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -543,7 +543,7 @@ def add_concretizer_args(subparser):
)
-def add_s3_connection_args(subparser, add_help):
+def add_connection_args(subparser, add_help):
subparser.add_argument(
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
)
@@ -559,6 +559,8 @@ def add_s3_connection_args(subparser, add_help):
subparser.add_argument(
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
)
+ subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
+ subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
def use_buildcache(cli_arg_value):
diff --git a/lib/spack/spack/cmd/common/confirmation.py b/lib/spack/spack/cmd/common/confirmation.py
new file mode 100644
index 00000000000000..8a5cd2592b44e9
--- /dev/null
+++ b/lib/spack/spack/cmd/common/confirmation.py
@@ -0,0 +1,30 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import sys
+from typing import List
+
+import llnl.util.tty as tty
+
+import spack.cmd
+
+display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
+
+
+def confirm_action(specs: List[spack.spec.Spec], participle: str, noun: str):
+ """Display the list of specs to be acted on and ask for confirmation.
+
+ Args:
+ specs: specs to be removed
+ participle: action expressed as a participle, e.g. "uninstalled"
+ noun: action expressed as a noun, e.g. "uninstallation"
+ """
+ tty.msg(f"The following {len(specs)} packages will be {participle}:\n")
+ spack.cmd.display_specs(specs, **display_args)
+ print("")
+ answer = tty.get_yes_or_no("Do you want to proceed?", default=False)
+ if not answer:
+ tty.msg(f"Aborting {noun}")
+ sys.exit(0)
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index 07006afc2cc8f0..76eb8d31508a67 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -31,6 +31,19 @@ def setup_parser(subparser):
aliases=["add"],
help="search the system for compilers to add to Spack configuration",
)
+ mixed_toolchain_group = find_parser.add_mutually_exclusive_group()
+ mixed_toolchain_group.add_argument(
+ "--mixed-toolchain",
+ action="store_true",
+ default=sys.platform == "darwin",
+ help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
+ )
+ mixed_toolchain_group.add_argument(
+ "--no-mixed-toolchain",
+ action="store_false",
+ dest="mixed_toolchain",
+ help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
+ )
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
find_parser.add_argument(
"--scope",
@@ -86,7 +99,9 @@ def compiler_find(args):
# Below scope=None because we want new compilers that don't appear
# in any other configuration.
- new_compilers = spack.compilers.find_new_compilers(paths, scope=None)
+ new_compilers = spack.compilers.find_new_compilers(
+ paths, scope=None, mixed_toolchain=args.mixed_toolchain
+ )
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
n = len(new_compilers)
diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py
index c4446b475afcbe..14514400a86ceb 100644
--- a/lib/spack/spack/cmd/config.py
+++ b/lib/spack/spack/cmd/config.py
@@ -407,7 +407,9 @@ def config_prefer_upstream(args):
pkgs = {}
for spec in pref_specs:
# Collect all the upstream compilers and versions for this package.
- pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
+ pkg = pkgs.get(spec.name, {"version": []})
+ all = pkgs.get("all", {"compiler": []})
+ pkgs["all"] = all
pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version.
@@ -418,8 +420,8 @@ def config_prefer_upstream(args):
pkg["version"].append(version)
compiler = str(spec.compiler)
- if compiler not in pkg["compiler"]:
- pkg["compiler"].append(compiler)
+ if compiler not in all["compiler"]:
+ all["compiler"].append(compiler)
# Get and list all the variants that differ from the default.
variants = []
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index ef60330beb3171..2ffade1f53acfd 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -64,6 +64,7 @@ class {class_name}({base_class_name}):
# maintainers("github_user1", "github_user2")
# FIXME: Add the SPDX identifier of the project's license below.
+ # See https://spdx.org/licenses/ for a list.
license("UNKNOWN")
{versions}
@@ -171,6 +172,14 @@ def configure_args(self):
return args"""
+class CargoPackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for cargo-based packages"""
+
+ base_class_name = "CargoPackage"
+
+ body_def = ""
+
+
class CMakePackageTemplate(PackageTemplate):
"""Provides appropriate overrides for CMake-based packages"""
@@ -185,6 +194,14 @@ def cmake_args(self):
return args"""
+class GoPackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for Go-module-based packages"""
+
+ base_class_name = "GoPackage"
+
+ body_def = ""
+
+
class LuaPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for LuaRocks-based packages"""
@@ -598,29 +615,31 @@ def __init__(self, name, *args, **kwargs):
templates = {
- "autotools": AutotoolsPackageTemplate,
"autoreconf": AutoreconfPackageTemplate,
- "cmake": CMakePackageTemplate,
+ "autotools": AutotoolsPackageTemplate,
+ "bazel": BazelPackageTemplate,
"bundle": BundlePackageTemplate,
- "qmake": QMakePackageTemplate,
+ "cargo": CargoPackageTemplate,
+ "cmake": CMakePackageTemplate,
+ "generic": PackageTemplate,
+ "go": GoPackageTemplate,
+ "intel": IntelPackageTemplate,
+ "lua": LuaPackageTemplate,
+ "makefile": MakefilePackageTemplate,
"maven": MavenPackageTemplate,
- "scons": SconsPackageTemplate,
- "waf": WafPackageTemplate,
- "bazel": BazelPackageTemplate,
+ "meson": MesonPackageTemplate,
+ "octave": OctavePackageTemplate,
+ "perlbuild": PerlbuildPackageTemplate,
+ "perlinstall": PerlbuildPackageTemplate,
+ "perlmake": PerlmakePackageTemplate,
"python": PythonPackageTemplate,
+ "qmake": QMakePackageTemplate,
"r": RPackageTemplate,
"racket": RacketPackageTemplate,
- "perlmake": PerlmakePackageTemplate,
- "perlbuild": PerlbuildPackageTemplate,
- "perlinstall": PerlbuildPackageTemplate,
- "octave": OctavePackageTemplate,
"ruby": RubyPackageTemplate,
- "makefile": MakefilePackageTemplate,
- "intel": IntelPackageTemplate,
- "meson": MesonPackageTemplate,
- "lua": LuaPackageTemplate,
+ "scons": SconsPackageTemplate,
"sip": SIPPackageTemplate,
- "generic": PackageTemplate,
+ "waf": WafPackageTemplate,
}
@@ -710,6 +729,8 @@ def __call__(self, stage, url):
clues = [
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
+ (r"/Cargo\.toml$", "cargo"),
+ (r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
diff --git a/lib/spack/spack/cmd/deconcretize.py b/lib/spack/spack/cmd/deconcretize.py
new file mode 100644
index 00000000000000..dbcf72ea8b3a29
--- /dev/null
+++ b/lib/spack/spack/cmd/deconcretize.py
@@ -0,0 +1,103 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import argparse
+import sys
+from typing import List
+
+import llnl.util.tty as tty
+
+import spack.cmd
+import spack.cmd.common.arguments as arguments
+import spack.cmd.common.confirmation as confirmation
+import spack.environment as ev
+import spack.spec
+
+description = "remove specs from the concretized lockfile of an environment"
+section = "environments"
+level = "long"
+
+# Arguments for display_specs when we find ambiguity
+display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
+
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ "--root", action="store_true", help="deconcretize only specific environment roots"
+ )
+ arguments.add_common_arguments(subparser, ["yes_to_all", "specs"])
+ subparser.add_argument(
+ "-a",
+ "--all",
+ action="store_true",
+ dest="all",
+ help="deconcretize ALL specs that match each supplied spec",
+ )
+
+
+def get_deconcretize_list(
+ args: argparse.Namespace, specs: List[spack.spec.Spec], env: ev.Environment
+) -> List[spack.spec.Spec]:
+ """
+ Get list of environment roots to deconcretize
+ """
+ env_specs = [s for _, s in env.concretized_specs()]
+ to_deconcretize = []
+ errors = []
+
+ for s in specs:
+ if args.root:
+ # find all roots matching given spec
+ to_deconc = [e for e in env_specs if e.satisfies(s)]
+ else:
+ # find all roots matching or depending on a matching spec
+ to_deconc = [e for e in env_specs if any(d.satisfies(s) for d in e.traverse())]
+
+ if len(to_deconc) < 1:
+ tty.warn(f"No matching specs to deconcretize for {s}")
+
+ elif len(to_deconc) > 1 and not args.all:
+ errors.append((s, to_deconc))
+
+ to_deconcretize.extend(to_deconc)
+
+ if errors:
+ for spec, matching in errors:
+ tty.error(f"{spec} matches multiple concrete specs:")
+ sys.stderr.write("\n")
+ spack.cmd.display_specs(matching, output=sys.stderr, **display_args)
+ sys.stderr.write("\n")
+ sys.stderr.flush()
+ tty.die("Use '--all' to deconcretize all matching specs, or be more specific")
+
+ return to_deconcretize
+
+
+def deconcretize_specs(args, specs):
+ env = spack.cmd.require_active_env(cmd_name="deconcretize")
+
+ if args.specs:
+ deconcretize_list = get_deconcretize_list(args, specs, env)
+ else:
+ deconcretize_list = [s for _, s in env.concretized_specs()]
+
+ if not args.yes_to_all:
+ confirmation.confirm_action(deconcretize_list, "deconcretized", "deconcretization")
+
+ with env.write_transaction():
+ for spec in deconcretize_list:
+ env.deconcretize(spec)
+ env.write()
+
+
+def deconcretize(parser, args):
+ if not args.specs and not args.all:
+ tty.die(
+ "deconcretize requires at least one spec argument.",
+ " Use `spack deconcretize --all` to deconcretize ALL specs.",
+ )
+
+ specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
+ deconcretize_specs(args, specs)
diff --git a/lib/spack/spack/cmd/dev_build.py b/lib/spack/spack/cmd/dev_build.py
index d8a7b447a26fb8..90008c8b3ef601 100644
--- a/lib/spack/spack/cmd/dev_build.py
+++ b/lib/spack/spack/cmd/dev_build.py
@@ -99,10 +99,7 @@ def dev_build(self, args):
spec = specs[0]
if not spack.repo.PATH.exists(spec.name):
- tty.die(
- "No package for '{0}' was found.".format(spec.name),
- " Use `spack create` to create a new package",
- )
+ raise spack.repo.UnknownPackageError(spec.name)
if not spec.versions.concrete_range_as_version:
tty.die(
diff --git a/lib/spack/spack/cmd/diff.py b/lib/spack/spack/cmd/diff.py
index c654f6a5b8c6db..e321c28afbf43f 100644
--- a/lib/spack/spack/cmd/diff.py
+++ b/lib/spack/spack/cmd/diff.py
@@ -200,6 +200,8 @@ def diff(parser, args):
specs = []
for spec in spack.cmd.parse_specs(args.specs):
+ # If the spec has a hash, check it before disambiguating
+ spec.replace_hash()
if spec.concrete:
specs.append(spec)
else:
diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py
index 15aeea31b3f22d..79f441a67adf57 100644
--- a/lib/spack/spack/cmd/edit.py
+++ b/lib/spack/spack/cmd/edit.py
@@ -43,10 +43,7 @@ def edit_package(name, repo_path, namespace):
if not os.access(path, os.R_OK):
tty.die("Insufficient permissions on '%s'!" % path)
else:
- tty.die(
- "No package for '{0}' was found.".format(spec.name),
- " Use `spack create` to create a new package",
- )
+ raise spack.repo.UnknownPackageError(spec.name)
editor(path)
diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py
index eb92e0c46b473b..011ce3e498563d 100644
--- a/lib/spack/spack/cmd/env.py
+++ b/lib/spack/spack/cmd/env.py
@@ -5,6 +5,7 @@
import argparse
import os
+import shlex
import shutil
import sys
import tempfile
@@ -143,10 +144,13 @@ def create_temp_env_directory():
return tempfile.mkdtemp(prefix="spack-")
-def env_activate(args):
- if not args.activate_env and not args.dir and not args.temp:
- tty.die("spack env activate requires an environment name, directory, or --temp")
+def _tty_info(msg):
+ """tty.info like function that prints the equivalent printf statement for eval."""
+ decorated = f'{colorize("@*b{==>}")} {msg}\n'
+ print(f"printf {shlex.quote(decorated)};")
+
+def env_activate(args):
if not args.shell:
spack.cmd.common.shell_init_instructions(
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
@@ -159,12 +163,25 @@ def env_activate(args):
env_name_or_dir = args.activate_env or args.dir
+ # When executing `spack env activate` without further arguments, activate
+ # the default environment. It's created when it doesn't exist yet.
+ if not env_name_or_dir and not args.temp:
+ short_name = "default"
+ if not ev.exists(short_name):
+ ev.create(short_name)
+ action = "Created and activated"
+ else:
+ action = "Activated"
+ env_path = ev.root(short_name)
+ _tty_info(f"{action} default environment in {env_path}")
+
# Temporary environment
- if args.temp:
+ elif args.temp:
env = create_temp_env_directory()
env_path = os.path.abspath(env)
short_name = os.path.basename(env_path)
ev.create_in_dir(env).write(regenerate=False)
+ _tty_info(f"Created and activated temporary environment in {env_path}")
# Managed environment
elif ev.exists(env_name_or_dir) and not args.dir:
@@ -379,28 +396,33 @@ def env_remove(args):
and manifests embedded in repositories should be removed manually.
"""
read_envs = []
+ bad_envs = []
for env_name in args.rm_env:
- env = ev.read(env_name)
- read_envs.append(env)
+ try:
+ env = ev.read(env_name)
+ read_envs.append(env)
+ except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
+ bad_envs.append(env_name)
if not args.yes_to_all:
- answer = tty.get_yes_or_no(
- "Really remove %s %s?"
- % (
- string.plural(len(args.rm_env), "environment", show_n=False),
- string.comma_and(args.rm_env),
- ),
- default=False,
- )
+ environments = string.plural(len(args.rm_env), "environment", show_n=False)
+ envs = string.comma_and(args.rm_env)
+ answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
if not answer:
tty.die("Will not remove any environments")
for env in read_envs:
+ name = env.name
if env.active:
- tty.die("Environment %s can't be removed while activated." % env.name)
-
+ tty.die(f"Environment {name} can't be removed while activated.")
env.destroy()
- tty.msg("Successfully removed environment '%s'" % env.name)
+ tty.msg(f"Successfully removed environment '{name}'")
+
+ for bad_env_name in bad_envs:
+ shutil.rmtree(
+ spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
+ )
+ tty.msg(f"Successfully removed environment '{bad_env_name}'")
#
@@ -547,8 +569,8 @@ def env_update_setup_parser(subparser):
def env_update(args):
manifest_file = ev.manifest_file(args.update_env)
backup_file = manifest_file + ".bkp"
- needs_update = not ev.is_latest_format(manifest_file)
+ needs_update = not ev.is_latest_format(manifest_file)
if not needs_update:
tty.msg('No update needed for the environment "{0}"'.format(args.update_env))
return
@@ -666,18 +688,31 @@ def env_depfile(args):
# Currently only make is supported.
spack.cmd.require_active_env(cmd_name="env depfile")
+ env = ev.active_environment()
+
# What things do we build when running make? By default, we build the
# root specs. If specific specs are provided as input, we build those.
filter_specs = spack.cmd.parse_specs(args.specs) if args.specs else None
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
model = depfile.MakefileModel.from_env(
- ev.active_environment(),
+ env,
filter_specs=filter_specs,
pkg_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[0]),
dep_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[1]),
make_prefix=args.make_prefix,
jobserver=args.jobserver,
)
+
+ # Warn in case we're generating a depfile for an empty environment. We don't automatically
+ # concretize; the user should do that explicitly. Could be changed in the future if requested.
+ if model.empty:
+ if not env.user_specs:
+ tty.warn("no specs in the environment")
+ elif filter_specs is not None:
+ tty.warn("no concrete matching specs found in environment")
+ else:
+ tty.warn("environment is not concretized. Run `spack concretize` first")
+
makefile = template.render(model.to_dict())
# Finally write to stdout/file.
diff --git a/lib/spack/spack/cmd/gc.py b/lib/spack/spack/cmd/gc.py
index e4da6a103daf75..9918bf7479fd05 100644
--- a/lib/spack/spack/cmd/gc.py
+++ b/lib/spack/spack/cmd/gc.py
@@ -6,6 +6,7 @@
import llnl.util.tty as tty
import spack.cmd.common.arguments
+import spack.cmd.common.confirmation
import spack.cmd.uninstall
import spack.environment as ev
import spack.store
@@ -41,6 +42,6 @@ def gc(parser, args):
return
if not args.yes_to_all:
- spack.cmd.uninstall.confirm_removal(specs)
+ spack.cmd.common.confirmation.confirm_action(specs, "uninstalled", "uninstallation")
spack.cmd.uninstall.do_uninstall(specs, force=False)
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index eeced40720e07f..4122d8d58894e2 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -61,7 +61,7 @@ def graph(parser, args):
args.dot = True
env = ev.active_environment()
if env:
- specs = env.all_specs()
+ specs = env.concrete_roots()
else:
specs = spack.store.STORE.db.query()
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 5e667f487686e1..1f90831f65ebd3 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import sys
import textwrap
from itertools import zip_longest
@@ -16,6 +17,7 @@
import spack.install_test
import spack.repo
import spack.spec
+import spack.version
from spack.package_base import preferred_version
description = "get detailed information on a particular package"
@@ -53,6 +55,7 @@ def setup_parser(subparser):
("--tags", print_tags.__doc__),
("--tests", print_tests.__doc__),
("--virtuals", print_virtuals.__doc__),
+ ("--variants-by-name", "list variants in strict name order; don't group by condition"),
]
for opt, help_comment in options:
subparser.add_argument(opt, action="store_true", help=help_comment)
@@ -77,35 +80,10 @@ def license(s):
class VariantFormatter:
- def __init__(self, variants):
- self.variants = variants
+ def __init__(self, pkg):
+ self.variants = pkg.variants
self.headers = ("Name [Default]", "When", "Allowed values", "Description")
- # Formats
- fmt_name = "{0} [{1}]"
-
- # Initialize column widths with the length of the
- # corresponding headers, as they cannot be shorter
- # than that
- self.column_widths = [len(x) for x in self.headers]
-
- # Expand columns based on max line lengths
- for k, e in variants.items():
- v, w = e
- candidate_max_widths = (
- len(fmt_name.format(k, self.default(v))), # Name [Default]
- len(str(w)),
- len(v.allowed_values), # Allowed values
- len(v.description), # Description
- )
-
- self.column_widths = (
- max(self.column_widths[0], candidate_max_widths[0]),
- max(self.column_widths[1], candidate_max_widths[1]),
- max(self.column_widths[2], candidate_max_widths[2]),
- max(self.column_widths[3], candidate_max_widths[3]),
- )
-
# Don't let name or possible values be less than max widths
_, cols = tty.terminal_size()
max_name = min(self.column_widths[0], 30)
@@ -137,6 +115,8 @@ def default(self, v):
def lines(self):
if not self.variants:
yield " None"
+ return
+
else:
yield " " + self.fmt % self.headers
underline = tuple([w * "=" for w in self.column_widths])
@@ -159,7 +139,7 @@ def lines(self):
yield " " + self.fmt % t
-def print_dependencies(pkg):
+def print_dependencies(pkg, args):
"""output build, link, and run package dependencies"""
for deptype in ("build", "link", "run"):
@@ -172,7 +152,7 @@ def print_dependencies(pkg):
color.cprint(" None")
-def print_detectable(pkg):
+def print_detectable(pkg, args):
"""output information on external detection"""
color.cprint("")
@@ -200,7 +180,7 @@ def print_detectable(pkg):
color.cprint(" False")
-def print_maintainers(pkg):
+def print_maintainers(pkg, args):
"""output package maintainers"""
if len(pkg.maintainers) > 0:
@@ -209,7 +189,7 @@ def print_maintainers(pkg):
color.cprint(section_title("Maintainers: ") + mnt)
-def print_phases(pkg):
+def print_phases(pkg, args):
"""output installation phases"""
if hasattr(pkg.builder, "phases") and pkg.builder.phases:
@@ -221,7 +201,7 @@ def print_phases(pkg):
color.cprint(phase_str)
-def print_tags(pkg):
+def print_tags(pkg, args):
"""output package tags"""
color.cprint("")
@@ -233,7 +213,7 @@ def print_tags(pkg):
color.cprint(" None")
-def print_tests(pkg):
+def print_tests(pkg, args):
"""output relevant build-time and stand-alone tests"""
# Some built-in base packages (e.g., Autotools) define callback (e.g.,
@@ -271,18 +251,171 @@ def print_tests(pkg):
color.cprint(" None")
-def print_variants(pkg):
+def _fmt_value(v):
+ if v is None or isinstance(v, bool):
+ return str(v).lower()
+ else:
+ return str(v)
+
+
+def _fmt_name_and_default(variant):
+ """Print colorized name [default] for a variant."""
+ return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
+
+
+def _fmt_when(when, indent):
+ return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}")
+
+
+def _fmt_variant_description(variant, width, indent):
+ """Format a variant's description, preserving explicit line breaks."""
+ return "\n".join(
+ textwrap.fill(
+ line, width=width, initial_indent=indent * " ", subsequent_indent=indent * " "
+ )
+ for line in variant.description.split("\n")
+ )
+
+
+def _fmt_variant(variant, max_name_default_len, indent, when=None, out=None):
+ out = out or sys.stdout
+
+ _, cols = tty.terminal_size()
+
+ name_and_default = _fmt_name_and_default(variant)
+ name_default_len = color.clen(name_and_default)
+
+ values = variant.values
+ if not isinstance(variant.values, (tuple, list, spack.variant.DisjointSetsOfValues)):
+ values = [variant.values]
+
+ # put 'none' first, sort the rest by value
+ sorted_values = sorted(values, key=lambda v: (v != "none", v))
+
+ pad = 4 # min padding between 'name [default]' and values
+ value_indent = (indent + max_name_default_len + pad) * " " # left edge of values
+
+ # This preserves any formatting (i.e., newlines) from how the description was
+ # written in package.py, but still wraps long lines for small terminals.
+ # This allows some packages to provide detailed help on their variants (see, e.g., gasnet).
+ formatted_values = "\n".join(
+ textwrap.wrap(
+ f"{', '.join(_fmt_value(v) for v in sorted_values)}",
+ width=cols - 2,
+ initial_indent=value_indent,
+ subsequent_indent=value_indent,
+ )
+ )
+ formatted_values = formatted_values[indent + name_default_len + pad :]
+
+ # name [default] value1, value2, value3, ...
+ padding = pad * " "
+ color.cprint(f"{indent * ' '}{name_and_default}{padding}@c{{{formatted_values}}}", stream=out)
+
+ # when
+ description_indent = indent + 4
+ if when is not None and when != spack.spec.Spec():
+ out.write(_fmt_when(when, description_indent - 2))
+ out.write("\n")
+
+ # description, preserving explicit line breaks from the way it's written in the package file
+ out.write(_fmt_variant_description(variant, cols - 2, description_indent))
+ out.write("\n")
+
+
+def _variants_by_name_when(pkg):
+ """Adaptor to get variants keyed by { name: { when: { [Variant...] } }."""
+ # TODO: replace with pkg.variants_by_name(when=True) when unified directive dicts are merged.
+ variants = {}
+ for name, (variant, whens) in pkg.variants.items():
+ for when in whens:
+ variants.setdefault(name, {}).setdefault(when, []).append(variant)
+ return variants
+
+
+def _variants_by_when_name(pkg):
+ """Adaptor to get variants keyed by { when: { name: Variant } }"""
+ # TODO: replace with pkg.variants when unified directive dicts are merged.
+ variants = {}
+ for name, (variant, whens) in pkg.variants.items():
+ for when in whens:
+ variants.setdefault(when, {})[name] = variant
+ return variants
+
+
+def _print_variants_header(pkg):
"""output variants"""
+ if not pkg.variants:
+ print(" None")
+ return
+
color.cprint("")
color.cprint(section_title("Variants:"))
- formatter = VariantFormatter(pkg.variants)
- for line in formatter.lines:
- color.cprint(color.cescape(line))
+ variants_by_name = _variants_by_name_when(pkg)
+
+ # Calculate the max length of the "name [default]" part of the variant display
+ # This lets us know where to print variant values.
+ max_name_default_len = max(
+ color.clen(_fmt_name_and_default(variant))
+ for name, when_variants in variants_by_name.items()
+ for variants in when_variants.values()
+ for variant in variants
+ )
+
+ return max_name_default_len, variants_by_name
+
+
+def _unconstrained_ver_first(item):
+ """sort key that puts specs with open version ranges first"""
+ spec, _ = item
+ return (spack.version.any_version not in spec.versions, spec)
+
+
+def print_variants_grouped_by_when(pkg):
+ max_name_default_len, _ = _print_variants_header(pkg)
+ indent = 4
+ variants = _variants_by_when_name(pkg)
+ for when, variants_by_name in sorted(variants.items(), key=_unconstrained_ver_first):
+ padded_values = max_name_default_len + 4
+ start_indent = indent
-def print_versions(pkg):
+ if when != spack.spec.Spec():
+ sys.stdout.write("\n")
+ sys.stdout.write(_fmt_when(when, indent))
+ sys.stdout.write("\n")
+
+ # indent names slightly inside 'when', but line up values
+ padded_values -= 2
+ start_indent += 2
+
+ for name, variant in sorted(variants_by_name.items()):
+ _fmt_variant(variant, padded_values, start_indent, None, out=sys.stdout)
+
+
+def print_variants_by_name(pkg):
+ max_name_default_len, variants_by_name = _print_variants_header(pkg)
+ max_name_default_len += 4
+
+ indent = 4
+ for name, when_variants in variants_by_name.items():
+ for when, variants in sorted(when_variants.items(), key=_unconstrained_ver_first):
+ for variant in variants:
+ _fmt_variant(variant, max_name_default_len, indent, when, out=sys.stdout)
+ sys.stdout.write("\n")
+
+
+def print_variants(pkg, args):
+ """output variants"""
+ if args.variants_by_name:
+ print_variants_by_name(pkg)
+ else:
+ print_variants_grouped_by_when(pkg)
+
+
+def print_versions(pkg, args):
"""output versions"""
color.cprint("")
@@ -300,18 +433,24 @@ def print_versions(pkg):
pad = padder(pkg.versions, 4)
preferred = preferred_version(pkg)
- url = ""
- if pkg.has_code:
- url = fs.for_package_version(pkg, preferred)
+ def get_url(version):
+ try:
+ return fs.for_package_version(pkg, version)
+ except spack.fetch_strategy.InvalidArgsError:
+ return "No URL"
+
+ url = get_url(preferred) if pkg.has_code else ""
line = version(" {0}".format(pad(preferred))) + color.cescape(url)
- color.cprint(line)
+ color.cwrite(line)
+
+ print()
safe = []
deprecated = []
for v in reversed(sorted(pkg.versions)):
if pkg.has_code:
- url = fs.for_package_version(pkg, v)
+ url = get_url(v)
if pkg.versions[v].get("deprecated", False):
deprecated.append((v, url))
else:
@@ -329,7 +468,7 @@ def print_versions(pkg):
color.cprint(line)
-def print_virtuals(pkg):
+def print_virtuals(pkg, args):
"""output virtual packages"""
color.cprint("")
@@ -352,7 +491,7 @@ def print_virtuals(pkg):
color.cprint(" None")
-def print_licenses(pkg):
+def print_licenses(pkg, args):
"""Output the licenses of the project."""
color.cprint("")
@@ -384,7 +523,8 @@ def info(parser, args):
else:
color.cprint(" None")
- color.cprint(section_title("Homepage: ") + pkg.homepage)
+ if getattr(pkg, "homepage"):
+ color.cprint(section_title("Homepage: ") + pkg.homepage)
# Now output optional information in expected order
sections = [
@@ -401,6 +541,6 @@ def info(parser, args):
]
for print_it, func in sections:
if print_it:
- func(pkg)
+ func(pkg, args)
color.cprint("")
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index 6edae785a01769..1036dcbe917e35 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -111,7 +111,7 @@ def setup_parser(subparser):
"and source use `--type binary --type source` (default)"
),
)
- arguments.add_s3_connection_args(add_parser, False)
+ arguments.add_connection_args(add_parser, False)
# Remove
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
@@ -141,7 +141,7 @@ def setup_parser(subparser):
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
- arguments.add_s3_connection_args(set_url_parser, False)
+ arguments.add_connection_args(set_url_parser, False)
# Set
set_parser = sp.add_parser("set", help=mirror_set.__doc__)
@@ -170,7 +170,7 @@ def setup_parser(subparser):
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
- arguments.add_s3_connection_args(set_parser, False)
+ arguments.add_connection_args(set_parser, False)
# List
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
@@ -192,6 +192,8 @@ def mirror_add(args):
or args.s3_profile
or args.s3_endpoint_url
or args.type
+ or args.oci_username
+ or args.oci_password
):
connection = {"url": args.url}
if args.s3_access_key_id and args.s3_access_key_secret:
@@ -202,6 +204,8 @@ def mirror_add(args):
connection["profile"] = args.s3_profile
if args.s3_endpoint_url:
connection["endpoint_url"] = args.s3_endpoint_url
+ if args.oci_username and args.oci_password:
+ connection["access_pair"] = [args.oci_username, args.oci_password]
if args.type:
connection["binary"] = "binary" in args.type
connection["source"] = "source" in args.type
@@ -235,6 +239,8 @@ def _configure_mirror(args):
changes["profile"] = args.s3_profile
if args.s3_endpoint_url:
changes["endpoint_url"] = args.s3_endpoint_url
+ if args.oci_username and args.oci_password:
+ changes["access_pair"] = [args.oci_username, args.oci_password]
# argparse cannot distinguish between --binary and --no-binary when same dest :(
# notice that set-url does not have these args, so getattr
diff --git a/lib/spack/spack/cmd/tutorial.py b/lib/spack/spack/cmd/tutorial.py
index a07824f922a74d..5759912b66ffc7 100644
--- a/lib/spack/spack/cmd/tutorial.py
+++ b/lib/spack/spack/cmd/tutorial.py
@@ -23,7 +23,7 @@
# tutorial configuration parameters
-tutorial_branch = "releases/v0.20"
+tutorial_branch = "releases/v0.21"
tutorial_mirror = "file:///mirror"
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index bc6a71cef10f1f..3288404151d230 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -11,10 +11,9 @@
import spack.cmd
import spack.cmd.common.arguments as arguments
+import spack.cmd.common.confirmation as confirmation
import spack.environment as ev
-import spack.error
import spack.package_base
-import spack.repo
import spack.spec
import spack.store
import spack.traverse as traverse
@@ -278,7 +277,7 @@ def uninstall_specs(args, specs):
return
if not args.yes_to_all:
- confirm_removal(uninstall_list)
+ confirmation.confirm_action(uninstall_list, "uninstalled", "uninstallation")
# Uninstall everything on the list
do_uninstall(uninstall_list, args.force)
@@ -292,21 +291,6 @@ def uninstall_specs(args, specs):
env.regenerate_views()
-def confirm_removal(specs: List[spack.spec.Spec]):
- """Display the list of specs to be removed and ask for confirmation.
-
- Args:
- specs: specs to be removed
- """
- tty.msg("The following {} packages will be uninstalled:\n".format(len(specs)))
- spack.cmd.display_specs(specs, **display_args)
- print("")
- answer = tty.get_yes_or_no("Do you want to proceed?", default=False)
- if not answer:
- tty.msg("Aborting uninstallation")
- sys.exit(0)
-
-
def uninstall(parser, args):
if not args.specs and not args.all:
tty.die(
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 3f9663d21eaff0..be9edeecb4226a 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -10,7 +10,7 @@
import itertools
import multiprocessing.pool
import os
-from typing import Dict, List
+from typing import Dict, List, Optional, Tuple
import archspec.cpu
@@ -21,6 +21,7 @@
import spack.compiler
import spack.config
import spack.error
+import spack.operating_systems
import spack.paths
import spack.platforms
import spack.spec
@@ -153,6 +154,14 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
"""
compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers:
+ if not compiler.cc:
+ tty.debug(f"{compiler.spec} does not have a C compiler")
+ if not compiler.cxx:
+ tty.debug(f"{compiler.spec} does not have a C++ compiler")
+ if not compiler.f77:
+ tty.debug(f"{compiler.spec} does not have a Fortran77 compiler")
+ if not compiler.fc:
+ tty.debug(f"{compiler.spec} does not have a Fortran compiler")
compiler_config.append(_to_dict(compiler))
spack.config.set("compilers", compiler_config, scope=scope)
@@ -223,13 +232,16 @@ def all_compiler_specs(scope=None, init_config=True):
]
-def find_compilers(path_hints=None):
+def find_compilers(
+ path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
+) -> List["spack.compiler.Compiler"]:
"""Return the list of compilers found in the paths given as arguments.
Args:
- path_hints (list or None): list of path hints where to look for.
- A sensible default based on the ``PATH`` environment variable
- will be used if the value is None
+ path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
+ environment variable will be used if the value is None
+ mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
+ a certain language
"""
if path_hints is None:
path_hints = get_path("PATH")
@@ -250,7 +262,7 @@ def find_compilers(path_hints=None):
finally:
tp.close()
- def valid_version(item):
+ def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
value, error = item
if error is None:
return True
@@ -262,25 +274,37 @@ def valid_version(item):
pass
return False
- def remove_errors(item):
+ def remove_errors(
+ item: Tuple[Optional[DetectVersionArgs], Optional[str]]
+ ) -> DetectVersionArgs:
value, _ = item
+ assert value is not None
return value
- return make_compiler_list(map(remove_errors, filter(valid_version, detected_versions)))
+ return make_compiler_list(
+ [remove_errors(detected) for detected in detected_versions if valid_version(detected)],
+ mixed_toolchain=mixed_toolchain,
+ )
-def find_new_compilers(path_hints=None, scope=None):
+def find_new_compilers(
+ path_hints: Optional[List[str]] = None,
+ scope: Optional[str] = None,
+ *,
+ mixed_toolchain: bool = False,
+):
"""Same as ``find_compilers`` but return only the compilers that are not
already in compilers.yaml.
Args:
- path_hints (list or None): list of path hints where to look for.
- A sensible default based on the ``PATH`` environment variable
- will be used if the value is None
- scope (str): scope to look for a compiler. If None consider the
- merged configuration.
+ path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
+ environment variable will be used if the value is None
+ scope: scope to look for a compiler. If None consider the merged configuration.
+ mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
+ a certain language
"""
- compilers = find_compilers(path_hints)
+ compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
+
return select_new_compilers(compilers, scope)
@@ -638,7 +662,9 @@ def all_compiler_types():
)
-def arguments_to_detect_version_fn(operating_system, paths):
+def arguments_to_detect_version_fn(
+ operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
+) -> List[DetectVersionArgs]:
"""Returns a list of DetectVersionArgs tuples to be used in a
corresponding function to detect compiler versions.
@@ -646,8 +672,7 @@ def arguments_to_detect_version_fn(operating_system, paths):
function by providing a method called with the same name.
Args:
- operating_system (spack.operating_systems.OperatingSystem): the operating system
- on which we are looking for compilers
+ operating_system: the operating system on which we are looking for compilers
paths: paths to search for compilers
Returns:
@@ -656,10 +681,10 @@ def arguments_to_detect_version_fn(operating_system, paths):
compilers in this OS.
"""
- def _default(search_paths):
- command_arguments = []
+ def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
+ command_arguments: List[DetectVersionArgs] = []
files_to_be_tested = fs.files_in(*search_paths)
- for compiler_name in spack.compilers.supported_compilers_for_host_platform():
+ for compiler_name in supported_compilers_for_host_platform():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"):
@@ -684,7 +709,9 @@ def _default(search_paths):
return fn(paths)
-def detect_version(detect_version_args):
+def detect_version(
+ detect_version_args: DetectVersionArgs,
+) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
"""Computes the version of a compiler and adds it to the information
passed as input.
@@ -693,8 +720,7 @@ def detect_version(detect_version_args):
needs to be checked by the code dispatching the calls.
Args:
- detect_version_args (DetectVersionArgs): information on the
- compiler for which we should detect the version.
+ detect_version_args: information on the compiler for which we should detect the version.
Returns:
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
@@ -710,7 +736,7 @@ def _default(fn_args):
path = fn_args.path
# Get compiler names and the callback to detect their versions
- callback = getattr(compiler_cls, "{0}_version".format(language))
+ callback = getattr(compiler_cls, f"{language}_version")
try:
version = callback(path)
@@ -736,13 +762,15 @@ def _default(fn_args):
return fn(detect_version_args)
-def make_compiler_list(detected_versions):
+def make_compiler_list(
+ detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
+) -> List["spack.compiler.Compiler"]:
"""Process a list of detected versions and turn them into a list of
compiler specs.
Args:
- detected_versions (list): list of DetectVersionArgs containing a
- valid version
+ detected_versions: list of DetectVersionArgs containing a valid version
+ mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
Returns:
list: list of Compiler objects
@@ -751,7 +779,7 @@ def make_compiler_list(detected_versions):
sorted_compilers = sorted(detected_versions, key=group_fn)
# Gather items in a dictionary by the id, name variation and language
- compilers_d = {}
+ compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
@@ -760,7 +788,7 @@ def make_compiler_list(detected_versions):
def _default_make_compilers(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
- compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
+ compiler_cls = class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
# TODO: johnwparent - revist the following line as per discussion at:
@@ -782,13 +810,14 @@ def _default_make_compilers(cmp_id, paths):
getattr(variation, "suffix", None),
)
- compilers = []
+ # Flatten to a list of compiler id, primary variation and compiler dictionary
+ flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
for compiler_id, by_compiler_id in compilers_d.items():
ordered = sorted(by_compiler_id, key=sort_fn)
selected_variation = ordered[0]
selected = by_compiler_id[selected_variation]
- # fill any missing parts from subsequent entries
+ # Fill any missing parts from subsequent entries (without mixing toolchains)
for lang in ["cxx", "f77", "fc"]:
if lang not in selected:
next_lang = next(
@@ -797,14 +826,63 @@ def _default_make_compilers(cmp_id, paths):
if next_lang:
selected[lang] = next_lang
- operating_system, _, _ = compiler_id
- make_compilers = getattr(operating_system, "make_compilers", _default_make_compilers)
+ flat_compilers.append((compiler_id, selected_variation, selected))
- compilers.extend(make_compilers(compiler_id, selected))
+ # Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
+ if mixed_toolchain:
+ make_mixed_toolchain(flat_compilers)
+
+ # Finally, create the compiler list
+ compilers = []
+ for compiler_id, _, compiler in flat_compilers:
+ make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
+ compilers.extend(make_compilers(compiler_id, compiler))
return compilers
+def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
+ """Add missing compilers across toolchains when they are missing for a particular language.
+ This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
+ fortran compiler (no flang)."""
+
+ # First collect the clangs that are missing a fortran compiler
+ clangs_without_flang = [
+ (id, variation, compiler)
+ for id, variation, compiler in compilers
+ if id.compiler_name in ("clang", "apple-clang")
+ and "f77" not in compiler
+ and "fc" not in compiler
+ ]
+ if not clangs_without_flang:
+ return
+
+ # Filter on GCCs with fortran compiler
+ gccs_with_fortran = [
+ (id, variation, compiler)
+ for id, variation, compiler in compilers
+ if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
+ ]
+
+ # Sort these GCCs by "best variation" (no prefix / suffix first)
+ gccs_with_fortran.sort(
+ key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
+ )
+
+ # Attach the optimal GCC fortran compiler to the clangs that don't have one
+ for clang_id, _, clang_compiler in clangs_without_flang:
+ gcc_compiler = next(
+ (gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
+ )
+
+ if not gcc_compiler:
+ continue
+
+ # Update the fc / f77 entries
+ clang_compiler["f77"] = gcc_compiler["f77"]
+ clang_compiler["fc"] = gcc_compiler["fc"]
+
+
def is_mixed_toolchain(compiler):
"""Returns True if the current compiler is a mixed toolchain,
False otherwise.
diff --git a/lib/spack/spack/compilers/aocc.py b/lib/spack/spack/compilers/aocc.py
index a642960b7df522..33039bf07d1874 100644
--- a/lib/spack/spack/compilers/aocc.py
+++ b/lib/spack/spack/compilers/aocc.py
@@ -5,7 +5,6 @@
import os
import re
-import sys
import llnl.util.lang
@@ -41,7 +40,6 @@ def debug_flags(self):
"-gdwarf-5",
"-gline-tables-only",
"-gmodules",
- "-gz",
"-g",
]
@@ -114,17 +112,6 @@ def extract_version_from_output(cls, output):
return ".".join(match.groups())
return "unknown"
- @classmethod
- def fc_version(cls, fortran_compiler):
- if sys.platform == "darwin":
- return cls.default_version("clang")
-
- return cls.default_version(fortran_compiler)
-
- @classmethod
- def f77_version(cls, f77):
- return cls.fc_version(f77)
-
@property
def stdcxx_libs(self):
return ("-lstdc++",)
diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py
index a9356227de5d76..5e63526df619c2 100644
--- a/lib/spack/spack/compilers/clang.py
+++ b/lib/spack/spack/compilers/clang.py
@@ -5,7 +5,6 @@
import os
import re
-import sys
import llnl.util.lang
@@ -39,10 +38,10 @@ class Clang(Compiler):
cxx_names = ["clang++"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ["flang", "gfortran", "xlf_r"]
+ f77_names = ["flang"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ["flang", "gfortran", "xlf90_r"]
+ fc_names = ["flang"]
version_argument = "--version"
@@ -56,7 +55,6 @@ def debug_flags(self):
"-gdwarf-5",
"-gline-tables-only",
"-gmodules",
- "-gz",
"-g",
]
@@ -182,16 +180,3 @@ def extract_version_from_output(cls, output):
if match:
ver = match.group(match.lastindex)
return ver
-
- @classmethod
- def fc_version(cls, fc):
- # We could map from gcc/gfortran version to clang version, but on macOS
- # we normally mix any version of gfortran with any version of clang.
- if sys.platform == "darwin":
- return cls.default_version("clang")
- else:
- return cls.default_version(fc)
-
- @classmethod
- def f77_version(cls, f77):
- return cls.fc_version(f77)
diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py
index 4ec29605259739..d4ff7c5ebc9067 100644
--- a/lib/spack/spack/compilers/intel.py
+++ b/lib/spack/spack/compilers/intel.py
@@ -85,6 +85,14 @@ def cxx14_flag(self):
else:
return "-std=c++14"
+ @property
+ def cxx17_flag(self):
+ # https://www.intel.com/content/www/us/en/developer/articles/news/c17-features-supported-by-c-compiler.html
+ if self.real_version < Version("19"):
+ raise UnsupportedCompilerFlag(self, "the C++17 standard", "cxx17_flag", "< 19")
+ else:
+ return "-std=c++17"
+
@property
def c99_flag(self):
if self.real_version < Version("12"):
diff --git a/lib/spack/spack/compilers/oneapi.py b/lib/spack/spack/compilers/oneapi.py
index fde6fa677ae7ad..63eb3859831eb3 100644
--- a/lib/spack/spack/compilers/oneapi.py
+++ b/lib/spack/spack/compilers/oneapi.py
@@ -6,6 +6,8 @@
import os
from os.path import dirname
+from llnl.util import tty
+
from spack.compiler import Compiler
@@ -135,3 +137,13 @@ def setup_custom_environment(self, pkg, env):
# Executable "sycl-post-link" doesn't exist!
if self.cxx:
env.prepend_path("PATH", dirname(self.cxx))
+
+ # 2024 release bumped the libsycl version because of an ABI
+ # change, 2024 compilers are required. You will see this
+ # error:
+ #
+ # /usr/bin/ld: warning: libsycl.so.7, needed by ...., not found
+ if pkg.spec.satisfies("%oneapi@:2023"):
+ for c in ["dnn"]:
+ if pkg.spec.satisfies(f"^intel-oneapi-{c}@2024:"):
+ tty.warn(f"intel-oneapi-{c}@2024 SYCL APIs requires %oneapi@2024:")
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 9c6fb6366e0460..9ef22addc8c897 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -69,6 +69,7 @@
SECTION_SCHEMAS = {
"compilers": spack.schema.compilers.schema,
"concretizer": spack.schema.concretizer.schema,
+ "definitions": spack.schema.definitions.schema,
"mirrors": spack.schema.mirrors.schema,
"repos": spack.schema.repos.schema,
"packages": spack.schema.packages.schema,
@@ -1008,6 +1009,7 @@ def read_config_file(filename, schema=None):
key = next(iter(data))
schema = _ALL_SCHEMAS[key]
validate(data, schema)
+
return data
except StopIteration:
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index f252fbc05df944..ecda8c36b0f0ba 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -1522,14 +1522,18 @@ def _query(
# TODO: like installed and known that can be queried? Or are
# TODO: these really special cases that only belong here?
- # Just look up concrete specs with hashes; no fancy search.
- if isinstance(query_spec, spack.spec.Spec) and query_spec.concrete:
- # TODO: handling of hashes restriction is not particularly elegant.
- hash_key = query_spec.dag_hash()
- if hash_key in self._data and (not hashes or hash_key in hashes):
- return [self._data[hash_key].spec]
- else:
- return []
+ if query_spec is not any:
+ if not isinstance(query_spec, spack.spec.Spec):
+ query_spec = spack.spec.Spec(query_spec)
+
+ # Just look up concrete specs with hashes; no fancy search.
+ if query_spec.concrete:
+ # TODO: handling of hashes restriction is not particularly elegant.
+ hash_key = query_spec.dag_hash()
+ if hash_key in self._data and (not hashes or hash_key in hashes):
+ return [self._data[hash_key].spec]
+ else:
+ return []
# Abstract specs require more work -- currently we test
# against everything.
@@ -1537,6 +1541,9 @@ def _query(
start_date = start_date or datetime.datetime.min
end_date = end_date or datetime.datetime.max
+ # save specs whose name doesn't match for last, to avoid a virtual check
+ deferred = []
+
for key, rec in self._data.items():
if hashes is not None and rec.spec.dag_hash() not in hashes:
continue
@@ -1561,8 +1568,26 @@ def _query(
if not (start_date < inst_date < end_date):
continue
- if query_spec is any or rec.spec.satisfies(query_spec):
+ if query_spec is any:
results.append(rec.spec)
+ continue
+
+ # check anon specs and exact name matches first
+ if not query_spec.name or rec.spec.name == query_spec.name:
+ if rec.spec.satisfies(query_spec):
+ results.append(rec.spec)
+
+ # save potential virtual matches for later, but not if we already found a match
+ elif not results:
+ deferred.append(rec.spec)
+
+ # Checking for virtuals is expensive, so we save it for last and only if needed.
+ # If we get here, we didn't find anything in the DB that matched by name.
+ # If we did fine something, the query spec can't be virtual b/c we matched an actual
+ # package installation, so skip the virtual check entirely. If we *didn't* find anything,
+ # check all the deferred specs *if* the query is virtual.
+ if not results and query_spec is not any and deferred and query_spec.virtual:
+ results = [spec for spec in deferred if spec.satisfies(query_spec)]
return results
diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py
index 0e873c3f555095..6fba021b336b0c 100644
--- a/lib/spack/spack/detection/common.py
+++ b/lib/spack/spack/detection/common.py
@@ -269,7 +269,7 @@ def find_windows_compiler_root_paths() -> List[str]:
At the moment simply returns location of VS install paths from VSWhere
But should be extended to include more information as relevant"""
- return list(winOs.WindowsOs.vs_install_paths)
+ return list(winOs.WindowsOs().vs_install_paths)
@staticmethod
def find_windows_compiler_cmake_paths() -> List[str]:
diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py
index 4de703ac97b0f3..f5da02bede1842 100644
--- a/lib/spack/spack/detection/path.py
+++ b/lib/spack/spack/detection/path.py
@@ -15,9 +15,12 @@
from typing import Dict, List, Optional, Set, Tuple
import llnl.util.filesystem
+import llnl.util.lang
import llnl.util.tty
+import spack.util.elf as elf_utils
import spack.util.environment
+import spack.util.environment as environment
import spack.util.ld_so_conf
from .common import (
@@ -39,15 +42,29 @@
DETECTION_TIMEOUT = 120
-def common_windows_package_paths() -> List[str]:
+def common_windows_package_paths(pkg_cls=None) -> List[str]:
+ """Get the paths for common package installation location on Windows
+ that are outside the PATH
+ Returns [] on unix
+ """
+ if sys.platform != "win32":
+ return []
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
paths.extend(find_win32_additional_install_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
+ if pkg_cls:
+ paths.extend(compute_windows_user_path_for_package(pkg_cls))
+ paths.extend(compute_windows_program_path_for_package(pkg_cls))
return paths
+def file_identifier(path):
+ s = os.stat(path)
+ return (s.st_dev, s.st_ino)
+
+
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
"""Get the paths of all executables available from the current PATH.
@@ -62,18 +79,44 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
path_hints: list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
- if sys.platform == "win32":
- path_hints.extend(common_windows_package_paths())
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
return path_to_dict(search_paths)
+def get_elf_compat(path):
+ """For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
+ it is host-compatible."""
+ # On ELF platforms supporting, we try to be a bit smarter when it comes to shared
+ # libraries, by dropping those that are not host compatible.
+ with open(path, "rb") as f:
+ elf = elf_utils.parse_elf(f, only_header=True)
+ return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
+
+
+def accept_elf(path, host_compat):
+ """Accept an ELF file if the header matches the given compat triplet,
+ obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
+ static library, or some arbitrary file, fall back to is_readable_file)."""
+ # Fast path: assume libraries at least have .so in their basename.
+ # Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
+ if ".so" not in os.path.basename(path):
+ return llnl.util.filesystem.is_readable_file(path)
+ try:
+ return host_compat == get_elf_compat(path)
+ except (OSError, elf_utils.ElfParsingError):
+ return llnl.util.filesystem.is_readable_file(path)
+
+
def libraries_in_ld_and_system_library_path(
path_hints: Optional[List[str]] = None,
) -> Dict[str, str]:
- """Get the paths of all libraries available from LD_LIBRARY_PATH,
- LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
- standard system library paths.
+ """Get the paths of all libraries available from ``path_hints`` or the
+ following defaults:
+
+ - Environment variables (Linux: ``LD_LIBRARY_PATH``, Darwin: ``DYLD_LIBRARY_PATH``,
+ and ``DYLD_FALLBACK_LIBRARY_PATH``)
+ - Dynamic linker default paths (glibc: ld.so.conf, musl: ld-musl-.path)
+ - Default system library paths.
For convenience, this is constructed as a dictionary where the keys are
the library paths and the values are the names of the libraries
@@ -87,31 +130,71 @@ def libraries_in_ld_and_system_library_path(
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
variables as well as the standard system library paths.
+ path_hints (list): list of paths to be searched. If ``None``, the default
+ system paths are used.
"""
- path_hints = (
- path_hints
- or spack.util.environment.get_path("LD_LIBRARY_PATH")
- + spack.util.environment.get_path("DYLD_LIBRARY_PATH")
- + spack.util.environment.get_path("DYLD_FALLBACK_LIBRARY_PATH")
- + spack.util.ld_so_conf.host_dynamic_linker_search_paths()
- )
- search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
- return path_to_dict(search_paths)
-
+ if path_hints:
+ search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
+ else:
+ search_paths = []
+
+ # Environment variables
+ if sys.platform == "darwin":
+ search_paths.extend(environment.get_path("DYLD_LIBRARY_PATH"))
+ search_paths.extend(environment.get_path("DYLD_FALLBACK_LIBRARY_PATH"))
+ elif sys.platform.startswith("linux"):
+ search_paths.extend(environment.get_path("LD_LIBRARY_PATH"))
+
+ # Dynamic linker paths
+ search_paths.extend(spack.util.ld_so_conf.host_dynamic_linker_search_paths())
+
+ # Drop redundant paths
+ search_paths = list(filter(os.path.isdir, search_paths))
+
+ # Make use we don't doubly list /usr/lib and /lib etc
+ search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
+
+ try:
+ host_compat = get_elf_compat(sys.executable)
+ accept = lambda path: accept_elf(path, host_compat)
+ except (OSError, elf_utils.ElfParsingError):
+ accept = llnl.util.filesystem.is_readable_file
+
+ path_to_lib = {}
+ # Reverse order of search directories so that a lib in the first
+ # search path entry overrides later entries
+ for search_path in reversed(search_paths):
+ for lib in os.listdir(search_path):
+ lib_path = os.path.join(search_path, lib)
+ if accept(lib_path):
+ path_to_lib[lib_path] = lib
+ return path_to_lib
+
+
+def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[str, str]:
+ """Get the paths of all libraries available from the system PATH paths.
+
+ For more details, see `libraries_in_ld_and_system_library_path` regarding
+ return type and contents.
-def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
- path_hints.extend(spack.util.environment.get_path("PATH"))
- search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
+ Args:
+ path_hints: list of paths to be searched. If None the list will be
+ constructed based on the set of PATH environment
+ variables as well as the standard system library paths.
+ """
+ search_hints = (
+ path_hints if path_hints is not None else spack.util.environment.get_path("PATH")
+ )
+ search_paths = llnl.util.filesystem.search_paths_for_libraries(*search_hints)
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
# at the search root. Add both of those options to the search scheme
- search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*path_hints))
- search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
- search_paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
- search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
- search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
- # SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
- # location, so we handle that case specifically.
- search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
+ search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*search_hints))
+ if path_hints is None:
+ # if no user provided path was given, add defaults to the search
+ search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
+ # SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
+ # location, so we handle that case specifically.
+ search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
return path_to_dict(search_paths)
@@ -125,19 +208,8 @@ def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
class Finder:
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
- def path_hints(
- self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
- ) -> List[str]:
- """Returns the list of paths to be searched.
-
- Args:
- pkg: package being detected
- initial_guess: initial list of paths from caller
- """
- result = initial_guess or []
- result.extend(compute_windows_user_path_for_package(pkg))
- result.extend(compute_windows_program_path_for_package(pkg))
- return result
+ def default_path_hints(self) -> List[str]:
+ return []
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
"""Returns the list of patterns used to match candidate files.
@@ -245,6 +317,8 @@ def find(
Args:
pkg_name: package being detected
initial_guess: initial list of paths to search from the caller
+ if None, default paths are searched. If this
+ is an empty list, nothing will be searched.
"""
import spack.repo
@@ -252,13 +326,18 @@ def find(
patterns = self.search_patterns(pkg=pkg_cls)
if not patterns:
return []
- path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
- candidates = self.candidate_files(patterns=patterns, paths=path_hints)
+ if initial_guess is None:
+ initial_guess = self.default_path_hints()
+ initial_guess.extend(common_windows_package_paths(pkg_cls))
+ candidates = self.candidate_files(patterns=patterns, paths=initial_guess)
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
return result
class ExecutablesFinder(Finder):
+ def default_path_hints(self) -> List[str]:
+ return spack.util.environment.get_path("PATH")
+
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
result = []
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
@@ -298,7 +377,7 @@ def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]
libraries_by_path = (
libraries_in_ld_and_system_library_path(path_hints=paths)
if sys.platform != "win32"
- else libraries_in_windows_paths(paths)
+ else libraries_in_windows_paths(path_hints=paths)
)
patterns = [re.compile(x) for x in patterns]
result = []
@@ -334,21 +413,16 @@ def by_path(
# TODO: Packages should be able to define both .libraries and .executables in the future
# TODO: determine_spec_details should get all relevant libraries and executables in one call
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
-
- executables_path_guess = (
- spack.util.environment.get_path("PATH") if path_hints is None else path_hints
- )
- libraries_path_guess = [] if path_hints is None else path_hints
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
result = collections.defaultdict(list)
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
for pkg in packages_to_search:
executable_future = executor.submit(
- executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
+ executables_finder.find, pkg_name=pkg, initial_guess=path_hints
)
library_future = executor.submit(
- libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
+ libraries_finder.find, pkg_name=pkg, initial_guess=path_hints
)
detected_specs_by_package[pkg] = executable_future, library_future
@@ -359,9 +433,13 @@ def by_path(
if detected:
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
result[unqualified_name].extend(detected)
- except Exception:
+ except concurrent.futures.TimeoutError:
llnl.util.tty.debug(
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
)
+ except Exception as e:
+ llnl.util.tty.debug(
+ f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}"
+ )
return result
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index ad8a75078fe442..14c5e420c428c1 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -137,6 +137,7 @@ class DirectiveMeta(type):
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
+ _default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
@@ -199,6 +200,16 @@ def pop_from_context():
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
+ @staticmethod
+ def push_default_args(default_args):
+ """Push default arguments"""
+ DirectiveMeta._default_args.append(default_args)
+
+ @staticmethod
+ def pop_default_args():
+ """Pop default arguments"""
+ return DirectiveMeta._default_args.pop()
+
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives.
@@ -259,7 +270,13 @@ def _decorator(decorated_function):
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)
- def _wrapper(*args, **kwargs):
+ def _wrapper(*args, **_kwargs):
+ # First merge default args with kwargs
+ kwargs = dict()
+ for default_args in DirectiveMeta._default_args:
+ kwargs.update(default_args)
+ kwargs.update(_kwargs)
+
# Inject when arguments from the context
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
@@ -601,17 +618,21 @@ def _execute_extends(pkg):
return _execute_extends
-@directive("provided")
-def provides(*specs, **kwargs):
- """Allows packages to provide a virtual dependency. If a package provides
- 'mpi', other packages can declare that they depend on "mpi", and spack
- can use the providing package to satisfy the dependency.
+@directive(dicts=("provided", "provided_together"))
+def provides(*specs, when: Optional[str] = None):
+ """Allows packages to provide a virtual dependency.
+
+ If a package provides "mpi", other packages can declare that they depend on "mpi",
+ and spack can use the providing package to satisfy the dependency.
+
+ Args:
+ *specs: virtual specs provided by this package
+ when: condition when this provides clause needs to be considered
"""
def _execute_provides(pkg):
import spack.parser # Avoid circular dependency
- when = kwargs.get("when")
when_spec = make_when_spec(when)
if not when_spec:
return
@@ -619,15 +640,18 @@ def _execute_provides(pkg):
# ``when`` specs for ``provides()`` need a name, as they are used
# to build the ProviderIndex.
when_spec.name = pkg.name
-
- for string in specs:
- for provided_spec in spack.parser.parse(string):
- if pkg.name == provided_spec.name:
- raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
-
- if provided_spec not in pkg.provided:
- pkg.provided[provided_spec] = set()
- pkg.provided[provided_spec].add(when_spec)
+ spec_objs = [spack.spec.Spec(x) for x in specs]
+ spec_names = [x.name for x in spec_objs]
+ if len(spec_names) > 1:
+ pkg.provided_together.setdefault(when_spec, []).append(set(spec_names))
+
+ for provided_spec in spec_objs:
+ if pkg.name == provided_spec.name:
+ raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
+
+ if provided_spec not in pkg.provided:
+ pkg.provided[provided_spec] = set()
+ pkg.provided[provided_spec].add(when_spec)
return _execute_provides
diff --git a/lib/spack/spack/environment/__init__.py b/lib/spack/spack/environment/__init__.py
index ac598e8421d2ad..2f293d9eb8f81b 100644
--- a/lib/spack/spack/environment/__init__.py
+++ b/lib/spack/spack/environment/__init__.py
@@ -339,6 +339,7 @@
from .environment import (
TOP_LEVEL_KEY,
Environment,
+ SpackEnvironmentConfigError,
SpackEnvironmentError,
SpackEnvironmentViewError,
activate,
@@ -372,6 +373,7 @@
__all__ = [
"TOP_LEVEL_KEY",
"Environment",
+ "SpackEnvironmentConfigError",
"SpackEnvironmentError",
"SpackEnvironmentViewError",
"activate",
diff --git a/lib/spack/spack/environment/depfile.py b/lib/spack/spack/environment/depfile.py
index f3a28331bd94dd..34e2481fa916c3 100644
--- a/lib/spack/spack/environment/depfile.py
+++ b/lib/spack/spack/environment/depfile.py
@@ -232,6 +232,10 @@ def to_dict(self):
"pkg_ids": " ".join(self.all_pkg_identifiers),
}
+ @property
+ def empty(self):
+ return len(self.roots) == 0
+
@staticmethod
def from_env(
env: ev.Environment,
@@ -254,15 +258,10 @@ def from_env(
jobserver: when enabled, make will invoke Spack with jobserver support. For
dry-run this should be disabled.
"""
- # If no specs are provided as a filter, build all the specs in the environment.
- if filter_specs:
- entrypoints = [env.matching_spec(s) for s in filter_specs]
- else:
- entrypoints = [s for _, s in env.concretized_specs()]
-
+ roots = env.all_matching_specs(*filter_specs) if filter_specs else env.concrete_roots()
visitor = DepfileSpecVisitor(pkg_buildcache, dep_buildcache)
traverse.traverse_breadth_first_with_visitor(
- entrypoints, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash())
+ roots, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash())
)
- return MakefileModel(env, entrypoints, visitor.adjacency_list, make_prefix, jobserver)
+ return MakefileModel(env, roots, visitor.adjacency_list, make_prefix, jobserver)
diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py
index 51ea453c39ef3c..5d6273506ec9c6 100644
--- a/lib/spack/spack/environment/environment.py
+++ b/lib/spack/spack/environment/environment.py
@@ -330,16 +330,21 @@ def create_in_dir(
if with_view is None and keep_relative:
return Environment(manifest_dir)
- manifest = EnvironmentManifestFile(manifest_dir)
+ try:
+ manifest = EnvironmentManifestFile(manifest_dir)
- if with_view is not None:
- manifest.set_default_view(with_view)
+ if with_view is not None:
+ manifest.set_default_view(with_view)
- if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
- init_file = pathlib.Path(init_file)
- manifest.absolutify_dev_paths(init_file.parent)
+ if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
+ init_file = pathlib.Path(init_file)
+ manifest.absolutify_dev_paths(init_file.parent)
+
+ manifest.flush()
- manifest.flush()
+ except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
+ shutil.rmtree(manifest_dir)
+ raise e
return Environment(manifest_dir)
@@ -391,7 +396,13 @@ def all_environments():
def _read_yaml(str_or_file):
"""Read YAML from a file for round-trip parsing."""
- data = syaml.load_config(str_or_file)
+ try:
+ data = syaml.load_config(str_or_file)
+ except syaml.SpackYAMLError as e:
+ raise SpackEnvironmentConfigError(
+ f"Invalid environment configuration detected: {e.message}"
+ )
+
filename = getattr(str_or_file, "name", None)
default_data = spack.config.validate(data, spack.schema.env.schema, filename)
return data, default_data
@@ -776,10 +787,18 @@ def _re_read(self):
"""Reinitialize the environment object."""
self.clear(re_read=True)
self.manifest = EnvironmentManifestFile(self.path)
- self._read()
+ self._read(re_read=True)
- def _read(self):
- self._construct_state_from_manifest()
+ def _read(self, re_read=False):
+ # If the manifest has included files, then some of the information
+ # (e.g., definitions) MAY be in those files. So we need to ensure
+ # the config is populated with any associated spec lists in order
+ # to fully construct the manifest state.
+ includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
+ if includes and not re_read:
+ prepare_config_scope(self)
+
+ self._construct_state_from_manifest(re_read)
if os.path.exists(self.lock_path):
with open(self.lock_path) as f:
@@ -793,21 +812,30 @@ def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
- def _construct_state_from_manifest(self):
+ def _process_definition(self, item):
+ """Process a single spec definition item."""
+ entry = copy.deepcopy(item)
+ when = _eval_conditional(entry.pop("when", "True"))
+ assert len(entry) == 1
+ if when:
+ name, spec_list = next(iter(entry.items()))
+ user_specs = SpecList(name, spec_list, self.spec_lists.copy())
+ if name in self.spec_lists:
+ self.spec_lists[name].extend(user_specs)
+ else:
+ self.spec_lists[name] = user_specs
+
+ def _construct_state_from_manifest(self, re_read=False):
"""Read manifest file and set up user specs."""
self.spec_lists = collections.OrderedDict()
+
+ if not re_read:
+ for item in spack.config.get("definitions", []):
+ self._process_definition(item)
+
env_configuration = self.manifest[TOP_LEVEL_KEY]
for item in env_configuration.get("definitions", []):
- entry = copy.deepcopy(item)
- when = _eval_conditional(entry.pop("when", "True"))
- assert len(entry) == 1
- if when:
- name, spec_list = next(iter(entry.items()))
- user_specs = SpecList(name, spec_list, self.spec_lists.copy())
- if name in self.spec_lists:
- self.spec_lists[name].extend(user_specs)
- else:
- self.spec_lists[name] = user_specs
+ self._process_definition(item)
spec_list = env_configuration.get(user_speclist_name, [])
user_specs = SpecList(
@@ -852,7 +880,9 @@ def clear(self, re_read=False):
yaml, and need to be maintained when re-reading an existing
environment.
"""
- self.spec_lists = {user_speclist_name: SpecList()} # specs from yaml
+ self.spec_lists = collections.OrderedDict()
+ self.spec_lists[user_speclist_name] = SpecList()
+
self.dev_specs = {} # dev-build specs from yaml
self.concretized_user_specs = [] # user specs from last concretize
self.concretized_order = [] # roots of last concretize, in order
@@ -1001,7 +1031,8 @@ def included_config_scopes(self):
elif include_url.scheme:
raise ValueError(
- "Unsupported URL scheme for environment include: {}".format(config_path)
+ f"Unsupported URL scheme ({include_url.scheme}) for "
+ f"environment include: {config_path}"
)
# treat relative paths as relative to the environment
@@ -1063,8 +1094,10 @@ def update_stale_references(self, from_list=None):
from_list = next(iter(self.spec_lists.keys()))
index = list(self.spec_lists.keys()).index(from_list)
- # spec_lists is an OrderedDict, all list entries after the modified
- # list may refer to the modified list. Update stale references
+ # spec_lists is an OrderedDict to ensure lists read from the manifest
+ # are maintainted in order, hence, all list entries after the modified
+ # list may refer to the modified list requiring stale references to be
+ # updated.
for i, (name, speclist) in enumerate(
list(self.spec_lists.items())[index + 1 :], index + 1
):
@@ -1162,7 +1195,7 @@ def change_existing_spec(
def remove(self, query_spec, list_name=user_speclist_name, force=False):
"""Remove specs from an environment that match a query_spec"""
err_msg_header = (
- f"cannot remove {query_spec} from '{list_name}' definition "
+ f"Cannot remove '{query_spec}' from '{list_name}' definition "
f"in {self.manifest.manifest_file}"
)
query_spec = Spec(query_spec)
@@ -1193,11 +1226,10 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
list_to_change.remove(spec)
self.update_stale_references(list_name)
new_specs = set(self.user_specs)
- except spack.spec_list.SpecListError:
+ except spack.spec_list.SpecListError as e:
# define new specs list
new_specs = set(self.user_specs)
- msg = f"Spec '{spec}' is part of a spec matrix and "
- msg += f"cannot be removed from list '{list_to_change}'."
+ msg = str(e)
if force:
msg += " It will be removed from the concrete specs."
# Mock new specs, so we can remove this spec from concrete spec lists
@@ -1326,7 +1358,7 @@ def concretize(self, force=False, tests=False):
# Remove concrete specs that no longer correlate to a user spec
for spec in set(self.concretized_user_specs) - set(self.user_specs):
- self.deconcretize(spec)
+ self.deconcretize(spec, concrete=False)
# Pick the right concretization strategy
if self.unify == "when_possible":
@@ -1341,15 +1373,36 @@ def concretize(self, force=False, tests=False):
msg = "concretization strategy not implemented [{0}]"
raise SpackEnvironmentError(msg.format(self.unify))
- def deconcretize(self, spec):
+ def deconcretize(self, spec: spack.spec.Spec, concrete: bool = True):
+ """
+ Remove specified spec from environment concretization
+
+ Arguments:
+ spec: Spec to deconcretize. This must be a root of the environment
+ concrete: If True, find all instances of spec as concrete in the environemnt.
+ If False, find a single instance of the abstract spec as root of the environment.
+ """
# spec has to be a root of the environment
- index = self.concretized_user_specs.index(spec)
- dag_hash = self.concretized_order.pop(index)
- del self.concretized_user_specs[index]
+ if concrete:
+ dag_hash = spec.dag_hash()
+
+ pairs = zip(self.concretized_user_specs, self.concretized_order)
+ filtered = [(spec, h) for spec, h in pairs if h != dag_hash]
+ # Cannot use zip and unpack two values; it fails if filtered is empty
+ self.concretized_user_specs = [s for s, _ in filtered]
+ self.concretized_order = [h for _, h in filtered]
+ else:
+ index = self.concretized_user_specs.index(spec)
+ dag_hash = self.concretized_order.pop(index)
+
+ del self.concretized_user_specs[index]
# If this was the only user spec that concretized to this concrete spec, remove it
if dag_hash not in self.concretized_order:
- del self.specs_by_hash[dag_hash]
+ # if we deconcretized a dependency that doesn't correspond to a root, it
+ # won't be here.
+ if dag_hash in self.specs_by_hash:
+ del self.specs_by_hash[dag_hash]
def _get_specs_to_concretize(
self,
@@ -1484,7 +1537,7 @@ def _concretize_separately(self, tests=False):
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
if uspec not in old_concretized_user_specs:
root_specs.append(uspec)
- args.append((i, uspec_constraints, tests))
+ args.append((i, [str(x) for x in uspec_constraints], tests))
i += 1
# Ensure we don't try to bootstrap clingo in parallel
@@ -1518,11 +1571,21 @@ def _concretize_separately(self, tests=False):
tty.msg(msg)
batch = []
- for i, concrete, duration in spack.util.parallel.imap_unordered(
- _concretize_task, args, processes=num_procs, debug=tty.is_debug()
+ for j, (i, concrete, duration) in enumerate(
+ spack.util.parallel.imap_unordered(
+ _concretize_task,
+ args,
+ processes=num_procs,
+ debug=tty.is_debug(),
+ maxtaskperchild=1,
+ )
):
batch.append((i, concrete))
- tty.verbose(f"[{duration:7.2f}s] {root_specs[i]}")
+ percentage = (j + 1) / len(args) * 100
+ tty.verbose(
+ f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
+ f"{root_specs[i].colored_str}"
+ )
sys.stdout.flush()
# Add specs in original order
@@ -1697,11 +1760,14 @@ def _env_modifications_for_view(
self, view: ViewDescriptor, reverse: bool = False
) -> spack.util.environment.EnvironmentModifications:
try:
- mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view)
+ with spack.store.STORE.db.read_transaction():
+ installed_roots = [s for s in self.concrete_roots() if s.installed]
+ mods = uenv.environment_modifications_for_specs(*installed_roots, view=view)
except Exception as e:
# Failing to setup spec-specific changes shouldn't be a hard error.
tty.warn(
- "couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e)
+ f"could not {'unload' if reverse else 'load'} runtime environment due "
+ f"to {e.__class__.__name__}: {e}"
)
return spack.util.environment.EnvironmentModifications()
return mods.reversed() if reverse else mods
@@ -2052,7 +2118,7 @@ def matching_spec(self, spec):
def removed_specs(self):
"""Tuples of (user spec, concrete spec) for all specs that will be
- removed on nexg concretize."""
+ removed on next concretize."""
needed = set()
for s, c in self.concretized_specs():
if s in self.user_specs:
@@ -2397,6 +2463,7 @@ def _concretize_from_constraints(spec_constraints, tests=False):
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
index, spec_constraints, tests = packed_arguments
+ spec_constraints = [Spec(x) for x in spec_constraints]
with tty.SuppressOutput(msg_enabled=False):
start = time.time()
spec = _concretize_from_constraints(spec_constraints, tests)
@@ -2710,7 +2777,7 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
self.changed = True
def add_definition(self, user_spec: str, list_name: str) -> None:
- """Appends a user spec to the first active definition mathing the name passed as argument.
+ """Appends a user spec to the first active definition matching the name passed as argument.
Args:
user_spec: user spec to be appended
@@ -2923,3 +2990,7 @@ class SpackEnvironmentError(spack.error.SpackError):
class SpackEnvironmentViewError(SpackEnvironmentError):
"""Class for errors regarding view generation."""
+
+
+class SpackEnvironmentConfigError(SpackEnvironmentError):
+ """Class for Spack environment-specific configuration errors."""
diff --git a/lib/spack/spack/extensions.py b/lib/spack/spack/extensions.py
index af900722cc9cfd..0ee01a22a12a99 100644
--- a/lib/spack/spack/extensions.py
+++ b/lib/spack/spack/extensions.py
@@ -5,6 +5,7 @@
"""Service functions and classes to implement the hooks
for Spack's command extensions.
"""
+import difflib
import importlib
import os
import re
@@ -176,10 +177,19 @@ class CommandNotFoundError(spack.error.SpackError):
"""
def __init__(self, cmd_name):
- super().__init__(
+ msg = (
"{0} is not a recognized Spack command or extension command;"
" check with `spack commands`.".format(cmd_name)
)
+ long_msg = None
+
+ similar = difflib.get_close_matches(cmd_name, spack.cmd.all_commands())
+
+ if 1 <= len(similar) <= 5:
+ long_msg = "\nDid you mean one of the following commands?\n "
+ long_msg += "\n ".join(similar)
+
+ super().__init__(msg, long_msg)
class ExtensionNamingError(spack.error.SpackError):
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index aa96bbbe5106d9..a922d9caf4c085 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -28,6 +28,7 @@
import os.path
import re
import shutil
+import urllib.error
import urllib.parse
from typing import List, Optional
@@ -41,6 +42,7 @@
import spack.config
import spack.error
+import spack.oci.opener
import spack.url
import spack.util.crypto as crypto
import spack.util.git
@@ -537,6 +539,34 @@ def fetch(self):
tty.msg("Using cached archive: {0}".format(path))
+class OCIRegistryFetchStrategy(URLFetchStrategy):
+ def __init__(self, url=None, checksum=None, **kwargs):
+ super().__init__(url, checksum, **kwargs)
+
+ self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
+
+ @_needs_stage
+ def fetch(self):
+ file = self.stage.save_filename
+ tty.msg(f"Fetching {self.url}")
+
+ try:
+ response = self._urlopen(self.url)
+ except urllib.error.URLError as e:
+ # clean up archive on failure.
+ if self.archive_file:
+ os.remove(self.archive_file)
+ if os.path.lexists(file):
+ os.remove(file)
+ raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
+
+ if os.path.lexists(file):
+ os.remove(file)
+
+ with open(file, "wb") as f:
+ shutil.copyfileobj(response, f)
+
+
class VCSFetchStrategy(FetchStrategy):
"""Superclass for version control system fetch strategies.
@@ -743,8 +773,7 @@ def git(self):
# Disable advice for a quieter fetch
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
if self.git_version >= spack.version.Version("1.7.2"):
- self._git.add_default_arg("-c")
- self._git.add_default_arg("advice.detachedHead=false")
+ self._git.add_default_arg("-c", "advice.detachedHead=false")
# If the user asked for insecure fetching, make that work
# with git as well.
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index 76ebbf636ebc95..78bf38ec0e7e43 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -528,10 +528,15 @@ def node_entry(self, node):
def edge_entry(self, edge):
colormap = {"build": "dodgerblue", "link": "crimson", "run": "goldenrod"}
+ label = ""
+ if edge.virtuals:
+ label = f" xlabel=\"virtuals={','.join(edge.virtuals)}\""
return (
edge.parent.dag_hash(),
edge.spec.dag_hash(),
- f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\"]",
+ f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\""
+ + label
+ + "]",
)
diff --git a/lib/spack/spack/hooks/module_file_generation.py b/lib/spack/spack/hooks/module_file_generation.py
index 0c6428ebd44198..1a2bbfdfe42d5c 100644
--- a/lib/spack/spack/hooks/module_file_generation.py
+++ b/lib/spack/spack/hooks/module_file_generation.py
@@ -3,17 +3,22 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from typing import Optional, Set
+
from llnl.util import tty
import spack.config
import spack.modules
+import spack.spec
-def _for_each_enabled(spec, method_name, explicit=None):
+def _for_each_enabled(
+ spec: spack.spec.Spec, method_name: str, explicit: Optional[bool] = None
+) -> None:
"""Calls a method for each enabled module"""
- set_names = set(spack.config.get("modules", {}).keys())
+ set_names: Set[str] = set(spack.config.get("modules", {}).keys())
for name in set_names:
- enabled = spack.config.get("modules:%s:enable" % name)
+ enabled = spack.config.get(f"modules:{name}:enable")
if not enabled:
tty.debug("NO MODULE WRITTEN: list of enabled module files is empty")
continue
@@ -28,7 +33,7 @@ def _for_each_enabled(spec, method_name, explicit=None):
tty.warn(msg.format(method_name, str(e)))
-def post_install(spec, explicit):
+def post_install(spec, explicit: bool):
import spack.environment as ev # break import cycle
if ev.active_environment():
diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py
index 75f0ef3d8a52a3..e1f19656f5edaa 100644
--- a/lib/spack/spack/main.py
+++ b/lib/spack/spack/main.py
@@ -16,11 +16,13 @@
import os.path
import pstats
import re
+import shlex
import signal
import subprocess as sp
import sys
import traceback
import warnings
+from typing import List, Tuple
import archspec.cpu
@@ -49,9 +51,6 @@
#: names of profile statistics
stat_names = pstats.Stats.sort_arg_dict_default
-#: top-level aliases for Spack commands
-aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
-
#: help levels in order of detail (i.e., number of commands shown)
levels = ["short", "long"]
@@ -359,7 +358,10 @@ def add_command(self, cmd_name):
module = spack.cmd.get_module(cmd_name)
# build a list of aliases
- alias_list = [k for k, v in aliases.items() if v == cmd_name]
+ alias_list = []
+ aliases = spack.config.get("config:aliases")
+ if aliases:
+ alias_list = [k for k, v in aliases.items() if shlex.split(v)[0] == cmd_name]
subparser = self.subparsers.add_parser(
cmd_name,
@@ -670,7 +672,6 @@ def __init__(self, command_name, subprocess=False):
Windows, where it is always False.
"""
self.parser = make_argument_parser()
- self.command = self.parser.add_command(command_name)
self.command_name = command_name
# TODO: figure out how to support this on windows
self.subprocess = subprocess if sys.platform != "win32" else False
@@ -702,13 +703,14 @@ def __call__(self, *argv, **kwargs):
if self.subprocess:
p = sp.Popen(
- [spack.paths.spack_script, self.command_name] + prepend + list(argv),
+ [spack.paths.spack_script] + prepend + [self.command_name] + list(argv),
stdout=sp.PIPE,
stderr=sp.STDOUT,
)
out, self.returncode = p.communicate()
out = out.decode()
else:
+ command = self.parser.add_command(self.command_name)
args, unknown = self.parser.parse_known_args(
prepend + [self.command_name] + list(argv)
)
@@ -716,7 +718,7 @@ def __call__(self, *argv, **kwargs):
out = io.StringIO()
try:
with log_output(out, echo=True):
- self.returncode = _invoke_command(self.command, self.parser, args, unknown)
+ self.returncode = _invoke_command(command, self.parser, args, unknown)
except SystemExit as e:
self.returncode = e.code
@@ -870,6 +872,46 @@ def restore_macos_dyld_vars():
os.environ[dyld_var] = os.environ[stored_var_name]
+def resolve_alias(cmd_name: str, cmd: List[str]) -> Tuple[str, List[str]]:
+ """Resolves aliases in the given command.
+
+ Args:
+ cmd_name: command name.
+ cmd: command line arguments.
+
+ Returns:
+ new command name and arguments.
+ """
+ all_commands = spack.cmd.all_commands()
+ aliases = spack.config.get("config:aliases")
+
+ if aliases:
+ for key, value in aliases.items():
+ if " " in key:
+ tty.warn(
+ f"Alias '{key}' (mapping to '{value}') contains a space"
+ ", which is not supported."
+ )
+ if key in all_commands:
+ tty.warn(
+ f"Alias '{key}' (mapping to '{value}') attempts to override"
+ " built-in command."
+ )
+
+ if cmd_name not in all_commands:
+ alias = None
+
+ if aliases:
+ alias = aliases.get(cmd_name)
+
+ if alias is not None:
+ alias_parts = shlex.split(alias)
+ cmd_name = alias_parts[0]
+ cmd = alias_parts + cmd[1:]
+
+ return cmd_name, cmd
+
+
def _main(argv=None):
"""Logic for the main entry point for the Spack command.
@@ -962,7 +1004,7 @@ def _main(argv=None):
# Try to load the particular command the caller asked for.
cmd_name = args.command[0]
- cmd_name = aliases.get(cmd_name, cmd_name)
+ cmd_name, args.command = resolve_alias(cmd_name, args.command)
# set up a bootstrap context, if asked.
# bootstrap context needs to include parsing the command, b/c things
@@ -974,14 +1016,16 @@ def _main(argv=None):
bootstrap_context = bootstrap.ensure_bootstrap_configuration()
with bootstrap_context:
- return finish_parse_and_run(parser, cmd_name, env_format_error)
+ return finish_parse_and_run(parser, cmd_name, args, env_format_error)
-def finish_parse_and_run(parser, cmd_name, env_format_error):
+def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
"""Finish parsing after we know the command to run."""
# add the found command to the parser and re-run then re-parse
command = parser.add_command(cmd_name)
- args, unknown = parser.parse_known_args()
+ args, unknown = parser.parse_known_args(main_args.command)
+ # we need to inherit verbose since the install command checks for it
+ args.verbose = main_args.verbose
# Now that we know what command this is and what its args are, determine
# whether we can continue with a bad environment and raise if not.
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 32037502c580a1..d5425772cdd3be 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -18,7 +18,7 @@
import sys
import traceback
import urllib.parse
-from typing import Optional, Union
+from typing import List, Optional, Union
import llnl.url
import llnl.util.tty as tty
@@ -27,18 +27,18 @@
import spack.caches
import spack.config
import spack.error
-import spack.fetch_strategy as fs
+import spack.fetch_strategy
import spack.mirror
+import spack.oci.image
import spack.spec
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
-from spack.util.spack_yaml import syaml_dict
-from spack.version import VersionList
+import spack.version
#: What schemes do we support
-supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs")
+supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci")
def _url_or_path_to_url(url_or_path: str) -> str:
@@ -230,12 +230,12 @@ def _get_value(self, attribute: str, direction: str):
value = self._data.get(direction, {})
# Return top-level entry if only a URL was set.
- if isinstance(value, str):
- return self._data.get(attribute, None)
+ if isinstance(value, str) or attribute not in value:
+ return self._data.get(attribute)
- return self._data.get(direction, {}).get(attribute, None)
+ return value[attribute]
- def get_url(self, direction: str):
+ def get_url(self, direction: str) -> str:
if direction not in ("fetch", "push"):
raise ValueError(f"direction must be either 'fetch' or 'push', not {direction}")
@@ -255,18 +255,21 @@ def get_url(self, direction: str):
elif "url" in info:
url = info["url"]
- return _url_or_path_to_url(url) if url else None
+ if not url:
+ raise ValueError(f"Mirror {self.name} has no URL configured")
- def get_access_token(self, direction: str):
+ return _url_or_path_to_url(url)
+
+ def get_access_token(self, direction: str) -> Optional[str]:
return self._get_value("access_token", direction)
- def get_access_pair(self, direction: str):
+ def get_access_pair(self, direction: str) -> Optional[List]:
return self._get_value("access_pair", direction)
- def get_profile(self, direction: str):
+ def get_profile(self, direction: str) -> Optional[str]:
return self._get_value("profile", direction)
- def get_endpoint_url(self, direction: str):
+ def get_endpoint_url(self, direction: str) -> Optional[str]:
return self._get_value("endpoint_url", direction)
@@ -330,7 +333,7 @@ def from_json(stream, name=None):
raise sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)) from e
def to_dict(self, recursive=False):
- return syaml_dict(
+ return syaml.syaml_dict(
sorted(
((k, (v.to_dict() if recursive else v)) for (k, v) in self._mirrors.items()),
key=operator.itemgetter(0),
@@ -372,7 +375,7 @@ def __len__(self):
def _determine_extension(fetcher):
- if isinstance(fetcher, fs.URLFetchStrategy):
+ if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
if fetcher.expand_archive:
# If we fetch with a URLFetchStrategy, use URL's archive type
ext = llnl.url.determine_url_file_extension(fetcher.url)
@@ -437,6 +440,19 @@ def __iter__(self):
yield self.cosmetic_path
+class OCIImageLayout:
+ """Follow the OCI Image Layout Specification to archive blobs
+
+ Paths are of the form `blobs//`
+ """
+
+ def __init__(self, digest: spack.oci.image.Digest) -> None:
+ self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest)
+
+ def __iter__(self):
+ yield self.storage_path
+
+
def mirror_archive_paths(fetcher, per_package_ref, spec=None):
"""Returns a ``MirrorReference`` object which keeps track of the relative
storage path of the resource associated with the specified ``fetcher``."""
@@ -482,7 +498,7 @@ def get_all_versions(specs):
for version in pkg_cls.versions:
version_spec = spack.spec.Spec(pkg_cls.name)
- version_spec.versions = VersionList([version])
+ version_spec.versions = spack.version.VersionList([version])
version_specs.append(version_spec)
return version_specs
@@ -521,7 +537,7 @@ def get_matching_versions(specs, num_versions=1):
# Generate only versions that satisfy the spec.
if spec.concrete or v.intersects(spec.versions):
s = spack.spec.Spec(pkg.name)
- s.versions = VersionList([v])
+ s.versions = spack.version.VersionList([v])
s.variants = spec.variants.copy()
# This is needed to avoid hanging references during the
# concretization phase
@@ -591,14 +607,14 @@ def add(mirror: Mirror, scope=None):
"""Add a named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
- mirrors = syaml_dict()
+ mirrors = syaml.syaml_dict()
if mirror.name in mirrors:
tty.die("Mirror with name {} already exists.".format(mirror.name))
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (mirror.name, mirror.to_dict()))
- mirrors = syaml_dict(items)
+ mirrors = syaml.syaml_dict(items)
spack.config.set("mirrors", mirrors, scope=scope)
@@ -606,7 +622,7 @@ def remove(name, scope):
"""Remove the named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
- mirrors = syaml_dict()
+ mirrors = syaml.syaml_dict()
if name not in mirrors:
tty.die("No mirror with name %s" % name)
diff --git a/lib/spack/spack/modules/__init__.py b/lib/spack/spack/modules/__init__.py
index 468e1334230b47..ec197be679804a 100644
--- a/lib/spack/spack/modules/__init__.py
+++ b/lib/spack/spack/modules/__init__.py
@@ -7,7 +7,9 @@
include Tcl non-hierarchical modules, Lua hierarchical modules, and others.
"""
-from .common import disable_modules
+from typing import Dict, Type
+
+from .common import BaseModuleFileWriter, disable_modules
from .lmod import LmodModulefileWriter
from .tcl import TclModulefileWriter
from .ups_table import UpsTableModulefileWriter
@@ -19,10 +21,9 @@
"UpsTableModulefileWriter",
"UpsVersionModulefileWriter",
"disable_modules",
- "ensure_modules_are_enabled_or_warn",
]
-module_types = {
+module_types: Dict[str, Type[BaseModuleFileWriter]] = {
"tcl": TclModulefileWriter,
"lmod": LmodModulefileWriter,
"ups_table": UpsTableModulefileWriter,
diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py
index 01e2adac9aab24..4776c7f558a536 100644
--- a/lib/spack/spack/modules/common.py
+++ b/lib/spack/spack/modules/common.py
@@ -36,7 +36,7 @@
import os.path
import re
import string
-from typing import Optional
+from typing import List, Optional
import llnl.util.filesystem
import llnl.util.tty as tty
@@ -51,6 +51,7 @@
import spack.projections as proj
import spack.repo
import spack.schema.environment
+import spack.spec
import spack.store
import spack.tengine as tengine
import spack.util.environment
@@ -62,7 +63,7 @@
#: config section for this file
def configuration(module_set_name):
- config_path = "modules:%s" % module_set_name
+ config_path = f"modules:{module_set_name}"
return spack.config.get(config_path, {})
@@ -96,10 +97,10 @@ def _check_tokens_are_valid(format_string, message):
named_tokens = re.findall(r"{(\w*)}", format_string)
invalid_tokens = [x for x in named_tokens if x.lower() not in _valid_tokens]
if invalid_tokens:
- msg = message
- msg += " [{0}]. ".format(", ".join(invalid_tokens))
- msg += 'Did you check your "modules.yaml" configuration?'
- raise RuntimeError(msg)
+ raise RuntimeError(
+ f"{message} [{', '.join(invalid_tokens)}]. "
+ f"Did you check your 'modules.yaml' configuration?"
+ )
def update_dictionary_extending_lists(target, update):
@@ -219,7 +220,7 @@ def root_path(name, module_set_name):
"""
defaults = {"lmod": "$spack/share/spack/lmod", "tcl": "$spack/share/spack/modules"}
# Root folders where the various module files should be written
- roots = spack.config.get("modules:%s:roots" % module_set_name, {})
+ roots = spack.config.get(f"modules:{module_set_name}:roots", {})
# Merge config values into the defaults so we prefer configured values
roots = spack.config.merge_yaml(defaults, roots)
@@ -262,7 +263,7 @@ def read_module_index(root):
index_path = os.path.join(root, "module-index.yaml")
if not os.path.exists(index_path):
return {}
- with open(index_path, "r") as index_file:
+ with open(index_path) as index_file:
return _read_module_index(index_file)
@@ -310,21 +311,21 @@ def upstream_module(self, spec, module_type):
if db_for_spec in self.upstream_dbs:
db_index = self.upstream_dbs.index(db_for_spec)
elif db_for_spec:
- raise spack.error.SpackError("Unexpected: {0} is installed locally".format(spec))
+ raise spack.error.SpackError(f"Unexpected: {spec} is installed locally")
else:
- raise spack.error.SpackError("Unexpected: no install DB found for {0}".format(spec))
+ raise spack.error.SpackError(f"Unexpected: no install DB found for {spec}")
module_index = self.module_indices[db_index]
module_type_index = module_index.get(module_type, {})
if not module_type_index:
tty.debug(
- "No {0} modules associated with the Spack instance where"
- " {1} is installed".format(module_type, spec)
+ f"No {module_type} modules associated with the Spack instance "
+ f"where {spec} is installed"
)
return None
if spec.dag_hash() in module_type_index:
return module_type_index[spec.dag_hash()]
else:
- tty.debug("No module is available for upstream package {0}".format(spec))
+ tty.debug(f"No module is available for upstream package {spec}")
return None
@@ -396,16 +397,14 @@ class BaseConfiguration:
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
- def __init__(self, spec, module_set_name, explicit=None):
+ def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
# Module where type(self) is defined
- self.module = inspect.getmodule(self)
+ m = inspect.getmodule(self)
+ assert m is not None # make mypy happy
+ self.module = m
# Spec for which we want to generate a module file
self.spec = spec
self.name = module_set_name
- # Software installation has been explicitly asked (get this information from
- # db when querying an existing module, like during a refresh or rm operations)
- if explicit is None:
- explicit = spec._installed_explicitly()
self.explicit = explicit
# Dictionary of configuration options that should be applied
# to the spec
@@ -459,7 +458,11 @@ def suffixes(self):
if constraint in self.spec:
suffixes.append(suffix)
suffixes = list(dedupe(suffixes))
- if self.hash:
+ # For hidden modules we can always add a fixed length hash as suffix, since it guards
+ # against file name clashes, and the module is not exposed to the user anyways.
+ if self.hidden:
+ suffixes.append(self.spec.dag_hash(length=7))
+ elif self.hash:
suffixes.append(self.hash)
return suffixes
@@ -484,37 +487,37 @@ def excluded(self):
spec = self.spec
conf = self.module.configuration(self.name)
- # Compute the list of include rules that match
- include_rules = conf.get("include", [])
- include_matches = [x for x in include_rules if spec.satisfies(x)]
-
- # Compute the list of exclude rules that match
- exclude_rules = conf.get("exclude", [])
- exclude_matches = [x for x in exclude_rules if spec.satisfies(x)]
-
- # Should I exclude the module because it's implicit?
- exclude_implicits = conf.get("exclude_implicits", None)
- excluded_as_implicit = exclude_implicits and not self.explicit
+ # Compute the list of matching include / exclude rules, and whether excluded as implicit
+ include_matches = [x for x in conf.get("include", []) if spec.satisfies(x)]
+ exclude_matches = [x for x in conf.get("exclude", []) if spec.satisfies(x)]
+ excluded_as_implicit = not self.explicit and conf.get("exclude_implicits", False)
def debug_info(line_header, match_list):
if match_list:
- msg = "\t{0} : {1}".format(line_header, spec.cshort_spec)
- tty.debug(msg)
+ tty.debug(f"\t{line_header} : {spec.cshort_spec}")
for rule in match_list:
- tty.debug("\t\tmatches rule: {0}".format(rule))
+ tty.debug(f"\t\tmatches rule: {rule}")
debug_info("INCLUDE", include_matches)
debug_info("EXCLUDE", exclude_matches)
if excluded_as_implicit:
- msg = "\tEXCLUDED_AS_IMPLICIT : {0}".format(spec.cshort_spec)
- tty.debug(msg)
+ tty.debug(f"\tEXCLUDED_AS_IMPLICIT : {spec.cshort_spec}")
+
+ return not include_matches and (exclude_matches or excluded_as_implicit)
+
+ @property
+ def hidden(self):
+ """Returns True if the module has been hidden, False otherwise."""
+
+ conf = self.module.configuration(self.name)
+
+ hidden_as_implicit = not self.explicit and conf.get("hide_implicits", False)
- is_excluded = exclude_matches or excluded_as_implicit
- if not include_matches and is_excluded:
- return True
+ if hidden_as_implicit:
+ tty.debug(f"\tHIDDEN_AS_IMPLICIT : {self.spec.cshort_spec}")
- return False
+ return hidden_as_implicit
@property
def context(self):
@@ -544,8 +547,7 @@ def exclude_env_vars(self):
def _create_list_for(self, what):
include = []
for item in self.conf[what]:
- conf = type(self)(item, self.name)
- if not conf.excluded:
+ if not self.module.make_configuration(item, self.name).excluded:
include.append(item)
return include
@@ -602,7 +604,7 @@ def filename(self):
# Just the name of the file
filename = self.use_name
if self.extension:
- filename = "{0}.{1}".format(self.use_name, self.extension)
+ filename = f"{self.use_name}.{self.extension}"
# Architecture sub-folder
arch_folder_conf = spack.config.get("modules:%s:arch_folder" % self.conf.name, True)
if arch_folder_conf:
@@ -670,7 +672,7 @@ def configure_options(self):
return msg
if os.path.exists(pkg.install_configure_args_path):
- with open(pkg.install_configure_args_path, "r") as args_file:
+ with open(pkg.install_configure_args_path) as args_file:
return spack.util.path.padding_filter(args_file.read())
# Returning a false-like value makes the default templates skip
@@ -724,7 +726,9 @@ def environment_modifications(self):
# for that to work, globals have to be set on the package modules, and the
# whole chain of setup_dependent_package has to be followed from leaf to spec.
# So: just run it here, but don't collect env mods.
- spack.build_environment.SetupContext(context=Context.RUN).set_all_package_py_globals()
+ spack.build_environment.SetupContext(
+ spec, context=Context.RUN
+ ).set_all_package_py_globals()
# Then run setup_dependent_run_environment before setup_run_environment.
for dep in spec.dependencies(deptype=("link", "run")):
@@ -817,8 +821,7 @@ def autoload(self):
def _create_module_list_of(self, what):
m = self.conf.module
name = self.conf.name
- explicit = self.conf.explicit
- return [m.make_layout(x, name, explicit).use_name for x in getattr(self.conf, what)]
+ return [m.make_layout(x, name).use_name for x in getattr(self.conf, what)]
@tengine.context_property
def verbose(self):
@@ -827,12 +830,19 @@ def verbose(self):
class BaseModuleFileWriter:
- def __init__(self, spec, module_set_name, explicit=None):
+ default_template: str
+ hide_cmd_format: str
+ modulerc_header: List[str]
+
+ def __init__(
+ self, spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
+ ) -> None:
self.spec = spec
# This class is meant to be derived. Get the module of the
# actual writer.
self.module = inspect.getmodule(self)
+ assert self.module is not None # make mypy happy
m = self.module
# Create the triplet of configuration/layout/context
@@ -850,6 +860,26 @@ def __init__(self, spec, module_set_name, explicit=None):
name = type(self).__name__
raise DefaultTemplateNotDefined(msg.format(name))
+ # Check if format for module hide command has been defined,
+ # throw if not found
+ try:
+ self.hide_cmd_format
+ except AttributeError:
+ msg = "'{0}' object has no attribute 'hide_cmd_format'\n"
+ msg += "Did you forget to define it in the class?"
+ name = type(self).__name__
+ raise HideCmdFormatNotDefined(msg.format(name))
+
+ # Check if modulerc header content has been defined,
+ # throw if not found
+ try:
+ self.modulerc_header
+ except AttributeError:
+ msg = "'{0}' object has no attribute 'modulerc_header'\n"
+ msg += "Did you forget to define it in the class?"
+ name = type(self).__name__
+ raise ModulercHeaderNotDefined(msg.format(name))
+
def _get_template(self):
"""Gets the template that will be rendered for this spec."""
# Get templates and put them in the order of importance:
@@ -857,7 +887,7 @@ def _get_template(self):
# 2. template specified in a package directly
# 3. default template (must be defined, check in __init__)
module_system_name = str(self.module.__name__).split(".")[-1]
- package_attribute = "{0}_template".format(module_system_name)
+ package_attribute = f"{module_system_name}_template"
choices = [
self.conf.template,
getattr(self.spec.package, package_attribute, None),
@@ -923,7 +953,7 @@ def write(self, overwrite=False):
# Attribute from package
module_name = str(self.module.__name__).split(".")[-1]
- attr_name = "{0}_context".format(module_name)
+ attr_name = f"{module_name}_context"
pkg_update = getattr(self.spec.package, attr_name, {})
context.update(pkg_update)
@@ -945,6 +975,9 @@ def write(self, overwrite=False):
# Symlink defaults if needed
self.update_module_defaults()
+ # record module hiddenness if implicit
+ self.update_module_hiddenness()
+
def update_module_defaults(self):
if any(self.spec.satisfies(default) for default in self.conf.defaults):
# This spec matches a default, it needs to be symlinked to default
@@ -955,6 +988,60 @@ def update_module_defaults(self):
os.symlink(self.layout.filename, default_tmp)
os.rename(default_tmp, default_path)
+ def update_module_hiddenness(self, remove=False):
+ """Update modulerc file corresponding to module to add or remove
+ command that hides module depending on its hidden state.
+
+ Args:
+ remove (bool): if True, hiddenness information for module is
+ removed from modulerc.
+ """
+ modulerc_path = self.layout.modulerc
+ hide_module_cmd = self.hide_cmd_format % self.layout.use_name
+ hidden = self.conf.hidden and not remove
+ modulerc_exists = os.path.exists(modulerc_path)
+ updated = False
+
+ if modulerc_exists:
+ # retrieve modulerc content
+ with open(modulerc_path) as f:
+ content = f.readlines()
+ content = "".join(content).split("\n")
+ # remove last empty item if any
+ if len(content[-1]) == 0:
+ del content[-1]
+ already_hidden = hide_module_cmd in content
+
+ # remove hide command if module not hidden
+ if already_hidden and not hidden:
+ content.remove(hide_module_cmd)
+ updated = True
+
+ # add hide command if module is hidden
+ elif not already_hidden and hidden:
+ if len(content) == 0:
+ content = self.modulerc_header.copy()
+ content.append(hide_module_cmd)
+ updated = True
+ else:
+ content = self.modulerc_header.copy()
+ if hidden:
+ content.append(hide_module_cmd)
+ updated = True
+
+ # no modulerc file change if no content update
+ if updated:
+ is_empty = content == self.modulerc_header or len(content) == 0
+ # remove existing modulerc if empty
+ if modulerc_exists and is_empty:
+ os.remove(modulerc_path)
+ # create or update modulerc
+ elif content != self.modulerc_header:
+ # ensure file ends with a newline character
+ content.append("")
+ with open(modulerc_path, "w") as f:
+ f.write("\n".join(content))
+
def remove(self):
"""Deletes the module file."""
mod_file = self.layout.filename
@@ -962,6 +1049,7 @@ def remove(self):
try:
os.remove(mod_file) # Remove the module file
self.remove_module_defaults() # Remove default targeting module file
+ self.update_module_hiddenness(remove=True) # Remove hide cmd in modulerc
os.removedirs(
os.path.dirname(mod_file)
) # Remove all the empty directories from the leaf up
@@ -1005,5 +1093,17 @@ class DefaultTemplateNotDefined(AttributeError, ModulesError):
"""
+class HideCmdFormatNotDefined(AttributeError, ModulesError):
+ """Raised if the attribute 'hide_cmd_format' has not been specified
+ in the derived classes.
+ """
+
+
+class ModulercHeaderNotDefined(AttributeError, ModulesError):
+ """Raised if the attribute 'modulerc_header' has not been specified
+ in the derived classes.
+ """
+
+
class ModulesTemplateNotFoundError(ModulesError, RuntimeError):
"""Raised if the template for a module file was not found."""
diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py
index d81e07e0bf9449..8f529ba21ceb5a 100644
--- a/lib/spack/spack/modules/lmod.py
+++ b/lib/spack/spack/modules/lmod.py
@@ -6,8 +6,7 @@
import collections
import itertools
import os.path
-import posixpath
-from typing import Any, Dict, List
+from typing import Dict, List, Optional, Tuple
import llnl.util.filesystem as fs
import llnl.util.lang as lang
@@ -24,18 +23,19 @@
#: lmod specific part of the configuration
-def configuration(module_set_name):
- config_path = "modules:%s:lmod" % module_set_name
- config = spack.config.get(config_path, {})
- return config
+def configuration(module_set_name: str) -> dict:
+ return spack.config.get(f"modules:{module_set_name}:lmod", {})
# Caches the configuration {spec_hash: configuration}
-configuration_registry: Dict[str, Any] = {}
+configuration_registry: Dict[Tuple[str, str, bool], BaseConfiguration] = {}
-def make_configuration(spec, module_set_name, explicit):
+def make_configuration(
+ spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
+) -> BaseConfiguration:
"""Returns the lmod configuration for spec"""
+ explicit = bool(spec._installed_explicitly()) if explicit is None else explicit
key = (spec.dag_hash(), module_set_name, explicit)
try:
return configuration_registry[key]
@@ -45,16 +45,18 @@ def make_configuration(spec, module_set_name, explicit):
)
-def make_layout(spec, module_set_name, explicit):
+def make_layout(
+ spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
+) -> BaseFileLayout:
"""Returns the layout information for spec"""
- conf = make_configuration(spec, module_set_name, explicit)
- return LmodFileLayout(conf)
+ return LmodFileLayout(make_configuration(spec, module_set_name, explicit))
-def make_context(spec, module_set_name, explicit):
+def make_context(
+ spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
+) -> BaseContext:
"""Returns the context information for spec"""
- conf = make_configuration(spec, module_set_name, explicit)
- return LmodContext(conf)
+ return LmodContext(make_configuration(spec, module_set_name, explicit))
def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
@@ -97,10 +99,7 @@ def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
class LmodConfiguration(BaseConfiguration):
"""Configuration class for lmod module files."""
- # Note: Posixpath is used here as well as below as opposed to
- # os.path.join due to spack.spec.Spec.format
- # requiring forward slash path seperators at this stage
- default_projections = {"all": posixpath.join("{name}", "{version}")}
+ default_projections = {"all": "{name}/{version}"}
@property
def core_compilers(self) -> List[spack.spec.CompilerSpec]:
@@ -232,6 +231,13 @@ def missing(self):
"""Returns the list of tokens that are not available."""
return [x for x in self.hierarchy_tokens if x not in self.available]
+ @property
+ def hidden(self):
+ # Never hide a module that opens a hierarchy
+ if any(self.spec.package.provides(x) for x in self.hierarchy_tokens):
+ return False
+ return super().hidden
+
class LmodFileLayout(BaseFileLayout):
"""File layout for lmod module files."""
@@ -267,12 +273,16 @@ def filename(self):
hierarchy_name = os.path.join(*parts)
# Compute the absolute path
- fullname = os.path.join(
+ return os.path.join(
self.arch_dirname, # root for lmod files on this architecture
hierarchy_name, # relative path
- ".".join([self.use_name, self.extension]), # file name
+ f"{self.use_name}.{self.extension}", # file name
)
- return fullname
+
+ @property
+ def modulerc(self):
+ """Returns the modulerc file associated with current module file"""
+ return os.path.join(os.path.dirname(self.filename), f".modulerc.{self.extension}")
def token_to_path(self, name, value):
"""Transforms a hierarchy token into the corresponding path part.
@@ -305,9 +315,7 @@ def path_part_fmt(token):
# we need to append a hash to the version to distinguish
# among flavors of the same library (e.g. openblas~openmp vs.
# openblas+openmp)
- path = path_part_fmt(token=value)
- path = "-".join([path, value.dag_hash(length=7)])
- return path
+ return f"{path_part_fmt(token=value)}-{value.dag_hash(length=7)}"
@property
def available_path_parts(self):
@@ -319,8 +327,7 @@ def available_path_parts(self):
# List of services that are part of the hierarchy
hierarchy = self.conf.hierarchy_tokens
# Tokenize each part that is both in the hierarchy and available
- parts = [self.token_to_path(x, available[x]) for x in hierarchy if x in available]
- return parts
+ return [self.token_to_path(x, available[x]) for x in hierarchy if x in available]
@property
@lang.memoized
@@ -438,7 +445,7 @@ def missing(self):
@lang.memoized
def unlocked_paths(self):
"""Returns the list of paths that are unlocked unconditionally."""
- layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
+ layout = make_layout(self.spec, self.conf.name)
return [os.path.join(*parts) for parts in layout.unlocked_paths[None]]
@tengine.context_property
@@ -446,7 +453,7 @@ def conditionally_unlocked_paths(self):
"""Returns the list of paths that are unlocked conditionally.
Each item in the list is a tuple with the structure (condition, path).
"""
- layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
+ layout = make_layout(self.spec, self.conf.name)
value = []
conditional_paths = layout.unlocked_paths
conditional_paths.pop(None)
@@ -468,7 +475,11 @@ def manipulate_path(token):
class LmodModulefileWriter(BaseModuleFileWriter):
"""Writer class for lmod module files."""
- default_template = posixpath.join("modules", "modulefile.lua")
+ default_template = "modules/modulefile.lua"
+
+ modulerc_header = []
+
+ hide_cmd_format = 'hide_version("%s")'
class CoreCompilersNotFoundError(spack.error.SpackError, KeyError):
diff --git a/lib/spack/spack/modules/tcl.py b/lib/spack/spack/modules/tcl.py
index 58b075379294b5..6d7f49b3309f33 100644
--- a/lib/spack/spack/modules/tcl.py
+++ b/lib/spack/spack/modules/tcl.py
@@ -6,28 +6,30 @@
"""This module implements the classes necessary to generate Tcl
non-hierarchical modules.
"""
-import posixpath
-from typing import Any, Dict
+import os.path
+from typing import Dict, Optional, Tuple
import spack.config
+import spack.spec
import spack.tengine as tengine
from .common import BaseConfiguration, BaseContext, BaseFileLayout, BaseModuleFileWriter
#: Tcl specific part of the configuration
-def configuration(module_set_name):
- config_path = "modules:%s:tcl" % module_set_name
- config = spack.config.get(config_path, {})
- return config
+def configuration(module_set_name: str) -> dict:
+ return spack.config.get(f"modules:{module_set_name}:tcl", {})
# Caches the configuration {spec_hash: configuration}
-configuration_registry: Dict[str, Any] = {}
+configuration_registry: Dict[Tuple[str, str, bool], BaseConfiguration] = {}
-def make_configuration(spec, module_set_name, explicit):
+def make_configuration(
+ spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
+) -> BaseConfiguration:
"""Returns the tcl configuration for spec"""
+ explicit = bool(spec._installed_explicitly()) if explicit is None else explicit
key = (spec.dag_hash(), module_set_name, explicit)
try:
return configuration_registry[key]
@@ -37,16 +39,18 @@ def make_configuration(spec, module_set_name, explicit):
)
-def make_layout(spec, module_set_name, explicit):
+def make_layout(
+ spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
+) -> BaseFileLayout:
"""Returns the layout information for spec"""
- conf = make_configuration(spec, module_set_name, explicit)
- return TclFileLayout(conf)
+ return TclFileLayout(make_configuration(spec, module_set_name, explicit))
-def make_context(spec, module_set_name, explicit):
+def make_context(
+ spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
+) -> BaseContext:
"""Returns the context information for spec"""
- conf = make_configuration(spec, module_set_name, explicit)
- return TclContext(conf)
+ return TclContext(make_configuration(spec, module_set_name, explicit))
class TclConfiguration(BaseConfiguration):
@@ -56,6 +60,11 @@ class TclConfiguration(BaseConfiguration):
class TclFileLayout(BaseFileLayout):
"""File layout for tcl module files."""
+ @property
+ def modulerc(self):
+ """Returns the modulerc file associated with current module file"""
+ return os.path.join(os.path.dirname(self.filename), ".modulerc")
+
class TclContext(BaseContext):
"""Context class for tcl module files."""
@@ -69,7 +78,8 @@ def prerequisites(self):
class TclModulefileWriter(BaseModuleFileWriter):
"""Writer class for tcl module files."""
- # Note: Posixpath is used here as opposed to
- # os.path.join due to spack.spec.Spec.format
- # requiring forward slash path seperators at this stage
- default_template = posixpath.join("modules", "modulefile.tcl")
+ default_template = "modules/modulefile.tcl"
+
+ modulerc_header = ["#%Module4.7"]
+
+ hide_cmd_format = "module-hide --soft --hidden-loaded %s"
diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py
index d3453beb794656..0c661172424751 100644
--- a/lib/spack/spack/multimethod.py
+++ b/lib/spack/spack/multimethod.py
@@ -26,6 +26,7 @@
"""
import functools
import inspect
+from contextlib import contextmanager
from llnl.util.lang import caller_locals
@@ -271,6 +272,13 @@ def __exit__(self, exc_type, exc_val, exc_tb):
spack.directives.DirectiveMeta.pop_from_context()
+@contextmanager
+def default_args(**kwargs):
+ spack.directives.DirectiveMeta.push_default_args(kwargs)
+ yield
+ spack.directives.DirectiveMeta.pop_default_args()
+
+
class MultiMethodError(spack.error.SpackError):
"""Superclass for multimethod dispatch errors"""
diff --git a/lib/spack/spack/oci/__init__.py b/lib/spack/spack/oci/__init__.py
new file mode 100644
index 00000000000000..af304aecb70f37
--- /dev/null
+++ b/lib/spack/spack/oci/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
diff --git a/lib/spack/spack/oci/image.py b/lib/spack/spack/oci/image.py
new file mode 100644
index 00000000000000..b61591b7bed0d0
--- /dev/null
+++ b/lib/spack/spack/oci/image.py
@@ -0,0 +1,235 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import re
+import urllib.parse
+from typing import Optional, Union
+
+import spack.spec
+
+# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase
+# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are
+# case sensitive though.
+alphanumeric_with_uppercase = r"[a-zA-Z0-9]+"
+separator = r"(?:[._]|__|[-]+)"
+localhost = r"localhost"
+domainNameComponent = r"(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])"
+optionalPort = r"(?::[0-9]+)?"
+tag = r"[\w][\w.-]{0,127}"
+digestPat = r"[A-Za-z][A-Za-z0-9]*(?:[-_+.][A-Za-z][A-Za-z0-9]*)*[:][0-9a-fA-F]{32,}"
+ipv6address = r"\[(?:[a-fA-F0-9:]+)\]"
+
+# domain name
+domainName = rf"{domainNameComponent}(?:\.{domainNameComponent})*"
+host = rf"(?:{domainName}|{ipv6address})"
+domainAndPort = rf"{host}{optionalPort}"
+
+# image name
+pathComponent = rf"{alphanumeric_with_uppercase}(?:{separator}{alphanumeric_with_uppercase})*"
+remoteName = rf"{pathComponent}(?:\/{pathComponent})*"
+namePat = rf"(?:{domainAndPort}\/)?{remoteName}"
+
+# Regex for a full image reference, with 3 groups: name, tag, digest
+referencePat = re.compile(rf"^({namePat})(?::({tag}))?(?:@({digestPat}))?$")
+
+# Regex for splitting the name into domain and path components
+anchoredNameRegexp = re.compile(rf"^(?:({domainAndPort})\/)?({remoteName})$")
+
+
+def ensure_sha256_checksum(oci_blob: str):
+ """Validate that the reference is of the format sha256:
+ Return the checksum if valid, raise ValueError otherwise."""
+ if ":" not in oci_blob:
+ raise ValueError(f"Invalid OCI blob format: {oci_blob}")
+ alg, checksum = oci_blob.split(":", 1)
+ if alg != "sha256":
+ raise ValueError(f"Unsupported OCI blob checksum algorithm: {alg}")
+ if len(checksum) != 64:
+ raise ValueError(f"Invalid OCI blob checksum length: {len(checksum)}")
+ return checksum
+
+
+class Digest:
+ """Represents a digest in the format :.
+ Currently only supports sha256 digests."""
+
+ __slots__ = ["algorithm", "digest"]
+
+ def __init__(self, *, algorithm: str, digest: str) -> None:
+ self.algorithm = algorithm
+ self.digest = digest
+
+ def __eq__(self, __value: object) -> bool:
+ if not isinstance(__value, Digest):
+ return NotImplemented
+ return self.algorithm == __value.algorithm and self.digest == __value.digest
+
+ @classmethod
+ def from_string(cls, string: str) -> "Digest":
+ return cls(algorithm="sha256", digest=ensure_sha256_checksum(string))
+
+ @classmethod
+ def from_sha256(cls, digest: str) -> "Digest":
+ return cls(algorithm="sha256", digest=digest)
+
+ def __str__(self) -> str:
+ return f"{self.algorithm}:{self.digest}"
+
+
+class ImageReference:
+ """A parsed image of the form domain/name:tag[@digest].
+ The digest is optional, and domain and tag are automatically
+ filled out with defaults when parsed from string."""
+
+ __slots__ = ["domain", "name", "tag", "digest"]
+
+ def __init__(
+ self, *, domain: str, name: str, tag: str = "latest", digest: Optional[Digest] = None
+ ):
+ self.domain = domain
+ self.name = name
+ self.tag = tag
+ self.digest = digest
+
+ @classmethod
+ def from_string(cls, string) -> "ImageReference":
+ match = referencePat.match(string)
+ if not match:
+ raise ValueError(f"Invalid image reference: {string}")
+
+ image, tag, digest = match.groups()
+
+ assert isinstance(image, str)
+ assert isinstance(tag, (str, type(None)))
+ assert isinstance(digest, (str, type(None)))
+
+ match = anchoredNameRegexp.match(image)
+
+ # This can never happen, since the regex is implied
+ # by the regex above. It's just here to make mypy happy.
+ assert match, f"Invalid image reference: {string}"
+
+ domain, name = match.groups()
+
+ assert isinstance(domain, (str, type(None)))
+ assert isinstance(name, str)
+
+ # Fill out defaults like docker would do...
+ # Based on github.com/distribution/distribution: allow short names like "ubuntu"
+ # and "user/repo" to be interpreted as "library/ubuntu" and "user/repo:latest
+ # Not sure if Spack should follow Docker, but it's what people expect...
+ if not domain:
+ domain = "index.docker.io"
+ name = f"library/{name}"
+ elif (
+ "." not in domain
+ and ":" not in domain
+ and domain != "localhost"
+ and domain == domain.lower()
+ ):
+ name = f"{domain}/{name}"
+ domain = "index.docker.io"
+
+ # Lowercase the image name. This is enforced by Docker, although the OCI spec isn't clear?
+ # We do this anyways, cause for example in Github Actions the /
+ # part can have uppercase, and may be interpolated when specifying the relevant OCI image.
+ name = name.lower()
+
+ if not tag:
+ tag = "latest"
+
+ # sha256 is currently the only algorithm that
+ # we implement, even though the spec allows for more
+ if isinstance(digest, str):
+ digest = Digest.from_string(digest)
+
+ return cls(domain=domain, name=name, tag=tag, digest=digest)
+
+ def manifest_url(self) -> str:
+ digest_or_tag = self.digest or self.tag
+ return f"https://{self.domain}/v2/{self.name}/manifests/{digest_or_tag}"
+
+ def blob_url(self, digest: Union[str, Digest]) -> str:
+ if isinstance(digest, str):
+ digest = Digest.from_string(digest)
+ return f"https://{self.domain}/v2/{self.name}/blobs/{digest}"
+
+ def with_digest(self, digest: Union[str, Digest]) -> "ImageReference":
+ if isinstance(digest, str):
+ digest = Digest.from_string(digest)
+ return ImageReference(domain=self.domain, name=self.name, tag=self.tag, digest=digest)
+
+ def with_tag(self, tag: str) -> "ImageReference":
+ return ImageReference(domain=self.domain, name=self.name, tag=tag, digest=self.digest)
+
+ def uploads_url(self, digest: Optional[Digest] = None) -> str:
+ url = f"https://{self.domain}/v2/{self.name}/blobs/uploads/"
+ if digest:
+ url += f"?digest={digest}"
+ return url
+
+ def tags_url(self) -> str:
+ return f"https://{self.domain}/v2/{self.name}/tags/list"
+
+ def endpoint(self, path: str = "") -> str:
+ return urllib.parse.urljoin(f"https://{self.domain}/v2/", path)
+
+ def __str__(self) -> str:
+ s = f"{self.domain}/{self.name}"
+ if self.tag:
+ s += f":{self.tag}"
+ if self.digest:
+ s += f"@{self.digest}"
+ return s
+
+ def __eq__(self, __value: object) -> bool:
+ if not isinstance(__value, ImageReference):
+ return NotImplemented
+ return (
+ self.domain == __value.domain
+ and self.name == __value.name
+ and self.tag == __value.tag
+ and self.digest == __value.digest
+ )
+
+
+def _ensure_valid_tag(tag: str) -> str:
+ """Ensure a tag is valid for an OCI registry."""
+ sanitized = re.sub(r"[^\w.-]", "_", tag)
+ if len(sanitized) > 128:
+ return sanitized[:64] + sanitized[-64:]
+ return sanitized
+
+
+def default_tag(spec: "spack.spec.Spec") -> str:
+ """Return a valid, default image tag for a spec."""
+ return _ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
+
+
+#: Default OCI index tag
+default_index_tag = "index.spack"
+
+
+def tag_is_spec(tag: str) -> bool:
+ """Check if a tag is likely a Spec"""
+ return tag.endswith(".spack") and tag != default_index_tag
+
+
+def default_config(architecture: str, os: str):
+ return {
+ "architecture": architecture,
+ "os": os,
+ "rootfs": {"type": "layers", "diff_ids": []},
+ "config": {"Env": []},
+ }
+
+
+def default_manifest():
+ return {
+ "mediaType": "application/vnd.oci.image.manifest.v1+json",
+ "schemaVersion": 2,
+ "config": {"mediaType": "application/vnd.oci.image.config.v1+json"},
+ "layers": [],
+ }
diff --git a/lib/spack/spack/oci/oci.py b/lib/spack/spack/oci/oci.py
new file mode 100644
index 00000000000000..4e5e196cd10db9
--- /dev/null
+++ b/lib/spack/spack/oci/oci.py
@@ -0,0 +1,381 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import hashlib
+import json
+import os
+import time
+import urllib.error
+import urllib.parse
+import urllib.request
+from http.client import HTTPResponse
+from typing import NamedTuple, Tuple
+from urllib.request import Request
+
+import llnl.util.tty as tty
+
+import spack.binary_distribution
+import spack.config
+import spack.error
+import spack.fetch_strategy
+import spack.mirror
+import spack.oci.opener
+import spack.repo
+import spack.spec
+import spack.stage
+import spack.traverse
+import spack.util.crypto
+
+from .image import Digest, ImageReference
+
+
+class Blob(NamedTuple):
+ compressed_digest: Digest
+ uncompressed_digest: Digest
+ size: int
+
+
+def create_tarball(spec: spack.spec.Spec, tarfile_path):
+ buildinfo = spack.binary_distribution.get_buildinfo_dict(spec)
+ return spack.binary_distribution._do_create_tarball(tarfile_path, spec.prefix, buildinfo)
+
+
+def _log_upload_progress(digest: Digest, size: int, elapsed: float):
+ elapsed = max(elapsed, 0.001) # guard against division by zero
+ tty.info(f"Uploaded {digest} ({elapsed:.2f}s, {size / elapsed / 1024 / 1024:.2f} MB/s)")
+
+
+def with_query_param(url: str, param: str, value: str) -> str:
+ """Add a query parameter to a URL
+
+ Args:
+ url: The URL to add the parameter to.
+ param: The parameter name.
+ value: The parameter value.
+
+ Returns:
+ The URL with the parameter added.
+ """
+ parsed = urllib.parse.urlparse(url)
+ query = urllib.parse.parse_qs(parsed.query)
+ if param in query:
+ query[param].append(value)
+ else:
+ query[param] = [value]
+ return urllib.parse.urlunparse(
+ parsed._replace(query=urllib.parse.urlencode(query, doseq=True))
+ )
+
+
+def upload_blob(
+ ref: ImageReference,
+ file: str,
+ digest: Digest,
+ force: bool = False,
+ small_file_size: int = 0,
+ _urlopen: spack.oci.opener.MaybeOpen = None,
+) -> bool:
+ """Uploads a blob to an OCI registry
+
+ We only do monolithic uploads, even though it's very simple to do chunked.
+ Observed problems with chunked uploads:
+ (1) it's slow, many sequential requests, (2) some registries set an *unknown*
+ max chunk size, and the spec doesn't say how to obtain it
+
+ Args:
+ ref: The image reference.
+ file: The file to upload.
+ digest: The digest of the file.
+ force: Whether to force upload the blob, even if it already exists.
+ small_file_size: For files at most this size, attempt
+ to do a single POST request instead of POST + PUT.
+ Some registries do no support single requests, and others
+ do not specify what size they support in single POST.
+ For now this feature is disabled by default (0KB)
+
+ Returns:
+ True if the blob was uploaded, False if it already existed.
+ """
+ _urlopen = _urlopen or spack.oci.opener.urlopen
+
+ # Test if the blob already exists, if so, early exit.
+ if not force and blob_exists(ref, digest, _urlopen):
+ return False
+
+ start = time.time()
+
+ with open(file, "rb") as f:
+ file_size = os.fstat(f.fileno()).st_size
+
+ # For small blobs, do a single POST request.
+ # The spec says that registries MAY support this
+ if file_size <= small_file_size:
+ request = Request(
+ url=ref.uploads_url(digest),
+ method="POST",
+ data=f,
+ headers={
+ "Content-Type": "application/octet-stream",
+ "Content-Length": str(file_size),
+ },
+ )
+ else:
+ request = Request(
+ url=ref.uploads_url(), method="POST", headers={"Content-Length": "0"}
+ )
+
+ response = _urlopen(request)
+
+ # Created the blob in one go.
+ if response.status == 201:
+ _log_upload_progress(digest, file_size, time.time() - start)
+ return True
+
+ # Otherwise, do another PUT request.
+ spack.oci.opener.ensure_status(response, 202)
+ assert "Location" in response.headers
+
+ # Can be absolute or relative, joining handles both
+ upload_url = with_query_param(
+ ref.endpoint(response.headers["Location"]), "digest", str(digest)
+ )
+ f.seek(0)
+
+ response = _urlopen(
+ Request(
+ url=upload_url,
+ method="PUT",
+ data=f,
+ headers={
+ "Content-Type": "application/octet-stream",
+ "Content-Length": str(file_size),
+ },
+ )
+ )
+
+ spack.oci.opener.ensure_status(response, 201)
+
+ # print elapsed time and # MB/s
+ _log_upload_progress(digest, file_size, time.time() - start)
+ return True
+
+
+def upload_manifest(
+ ref: ImageReference,
+ oci_manifest: dict,
+ tag: bool = True,
+ _urlopen: spack.oci.opener.MaybeOpen = None,
+):
+ """Uploads a manifest/index to a registry
+
+ Args:
+ ref: The image reference.
+ oci_manifest: The OCI manifest or index.
+ tag: When true, use the tag, otherwise use the digest,
+ this is relevant for multi-arch images, where the
+ tag is an index, referencing the manifests by digest.
+
+ Returns:
+ The digest and size of the uploaded manifest.
+ """
+ _urlopen = _urlopen or spack.oci.opener.urlopen
+
+ data = json.dumps(oci_manifest, separators=(",", ":")).encode()
+ digest = Digest.from_sha256(hashlib.sha256(data).hexdigest())
+ size = len(data)
+
+ if not tag:
+ ref = ref.with_digest(digest)
+
+ response = _urlopen(
+ Request(
+ url=ref.manifest_url(),
+ method="PUT",
+ data=data,
+ headers={"Content-Type": oci_manifest["mediaType"]},
+ )
+ )
+
+ spack.oci.opener.ensure_status(response, 201)
+ return digest, size
+
+
+def image_from_mirror(mirror: spack.mirror.Mirror) -> ImageReference:
+ """Given an OCI based mirror, extract the URL and image name from it"""
+ url = mirror.push_url
+ if not url.startswith("oci://"):
+ raise ValueError(f"Mirror {mirror} is not an OCI mirror")
+ return ImageReference.from_string(url[6:])
+
+
+def blob_exists(
+ ref: ImageReference, digest: Digest, _urlopen: spack.oci.opener.MaybeOpen = None
+) -> bool:
+ """Checks if a blob exists in an OCI registry"""
+ try:
+ _urlopen = _urlopen or spack.oci.opener.urlopen
+ response = _urlopen(Request(url=ref.blob_url(digest), method="HEAD"))
+ return response.status == 200
+ except urllib.error.HTTPError as e:
+ if e.getcode() == 404:
+ return False
+ raise
+
+
+def copy_missing_layers(
+ src: ImageReference,
+ dst: ImageReference,
+ architecture: str,
+ _urlopen: spack.oci.opener.MaybeOpen = None,
+) -> Tuple[dict, dict]:
+ """Copy image layers from src to dst for given architecture.
+
+ Args:
+ src: The source image reference.
+ dst: The destination image reference.
+ architecture: The architecture (when referencing an index)
+
+ Returns:
+ Tuple of manifest and config of the base image.
+ """
+ _urlopen = _urlopen or spack.oci.opener.urlopen
+ manifest, config = get_manifest_and_config(src, architecture, _urlopen=_urlopen)
+
+ # Get layer digests
+ digests = [Digest.from_string(layer["digest"]) for layer in manifest["layers"]]
+
+ # Filter digests that are don't exist in the registry
+ missing_digests = [
+ digest for digest in digests if not blob_exists(dst, digest, _urlopen=_urlopen)
+ ]
+
+ if not missing_digests:
+ return manifest, config
+
+ # Pull missing blobs, push them to the registry
+ with spack.stage.StageComposite.from_iterable(
+ make_stage(url=src.blob_url(digest), digest=digest, _urlopen=_urlopen)
+ for digest in missing_digests
+ ) as stages:
+ stages.fetch()
+ stages.check()
+ stages.cache_local()
+
+ for stage, digest in zip(stages, missing_digests):
+ # No need to check existince again, force=True.
+ upload_blob(
+ dst, file=stage.save_filename, force=True, digest=digest, _urlopen=_urlopen
+ )
+
+ return manifest, config
+
+
+#: OCI manifest content types (including docker type)
+manifest_content_type = [
+ "application/vnd.oci.image.manifest.v1+json",
+ "application/vnd.docker.distribution.manifest.v2+json",
+]
+
+#: OCI index content types (including docker type)
+index_content_type = [
+ "application/vnd.oci.image.index.v1+json",
+ "application/vnd.docker.distribution.manifest.list.v2+json",
+]
+
+#: All OCI manifest / index content types
+all_content_type = manifest_content_type + index_content_type
+
+
+def get_manifest_and_config(
+ ref: ImageReference,
+ architecture="amd64",
+ recurse=3,
+ _urlopen: spack.oci.opener.MaybeOpen = None,
+) -> Tuple[dict, dict]:
+ """Recursively fetch manifest and config for a given image reference
+ with a given architecture.
+
+ Args:
+ ref: The image reference.
+ architecture: The architecture (when referencing an index)
+ recurse: How many levels of index to recurse into.
+
+ Returns:
+ A tuple of (manifest, config)"""
+
+ _urlopen = _urlopen or spack.oci.opener.urlopen
+
+ # Get manifest
+ response: HTTPResponse = _urlopen(
+ Request(url=ref.manifest_url(), headers={"Accept": ", ".join(all_content_type)})
+ )
+
+ # Recurse when we find an index
+ if response.headers["Content-Type"] in index_content_type:
+ if recurse == 0:
+ raise Exception("Maximum recursion depth reached while fetching OCI manifest")
+
+ index = json.load(response)
+ manifest_meta = next(
+ manifest
+ for manifest in index["manifests"]
+ if manifest["platform"]["architecture"] == architecture
+ )
+
+ return get_manifest_and_config(
+ ref.with_digest(manifest_meta["digest"]),
+ architecture=architecture,
+ recurse=recurse - 1,
+ _urlopen=_urlopen,
+ )
+
+ # Otherwise, require a manifest
+ if response.headers["Content-Type"] not in manifest_content_type:
+ raise Exception(f"Unknown content type {response.headers['Content-Type']}")
+
+ manifest = json.load(response)
+
+ # Download, verify and cache config file
+ config_digest = Digest.from_string(manifest["config"]["digest"])
+ with make_stage(ref.blob_url(config_digest), config_digest, _urlopen=_urlopen) as stage:
+ stage.fetch()
+ stage.check()
+ stage.cache_local()
+ with open(stage.save_filename, "rb") as f:
+ config = json.load(f)
+
+ return manifest, config
+
+
+#: Same as upload_manifest, but with retry wrapper
+upload_manifest_with_retry = spack.oci.opener.default_retry(upload_manifest)
+
+#: Same as upload_blob, but with retry wrapper
+upload_blob_with_retry = spack.oci.opener.default_retry(upload_blob)
+
+#: Same as get_manifest_and_config, but with retry wrapper
+get_manifest_and_config_with_retry = spack.oci.opener.default_retry(get_manifest_and_config)
+
+#: Same as copy_missing_layers, but with retry wrapper
+copy_missing_layers_with_retry = spack.oci.opener.default_retry(copy_missing_layers)
+
+
+def make_stage(
+ url: str, digest: Digest, keep: bool = False, _urlopen: spack.oci.opener.MaybeOpen = None
+) -> spack.stage.Stage:
+ _urlopen = _urlopen or spack.oci.opener.urlopen
+ fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
+ url, checksum=digest.digest, _urlopen=_urlopen
+ )
+ # Use blobs// as the cache path, which follows
+ # the OCI Image Layout Specification. What's missing though,
+ # is the `oci-layout` and `index.json` files, which are
+ # required by the spec.
+ return spack.stage.Stage(
+ fetch_strategy,
+ mirror_paths=spack.mirror.OCIImageLayout(digest),
+ name=digest.digest,
+ keep=keep,
+ )
diff --git a/lib/spack/spack/oci/opener.py b/lib/spack/spack/oci/opener.py
new file mode 100644
index 00000000000000..792598578d3204
--- /dev/null
+++ b/lib/spack/spack/oci/opener.py
@@ -0,0 +1,442 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+"""All the logic for OCI fetching and authentication"""
+
+import base64
+import json
+import re
+import time
+import urllib.error
+import urllib.parse
+import urllib.request
+from enum import Enum, auto
+from http.client import HTTPResponse
+from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Tuple
+from urllib.request import Request
+
+import llnl.util.lang
+
+import spack.config
+import spack.mirror
+import spack.parser
+import spack.repo
+import spack.util.web
+
+from .image import ImageReference
+
+
+def _urlopen():
+ opener = create_opener()
+
+ def dispatch_open(fullurl, data=None, timeout=None):
+ timeout = timeout or spack.config.get("config:connect_timeout", 10)
+ return opener.open(fullurl, data, timeout)
+
+ return dispatch_open
+
+
+OpenType = Callable[..., HTTPResponse]
+MaybeOpen = Optional[OpenType]
+
+#: Opener that automatically uses OCI authentication based on mirror config
+urlopen: OpenType = llnl.util.lang.Singleton(_urlopen)
+
+
+SP = r" "
+OWS = r"[ \t]*"
+BWS = OWS
+HTAB = r"\t"
+VCHAR = r"\x21-\x7E"
+tchar = r"[!#$%&'*+\-.^_`|~0-9A-Za-z]"
+token = rf"{tchar}+"
+obs_text = r"\x80-\xFF"
+qdtext = rf"[{HTAB}{SP}\x21\x23-\x5B\x5D-\x7E{obs_text}]"
+quoted_pair = rf"\\([{HTAB}{SP}{VCHAR}{obs_text}])"
+quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"'
+
+
+class TokenType(spack.parser.TokenBase):
+ AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})"
+ # TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this?
+ TOKEN = rf"{tchar}+"
+ EQUALS = rf"{BWS}={BWS}"
+ COMMA = rf"{OWS},{OWS}"
+ SPACE = r" +"
+ EOF = r"$"
+ ANY = r"."
+
+
+TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
+
+ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
+
+
+class State(Enum):
+ CHALLENGE = auto()
+ AUTH_PARAM_LIST_START = auto()
+ AUTH_PARAM = auto()
+ NEXT_IN_LIST = auto()
+ AUTH_PARAM_OR_SCHEME = auto()
+
+
+def tokenize(input: str):
+ scanner = ALL_TOKENS.scanner(input) # type: ignore[attr-defined]
+
+ for match in iter(scanner.match, None): # type: ignore[var-annotated]
+ yield spack.parser.Token(
+ TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
+ match.group(), # type: ignore[attr-defined]
+ match.start(), # type: ignore[attr-defined]
+ match.end(), # type: ignore[attr-defined]
+ )
+
+
+class Challenge:
+ __slots__ = ["scheme", "params"]
+
+ def __init__(
+ self, scheme: Optional[str] = None, params: Optional[List[Tuple[str, str]]] = None
+ ) -> None:
+ self.scheme = scheme or ""
+ self.params = params or []
+
+ def __repr__(self) -> str:
+ return f"Challenge({self.scheme}, {self.params})"
+
+ def __eq__(self, other: object) -> bool:
+ return (
+ isinstance(other, Challenge)
+ and self.scheme == other.scheme
+ and self.params == other.params
+ )
+
+
+def parse_www_authenticate(input: str):
+ """Very basic parsing of www-authenticate parsing (RFC7235 section 4.1)
+ Notice: this omits token68 support."""
+
+ # auth-scheme = token
+ # auth-param = token BWS "=" BWS ( token / quoted-string )
+ # challenge = auth-scheme [ 1*SP ( token68 / #auth-param ) ]
+ # WWW-Authenticate = 1#challenge
+
+ challenges: List[Challenge] = []
+
+ _unquote = re.compile(quoted_pair).sub
+ unquote = lambda s: _unquote(r"\1", s[1:-1])
+
+ mode: State = State.CHALLENGE
+ tokens = tokenize(input)
+
+ current_challenge = Challenge()
+
+ def extract_auth_param(input: str) -> Tuple[str, str]:
+ key, value = input.split("=", 1)
+ key = key.rstrip()
+ value = value.lstrip()
+ if value.startswith('"'):
+ value = unquote(value)
+ return key, value
+
+ while True:
+ token: spack.parser.Token = next(tokens)
+
+ if mode == State.CHALLENGE:
+ if token.kind == TokenType.EOF:
+ raise ValueError(token)
+ elif token.kind == TokenType.TOKEN:
+ current_challenge.scheme = token.value
+ mode = State.AUTH_PARAM_LIST_START
+ else:
+ raise ValueError(token)
+
+ elif mode == State.AUTH_PARAM_LIST_START:
+ if token.kind == TokenType.EOF:
+ challenges.append(current_challenge)
+ break
+ elif token.kind == TokenType.COMMA:
+ # Challenge without param list, followed by another challenge.
+ challenges.append(current_challenge)
+ current_challenge = Challenge()
+ mode = State.CHALLENGE
+ elif token.kind == TokenType.SPACE:
+ # A space means it must be followed by param list
+ mode = State.AUTH_PARAM
+ else:
+ raise ValueError(token)
+
+ elif mode == State.AUTH_PARAM:
+ if token.kind == TokenType.EOF:
+ raise ValueError(token)
+ elif token.kind == TokenType.AUTH_PARAM:
+ key, value = extract_auth_param(token.value)
+ current_challenge.params.append((key, value))
+ mode = State.NEXT_IN_LIST
+ else:
+ raise ValueError(token)
+
+ elif mode == State.NEXT_IN_LIST:
+ if token.kind == TokenType.EOF:
+ challenges.append(current_challenge)
+ break
+ elif token.kind == TokenType.COMMA:
+ mode = State.AUTH_PARAM_OR_SCHEME
+ else:
+ raise ValueError(token)
+
+ elif mode == State.AUTH_PARAM_OR_SCHEME:
+ if token.kind == TokenType.EOF:
+ raise ValueError(token)
+ elif token.kind == TokenType.TOKEN:
+ challenges.append(current_challenge)
+ current_challenge = Challenge(token.value)
+ mode = State.AUTH_PARAM_LIST_START
+ elif token.kind == TokenType.AUTH_PARAM:
+ key, value = extract_auth_param(token.value)
+ current_challenge.params.append((key, value))
+ mode = State.NEXT_IN_LIST
+
+ return challenges
+
+
+class RealmServiceScope(NamedTuple):
+ realm: str
+ service: str
+ scope: str
+
+
+class UsernamePassword(NamedTuple):
+ username: str
+ password: str
+
+
+def get_bearer_challenge(challenges: List[Challenge]) -> Optional[RealmServiceScope]:
+ # Find a challenge that we can handle (currently only Bearer)
+ challenge = next((c for c in challenges if c.scheme == "Bearer"), None)
+
+ if challenge is None:
+ return None
+
+ # Get realm / service / scope from challenge
+ realm = next((v for k, v in challenge.params if k == "realm"), None)
+ service = next((v for k, v in challenge.params if k == "service"), None)
+ scope = next((v for k, v in challenge.params if k == "scope"), None)
+
+ if realm is None or service is None or scope is None:
+ return None
+
+ return RealmServiceScope(realm, service, scope)
+
+
+class OCIAuthHandler(urllib.request.BaseHandler):
+ def __init__(self, credentials_provider: Callable[[str], Optional[UsernamePassword]]):
+ """
+ Args:
+ credentials_provider: A function that takes a domain and may return a UsernamePassword.
+ """
+ self.credentials_provider = credentials_provider
+
+ # Cached bearer tokens for a given domain.
+ self.cached_tokens: Dict[str, str] = {}
+
+ def obtain_bearer_token(self, registry: str, challenge: RealmServiceScope, timeout) -> str:
+ # See https://docs.docker.com/registry/spec/auth/token/
+
+ query = urllib.parse.urlencode(
+ {"service": challenge.service, "scope": challenge.scope, "client_id": "spack"}
+ )
+
+ parsed = urllib.parse.urlparse(challenge.realm)._replace(
+ query=query, fragment="", params=""
+ )
+
+ # Don't send credentials over insecure transport.
+ if parsed.scheme != "https":
+ raise ValueError(
+ f"Cannot login to {registry} over insecure {parsed.scheme} connection"
+ )
+
+ request = Request(urllib.parse.urlunparse(parsed))
+
+ # I guess we shouldn't cache this, since we don't know
+ # the context in which it's used (may depend on config)
+ pair = self.credentials_provider(registry)
+
+ if pair is not None:
+ encoded = base64.b64encode(f"{pair.username}:{pair.password}".encode("utf-8")).decode(
+ "utf-8"
+ )
+ request.add_unredirected_header("Authorization", f"Basic {encoded}")
+
+ # Do a GET request.
+ response = self.parent.open(request, timeout=timeout)
+
+ # Read the response and parse the JSON
+ response_json = json.load(response)
+
+ # Get the token from the response
+ token = response_json["token"]
+
+ # Remember the last obtained token for this registry
+ # Note: we should probably take into account realm, service and scope
+ # so we can store multiple tokens for the same registry.
+ self.cached_tokens[registry] = token
+
+ return token
+
+ def https_request(self, req: Request):
+ # Eagerly add the bearer token to the request if no
+ # auth header is set yet, to avoid 401s in multiple
+ # requests to the same registry.
+
+ # Use has_header, not .headers, since there are two
+ # types of headers (redirected and unredirected)
+ if req.has_header("Authorization"):
+ return req
+
+ parsed = urllib.parse.urlparse(req.full_url)
+ token = self.cached_tokens.get(parsed.netloc)
+
+ if not token:
+ return req
+
+ req.add_unredirected_header("Authorization", f"Bearer {token}")
+ return req
+
+ def http_error_401(self, req: Request, fp, code, msg, headers):
+ # Login failed, avoid infinite recursion where we go back and
+ # forth between auth server and registry
+ if hasattr(req, "login_attempted"):
+ raise urllib.error.HTTPError(
+ req.full_url, code, f"Failed to login to {req.full_url}: {msg}", headers, fp
+ )
+
+ # On 401 Unauthorized, parse the WWW-Authenticate header
+ # to determine what authentication is required
+ if "WWW-Authenticate" not in headers:
+ raise urllib.error.HTTPError(
+ req.full_url,
+ code,
+ "Cannot login to registry, missing WWW-Authenticate header",
+ headers,
+ fp,
+ )
+
+ header_value = headers["WWW-Authenticate"]
+
+ try:
+ challenge = get_bearer_challenge(parse_www_authenticate(header_value))
+ except ValueError as e:
+ raise urllib.error.HTTPError(
+ req.full_url,
+ code,
+ f"Cannot login to registry, malformed WWW-Authenticate header: {header_value}",
+ headers,
+ fp,
+ ) from e
+
+ # If there is no bearer challenge, we can't handle it
+ if not challenge:
+ raise urllib.error.HTTPError(
+ req.full_url,
+ code,
+ f"Cannot login to registry, unsupported authentication scheme: {header_value}",
+ headers,
+ fp,
+ )
+
+ # Get the token from the auth handler
+ try:
+ token = self.obtain_bearer_token(
+ registry=urllib.parse.urlparse(req.get_full_url()).netloc,
+ challenge=challenge,
+ timeout=req.timeout,
+ )
+ except ValueError as e:
+ raise urllib.error.HTTPError(
+ req.full_url,
+ code,
+ f"Cannot login to registry, failed to obtain bearer token: {e}",
+ headers,
+ fp,
+ ) from e
+
+ # Add the token to the request
+ req.add_unredirected_header("Authorization", f"Bearer {token}")
+ setattr(req, "login_attempted", True)
+
+ return self.parent.open(req, timeout=req.timeout)
+
+
+def credentials_from_mirrors(
+ domain: str, *, mirrors: Optional[Iterable[spack.mirror.Mirror]] = None
+) -> Optional[UsernamePassword]:
+ """Filter out OCI registry credentials from a list of mirrors."""
+
+ mirrors = mirrors or spack.mirror.MirrorCollection().values()
+
+ for mirror in mirrors:
+ # Prefer push credentials over fetch. Unlikely that those are different
+ # but our config format allows it.
+ for direction in ("push", "fetch"):
+ pair = mirror.get_access_pair(direction)
+ if pair is None:
+ continue
+ url = mirror.get_url(direction)
+ if not url.startswith("oci://"):
+ continue
+ try:
+ parsed = ImageReference.from_string(url[6:])
+ except ValueError:
+ continue
+ if parsed.domain == domain:
+ return UsernamePassword(*pair)
+ return None
+
+
+def create_opener():
+ """Create an opener that can handle OCI authentication."""
+ opener = urllib.request.OpenerDirector()
+ for handler in [
+ urllib.request.UnknownHandler(),
+ urllib.request.HTTPSHandler(),
+ spack.util.web.SpackHTTPDefaultErrorHandler(),
+ urllib.request.HTTPRedirectHandler(),
+ urllib.request.HTTPErrorProcessor(),
+ OCIAuthHandler(credentials_from_mirrors),
+ ]:
+ opener.add_handler(handler)
+ return opener
+
+
+def ensure_status(response: HTTPResponse, status: int):
+ """Raise an error if the response status is not the expected one."""
+ if response.status == status:
+ return
+
+ raise urllib.error.HTTPError(
+ response.geturl(), response.status, response.reason, response.info(), None
+ )
+
+
+def default_retry(f, retries: int = 3, sleep=None):
+ sleep = sleep or time.sleep
+
+ def wrapper(*args, **kwargs):
+ for i in range(retries):
+ try:
+ return f(*args, **kwargs)
+ except urllib.error.HTTPError as e:
+ # Retry on internal server errors, and rate limit errors
+ # Potentially this could take into account the Retry-After header
+ # if registries support it
+ if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
+ # Exponential backoff
+ sleep(2**i)
+ continue
+ raise
+
+ return wrapper
diff --git a/lib/spack/spack/operating_systems/windows_os.py b/lib/spack/spack/operating_systems/windows_os.py
index 0c3930e99c48f1..fa767d71fb1c22 100755
--- a/lib/spack/spack/operating_systems/windows_os.py
+++ b/lib/spack/spack/operating_systems/windows_os.py
@@ -5,10 +5,12 @@
import glob
import os
+import pathlib
import platform
import subprocess
from spack.error import SpackError
+from spack.util import windows_registry as winreg
from spack.version import Version
from ._operating_system import OperatingSystem
@@ -31,43 +33,6 @@ class WindowsOs(OperatingSystem):
10.
"""
- # Find MSVC directories using vswhere
- comp_search_paths = []
- vs_install_paths = []
- root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
- if root:
- try:
- extra_args = {"encoding": "mbcs", "errors": "strict"}
- paths = subprocess.check_output( # type: ignore[call-overload] # novermin
- [
- os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
- "-prerelease",
- "-requires",
- "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
- "-property",
- "installationPath",
- "-products",
- "*",
- ],
- **extra_args,
- ).strip()
- vs_install_paths = paths.split("\n")
- msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths]
- for p in msvc_paths:
- comp_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
- if os.getenv("ONEAPI_ROOT"):
- comp_search_paths.extend(
- glob.glob(
- os.path.join(
- str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin"
- )
- )
- )
- except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
- pass
- if comp_search_paths:
- compiler_search_paths = comp_search_paths
-
def __init__(self):
plat_ver = windows_version()
if plat_ver < Version("10"):
@@ -76,3 +41,71 @@ def __init__(self):
def __str__(self):
return self.name
+
+ @property
+ def vs_install_paths(self):
+ vs_install_paths = []
+ root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
+ if root:
+ try:
+ extra_args = {"encoding": "mbcs", "errors": "strict"}
+ paths = subprocess.check_output( # type: ignore[call-overload] # novermin
+ [
+ os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
+ "-prerelease",
+ "-requires",
+ "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
+ "-property",
+ "installationPath",
+ "-products",
+ "*",
+ ],
+ **extra_args,
+ ).strip()
+ vs_install_paths = paths.split("\n")
+ except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
+ pass
+ return vs_install_paths
+
+ @property
+ def msvc_paths(self):
+ return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths]
+
+ @property
+ def compiler_search_paths(self):
+ # First Strategy: Find MSVC directories using vswhere
+ _compiler_search_paths = []
+ for p in self.msvc_paths:
+ _compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
+ if os.getenv("ONEAPI_ROOT"):
+ _compiler_search_paths.extend(
+ glob.glob(
+ os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
+ )
+ )
+ # Second strategy: Find MSVC via the registry
+ msft = winreg.WindowsRegistryView(
+ "SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE
+ )
+ vs_entries = msft.find_subkeys(r"VisualStudio_.*")
+ vs_paths = []
+
+ def clean_vs_path(path):
+ path = path.split(",")[0].lstrip("@")
+ return str((pathlib.Path(path).parent / "..\\..").resolve())
+
+ for entry in vs_entries:
+ try:
+ val = entry.get_subkey("Capabilities").get_value("ApplicationDescription").value
+ vs_paths.append(clean_vs_path(val))
+ except FileNotFoundError as e:
+ if hasattr(e, "winerror"):
+ if e.winerror == 2:
+ pass
+ else:
+ raise
+ else:
+ raise
+
+ _compiler_search_paths.extend(vs_paths)
+ return _compiler_search_paths
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 9bf01be5d4eddd..8113d363dd7cf1 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -32,14 +32,17 @@
from spack.build_systems.bundle import BundlePackage
from spack.build_systems.cached_cmake import (
CachedCMakePackage,
+ cmake_cache_filepath,
cmake_cache_option,
cmake_cache_path,
cmake_cache_string,
)
+from spack.build_systems.cargo import CargoPackage
from spack.build_systems.cmake import CMakePackage, generator
from spack.build_systems.cuda import CudaPackage
from spack.build_systems.generic import Package
from spack.build_systems.gnu import GNUMirrorPackage
+from spack.build_systems.go import GoPackage
from spack.build_systems.intel import IntelPackage
from spack.build_systems.lua import LuaPackage
from spack.build_systems.makefile import MakefilePackage
@@ -49,7 +52,9 @@
from spack.build_systems.nmake import NMakePackage
from spack.build_systems.octave import OctavePackage
from spack.build_systems.oneapi import (
+ INTEL_MATH_LIBRARIES,
IntelOneApiLibraryPackage,
+ IntelOneApiLibraryPackageWithSdk,
IntelOneApiPackage,
IntelOneApiStaticLibraryList,
)
@@ -85,7 +90,7 @@
UpstreamPackageError,
)
from spack.mixins import filter_compiler_wrappers
-from spack.multimethod import when
+from spack.multimethod import default_args, when
from spack.package_base import (
DependencyConflictError,
build_system_flags,
diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py
index 7e3532e9488ea2..c69918b41905b5 100644
--- a/lib/spack/spack/parser.py
+++ b/lib/spack/spack/parser.py
@@ -6,7 +6,7 @@
Here is the EBNF grammar for a spec::
- spec = [name] [node_options] { ^ node } |
+ spec = [name] [node_options] { ^[edge_properties] node } |
[name] [node_options] hash |
filename
@@ -14,7 +14,8 @@
[name] [node_options] hash |
filename
- node_options = [@(version_list|version_pair)] [%compiler] { variant }
+ node_options = [@(version_list|version_pair)] [%compiler] { variant }
+ edge_properties = [ { bool_variant | key_value } ]
hash = / id
filename = (.|/|[a-zA-Z0-9-_]*/)([a-zA-Z0-9-_./]*)(.json|.yaml)
@@ -64,9 +65,9 @@
from llnl.util.tty import color
+import spack.deptypes
import spack.error
import spack.spec
-import spack.variant
import spack.version
IS_WINDOWS = sys.platform == "win32"
@@ -97,9 +98,9 @@
VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
QUOTED_VALUE = r"[\"']+(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+"
-VERSION = r"=?([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
-VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)"
-VERSION_LIST = rf"({VERSION_RANGE}|{VERSION})(\s*[,]\s*({VERSION_RANGE}|{VERSION}))*"
+VERSION = r"=?(?:[a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
+VERSION_RANGE = rf"(?:(?:{VERSION})?:(?:{VERSION}(?!\s*=))?)"
+VERSION_LIST = rf"(?:{VERSION_RANGE}|{VERSION})(?:\s*,\s*(?:{VERSION_RANGE}|{VERSION}))*"
class TokenBase(enum.Enum):
@@ -127,6 +128,8 @@ class TokenType(TokenBase):
"""
# Dependency
+ START_EDGE_PROPERTIES = r"(?:\^\[)"
+ END_EDGE_PROPERTIES = r"(?:\])"
DEPENDENCY = r"(?:\^)"
# Version
VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))"
@@ -164,7 +167,7 @@ class Token:
__slots__ = "kind", "value", "start", "end"
def __init__(
- self, kind: TokenType, value: str, start: Optional[int] = None, end: Optional[int] = None
+ self, kind: TokenBase, value: str, start: Optional[int] = None, end: Optional[int] = None
):
self.kind = kind
self.value = value
@@ -264,8 +267,8 @@ def tokens(self) -> List[Token]:
return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
def next_spec(
- self, initial_spec: Optional[spack.spec.Spec] = None
- ) -> Optional[spack.spec.Spec]:
+ self, initial_spec: Optional["spack.spec.Spec"] = None
+ ) -> Optional["spack.spec.Spec"]:
"""Return the next spec parsed from text.
Args:
@@ -281,16 +284,15 @@ def next_spec(
initial_spec = initial_spec or spack.spec.Spec()
root_spec = SpecNodeParser(self.ctx).parse(initial_spec)
while True:
- if self.ctx.accept(TokenType.DEPENDENCY):
- dependency = SpecNodeParser(self.ctx).parse()
-
- if dependency is None:
- msg = (
- "this dependency sigil needs to be followed by a package name "
- "or a node attribute (version, variant, etc.)"
- )
- raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
-
+ if self.ctx.accept(TokenType.START_EDGE_PROPERTIES):
+ edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
+ edge_properties.setdefault("depflag", 0)
+ edge_properties.setdefault("virtuals", ())
+ dependency = self._parse_node(root_spec)
+ root_spec._add_dependency(dependency, **edge_properties)
+
+ elif self.ctx.accept(TokenType.DEPENDENCY):
+ dependency = self._parse_node(root_spec)
root_spec._add_dependency(dependency, depflag=0, virtuals=())
else:
@@ -298,7 +300,19 @@ def next_spec(
return root_spec
- def all_specs(self) -> List[spack.spec.Spec]:
+ def _parse_node(self, root_spec):
+ dependency = SpecNodeParser(self.ctx).parse()
+ if dependency is None:
+ msg = (
+ "the dependency sigil and any optional edge attributes must be followed by a "
+ "package name or a node attribute (version, variant, etc.)"
+ )
+ raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
+ if root_spec.concrete:
+ raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
+ return dependency
+
+ def all_specs(self) -> List["spack.spec.Spec"]:
"""Return all the specs that remain to be parsed"""
return list(iter(self.next_spec, None))
@@ -313,7 +327,9 @@ def __init__(self, ctx):
self.has_compiler = False
self.has_version = False
- def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
+ def parse(
+ self, initial_spec: Optional["spack.spec.Spec"] = None
+ ) -> Optional["spack.spec.Spec"]:
"""Parse a single spec node from a stream of tokens
Args:
@@ -414,7 +430,7 @@ class FileParser:
def __init__(self, ctx):
self.ctx = ctx
- def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
+ def parse(self, initial_spec: "spack.spec.Spec") -> "spack.spec.Spec":
"""Parse a spec tree from a specfile.
Args:
@@ -437,7 +453,42 @@ def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
return initial_spec
-def parse(text: str) -> List[spack.spec.Spec]:
+class EdgeAttributeParser:
+ __slots__ = "ctx", "literal_str"
+
+ def __init__(self, ctx, literal_str):
+ self.ctx = ctx
+ self.literal_str = literal_str
+
+ def parse(self):
+ attributes = {}
+ while True:
+ if self.ctx.accept(TokenType.KEY_VALUE_PAIR):
+ name, value = self.ctx.current_token.value.split("=", maxsplit=1)
+ name = name.strip("'\" ")
+ value = value.strip("'\" ").split(",")
+ attributes[name] = value
+ if name not in ("deptypes", "virtuals"):
+ msg = (
+ "the only edge attributes that are currently accepted "
+ 'are "deptypes" and "virtuals"'
+ )
+ raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
+ # TODO: Add code to accept bool variants here as soon as use variants are implemented
+ elif self.ctx.accept(TokenType.END_EDGE_PROPERTIES):
+ break
+ else:
+ msg = "unexpected token in edge attributes"
+ raise SpecParsingError(msg, self.ctx.next_token, self.literal_str)
+
+ # Turn deptypes=... to depflag representation
+ if "deptypes" in attributes:
+ deptype_string = attributes.pop("deptypes")
+ attributes["depflag"] = spack.deptypes.canonicalize(deptype_string)
+ return attributes
+
+
+def parse(text: str) -> List["spack.spec.Spec"]:
"""Parse text into a list of strings
Args:
@@ -450,8 +501,8 @@ def parse(text: str) -> List[spack.spec.Spec]:
def parse_one_or_raise(
- text: str, initial_spec: Optional[spack.spec.Spec] = None
-) -> spack.spec.Spec:
+ text: str, initial_spec: Optional["spack.spec.Spec"] = None
+) -> "spack.spec.Spec":
"""Parse exactly one spec from text and return it, or raise
Args:
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index 8b094a7642d634..7e2fcaff103ef3 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -7,6 +7,7 @@
import inspect
import os
import os.path
+import pathlib
import sys
import llnl.util.filesystem
@@ -36,10 +37,12 @@ def apply_patch(stage, patch_path, level=1, working_dir="."):
"""
git_utils_path = os.environ.get("PATH", "")
if sys.platform == "win32":
- git = which_string("git", required=True)
- git_root = git.split("\\")[:-2]
- git_root.extend(["usr", "bin"])
- git_utils_path = os.sep.join(git_root)
+ git = which_string("git")
+ if git:
+ git = pathlib.Path(git)
+ git_root = git.parent.parent
+ git_root = git_root / "usr" / "bin"
+ git_utils_path = os.pathsep.join([str(git_root), git_utils_path])
# TODO: Decouple Spack's patch support on Windows from Git
# for Windows, and instead have Spack directly fetch, install, and
diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py
index 2624de56acd88e..32ace00a1669e7 100644
--- a/lib/spack/spack/provider_index.py
+++ b/lib/spack/spack/provider_index.py
@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Classes and functions to manage providers of virtual dependencies"""
-import itertools
from typing import Dict, List, Optional, Set
import spack.error
@@ -11,33 +10,6 @@
import spack.util.spack_json as sjson
-def _cross_provider_maps(lmap, rmap):
- """Return a dictionary that combines constraint requests from both input.
-
- Args:
- lmap: main provider map
- rmap: provider map with additional constraints
- """
- # TODO: this is pretty darned nasty, and inefficient, but there
- # TODO: are not that many vdeps in most specs.
- result = {}
- for lspec, rspec in itertools.product(lmap, rmap):
- try:
- constrained = lspec.constrained(rspec)
- except spack.error.UnsatisfiableSpecError:
- continue
-
- # lp and rp are left and right provider specs.
- for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]):
- if lp_spec.name == rp_spec.name:
- try:
- const = lp_spec.constrained(rp_spec, deps=False)
- result.setdefault(constrained, set()).add(const)
- except spack.error.UnsatisfiableSpecError:
- continue
- return result
-
-
class _IndexBase:
#: This is a dict of dicts used for finding providers of particular
#: virtual dependencies. The dict of dicts looks like:
@@ -81,29 +53,6 @@ def providers_for(self, virtual_spec):
def __contains__(self, name):
return name in self.providers
- def satisfies(self, other):
- """Determine if the providers of virtual specs are compatible.
-
- Args:
- other: another provider index
-
- Returns:
- True if the providers are compatible, False otherwise.
- """
- common = set(self.providers) & set(other.providers)
- if not common:
- return True
-
- # This ensures that some provider in other COULD satisfy the
- # vpkg constraints on self.
- result = {}
- for name in common:
- crossed = _cross_provider_maps(self.providers[name], other.providers[name])
- if crossed:
- result[name] = crossed
-
- return all(c in result for c in common)
-
def __eq__(self, other):
return self.providers == other.providers
diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py
index a89b5dd407d536..5918454005df85 100644
--- a/lib/spack/spack/repo.py
+++ b/lib/spack/spack/repo.py
@@ -6,6 +6,7 @@
import abc
import collections.abc
import contextlib
+import difflib
import errno
import functools
import importlib
@@ -1516,7 +1517,18 @@ def __init__(self, name, repo=None):
long_msg = "Did you mean to specify a filename with './{0}'?"
long_msg = long_msg.format(name)
else:
- long_msg = "You may need to run 'spack clean -m'."
+ long_msg = "Use 'spack create' to create a new package."
+
+ if not repo:
+ repo = spack.repo.PATH
+
+ # We need to compare the base package name
+ pkg_name = name.rsplit(".", 1)[-1]
+ similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
+
+ if 1 <= len(similar) <= 5:
+ long_msg += "\n\nDid you mean one of the following packages?\n "
+ long_msg += "\n ".join(similar)
super().__init__(msg, long_msg)
self.name = name
diff --git a/lib/spack/spack/schema/__init__.py b/lib/spack/spack/schema/__init__.py
index f99f47a455e42c..bdb1a272d03754 100644
--- a/lib/spack/spack/schema/__init__.py
+++ b/lib/spack/spack/schema/__init__.py
@@ -62,3 +62,25 @@ def _deprecated_properties(validator, deprecated, instance, schema):
Validator = llnl.util.lang.Singleton(_make_validator)
+
+spec_list_schema = {
+ "type": "array",
+ "default": [],
+ "items": {
+ "anyOf": [
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "matrix": {
+ "type": "array",
+ "items": {"type": "array", "items": {"type": "string"}},
+ },
+ "exclude": {"type": "array", "items": {"type": "string"}},
+ },
+ },
+ {"type": "string"},
+ {"type": "null"},
+ ]
+ },
+}
diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py
index 6c30f0aab9869e..6818cd78f39079 100644
--- a/lib/spack/spack/schema/config.py
+++ b/lib/spack/spack/schema/config.py
@@ -92,6 +92,7 @@
"url_fetch_method": {"type": "string", "enum": ["urllib", "curl"]},
"additional_external_search_paths": {"type": "array", "items": {"type": "string"}},
"binary_index_ttl": {"type": "integer", "minimum": 0},
+ "aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}},
},
"deprecatedProperties": {
"properties": ["terminal_title"],
diff --git a/lib/spack/spack/schema/definitions.py b/lib/spack/spack/schema/definitions.py
new file mode 100644
index 00000000000000..470eb7e8989ce4
--- /dev/null
+++ b/lib/spack/spack/schema/definitions.py
@@ -0,0 +1,34 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+"""Schema for definitions
+
+.. literalinclude:: _spack_root/lib/spack/spack/schema/definitions.py
+ :lines: 13-
+"""
+
+import spack.schema
+
+#: Properties for inclusion in other schemas
+properties = {
+ "definitions": {
+ "type": "array",
+ "default": [],
+ "items": {
+ "type": "object",
+ "properties": {"when": {"type": "string"}},
+ "patternProperties": {r"^(?!when$)\w*": spack.schema.spec_list_schema},
+ },
+ }
+}
+
+#: Full schema with metadata
+schema = {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack definitions configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
+}
diff --git a/lib/spack/spack/schema/env.py b/lib/spack/spack/schema/env.py
index 6548ca4b2b400d..463c6680f0d47e 100644
--- a/lib/spack/spack/schema/env.py
+++ b/lib/spack/spack/schema/env.py
@@ -12,34 +12,11 @@
import spack.schema.gitlab_ci # DEPRECATED
import spack.schema.merged
-import spack.schema.packages
import spack.schema.projections
#: Top level key in a manifest file
TOP_LEVEL_KEY = "spack"
-spec_list_schema = {
- "type": "array",
- "default": [],
- "items": {
- "anyOf": [
- {
- "type": "object",
- "additionalProperties": False,
- "properties": {
- "matrix": {
- "type": "array",
- "items": {"type": "array", "items": {"type": "string"}},
- },
- "exclude": {"type": "array", "items": {"type": "string"}},
- },
- },
- {"type": "string"},
- {"type": "null"},
- ]
- },
-}
-
projections_scheme = spack.schema.projections.properties["projections"]
schema = {
@@ -75,16 +52,7 @@
}
},
},
- "definitions": {
- "type": "array",
- "default": [],
- "items": {
- "type": "object",
- "properties": {"when": {"type": "string"}},
- "patternProperties": {r"^(?!when$)\w*": spec_list_schema},
- },
- },
- "specs": spec_list_schema,
+ "specs": spack.schema.spec_list_schema,
"view": {
"anyOf": [
{"type": "boolean"},
diff --git a/lib/spack/spack/schema/merged.py b/lib/spack/spack/schema/merged.py
index b20700a03cebfb..7ceb6494108d0e 100644
--- a/lib/spack/spack/schema/merged.py
+++ b/lib/spack/spack/schema/merged.py
@@ -17,6 +17,7 @@
import spack.schema.concretizer
import spack.schema.config
import spack.schema.container
+import spack.schema.definitions
import spack.schema.mirrors
import spack.schema.modules
import spack.schema.packages
@@ -32,6 +33,7 @@
spack.schema.config.properties,
spack.schema.container.properties,
spack.schema.ci.properties,
+ spack.schema.definitions.properties,
spack.schema.mirrors.properties,
spack.schema.modules.properties,
spack.schema.packages.properties,
diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py
index 2c62db0b98c9c2..a2a5bb67842ab0 100644
--- a/lib/spack/spack/schema/modules.py
+++ b/lib/spack/spack/schema/modules.py
@@ -17,10 +17,8 @@
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
spec_regex = (
- r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|"
- r"whitelist|blacklist|" # DEPRECATED: remove in 0.20.
- r"include|exclude|" # use these more inclusive/consistent options
- r"projections|naming_scheme|core_compilers|all)(^\w[\w-]*)"
+ r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|hide|"
+ r"include|exclude|projections|naming_scheme|core_compilers|all)(^\w[\w-]*)"
)
#: Matches a valid name for a module set
@@ -46,14 +44,7 @@
"default": {},
"additionalProperties": False,
"properties": {
- # DEPRECATED: remove in 0.20.
- "environment_blacklist": {
- "type": "array",
- "default": [],
- "items": {"type": "string"},
- },
- # use exclude_env_vars instead
- "exclude_env_vars": {"type": "array", "default": [], "items": {"type": "string"}},
+ "exclude_env_vars": {"type": "array", "default": [], "items": {"type": "string"}}
},
},
"template": {"type": "string"},
@@ -80,15 +71,11 @@
"properties": {
"verbose": {"type": "boolean", "default": False},
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
- # DEPRECATED: remove in 0.20.
- "whitelist": array_of_strings,
- "blacklist": array_of_strings,
- "blacklist_implicits": {"type": "boolean", "default": False},
- # whitelist/blacklist have been replaced with include/exclude
"include": array_of_strings,
"exclude": array_of_strings,
"exclude_implicits": {"type": "boolean", "default": False},
"defaults": array_of_strings,
+ "hide_implicits": {"type": "boolean", "default": False},
"naming_scheme": {"type": "string"}, # Can we be more specific here?
"projections": projections_scheme,
"all": module_file_configuration,
diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py
index 2cc4534d0711fa..2802f8952947aa 100644
--- a/lib/spack/spack/schema/packages.py
+++ b/lib/spack/spack/schema/packages.py
@@ -8,6 +8,68 @@
:lines: 13-
"""
+permissions = {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "read": {"type": "string", "enum": ["user", "group", "world"]},
+ "write": {"type": "string", "enum": ["user", "group", "world"]},
+ "group": {"type": "string"},
+ },
+}
+
+variants = {"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]}
+
+requirements = {
+ "oneOf": [
+ # 'require' can be a list of requirement_groups.
+ # each requirement group is a list of one or more
+ # specs. Either at least one or exactly one spec
+ # in the group must be satisfied (depending on
+ # whether you use "any_of" or "one_of",
+ # repectively)
+ {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "one_of": {"type": "array", "items": {"type": "string"}},
+ "any_of": {"type": "array", "items": {"type": "string"}},
+ "spec": {"type": "string"},
+ "message": {"type": "string"},
+ "when": {"type": "string"},
+ },
+ },
+ {"type": "string"},
+ ]
+ },
+ },
+ # Shorthand for a single requirement group with
+ # one member
+ {"type": "string"},
+ ]
+}
+
+permissions = {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "read": {"type": "string", "enum": ["user", "group", "world"]},
+ "write": {"type": "string", "enum": ["user", "group", "world"]},
+ "group": {"type": "string"},
+ },
+}
+
+package_attributes = {
+ "type": "object",
+ "additionalProperties": False,
+ "patternProperties": {r"\w+": {}},
+}
+
+REQUIREMENT_URL = "https://spack.readthedocs.io/en/latest/packages_yaml.html#package-requirements"
#: Properties for inclusion in other schemas
properties = {
@@ -15,57 +77,14 @@
"type": "object",
"default": {},
"additionalProperties": False,
- "patternProperties": {
- r"\w[\w-]*": { # package name
+ "properties": {
+ "all": { # package name
"type": "object",
"default": {},
"additionalProperties": False,
"properties": {
- "require": {
- "oneOf": [
- # 'require' can be a list of requirement_groups.
- # each requirement group is a list of one or more
- # specs. Either at least one or exactly one spec
- # in the group must be satisfied (depending on
- # whether you use "any_of" or "one_of",
- # repectively)
- {
- "type": "array",
- "items": {
- "oneOf": [
- {
- "type": "object",
- "additionalProperties": False,
- "properties": {
- "one_of": {
- "type": "array",
- "items": {"type": "string"},
- },
- "any_of": {
- "type": "array",
- "items": {"type": "string"},
- },
- "spec": {"type": "string"},
- "message": {"type": "string"},
- "when": {"type": "string"},
- },
- },
- {"type": "string"},
- ]
- },
- },
- # Shorthand for a single requirement group with
- # one member
- {"type": "string"},
- ]
- },
- "version": {
- "type": "array",
- "default": [],
- # version strings (type should be string, number is still possible
- # but deprecated. this is to avoid issues with e.g. 3.10 -> 3.1)
- "items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
- },
+ "require": requirements,
+ "version": {}, # Here only to warn users on ignored properties
"target": {
"type": "array",
"default": [],
@@ -78,22 +97,10 @@
"items": {"type": "string"},
}, # compiler specs
"buildable": {"type": "boolean", "default": True},
- "permissions": {
- "type": "object",
- "additionalProperties": False,
- "properties": {
- "read": {"type": "string", "enum": ["user", "group", "world"]},
- "write": {"type": "string", "enum": ["user", "group", "world"]},
- "group": {"type": "string"},
- },
- },
+ "permissions": permissions,
# If 'get_full_repo' is promoted to a Package-level
# attribute, it could be useful to set it here
- "package_attributes": {
- "type": "object",
- "additionalProperties": False,
- "patternProperties": {r"\w+": {}},
- },
+ "package_attributes": package_attributes,
"providers": {
"type": "object",
"default": {},
@@ -106,12 +113,40 @@
}
},
},
- "variants": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}},
- ]
+ "variants": variants,
+ },
+ "deprecatedProperties": {
+ "properties": ["version"],
+ "message": "setting version preferences in the 'all' section of packages.yaml "
+ "is deprecated and will be removed in v0.22\n\n\tThese preferences "
+ "will be ignored by Spack. You can set them only in package-specific sections "
+ "of the same file.\n",
+ "error": False,
+ },
+ }
+ },
+ "patternProperties": {
+ r"(?!^all$)(^\w[\w-]*)": { # package name
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "properties": {
+ "require": requirements,
+ "version": {
+ "type": "array",
+ "default": [],
+ # version strings
+ "items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
},
+ "target": {}, # Here only to warn users on ignored properties
+ "compiler": {}, # Here only to warn users on ignored properties
+ "buildable": {"type": "boolean", "default": True},
+ "permissions": permissions,
+ # If 'get_full_repo' is promoted to a Package-level
+ # attribute, it could be useful to set it here
+ "package_attributes": package_attributes,
+ "providers": {}, # Here only to warn users on ignored properties
+ "variants": variants,
"externals": {
"type": "array",
"items": {
@@ -127,6 +162,18 @@
},
},
},
+ "deprecatedProperties": {
+ "properties": ["target", "compiler", "providers"],
+ "message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
+ "a package-specific section of packages.yaml is deprecated, and will be "
+ "removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and "
+ "can be set only in the 'all' section of the same file. "
+ "You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
+ "including files:lines where the deprecated attributes are used.\n\n"
+ "\tUse requirements to enforce conditions on specific packages: "
+ f"{REQUIREMENT_URL}\n",
+ "error": False,
+ },
}
},
}
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index f44cc5b06b4e66..d6ac5e8675a915 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -13,7 +13,7 @@
import re
import types
import warnings
-from typing import Dict, List, NamedTuple, Optional, Sequence, Tuple, Union
+from typing import Callable, Dict, List, NamedTuple, Optional, Sequence, Set, Tuple, Union
import archspec.cpu
@@ -338,6 +338,13 @@ def __getattr__(self, name):
fn = AspFunctionBuilder()
+TransformFunction = Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]
+
+
+def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
+ """Transformation that removes all "node" and "virtual_node" from the input list of facts."""
+ return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
+
def _create_counter(specs, tests):
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
@@ -685,7 +692,7 @@ def extract_args(model, predicate_name):
class ErrorHandler:
def __init__(self, model):
self.model = model
- self.error_args = extract_args(model, "error")
+ self.full_model = None
def multiple_values_error(self, attribute, pkg):
return f'Cannot select a single "{attribute}" for package "{pkg}"'
@@ -693,6 +700,48 @@ def multiple_values_error(self, attribute, pkg):
def no_value_error(self, attribute, pkg):
return f'Cannot select a single "{attribute}" for package "{pkg}"'
+ def _get_cause_tree(
+ self,
+ cause: Tuple[str, str],
+ conditions: Dict[str, str],
+ condition_causes: List[Tuple[Tuple[str, str], Tuple[str, str]]],
+ seen: Set,
+ indent: str = " ",
+ ) -> List[str]:
+ """
+ Implementation of recursion for self.get_cause_tree. Much of this operates on tuples
+ (condition_id, set_id) in which the latter idea means that the condition represented by
+ the former held in the condition set represented by the latter.
+ """
+ seen.add(cause)
+ parents = [c for e, c in condition_causes if e == cause and c not in seen]
+ local = "required because %s " % conditions[cause[0]]
+
+ return [indent + local] + [
+ c
+ for parent in parents
+ for c in self._get_cause_tree(
+ parent, conditions, condition_causes, seen, indent=indent + " "
+ )
+ ]
+
+ def get_cause_tree(self, cause: Tuple[str, str]) -> List[str]:
+ """
+ Get the cause tree associated with the given cause.
+
+ Arguments:
+ cause: The root cause of the tree (final condition)
+
+ Returns:
+ A list of strings describing the causes, formatted to display tree structure.
+ """
+ conditions: Dict[str, str] = dict(extract_args(self.full_model, "condition_reason"))
+ condition_causes: List[Tuple[Tuple[str, str], Tuple[str, str]]] = list(
+ ((Effect, EID), (Cause, CID))
+ for Effect, EID, Cause, CID in extract_args(self.full_model, "condition_cause")
+ )
+ return self._get_cause_tree(cause, conditions, condition_causes, set())
+
def handle_error(self, msg, *args):
"""Handle an error state derived by the solver."""
if msg == "multiple_values_error":
@@ -701,14 +750,31 @@ def handle_error(self, msg, *args):
if msg == "no_value_error":
return self.no_value_error(*args)
+ try:
+ idx = args.index("startcauses")
+ except ValueError:
+ msg_args = args
+ causes = []
+ else:
+ msg_args = args[:idx]
+ cause_args = args[idx + 1 :]
+ cause_args_conditions = cause_args[::2]
+ cause_args_ids = cause_args[1::2]
+ causes = list(zip(cause_args_conditions, cause_args_ids))
+
+ msg = msg.format(*msg_args)
+
# For variant formatting, we sometimes have to construct specs
# to format values properly. Find/replace all occurances of
# Spec(...) with the string representation of the spec mentioned
- msg = msg.format(*args)
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
for spec_str in specs_to_construct:
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
+ for cause in set(causes):
+ for c in self.get_cause_tree(cause):
+ msg += f"\n{c}"
+
return msg
def message(self, errors) -> str:
@@ -720,13 +786,40 @@ def message(self, errors) -> str:
return "\n".join([header] + messages)
def raise_if_errors(self):
- if not self.error_args:
+ initial_error_args = extract_args(self.model, "error")
+ if not initial_error_args:
return
+ error_causation = clingo.Control()
+
+ parent_dir = pathlib.Path(__file__).parent
+ errors_lp = parent_dir / "error_messages.lp"
+
+ def on_model(model):
+ self.full_model = model.symbols(shown=True, terms=True)
+
+ with error_causation.backend() as backend:
+ for atom in self.model:
+ atom_id = backend.add_atom(atom)
+ backend.add_rule([atom_id], [], choice=False)
+
+ error_causation.load(str(errors_lp))
+ error_causation.ground([("base", []), ("error_messages", [])])
+ _ = error_causation.solve(on_model=on_model)
+
+ # No choices so there will be only one model
+ error_args = extract_args(self.full_model, "error")
errors = sorted(
- [(int(priority), msg, args) for priority, msg, *args in self.error_args], reverse=True
+ [(int(priority), msg, args) for priority, msg, *args in error_args], reverse=True
)
- msg = self.message(errors)
+ try:
+ msg = self.message(errors)
+ except Exception as e:
+ msg = (
+ f"unexpected error during concretization [{str(e)}]. "
+ f"Please report a bug at https://github.com/spack/spack/issues"
+ )
+ raise spack.error.SpackError(msg)
raise UnsatisfiableSpecError(msg)
@@ -889,14 +982,6 @@ def on_model(model):
timer.start("solve")
solve_result = self.control.solve(**solve_kwargs)
-
- if solve_result.satisfiable and self._model_has_cycles(models):
- tty.debug(f"cycles detected, falling back to slower algorithm [specs={specs}]")
- self.control.load(os.path.join(parent_dir, "cycle_detection.lp"))
- self.control.ground([("no_cycle", [])])
- models.clear()
- solve_result = self.control.solve(**solve_kwargs)
-
timer.stop("solve")
# once done, construct the solve result
@@ -928,14 +1013,6 @@ def on_model(model):
# record the possible dependencies in the solve
result.possible_dependencies = setup.pkgs
- # print any unknown functions in the model
- for sym in best_model:
- if sym.name not in ("attr", "error", "opt_criterion"):
- tty.debug(
- "UNKNOWN SYMBOL: %s(%s)"
- % (sym.name, ", ".join(intermediate_repr(sym.arguments)))
- )
-
elif cores:
result.control = self.control
result.cores.extend(cores)
@@ -950,26 +1027,6 @@ def on_model(model):
return result, timer, self.control.statistics
- def _model_has_cycles(self, models):
- """Returns true if the best model has cycles in it"""
- cycle_detection = clingo.Control()
- parent_dir = pathlib.Path(__file__).parent
- lp_file = parent_dir / "cycle_detection.lp"
-
- min_cost, best_model = min(models)
- with cycle_detection.backend() as backend:
- for atom in best_model:
- if atom.name == "attr" and str(atom.arguments[0]) == '"depends_on"':
- symbol = fn.depends_on(atom.arguments[1], atom.arguments[2])
- atom_id = backend.add_atom(symbol.symbol())
- backend.add_rule([atom_id], [], choice=False)
-
- cycle_detection.load(str(lp_file))
- cycle_detection.ground([("base", []), ("no_cycle", [])])
- cycle_result = cycle_detection.solve()
-
- return cycle_result.unsatisfiable
-
class ConcreteSpecsByHash(collections.abc.Mapping):
"""Mapping containing concrete specs keyed by DAG hash.
@@ -1060,11 +1117,8 @@ def __init__(self, tests=False):
self.reusable_and_possible = ConcreteSpecsByHash()
- # id for dummy variables
- self._condition_id_counter = itertools.count()
- self._trigger_id_counter = itertools.count()
+ self._id_counter = itertools.count()
self._trigger_cache = collections.defaultdict(dict)
- self._effect_id_counter = itertools.count()
self._effect_cache = collections.defaultdict(dict)
# Caches to optimize the setup phase of the solver
@@ -1145,7 +1199,7 @@ def conflict_rules(self, pkg):
default_msg = "{0}: '{1}' conflicts with '{2}'"
no_constraint_msg = "{0}: conflicts with '{1}'"
for trigger, constraints in pkg.conflicts.items():
- trigger_msg = "conflict trigger %s" % str(trigger)
+ trigger_msg = f"conflict is triggered when {str(trigger)}"
trigger_spec = spack.spec.Spec(trigger)
trigger_id = self.condition(
trigger_spec, name=trigger_spec.name or pkg.name, msg=trigger_msg
@@ -1157,7 +1211,11 @@ def conflict_rules(self, pkg):
conflict_msg = no_constraint_msg.format(pkg.name, trigger)
else:
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
- constraint_msg = "conflict constraint %s" % str(constraint)
+
+ spec_for_msg = (
+ spack.spec.Spec(pkg.name) if constraint == spack.spec.Spec() else constraint
+ )
+ constraint_msg = f"conflict applies to spec {str(spec_for_msg)}"
constraint_id = self.condition(constraint, name=pkg.name, msg=constraint_msg)
self.gen.fact(
fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, conflict_msg))
@@ -1196,32 +1254,9 @@ def compiler_facts(self):
matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec))
for weight, (compiler_id, cspec) in enumerate(matches):
- f = fn.default_compiler_preference(compiler_id, weight)
+ f = fn.compiler_weight(compiler_id, weight)
self.gen.fact(f)
- def package_compiler_defaults(self, pkg):
- """Facts about packages' compiler prefs."""
-
- packages = spack.config.get("packages")
- pkg_prefs = packages.get(pkg.name)
- if not pkg_prefs or "compiler" not in pkg_prefs:
- return
-
- compiler_list = self.possible_compilers
- compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
- ppk = spack.package_prefs.PackagePrefs(pkg.name, "compiler", all=False)
- matches = sorted(compiler_list, key=lambda x: ppk(x.spec))
-
- for i, compiler in enumerate(reversed(matches)):
- self.gen.fact(
- fn.pkg_fact(
- pkg.name,
- fn.node_compiler_preference(
- compiler.spec.name, compiler.spec.version, -i * 100
- ),
- )
- )
-
def package_requirement_rules(self, pkg):
rules = self.requirement_rules_from_package_py(pkg)
rules.extend(self.requirement_rules_from_packages_yaml(pkg))
@@ -1253,52 +1288,70 @@ def requirement_rules_from_packages_yaml(self, pkg):
kind = RequirementKind.DEFAULT
return self._rules_from_requirements(pkg_name, requirements, kind=kind)
- def _rules_from_requirements(self, pkg_name: str, requirements, *, kind: RequirementKind):
+ def _rules_from_requirements(
+ self, pkg_name: str, requirements, *, kind: RequirementKind
+ ) -> List[RequirementRule]:
"""Manipulate requirements from packages.yaml, and return a list of tuples
with a uniform structure (name, policy, requirements).
"""
if isinstance(requirements, str):
- rules = [self._rule_from_str(pkg_name, requirements, kind)]
- else:
- rules = []
- for requirement in requirements:
- if isinstance(requirement, str):
- # A string represents a spec that must be satisfied. It is
- # equivalent to a one_of group with a single element
- rules.append(self._rule_from_str(pkg_name, requirement, kind))
- else:
- for policy in ("spec", "one_of", "any_of"):
- if policy in requirement:
- constraints = requirement[policy]
-
- # "spec" is for specifying a single spec
- if policy == "spec":
- constraints = [constraints]
- policy = "one_of"
-
- rules.append(
- RequirementRule(
- pkg_name=pkg_name,
- policy=policy,
- requirements=constraints,
- kind=kind,
- message=requirement.get("message"),
- condition=requirement.get("when"),
- )
- )
+ requirements = [requirements]
+
+ rules = []
+ for requirement in requirements:
+ # A string is equivalent to a one_of group with a single element
+ if isinstance(requirement, str):
+ requirement = {"one_of": [requirement]}
+
+ for policy in ("spec", "one_of", "any_of"):
+ if policy not in requirement:
+ continue
+
+ constraints = requirement[policy]
+ # "spec" is for specifying a single spec
+ if policy == "spec":
+ constraints = [constraints]
+ policy = "one_of"
+
+ constraints = [
+ x
+ for x in constraints
+ if not self.reject_requirement_constraint(pkg_name, constraint=x, kind=kind)
+ ]
+ if not constraints:
+ continue
+
+ rules.append(
+ RequirementRule(
+ pkg_name=pkg_name,
+ policy=policy,
+ requirements=constraints,
+ kind=kind,
+ message=requirement.get("message"),
+ condition=requirement.get("when"),
+ )
+ )
return rules
- def _rule_from_str(
- self, pkg_name: str, requirements: str, kind: RequirementKind
- ) -> RequirementRule:
- return RequirementRule(
- pkg_name=pkg_name,
- policy="one_of",
- requirements=[requirements],
- kind=kind,
- condition=None,
- message=None,
- )
+ def reject_requirement_constraint(
+ self, pkg_name: str, *, constraint: str, kind: RequirementKind
+ ) -> bool:
+ """Returns True if a requirement constraint should be rejected"""
+ if kind == RequirementKind.DEFAULT:
+ # Requirements under all: are applied only if they are satisfiable considering only
+ # package rules, so e.g. variants must exist etc. Otherwise, they are rejected.
+ try:
+ s = spack.spec.Spec(pkg_name)
+ s.constrain(constraint)
+ s.validate_or_raise()
+ except spack.error.SpackError as e:
+ tty.debug(
+ f"[SETUP] Rejecting the default '{constraint}' requirement "
+ f"on '{pkg_name}': {str(e)}",
+ level=2,
+ )
+ return True
+ return False
def pkg_rules(self, pkg, tests):
pkg = packagize(pkg)
@@ -1313,9 +1366,6 @@ def pkg_rules(self, pkg, tests):
# conflicts
self.conflict_rules(pkg)
- # default compilers for this package
- self.package_compiler_defaults(pkg)
-
# virtuals
self.package_provider_rules(pkg)
@@ -1339,7 +1389,7 @@ def trigger_rules(self):
self.gen.h2("Trigger conditions")
for name in self._trigger_cache:
cache = self._trigger_cache[name]
- for spec_str, (trigger_id, requirements) in cache.items():
+ for (spec_str, _), (trigger_id, requirements) in cache.items():
self.gen.fact(fn.pkg_fact(name, fn.trigger_id(trigger_id)))
self.gen.fact(fn.pkg_fact(name, fn.trigger_msg(spec_str)))
for predicate in requirements:
@@ -1352,7 +1402,7 @@ def effect_rules(self):
self.gen.h2("Imposed requirements")
for name in self._effect_cache:
cache = self._effect_cache[name]
- for spec_str, (effect_id, requirements) in cache.items():
+ for (spec_str, _), (effect_id, requirements) in cache.items():
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
self.gen.fact(fn.pkg_fact(name, fn.effect_msg(spec_str)))
for predicate in requirements:
@@ -1451,18 +1501,26 @@ def variant_rules(self, pkg):
self.gen.newline()
- def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=False):
+ def condition(
+ self,
+ required_spec: spack.spec.Spec,
+ imposed_spec: Optional[spack.spec.Spec] = None,
+ name: Optional[str] = None,
+ msg: Optional[str] = None,
+ transform_required: Optional[TransformFunction] = None,
+ transform_imposed: Optional[TransformFunction] = remove_node,
+ ):
"""Generate facts for a dependency or virtual provider condition.
Arguments:
- required_spec (spack.spec.Spec): the spec that triggers this condition
- imposed_spec (spack.spec.Spec or None): the spec with constraints that
- are imposed when this condition is triggered
- name (str or None): name for `required_spec` (required if
- required_spec is anonymous, ignored if not)
- msg (str or None): description of the condition
- node (bool): if False does not emit "node" or "virtual_node" requirements
- from the imposed spec
+ required_spec: the constraints that triggers this condition
+ imposed_spec: the constraints that are imposed when this condition is triggered
+ name: name for `required_spec` (required if required_spec is anonymous, ignored if not)
+ msg: description of the condition
+ transform_required: transformation applied to facts from the required spec. Defaults
+ to leave facts as they are.
+ transform_imposed: transformation applied to facts from the imposed spec. Defaults
+ to removing "node" and "virtual_node" facts.
Returns:
int: id of the condition created by this function
"""
@@ -1474,16 +1532,20 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=
# In this way, if a condition can't be emitted but the exception is handled in the caller,
# we won't emit partial facts.
- condition_id = next(self._condition_id_counter)
+ condition_id = next(self._id_counter)
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
self.gen.fact(fn.condition_reason(condition_id, msg))
cache = self._trigger_cache[named_cond.name]
- named_cond_key = str(named_cond)
+ named_cond_key = (str(named_cond), transform_required)
if named_cond_key not in cache:
- trigger_id = next(self._trigger_id_counter)
+ trigger_id = next(self._id_counter)
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
+
+ if transform_required:
+ requirements = transform_required(named_cond, requirements)
+
cache[named_cond_key] = (trigger_id, requirements)
trigger_id, requirements = cache[named_cond_key]
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
@@ -1492,14 +1554,14 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=
return condition_id
cache = self._effect_cache[named_cond.name]
- imposed_spec_key = str(imposed_spec)
+ imposed_spec_key = (str(imposed_spec), transform_imposed)
if imposed_spec_key not in cache:
- effect_id = next(self._effect_id_counter)
+ effect_id = next(self._id_counter)
requirements = self.spec_clauses(imposed_spec, body=False, required_from=name)
- if not node:
- requirements = list(
- filter(lambda x: x.args[0] not in ("node", "virtual_node"), requirements)
- )
+
+ if transform_imposed:
+ requirements = transform_imposed(imposed_spec, requirements)
+
cache[imposed_spec_key] = (effect_id, requirements)
effect_id, requirements = cache[imposed_spec_key]
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
@@ -1530,6 +1592,17 @@ def package_provider_rules(self, pkg):
)
self.gen.newline()
+ for when, sets_of_virtuals in pkg.provided_together.items():
+ condition_id = self.condition(
+ when, name=pkg.name, msg="Virtuals are provided together"
+ )
+ for set_id, virtuals_together in enumerate(sets_of_virtuals):
+ for name in virtuals_together:
+ self.gen.fact(
+ fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
+ )
+ self.gen.newline()
+
def package_dependencies_rules(self, pkg):
"""Translate 'depends_on' directives into ASP logic."""
for _, conditions in sorted(pkg.dependencies.items()):
@@ -1548,21 +1621,32 @@ def package_dependencies_rules(self, pkg):
if not depflag:
continue
- msg = "%s depends on %s" % (pkg.name, dep.spec.name)
+ msg = f"{pkg.name} depends on {dep.spec}"
if cond != spack.spec.Spec():
- msg += " when %s" % cond
+ msg += f" when {cond}"
else:
pass
- condition_id = self.condition(cond, dep.spec, pkg.name, msg)
- self.gen.fact(
- fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name))
- )
+ def track_dependencies(input_spec, requirements):
+ return requirements + [fn.attr("track_dependencies", input_spec.name)]
+
+ def dependency_holds(input_spec, requirements):
+ return remove_node(input_spec, requirements) + [
+ fn.attr(
+ "dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
+ )
+ for t in dt.ALL_FLAGS
+ if t & depflag
+ ]
- for t in dt.ALL_FLAGS:
- if t & depflag:
- # there is a declared dependency of type t
- self.gen.fact(fn.dependency_type(condition_id, dt.flag_to_string(t)))
+ self.condition(
+ cond,
+ dep.spec,
+ name=pkg.name,
+ msg=msg,
+ transform_required=track_dependencies,
+ transform_imposed=dependency_holds,
+ )
self.gen.newline()
@@ -1577,6 +1661,7 @@ def virtual_preferences(self, pkg_name, func):
for i, provider in enumerate(providers):
provider_name = spack.spec.Spec(provider).name
func(vspec, provider_name, i)
+ self.gen.newline()
def provider_defaults(self):
self.gen.h2("Default virtual providers")
@@ -1657,8 +1742,17 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
when_spec = spack.spec.Spec(pkg_name)
try:
+ # With virtual we want to emit "node" and "virtual_node" in imposed specs
+ transform: Optional[TransformFunction] = remove_node
+ if virtual:
+ transform = None
+
member_id = self.condition(
- required_spec=when_spec, imposed_spec=spec, name=pkg_name, node=virtual
+ required_spec=when_spec,
+ imposed_spec=spec,
+ name=pkg_name,
+ transform_imposed=transform,
+ msg=f"{spec_str} is a requirement for package {pkg_name}",
)
except Exception as e:
# Do not raise if the rule comes from the 'all' subsection, since usability
@@ -1721,8 +1815,16 @@ def external_packages(self):
# Declare external conditions with a local index into packages.yaml
for local_idx, spec in enumerate(external_specs):
msg = "%s available as external when satisfying %s" % (spec.name, spec)
- condition_id = self.condition(spec, msg=msg)
- self.gen.fact(fn.pkg_fact(pkg_name, fn.possible_external(condition_id, local_idx)))
+
+ def external_imposition(input_spec, _):
+ return [fn.attr("external_conditions_hold", input_spec.name, local_idx)]
+
+ self.condition(
+ spec,
+ spack.spec.Spec(spec.name),
+ msg=msg,
+ transform_imposed=external_imposition,
+ )
self.possible_versions[spec.name].add(spec.version)
self.gen.newline()
@@ -1744,7 +1846,13 @@ def preferred_variants(self, pkg_name):
# perform validation of the variant and values
spec = spack.spec.Spec(pkg_name)
- spec.update_variant_validate(variant_name, values)
+ try:
+ spec.update_variant_validate(variant_name, values)
+ except (spack.variant.InvalidVariantValueError, KeyError, ValueError) as e:
+ tty.debug(
+ f"[SETUP]: rejected {str(variant)} as a preference for {pkg_name}: {str(e)}"
+ )
+ continue
for value in values:
self.variant_values_from_specs.add((pkg_name, variant.name, value))
@@ -1752,8 +1860,8 @@ def preferred_variants(self, pkg_name):
fn.variant_default_value_from_packages_yaml(pkg_name, variant.name, value)
)
- def target_preferences(self, pkg_name):
- key_fn = spack.package_prefs.PackagePrefs(pkg_name, "target")
+ def target_preferences(self):
+ key_fn = spack.package_prefs.PackagePrefs("all", "target")
if not self.target_specs_cache:
self.target_specs_cache = [
@@ -1763,17 +1871,25 @@ def target_preferences(self, pkg_name):
package_targets = self.target_specs_cache[:]
package_targets.sort(key=key_fn)
-
- offset = 0
- best_default = self.default_targets[0][1]
for i, preferred in enumerate(package_targets):
- if str(preferred.architecture.target) == best_default and i != 0:
- offset = 100
- self.gen.fact(
- fn.pkg_fact(
- pkg_name, fn.target_weight(str(preferred.architecture.target), i + offset)
- )
- )
+ self.gen.fact(fn.target_weight(str(preferred.architecture.target), i))
+
+ def flag_defaults(self):
+ self.gen.h2("Compiler flag defaults")
+
+ # types of flags that can be on specs
+ for flag in spack.spec.FlagMap.valid_compiler_flags():
+ self.gen.fact(fn.flag_type(flag))
+ self.gen.newline()
+
+ # flags from compilers.yaml
+ compilers = all_compilers_in_config()
+ for compiler in compilers:
+ for name, flags in compiler.flags.items():
+ for flag in flags:
+ self.gen.fact(
+ fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
+ )
def spec_clauses(self, *args, **kwargs):
"""Wrap a call to `_spec_clauses()` into a try/except block that
@@ -1826,7 +1942,7 @@ class Head:
node_flag = fn.attr("node_flag_set")
node_flag_source = fn.attr("node_flag_source")
node_flag_propagate = fn.attr("node_flag_propagate")
- variant_propagate = fn.attr("variant_propagate")
+ variant_propagation_candidate = fn.attr("variant_propagation_candidate")
class Body:
node = fn.attr("node")
@@ -1840,7 +1956,7 @@ class Body:
node_flag = fn.attr("node_flag")
node_flag_source = fn.attr("node_flag_source")
node_flag_propagate = fn.attr("node_flag_propagate")
- variant_propagate = fn.attr("variant_propagate")
+ variant_propagation_candidate = fn.attr("variant_propagation_candidate")
f = Body if body else Head
@@ -1889,7 +2005,9 @@ class Body:
clauses.append(f.variant_value(spec.name, vname, value))
if variant.propagate:
- clauses.append(f.variant_propagate(spec.name, vname, value, spec.name))
+ clauses.append(
+ f.variant_propagation_candidate(spec.name, vname, value, spec.name)
+ )
# Tell the concretizer that this is a possible value for the
# variant, to account for things like int/str values where we
@@ -1931,6 +2049,16 @@ class Body:
clauses.append(fn.attr("package_hash", spec.name, spec._package_hash))
clauses.append(fn.attr("hash", spec.name, spec.dag_hash()))
+ edges = spec.edges_from_dependents()
+ virtuals = [x for x in itertools.chain.from_iterable([edge.virtuals for edge in edges])]
+ if not body:
+ for virtual in virtuals:
+ clauses.append(fn.attr("provider_set", spec.name, virtual))
+ clauses.append(fn.attr("virtual_node", virtual))
+ else:
+ for virtual in virtuals:
+ clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual))
+
# add all clauses from dependencies
if transitive:
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
@@ -2217,6 +2345,8 @@ def target_defaults(self, specs):
self.default_targets = list(sorted(set(self.default_targets)))
+ self.target_preferences()
+
def virtual_providers(self):
self.gen.h2("Virtual providers")
msg = (
@@ -2440,12 +2570,8 @@ def setup(
reuse: list of concrete specs that can be reused
allow_deprecated: if True adds deprecated versions into the solve
"""
- self._condition_id_counter = itertools.count()
-
- # preliminary checks
check_packages_exist(specs)
- # get list of all possible dependencies
self.possible_virtuals = set(x.name for x in specs if x.virtual)
node_counter = _create_counter(specs, tests=self.tests)
@@ -2538,7 +2664,6 @@ def setup(
self.pkg_rules(pkg, tests=self.tests)
self.gen.h2("Package preferences: %s" % pkg)
self.preferred_variants(pkg)
- self.target_preferences(pkg)
self.gen.h1("Develop specs")
# Inject dev_path from environment
@@ -2564,20 +2689,45 @@ def setup(
self.define_target_constraints()
def literal_specs(self, specs):
- for idx, spec in enumerate(specs):
+ for spec in specs:
self.gen.h2("Spec: %s" % str(spec))
- self.gen.fact(fn.literal(idx))
+ condition_id = next(self._id_counter)
+ trigger_id = next(self._id_counter)
- self.gen.fact(fn.literal(idx, "virtual_root" if spec.virtual else "root", spec.name))
- for clause in self.spec_clauses(spec):
- self.gen.fact(fn.literal(idx, *clause.args))
- if clause.args[0] == "variant_set":
- self.gen.fact(
- fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
+ # Special condition triggered by "literal_solved"
+ self.gen.fact(fn.literal(trigger_id))
+ self.gen.fact(fn.pkg_fact(spec.name, fn.condition_trigger(condition_id, trigger_id)))
+ self.gen.fact(fn.condition_reason(condition_id, f"{spec} requested from CLI"))
+
+ # Effect imposes the spec
+ imposed_spec_key = str(spec), None
+ cache = self._effect_cache[spec.name]
+ msg = (
+ "literal specs have different requirements. clear cache before computing literals"
+ )
+ assert imposed_spec_key not in cache, msg
+ effect_id = next(self._id_counter)
+ requirements = self.spec_clauses(spec)
+ root_name = spec.name
+ for clause in requirements:
+ clause_name = clause.args[0]
+ if clause_name == "variant_set":
+ requirements.append(
+ fn.attr("variant_default_value_from_cli", *clause.args[1:])
)
+ elif clause_name in ("node", "virtual_node", "hash"):
+ # These facts are needed to compute the "condition_set" of the root
+ pkg_name = clause.args[1]
+ self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
+
+ requirements.append(fn.attr("virtual_root" if spec.virtual else "root", spec.name))
+ cache[imposed_spec_key] = (effect_id, requirements)
+ self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
if self.concretize_everything:
- self.gen.fact(fn.solve_literal(idx))
+ self.gen.fact(fn.solve_literal(trigger_id))
+
+ self.effect_rules()
def validate_and_define_versions_from_requirements(
self, *, allow_deprecated: bool, require_checksum: bool
@@ -2658,9 +2808,11 @@ class SpecBuilder:
r"^.*_propagate$",
r"^.*_satisfies$",
r"^.*_set$",
+ r"^dependency_holds$",
r"^node_compiler$",
r"^package_hash$",
r"^root$",
+ r"^track_dependencies$",
r"^variant_default_value_from_cli$",
r"^virtual_node$",
r"^virtual_root$",
@@ -3194,10 +3346,11 @@ def __init__(self, provided, conflicts):
msg = (
"Spack concretizer internal error. Please submit a bug report and include the "
"command, environment if applicable and the following error message."
- f"\n {provided} is unsatisfiable, errors are:"
+ f"\n {provided} is unsatisfiable"
)
- msg += "".join([f"\n {conflict}" for conflict in conflicts])
+ if conflicts:
+ msg += ", errors are:" + "".join([f"\n {conflict}" for conflict in conflicts])
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp
index efca3bfed2a32e..d5f24ddc3b33f0 100644
--- a/lib/spack/spack/solver/concretize.lp
+++ b/lib/spack/spack/solver/concretize.lp
@@ -10,9 +10,8 @@
% ID of the nodes in the "root" link-run sub-DAG
#const min_dupe_id = 0.
-#const link_run = 0.
-#const direct_link_run =1.
-#const direct_build = 2.
+#const direct_link_run = 0.
+#const direct_build = 1.
% Allow clingo to create nodes
{ attr("node", node(0..X-1, Package)) } :- max_dupes(Package, X), not virtual(Package).
@@ -30,23 +29,21 @@
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode).
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode).
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode).
-:- attr("node_flag_source", PackageNode, _, _), not attr("node", PackageNode).
:- attr("no_flags", PackageNode, _), not attr("node", PackageNode).
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode).
:- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode).
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode).
:- attr("node_flag_source", ParentNode, _, _), not attr("node", ParentNode).
:- attr("node_flag_source", _, _, ChildNode), not attr("node", ChildNode).
+:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode), internal_error("virtual node with no provider").
+:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode), internal_error("provider with no virtual node").
+:- provider(PackageNode, _), not attr("node", PackageNode), internal_error("provider with no real node").
-:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode).
-:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode).
-:- provider(PackageNode, _), not attr("node", PackageNode).
-
-:- attr("root", node(ID, PackageNode)), ID > min_dupe_id.
+:- attr("root", node(ID, PackageNode)), ID > min_dupe_id, internal_error("root with a non-minimal duplicate ID").
% Nodes in the "root" unification set cannot depend on non-root nodes if the dependency is "link" or "run"
-:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "link"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)).
-:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "run"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)).
+:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "link"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("link dependency out of the root unification set").
+:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "run"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("run dependency out of the root unification set").
% Rules on "unification sets", i.e. on sets of nodes allowing a single configuration of any given package
unify(SetID, PackageName) :- unification_set(SetID, node(_, PackageName)).
@@ -86,22 +83,24 @@ unification_set(SetID, VirtualNode)
%----
% In the "root" unification set only ID = 0 are allowed
-:- unification_set("root", node(ID, _)), ID != 0.
+:- unification_set("root", node(ID, _)), ID != 0, internal_error("root unification set has node with non-zero unification set ID").
% In the "root" unification set we allow only packages from the link-run possible subDAG
-:- unification_set("root", node(_, Package)), not possible_in_link_run(Package), not virtual(Package).
+:- unification_set("root", node(_, Package)), not possible_in_link_run(Package), not virtual(Package), internal_error("package outside possible link/run graph in root unification set").
% Each node must belong to at least one unification set
-:- attr("node", PackageNode), not unification_set(_, PackageNode).
+:- attr("node", PackageNode), not unification_set(_, PackageNode), internal_error("node belongs to no unification set").
% Cannot have a node with an ID, if lower ID of the same package are not used
:- attr("node", node(ID1, Package)),
not attr("node", node(ID2, Package)),
- max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1.
+ max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1,
+ internal_error("node skipped id number").
:- attr("virtual_node", node(ID1, Package)),
not attr("virtual_node", node(ID2, Package)),
- max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1.
+ max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1,
+ internal_error("virtual node skipped id number").
%-----------------------------------------------------------------------------
% Map literal input specs to facts that drive the solve
@@ -113,26 +112,30 @@ unification_set(SetID, VirtualNode)
multiple_nodes_attribute("node_flag_source").
multiple_nodes_attribute("depends_on").
multiple_nodes_attribute("virtual_on_edge").
+multiple_nodes_attribute("provider_set").
+
+trigger_condition_holds(TriggerID, node(min_dupe_id, Package)) :-
+ solve_literal(TriggerID),
+ pkg_fact(Package, condition_trigger(_, TriggerID)),
+ literal(TriggerID).
-% Map constraint on the literal ID to facts on the node
-attr(Name, node(min_dupe_id, A1)) :- literal(LiteralID, Name, A1), solve_literal(LiteralID).
-attr(Name, node(min_dupe_id, A1), A2) :- literal(LiteralID, Name, A1, A2), solve_literal(LiteralID).
-attr(Name, node(min_dupe_id, A1), A2, A3) :- literal(LiteralID, Name, A1, A2, A3), solve_literal(LiteralID), not multiple_nodes_attribute(Name).
-attr(Name, node(min_dupe_id, A1), A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), solve_literal(LiteralID).
+trigger_node(TriggerID, Node, Node) :-
+ trigger_condition_holds(TriggerID, Node),
+ literal(TriggerID).
-% Special cases where nodes occur in arguments other than A1
-attr("node_flag_source", node(min_dupe_id, A1), A2, node(min_dupe_id, A3)) :- literal(LiteralID, "node_flag_source", A1, A2, A3), solve_literal(LiteralID).
-attr("depends_on", node(min_dupe_id, A1), node(min_dupe_id, A2), A3) :- literal(LiteralID, "depends_on", A1, A2, A3), solve_literal(LiteralID).
+% Since we trigger the existence of literal nodes from a condition, we need to construct
+% the condition_set/2 manually below
+mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
+condition_set(node(min_dupe_id, Root), node(min_dupe_id, Mentioned)) :- mentioned_in_literal(Root, Mentioned).
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
-explicitly_requested_root(node(min_dupe_id, A1)) :- literal(LiteralID, "root", A1), solve_literal(LiteralID).
+explicitly_requested_root(node(min_dupe_id, Package)) :-
+ solve_literal(TriggerID),
+ trigger_and_effect(Package, TriggerID, EffectID),
+ imposed_constraint(EffectID, "root", Package).
#defined concretize_everything/0.
#defined literal/1.
-#defined literal/3.
-#defined literal/4.
-#defined literal/5.
-#defined literal/6.
% Attributes for node packages which must have a single value
attr_single_value("version").
@@ -230,7 +233,8 @@ possible_version_weight(node(ID, Package), Weight)
1 { version_weight(node(ID, Package), Weight) : pkg_fact(Package, version_declared(Version, Weight)) } 1
:- attr("version", node(ID, Package), Version),
- attr("node", node(ID, Package)).
+ attr("node", node(ID, Package)),
+ internal_error("version weights must exist and be unique").
% node_version_satisfies implies that exactly one of the satisfying versions
% is the package's version, and vice versa.
@@ -244,7 +248,8 @@ possible_version_weight(node(ID, Package), Weight)
% bound on the choice rule to avoid false positives with the error below
1 { attr("version", node(ID, Package), Version) : pkg_fact(Package, version_satisfies(Constraint, Version)) }
:- attr("node_version_satisfies", node(ID, Package), Constraint),
- pkg_fact(Package, version_satisfies(Constraint, _)).
+ pkg_fact(Package, version_satisfies(Constraint, _)),
+ internal_error("must choose a single version to satisfy version constraints").
% More specific error message if the version cannot satisfy some constraint
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
@@ -357,7 +362,7 @@ imposed_nodes(ConditionID, PackageNode, node(X, A1))
% Conditions that hold impose may impose constraints on other specs
attr(Name, node(X, A1)) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1), imposed_nodes(ID, PackageNode, node(X, A1)).
-attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2), imposed_nodes(ID, PackageNode, node(X, A1)).
+attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
attr(Name, node(X, A1), A2, A3) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
attr(Name, node(X, A1), A2, A3, A4) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3, A4), imposed_nodes(ID, PackageNode, node(X, A1)).
@@ -368,6 +373,16 @@ attr("node_flag_source", node(X, A1), A2, node(Y, A3))
imposed_constraint(ID, "node_flag_source", A1, A2, A3),
condition_set(node(Y, A3), node(X, A1)).
+% Provider set is relevant only for literals, since it's the only place where `^[virtuals=foo] bar`
+% might appear in the HEAD of a rule
+attr("provider_set", node(min_dupe_id, Provider), node(min_dupe_id, Virtual))
+ :- solve_literal(TriggerID),
+ trigger_and_effect(_, TriggerID, EffectID),
+ impose(EffectID, _),
+ imposed_constraint(EffectID, "provider_set", Provider, Virtual).
+
+provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, VirtualNode).
+
% Here we can't use the condition set because it's a recursive definition, that doesn't define the
% node index, and leads to unsatisfiability. Hence we say that one and only one node index must
% satisfy the dependency.
@@ -427,24 +442,11 @@ depends_on(PackageNode, DependencyNode) :- attr("depends_on", PackageNode, Depen
% concrete. We chop off dependencies for externals, and dependencies of
% concrete specs don't need to be resolved -- they arise from the concrete
% specs themselves.
-dependency_holds(node(NodeID, Package), Dependency, Type) :-
- pkg_fact(Package, dependency_condition(ID, Dependency)),
- dependency_type(ID, Type),
- build(node(NodeID, Package)),
- not external(node(NodeID, Package)),
- condition_holds(ID, node(NodeID, Package)).
-
-% We cut off dependencies of externals (as we don't really know them).
-% Don't impose constraints on dependencies that don't exist.
-do_not_impose(EffectID, node(NodeID, Package)) :-
- not dependency_holds(node(NodeID, Package), Dependency, _),
- attr("node", node(NodeID, Package)),
- pkg_fact(Package, dependency_condition(ID, Dependency)),
- pkg_fact(Package, condition_effect(ID, EffectID)).
+attr("track_dependencies", Node) :- build(Node), not external(Node).
% If a dependency holds on a package node, there must be one and only one dependency node satisfying it
1 { attr("depends_on", PackageNode, node(0..Y-1, Dependency), Type) : max_dupes(Dependency, Y) } 1
- :- dependency_holds(PackageNode, Dependency, Type),
+ :- attr("dependency_holds", PackageNode, Dependency, Type),
not virtual(Dependency).
% all nodes in the graph must be reachable from some root
@@ -476,10 +478,25 @@ error(1, Msg)
% Virtual dependencies
%-----------------------------------------------------------------------------
+% If the provider is set from the command line, its weight is 0
+possible_provider_weight(ProviderNode, VirtualNode, 0, "Set on the command line")
+ :- attr("provider_set", ProviderNode, VirtualNode).
+
+% Enforces all virtuals to be provided, if multiple of them are provided together
+error(100, "Package '{0}' needs to provide both '{1}' and '{2}' together, but provides only '{1}'", Package, Virtual1, Virtual2)
+:- condition_holds(ID, node(X, Package)),
+ pkg_fact(Package, provided_together(ID, SetID, Virtual1)),
+ pkg_fact(Package, provided_together(ID, SetID, Virtual2)),
+ Virtual1 != Virtual2,
+ attr("virtual_on_incoming_edges", node(X, Package), Virtual1),
+ not attr("virtual_on_incoming_edges", node(X, Package), Virtual2),
+ attr("virtual_node", node(_, Virtual1)),
+ attr("virtual_node", node(_, Virtual2)).
+
% if a package depends on a virtual, it's not external and we have a
% provider for that virtual then it depends on the provider
node_depends_on_virtual(PackageNode, Virtual, Type)
- :- dependency_holds(PackageNode, Virtual, Type),
+ :- attr("dependency_holds", PackageNode, Virtual, Type),
virtual(Virtual),
not external(PackageNode).
@@ -489,11 +506,14 @@ node_depends_on_virtual(PackageNode, Virtual) :- node_depends_on_virtual(Package
:- node_depends_on_virtual(PackageNode, Virtual, Type).
attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
- :- dependency_holds(PackageNode, Virtual, Type),
+ :- attr("dependency_holds", PackageNode, Virtual, Type),
attr("depends_on", PackageNode, ProviderNode, Type),
provider(ProviderNode, node(_, Virtual)),
not external(PackageNode).
+attr("virtual_on_incoming_edges", ProviderNode, Virtual)
+ :- attr("virtual_on_edge", _, ProviderNode, Virtual).
+
% dependencies on virtuals also imply that the virtual is a virtual node
1 { attr("virtual_node", node(0..X-1, Virtual)) : max_dupes(Virtual, X) }
:- node_depends_on_virtual(PackageNode, Virtual).
@@ -501,6 +521,10 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
% If there's a virtual node, we must select one and only one provider.
% The provider must be selected among the possible providers.
+error(100, "'{0}' cannot be a provider for the '{1}' virtual", Package, Virtual)
+ :- attr("provider_set", node(min_dupe_id, Package), node(min_dupe_id, Virtual)),
+ not virtual_condition_holds( node(min_dupe_id, Package), Virtual).
+
error(100, "Cannot find valid provider for virtual {0}", Virtual)
:- attr("virtual_node", node(X, Virtual)),
not provider(_, node(X, Virtual)).
@@ -521,20 +545,6 @@ attr("root", PackageNode) :- attr("virtual_root", VirtualNode), provider(Package
attr("node", PackageNode), virtual_condition_holds(PackageNode, Virtual) } 1
:- attr("virtual_node", node(X, Virtual)).
-% If a spec is selected as a provider, it is for all the virtual it could provide
-:- provider(PackageNode, node(X, Virtual1)),
- virtual_condition_holds(PackageNode, Virtual2),
- Virtual2 != Virtual1,
- unification_set(SetID, PackageNode),
- unification_set(SetID, node(X, Virtual2)),
- not provider(PackageNode, node(X, Virtual2)).
-
-% If a spec is a dependency, and could provide a needed virtual, it must be a provider
-:- node_depends_on_virtual(PackageNode, Virtual),
- depends_on(PackageNode, PossibleProviderNode),
- virtual_condition_holds(PossibleProviderNode, Virtual),
- not attr("virtual_on_edge", PackageNode, PossibleProviderNode, Virtual).
-
% The provider provides the virtual if some provider condition holds.
virtual_condition_holds(node(ProviderID, Provider), Virtual) :- virtual_condition_holds(ID, node(ProviderID, Provider), Virtual).
virtual_condition_holds(ID, node(ProviderID, Provider), Virtual) :-
@@ -561,6 +571,8 @@ do_not_impose(EffectID, node(X, Package))
not virtual_condition_holds(PackageNode, Virtual),
internal_error("Virtual when provides not respected").
+#defined provided_together/4.
+
%-----------------------------------------------------------------------------
% Virtual dependency weights
%-----------------------------------------------------------------------------
@@ -577,21 +589,15 @@ possible_provider_weight(DependencyNode, VirtualNode, 0, "external")
:- provider(DependencyNode, VirtualNode),
external(DependencyNode).
-% A provider mentioned in packages.yaml can use a weight
-% according to its priority in the list of providers
-possible_provider_weight(node(DependencyID, Dependency), node(VirtualID, Virtual), Weight, "packages_yaml")
- :- provider(node(DependencyID, Dependency), node(VirtualID, Virtual)),
- depends_on(node(ID, Package), node(DependencyID, Dependency)),
- pkg_fact(Package, provider_preference(Virtual, Dependency, Weight)).
-
% A provider mentioned in the default configuration can use a weight
% according to its priority in the list of providers
-possible_provider_weight(node(DependencyID, Dependency), node(VirtualID, Virtual), Weight, "default")
- :- provider(node(DependencyID, Dependency), node(VirtualID, Virtual)),
- default_provider_preference(Virtual, Dependency, Weight).
+possible_provider_weight(node(ProviderID, Provider), node(VirtualID, Virtual), Weight, "default")
+ :- provider(node(ProviderID, Provider), node(VirtualID, Virtual)),
+ default_provider_preference(Virtual, Provider, Weight).
% Any provider can use 100 as a weight, which is very high and discourage its use
-possible_provider_weight(node(DependencyID, Dependency), VirtualNode, 100, "fallback") :- provider(node(DependencyID, Dependency), VirtualNode).
+possible_provider_weight(node(ProviderID, Provider), VirtualNode, 100, "fallback")
+ :- provider(node(ProviderID, Provider), VirtualNode).
% do not warn if generated program contains none of these.
#defined virtual/1.
@@ -609,11 +615,11 @@ possible_provider_weight(node(DependencyID, Dependency), VirtualNode, 100, "fall
pkg_fact(Package, version_declared(Version, Weight, "external")) }
:- external(node(ID, Package)).
-error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
+error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec version", Package)
:- external(node(ID, Package)),
not external_version(node(ID, Package), _, _).
-error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
+error(100, "Attempted to use external for '{0}' which does not satisfy a unique configured external spec version", Package)
:- external(node(ID, Package)),
2 { external_version(node(ID, Package), Version, Weight) }.
@@ -642,18 +648,15 @@ external(PackageNode) :- attr("external_spec_selected", PackageNode, _).
% determine if an external spec has been selected
attr("external_spec_selected", node(ID, Package), LocalIndex) :-
- external_conditions_hold(node(ID, Package), LocalIndex),
+ attr("external_conditions_hold", node(ID, Package), LocalIndex),
attr("node", node(ID, Package)),
not attr("hash", node(ID, Package), _).
-external_conditions_hold(node(PackageID, Package), LocalIndex) :-
- pkg_fact(Package, possible_external(ID, LocalIndex)), condition_holds(ID, node(PackageID, Package)).
-
% it cannot happen that a spec is external, but none of the external specs
% conditions hold.
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
:- external(node(ID, Package)),
- not external_conditions_hold(node(ID, Package), _).
+ not attr("external_conditions_hold", node(ID, Package), _).
%-----------------------------------------------------------------------------
% Config required semantics
@@ -696,15 +699,18 @@ requirement_group_satisfied(node(ID, Package), X) :-
% flags if their only source is from a requirement. This is overly-specific
% and should use a more-generic approach like in https://github.com/spack/spack/pull/37180
-{ attr("node_flag", node(ID, A1), A2, A3) } :-
- requirement_group_member(Y, Package, X),
- activate_requirement(node(ID, Package), X),
- imposed_constraint(Y,"node_flag_set", A1, A2, A3).
+{ attr("node_flag", node(ID, Package), FlagType, FlagValue) } :-
+ requirement_group_member(ConditionID, Package, RequirementID),
+ activate_requirement(node(ID, Package), RequirementID),
+ pkg_fact(Package, condition_effect(ConditionID, EffectID)),
+ imposed_constraint(EffectID, "node_flag_set", Package, FlagType, FlagValue).
-{ attr("node_flag_source", node(ID, A1), A2, node(ID, A3)) } :-
- requirement_group_member(Y, Package, X),
- activate_requirement(node(ID, Package), X),
- imposed_constraint(Y,"node_flag_source", A1, A2, A3).
+{ attr("node_flag_source", node(NodeID1, Package1), FlagType, node(NodeID2, Package2)) } :-
+ requirement_group_member(ConditionID, Package1, RequirementID),
+ activate_requirement(node(NodeID1, Package1), RequirementID),
+ pkg_fact(Package1, condition_effect(ConditionID, EffectID)),
+ imposed_constraint(EffectID, "node_flag_source", Package1, FlagType, Package2),
+ imposed_nodes(EffectID, node(NodeID2, Package2), node(NodeID1, Package1)).
requirement_weight(node(ID, Package), Group, W) :-
W = #min {
@@ -751,23 +757,36 @@ node_has_variant(node(ID, Package), Variant) :-
pkg_fact(Package, variant(Variant)),
attr("node", node(ID, Package)).
-attr("variant_propagate", PackageNode, Variant, Value, Source) :-
+% Variant propagation is forwarded to dependencies
+attr("variant_propagation_candidate", PackageNode, Variant, Value, Source) :-
attr("node", PackageNode),
depends_on(ParentNode, PackageNode),
- attr("variant_propagate", ParentNode, Variant, Value, Source),
- not attr("variant_set", PackageNode, Variant).
+ attr("variant_value", node(_, Source), Variant, Value),
+ attr("variant_propagation_candidate", ParentNode, Variant, _, Source).
-attr("variant_value", node(ID, Package), Variant, Value) :-
- attr("node", node(ID, Package)),
+% If the node is a candidate, and it has the variant and value,
+% then those variant and value should be propagated
+attr("variant_propagate", node(ID, Package), Variant, Value, Source) :-
+ attr("variant_propagation_candidate", node(ID, Package), Variant, Value, Source),
node_has_variant(node(ID, Package), Variant),
- attr("variant_propagate", node(ID, Package), Variant, Value, _),
- pkg_fact(Package, variant_possible_value(Variant, Value)).
+ pkg_fact(Package, variant_possible_value(Variant, Value)),
+ not attr("variant_set", node(ID, Package), Variant).
+
+% Propagate the value, if there is the corresponding attribute
+attr("variant_value", PackageNode, Variant, Value) :- attr("variant_propagate", PackageNode, Variant, Value, _).
+% If a variant is propagated, we cannot have extraneous values (this is for multi valued variants)
+variant_is_propagated(PackageNode, Variant) :- attr("variant_propagate", PackageNode, Variant, _, _).
+:- variant_is_propagated(PackageNode, Variant),
+ attr("variant_value", PackageNode, Variant, Value),
+ not attr("variant_propagate", PackageNode, Variant, Value, _).
+
+% Cannot receive different values from different sources on the same variant
error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :-
attr("variant_propagate", node(X, Package), Variant, Value1, Source1),
attr("variant_propagate", node(X, Package), Variant, Value2, Source2),
node_has_variant(node(X, Package), Variant),
- Value1 < Value2.
+ Value1 < Value2, Source1 < Source2.
% a variant cannot be set if it is not a variant on the package
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
@@ -863,13 +882,15 @@ variant_default_not_used(node(ID, Package), Variant, Value)
:- variant_default_value(Package, Variant, Value),
node_has_variant(node(ID, Package), Variant),
not attr("variant_value", node(ID, Package), Variant, Value),
+ not attr("variant_propagate", node(ID, Package), Variant, _, _),
attr("node", node(ID, Package)).
% The variant is set in an external spec
external_with_variant_set(node(NodeID, Package), Variant, Value)
:- attr("variant_value", node(NodeID, Package), Variant, Value),
- condition_requirement(ID, "variant_value", Package, Variant, Value),
- pkg_fact(Package, possible_external(ID, _)),
+ condition_requirement(TriggerID, "variant_value", Package, Variant, Value),
+ trigger_and_effect(Package, TriggerID, EffectID),
+ imposed_constraint(EffectID, "external_conditions_hold", Package, _),
external(node(NodeID, Package)),
attr("node", node(NodeID, Package)).
@@ -1045,7 +1066,7 @@ attr("node_target", PackageNode, Target)
node_target_weight(node(ID, Package), Weight)
:- attr("node", node(ID, Package)),
attr("node_target", node(ID, Package), Target),
- pkg_fact(Package, target_weight(Target, Weight)).
+ target_weight(Target, Weight).
% compatibility rules for targets among nodes
node_target_match(ParentNode, DependencyNode)
@@ -1167,23 +1188,17 @@ compiler_mismatch_required(PackageNode, DependencyNode)
#defined allow_compiler/2.
% compilers weighted by preference according to packages.yaml
-compiler_weight(node(ID, Package), Weight)
- :- node_compiler(node(ID, Package), CompilerID),
- compiler_name(CompilerID, Compiler),
- compiler_version(CompilerID, V),
- pkg_fact(Package, node_compiler_preference(Compiler, V, Weight)).
-compiler_weight(node(ID, Package), Weight)
+node_compiler_weight(node(ID, Package), Weight)
:- node_compiler(node(ID, Package), CompilerID),
compiler_name(CompilerID, Compiler),
compiler_version(CompilerID, V),
- not pkg_fact(Package, node_compiler_preference(Compiler, V, _)),
- default_compiler_preference(CompilerID, Weight).
-compiler_weight(node(ID, Package), 100)
+ compiler_weight(CompilerID, Weight).
+
+node_compiler_weight(node(ID, Package), 100)
:- node_compiler(node(ID, Package), CompilerID),
compiler_name(CompilerID, Compiler),
compiler_version(CompilerID, V),
- not pkg_fact(Package, node_compiler_preference(Compiler, V, _)),
- not default_compiler_preference(CompilerID, _).
+ not compiler_weight(CompilerID, _).
% For the time being, be strict and reuse only if the compiler match one we have on the system
error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version)
@@ -1191,7 +1206,7 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss
not node_compiler(node(ID, Package), _).
#defined node_compiler_preference/4.
-#defined default_compiler_preference/3.
+#defined compiler_weight/3.
%-----------------------------------------------------------------------------
% Compiler flags
@@ -1325,6 +1340,10 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package
#defined installed_hash/2.
+% This statement, which is a hidden feature of clingo, let us avoid cycles in the DAG
+#edge (A, B) : depends_on(A, B).
+
+
%-----------------------------------------------------------------
% Optimization to avoid errors
%-----------------------------------------------------------------
@@ -1511,7 +1530,7 @@ opt_criterion(15, "non-preferred compilers").
#minimize{ 0@15: #true }.
#minimize{
Weight@15+Priority,PackageNode
- : compiler_weight(PackageNode, Weight),
+ : node_compiler_weight(PackageNode, Weight),
build_priority(PackageNode, Priority)
}.
diff --git a/lib/spack/spack/solver/cycle_detection.lp b/lib/spack/spack/solver/cycle_detection.lp
deleted file mode 100644
index 310c543623d153..00000000000000
--- a/lib/spack/spack/solver/cycle_detection.lp
+++ /dev/null
@@ -1,21 +0,0 @@
-% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
-% Spack Project Developers. See the top-level COPYRIGHT file for details.
-%
-% SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-%=============================================================================
-% Avoid cycles in the DAG
-%
-% Some combinations of conditional dependencies can result in cycles;
-% this ensures that we solve around them. Note that these rules are quite
-% demanding on both grounding and solving, since they need to compute and
-% consider all possible paths between pair of nodes.
-%=============================================================================
-
-
-#program no_cycle.
-path(Parent, Child) :- depends_on(Parent, Child).
-path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
-:- path(A, A).
-
-#defined depends_on/2.
diff --git a/lib/spack/spack/solver/display.lp b/lib/spack/spack/solver/display.lp
index fffffb2c0430bd..58d04d42ea3002 100644
--- a/lib/spack/spack/solver/display.lp
+++ b/lib/spack/spack/solver/display.lp
@@ -24,4 +24,29 @@
#show error/5.
#show error/6.
+% for error causation
+#show condition_reason/2.
+
+% For error messages to use later
+#show pkg_fact/2.
+#show condition_holds/2.
+#show imposed_constraint/3.
+#show imposed_constraint/4.
+#show imposed_constraint/5.
+#show imposed_constraint/6.
+#show condition_requirement/3.
+#show condition_requirement/4.
+#show condition_requirement/5.
+#show condition_requirement/6.
+#show node_has_variant/2.
+#show build/1.
+#show external/1.
+#show external_version/3.
+#show trigger_and_effect/3.
+#show unification_set/2.
+#show provider/2.
+#show condition_nodes/3.
+#show trigger_node/3.
+#show imposed_nodes/3.
+
% debug
diff --git a/lib/spack/spack/solver/error_messages.lp b/lib/spack/spack/solver/error_messages.lp
new file mode 100644
index 00000000000000..7eb383860d8c75
--- /dev/null
+++ b/lib/spack/spack/solver/error_messages.lp
@@ -0,0 +1,239 @@
+% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+% Spack Project Developers. See the top-level COPYRIGHT file for details.
+%
+% SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+%=============================================================================
+% This logic program adds detailed error messages to Spack's concretizer
+%=============================================================================
+
+#program error_messages.
+
+% Create a causal tree between trigger conditions by locating the effect conditions
+% that are triggers for another condition. Condition2 is caused by Condition1
+condition_cause(Condition2, ID2, Condition1, ID1) :-
+ condition_holds(Condition2, node(ID2, Package2)),
+ pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
+ condition_requirement(Trigger, Name, Package),
+ condition_nodes(Trigger, TriggerNode, node(ID, Package)),
+ trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
+ attr(Name, node(ID, Package)),
+ condition_holds(Condition1, node(ID1, Package1)),
+ pkg_fact(Package1, condition_effect(Condition1, Effect)),
+ imposed_constraint(Effect, Name, Package),
+ imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)).
+
+condition_cause(Condition2, ID2, Condition1, ID1) :-
+ condition_holds(Condition2, node(ID2, Package2)),
+ pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
+ condition_requirement(Trigger, Name, Package, A1),
+ condition_nodes(Trigger, TriggerNode, node(ID, Package)),
+ trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
+ attr(Name, node(ID, Package), A1),
+ condition_holds(Condition1, node(ID1, Package1)),
+ pkg_fact(Package1, condition_effect(Condition1, Effect)),
+ imposed_constraint(Effect, Name, Package, A1),
+ imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)).
+
+condition_cause(Condition2, ID2, Condition1, ID1) :-
+ condition_holds(Condition2, node(ID2, Package2)),
+ pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
+ condition_requirement(Trigger, Name, Package, A1, A2),
+ condition_nodes(Trigger, TriggerNode, node(ID, Package)),
+ trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
+ attr(Name, node(ID, Package), A1, A2),
+ condition_holds(Condition1, node(ID1, Package1)),
+ pkg_fact(Package1, condition_effect(Condition1, Effect)),
+ imposed_constraint(Effect, Name, Package, A1, A2),
+ imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)).
+
+condition_cause(Condition2, ID2, Condition1, ID1) :-
+ condition_holds(Condition2, node(ID2, Package2)),
+ pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
+ condition_requirement(Trigger, Name, Package, A1, A2, A3),
+ condition_nodes(Trigger, TriggerNode, node(ID, Package)),
+ trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
+ attr(Name, node(ID, Package), A1, A2, A3),
+ condition_holds(Condition1, node(ID1, Package1)),
+ pkg_fact(Package1, condition_effect(Condition1, Effect)),
+ imposed_constraint(Effect, Name, Package, A1, A2, A3),
+ imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)).
+
+% special condition cause for dependency conditions
+% we can't simply impose the existence of the node for dependency conditions
+% because we need to allow for the choice of which dupe ID the node gets
+condition_cause(Condition2, ID2, Condition1, ID1) :-
+ condition_holds(Condition2, node(ID2, Package2)),
+ pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
+ condition_requirement(Trigger, "node", Package),
+ condition_nodes(Trigger, TriggerNode, node(ID, Package)),
+ trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
+ attr("node", node(ID, Package)),
+ condition_holds(Condition1, node(ID1, Package1)),
+ pkg_fact(Package1, condition_effect(Condition1, Effect)),
+ imposed_constraint(Effect, "dependency_holds", Parent, Package, Type),
+ imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)),
+ attr("depends_on", node(X, Parent), node(ID, Package), Type).
+
+% The literal startcauses is used to separate the variables that are part of the error from the
+% ones describing the causal tree of the error. After startcauses, each successive pair must be
+% a condition and a condition_set id for which it holds.
+
+% More specific error message if the version cannot satisfy some constraint
+% Otherwise covered by `no_version_error` and `versions_conflict_error`.
+error(1, "Cannot satisfy '{0}@{1}'", Package, Constraint, startcauses, ConstraintCause, CauseID)
+ :- attr("node_version_satisfies", node(ID, Package), Constraint),
+ pkg_fact(TriggerPkg, condition_effect(ConstraintCause, EffectID)),
+ imposed_constraint(EffectID, "node_version_satisfies", Package, Constraint),
+ condition_holds(ConstraintCause, node(CauseID, TriggerPkg)),
+ attr("version", node(ID, Package), Version),
+ not pkg_fact(Package, version_satisfies(Constraint, Version)).
+
+error(0, "Cannot satisfy '{0}@{1}' and '{0}@{2}", Package, Constraint1, Constraint2, startcauses, Cause1, C1ID, Cause2, C2ID)
+ :- attr("node_version_satisfies", node(ID, Package), Constraint1),
+ pkg_fact(TriggerPkg1, condition_effect(Cause1, EffectID1)),
+ imposed_constraint(EffectID1, "node_version_satisfies", Package, Constraint1),
+ condition_holds(Cause1, node(C1ID, TriggerPkg1)),
+ % two constraints
+ attr("node_version_satisfies", node(ID, Package), Constraint2),
+ pkg_fact(TriggerPkg2, condition_effect(Cause2, EffectID2)),
+ imposed_constraint(EffectID2, "node_version_satisfies", Package, Constraint2),
+ condition_holds(Cause2, node(C2ID, TriggerPkg2)),
+ % version chosen
+ attr("version", node(ID, Package), Version),
+ % version satisfies one but not the other
+ pkg_fact(Package, version_satisfies(Constraint1, Version)),
+ not pkg_fact(Package, version_satisfies(Constraint2, Version)).
+
+% causation tracking error for no or multiple virtual providers
+error(0, "Cannot find a valid provider for virtual {0}", Virtual, startcauses, Cause, CID)
+ :- attr("virtual_node", node(X, Virtual)),
+ not provider(_, node(X, Virtual)),
+ imposed_constraint(EID, "dependency_holds", Parent, Virtual, Type),
+ pkg_fact(TriggerPkg, condition_effect(Cause, EID)),
+ condition_holds(Cause, node(CID, TriggerPkg)).
+
+
+% At most one variant value for single-valued variants
+error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X)
+ :- attr("node", node(X, Package)),
+ node_has_variant(node(X, Package), Variant),
+ pkg_fact(Package, variant_single_value(Variant)),
+ build(node(X, Package)),
+ attr("variant_value", node(X, Package), Variant, Value1),
+ imposed_constraint(EID1, "variant_set", Package, Variant, Value1),
+ pkg_fact(TriggerPkg1, condition_effect(Cause1, EID1)),
+ condition_holds(Cause1, node(X, TriggerPkg1)),
+ attr("variant_value", node(X, Package), Variant, Value2),
+ imposed_constraint(EID2, "variant_set", Package, Variant, Value2),
+ pkg_fact(TriggerPkg2, condition_effect(Cause2, EID2)),
+ condition_holds(Cause2, node(X, TriggerPkg2)),
+ Value1 < Value2. % see[1] in concretize.lp
+
+% Externals have to specify external conditions
+error(0, "Attempted to use external for {0} which does not satisfy any configured external spec version", Package, startcauses, ExternalCause, CID)
+ :- external(node(ID, Package)),
+ attr("external_spec_selected", node(ID, Package), Index),
+ imposed_constraint(EID, "external_conditions_hold", Package, Index),
+ pkg_fact(TriggerPkg, condition_effect(ExternalCause, EID)),
+ condition_holds(ExternalCause, node(CID, TriggerPkg)),
+ not external_version(node(ID, Package), _, _).
+
+error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}') is an external constraint for {0} which was not satisfied", Package, Name, A1)
+ :- external(node(ID, Package)),
+ not attr("external_conditions_hold", node(ID, Package), _),
+ imposed_constraint(EID, "external_conditions_hold", Package, _),
+ trigger_and_effect(Package, TID, EID),
+ condition_requirement(TID, Name, A1),
+ not attr(Name, node(_, A1)).
+
+error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2)
+ :- external(node(ID, Package)),
+ not attr("external_conditions_hold", node(ID, Package), _),
+ imposed_constraint(EID, "external_conditions_hold", Package, _),
+ trigger_and_effect(Package, TID, EID),
+ condition_requirement(TID, Name, A1, A2),
+ not attr(Name, node(_, A1), A2).
+
+error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}', '{4}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2, A3)
+ :- external(node(ID, Package)),
+ not attr("external_conditions_hold", node(ID, Package), _),
+ imposed_constraint(EID, "external_conditions_hold", Package, _),
+ trigger_and_effect(Package, TID, EID),
+ condition_requirement(TID, Name, A1, A2, A3),
+ not attr(Name, node(_, A1), A2, A3).
+
+error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n 'Spec({0} {1}={2})' is an external constraint for {0} which was not satisfied\n 'Spec({0} {1}={3})' required", Package, Variant, Value, OtherValue, startcauses, OtherValueCause, CID)
+ :- external(node(ID, Package)),
+ not attr("external_conditions_hold", node(ID, Package), _),
+ imposed_constraint(EID, "external_conditions_hold", Package, _),
+ trigger_and_effect(Package, TID, EID),
+ condition_requirement(TID, "variant_value", Package, Variant, Value),
+ not attr("variant_value", node(ID, Package), Variant, Value),
+ attr("variant_value", node(ID, Package), Variant, OtherValue),
+ imposed_constraint(EID2, "variant_set", Package, Variant, OtherValue),
+ pkg_fact(TriggerPkg, condition_effect(OtherValueCause, EID2)),
+ condition_holds(OtherValueCause, node(CID, TriggerPkg)).
+
+error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}', '{4}', '{5}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2, A3, A4)
+ :- external(node(ID, Package)),
+ not attr("external_conditions_hold", node(ID, Package), _),
+ imposed_constraint(EID, "external_conditions_hold", Package, _),
+ trigger_and_effect(Package, TID, EID),
+ condition_requirement(TID, Name, A1, A2, A3, A4),
+ not attr(Name, node(_, A1), A2, A3, A4).
+
+% error message with causes for conflicts
+error(0, Msg, startcauses, TriggerID, ID1, ConstraintID, ID2)
+ :- attr("node", node(ID, Package)),
+ pkg_fact(Package, conflict(TriggerID, ConstraintID, Msg)),
+ % node(ID1, TriggerPackage) is node(ID2, Package) in most, but not all, cases
+ condition_holds(TriggerID, node(ID1, TriggerPackage)),
+ condition_holds(ConstraintID, node(ID2, Package)),
+ unification_set(X, node(ID2, Package)),
+ unification_set(X, node(ID1, TriggerPackage)),
+ not external(node(ID, Package)), % ignore conflicts for externals
+ not attr("hash", node(ID, Package), _). % ignore conflicts for installed packages
+
+% variables to show
+#show error/2.
+#show error/3.
+#show error/4.
+#show error/5.
+#show error/6.
+#show error/7.
+#show error/8.
+#show error/9.
+#show error/10.
+#show error/11.
+
+#show condition_cause/4.
+#show condition_reason/2.
+
+% Define all variables used to avoid warnings at runtime when the model doesn't happen to have one
+#defined error/2.
+#defined error/3.
+#defined error/4.
+#defined error/5.
+#defined error/6.
+#defined attr/2.
+#defined attr/3.
+#defined attr/4.
+#defined attr/5.
+#defined pkg_fact/2.
+#defined imposed_constraint/3.
+#defined imposed_constraint/4.
+#defined imposed_constraint/5.
+#defined imposed_constraint/6.
+#defined condition_requirement/3.
+#defined condition_requirement/4.
+#defined condition_requirement/5.
+#defined condition_requirement/6.
+#defined condition_holds/2.
+#defined unification_set/2.
+#defined external/1.
+#defined trigger_and_effect/3.
+#defined build/1.
+#defined node_has_variant/2.
+#defined provider/2.
+#defined external_version/3.
diff --git a/lib/spack/spack/solver/heuristic.lp b/lib/spack/spack/solver/heuristic.lp
index 69f925180f59d8..cc87207047d438 100644
--- a/lib/spack/spack/solver/heuristic.lp
+++ b/lib/spack/spack/solver/heuristic.lp
@@ -11,19 +11,14 @@
%-----------------
% Domain heuristic
%-----------------
-#heuristic attr("hash", node(0, Package), Hash) : literal(_, "root", Package). [45, init]
-#heuristic attr("root", node(0, Package)) : literal(_, "root", Package). [45, true]
-#heuristic attr("node", node(0, Package)) : literal(_, "root", Package). [45, true]
-#heuristic attr("node", node(0, Package)) : literal(_, "node", Package). [45, true]
% Root node
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
-#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true]
+#heuristic attr("node_target", node(0, Package), Target) : target_weight(Target, 0), attr("root", node(0, Package)). [35, true]
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
-#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
+#heuristic node_compiler(node(0, Package), CompilerID) : compiler_weight(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
% Providers
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
-
diff --git a/lib/spack/spack/solver/heuristic_separate.lp b/lib/spack/spack/solver/heuristic_separate.lp
index cb4345f3be3245..caa47aa09d84d1 100644
--- a/lib/spack/spack/solver/heuristic_separate.lp
+++ b/lib/spack/spack/solver/heuristic_separate.lp
@@ -13,7 +13,7 @@
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
-#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
+#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
% node(ID, _), split build dependencies
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
@@ -21,4 +21,4 @@
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
-#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
+#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 51b62b2693e595..428cc28f9a54bc 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -59,7 +59,7 @@
import re
import socket
import warnings
-from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
import llnl.path
import llnl.string
@@ -75,6 +75,7 @@
import spack.deptypes as dt
import spack.error
import spack.hash_types as ht
+import spack.parser
import spack.patch
import spack.paths
import spack.platforms
@@ -1319,8 +1320,6 @@ def __init__(
self.external_path = external_path
self.external_module = external_module
"""
- import spack.parser
-
# Copy if spec_like is a Spec.
if isinstance(spec_like, Spec):
self._dup(spec_like)
@@ -1466,6 +1465,26 @@ def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
"""
return [d for d in self._dependencies.select(child=name, depflag=depflag)]
+ @property
+ def edge_attributes(self) -> str:
+ """Helper method to print edge attributes in spec literals"""
+ edges = self.edges_from_dependents()
+ if not edges:
+ return ""
+
+ union = DependencySpec(parent=Spec(), spec=self, depflag=0, virtuals=())
+ for edge in edges:
+ union.update_deptypes(edge.depflag)
+ union.update_virtuals(edge.virtuals)
+ deptypes_str = (
+ f"deptypes={','.join(dt.flag_to_tuple(union.depflag))}" if union.depflag else ""
+ )
+ virtuals_str = f"virtuals={','.join(union.virtuals)}" if union.virtuals else ""
+ if not deptypes_str and not virtuals_str:
+ return ""
+ result = f"{deptypes_str} {virtuals_str}".strip()
+ return f"[{result}]"
+
def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
"""Return a list of direct dependencies (nodes in the DAG).
@@ -3697,8 +3716,15 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
if other.concrete and self.concrete:
return self.dag_hash() == other.dag_hash()
- self_hash = self.dag_hash() if self.concrete else self.abstract_hash
- other_hash = other.dag_hash() if other.concrete else other.abstract_hash
+ elif self.concrete:
+ return self.satisfies(other)
+
+ elif other.concrete:
+ return other.satisfies(self)
+
+ # From here we know both self and other are not concrete
+ self_hash = self.abstract_hash
+ other_hash = other.abstract_hash
if (
self_hash
@@ -3787,10 +3813,6 @@ def _intersects_dependencies(self, other):
repository=spack.repo.PATH, specs=other.traverse(), restrict=True
)
- # This handles cases where there are already providers for both vpkgs
- if not self_index.satisfies(other_index):
- return False
-
# These two loops handle cases where there is an overly restrictive
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not
# compatible with mpich2)
@@ -3888,7 +3910,46 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
return False
# If we arrived here, then rhs is abstract. At the moment we don't care about the edge
- # structure of an abstract DAG - hence the deps=False parameter.
+ # structure of an abstract DAG, so we check if any edge could satisfy the properties
+ # we ask for.
+ lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
+ for rhs_edge in other.traverse_edges(root=False, cover="edges"):
+ # If we are checking for ^mpi we need to verify if there is any edge
+ if rhs_edge.spec.virtual:
+ rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
+
+ if not rhs_edge.virtuals:
+ continue
+
+ if not lhs_edges:
+ # Construct a map of the link/run subDAG + direct "build" edges,
+ # keyed by dependency name
+ for lhs_edge in self.traverse_edges(
+ root=False, cover="edges", deptype=("link", "run")
+ ):
+ lhs_edges[lhs_edge.spec.name].add(lhs_edge)
+ for virtual_name in lhs_edge.virtuals:
+ lhs_edges[virtual_name].add(lhs_edge)
+
+ build_edges = self.edges_to_dependencies(depflag=dt.BUILD)
+ for lhs_edge in build_edges:
+ lhs_edges[lhs_edge.spec.name].add(lhs_edge)
+ for virtual_name in lhs_edge.virtuals:
+ lhs_edges[virtual_name].add(lhs_edge)
+
+ # We don't have edges to this dependency
+ current_dependency_name = rhs_edge.spec.name
+ if current_dependency_name not in lhs_edges:
+ return False
+
+ for virtual in rhs_edge.virtuals:
+ has_virtual = any(
+ virtual in edge.virtuals for edge in lhs_edges[current_dependency_name]
+ )
+ if not has_virtual:
+ return False
+
+ # Edges have been checked above already, hence deps=False
return all(
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
for rhs in other.traverse(root=False)
@@ -4090,9 +4151,7 @@ def __getitem__(self, name):
"""
query_parameters = name.split(":")
if len(query_parameters) > 2:
- msg = "key has more than one ':' symbol."
- msg += " At most one is admitted."
- raise KeyError(msg)
+ raise KeyError("key has more than one ':' symbol. At most one is admitted.")
name, query_parameters = query_parameters[0], query_parameters[1:]
if query_parameters:
@@ -4117,11 +4176,17 @@ def __getitem__(self, name):
itertools.chain(
# Regular specs
(x for x in order() if x.name == name),
+ (
+ x
+ for x in order()
+ if (not x.virtual)
+ and any(name in edge.virtuals for edge in x.edges_from_dependents())
+ ),
(x for x in order() if (not x.virtual) and x.package.provides(name)),
)
)
except StopIteration:
- raise KeyError("No spec with name %s in %s" % (name, self))
+ raise KeyError(f"No spec with name {name} in {self}")
if self._concrete:
return SpecBuildInterface(value, name, query_parameters)
@@ -4499,10 +4564,26 @@ def format_path(
return str(path_ctor(*output_path_components))
def __str__(self):
- sorted_nodes = [self] + sorted(
- self.traverse(root=False), key=lambda x: x.name or x.abstract_hash
+ root_str = [self.format()]
+ sorted_dependencies = sorted(
+ self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
)
- spec_str = " ^".join(d.format() for d in sorted_nodes)
+ sorted_dependencies = [
+ d.format("{edge_attributes} " + DEFAULT_FORMAT) for d in sorted_dependencies
+ ]
+ spec_str = " ^".join(root_str + sorted_dependencies)
+ return spec_str.strip()
+
+ @property
+ def colored_str(self):
+ root_str = [self.cformat()]
+ sorted_dependencies = sorted(
+ self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
+ )
+ sorted_dependencies = [
+ d.cformat("{edge_attributes} " + DISPLAY_FORMAT) for d in sorted_dependencies
+ ]
+ spec_str = " ^".join(root_str + sorted_dependencies)
return spec_str.strip()
def install_status(self):
diff --git a/lib/spack/spack/spec_list.py b/lib/spack/spack/spec_list.py
index 3f60d5724922c2..6bb1ba8d047e9a 100644
--- a/lib/spack/spack/spec_list.py
+++ b/lib/spack/spack/spec_list.py
@@ -93,8 +93,8 @@ def remove(self, spec):
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
]
if not remove:
- msg = "Cannot remove %s from SpecList %s\n" % (spec, self.name)
- msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
+ msg = f"Cannot remove {spec} from SpecList {self.name}.\n"
+ msg += f"Either {spec} is not in {self.name} or {spec} is "
msg += "expanded from a matrix and cannot be removed directly."
raise SpecListError(msg)
@@ -133,9 +133,8 @@ def _parse_reference(self, name):
# Make sure the reference is valid
if name not in self._reference:
- msg = "SpecList %s refers to " % self.name
- msg += "named list %s " % name
- msg += "which does not appear in its reference dict"
+ msg = f"SpecList '{self.name}' refers to named list '{name}'"
+ msg += " which does not appear in its reference dict."
raise UndefinedReferenceError(msg)
return (name, sigil)
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index 1c7ebdec5c50df..7418b5a44ee694 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -37,6 +37,7 @@
import spack.fetch_strategy as fs
import spack.mirror
import spack.paths
+import spack.resource
import spack.spec
import spack.stage
import spack.util.lock
@@ -455,6 +456,7 @@ def fetch(self, mirror_only=False, err_msg=None):
mirror_urls = [
url_util.join(mirror.fetch_url, rel_path)
for mirror in spack.mirror.MirrorCollection(source=True).values()
+ if not mirror.fetch_url.startswith("oci://")
for rel_path in self.mirror_paths
]
@@ -658,8 +660,14 @@ def destroy(self):
class ResourceStage(Stage):
- def __init__(self, url_or_fetch_strategy, root, resource, **kwargs):
- super().__init__(url_or_fetch_strategy, **kwargs)
+ def __init__(
+ self,
+ fetch_strategy: fs.FetchStrategy,
+ root: Stage,
+ resource: spack.resource.Resource,
+ **kwargs,
+ ):
+ super().__init__(fetch_strategy, **kwargs)
self.root_stage = root
self.resource = resource
@@ -870,6 +878,7 @@ def interactive_version_filter(
url_dict: Dict[StandardVersion, str],
known_versions: Iterable[StandardVersion] = (),
*,
+ initial_verion_filter: Optional[VersionList] = None,
url_changes: Set[StandardVersion] = set(),
input: Callable[..., str] = input,
) -> Optional[Dict[StandardVersion, str]]:
@@ -883,9 +892,10 @@ def interactive_version_filter(
Filtered dictionary of versions to URLs or None if the user wants to quit
"""
# Find length of longest string in the list for padding
- sorted_and_filtered = sorted(url_dict.keys(), reverse=True)
- version_filter = VersionList([":"])
- max_len = max(len(str(v)) for v in sorted_and_filtered)
+ version_filter = initial_verion_filter or VersionList([":"])
+ max_len = max(len(str(v)) for v in url_dict) if url_dict else 0
+ sorted_and_filtered = [v for v in url_dict if v.satisfies(version_filter)]
+ sorted_and_filtered.sort(reverse=True)
orig_url_dict = url_dict # only copy when using editor to modify
print_header = True
VERSION_COLOR = spack.spec.VERSION_COLOR
@@ -893,21 +903,20 @@ def interactive_version_filter(
if print_header:
has_filter = version_filter != VersionList([":"])
header = []
- if not sorted_and_filtered:
- header.append("No versions selected")
- elif len(sorted_and_filtered) == len(orig_url_dict):
+ if len(orig_url_dict) > 0 and len(sorted_and_filtered) == len(orig_url_dict):
header.append(
f"Selected {llnl.string.plural(len(sorted_and_filtered), 'version')}"
)
else:
header.append(
- f"Selected {len(sorted_and_filtered)} of {len(orig_url_dict)} versions"
+ f"Selected {len(sorted_and_filtered)} of "
+ f"{llnl.string.plural(len(orig_url_dict), 'version')}"
)
if sorted_and_filtered and known_versions:
num_new = sum(1 for v in sorted_and_filtered if v not in known_versions)
header.append(f"{llnl.string.plural(num_new, 'new version')}")
if has_filter:
- header.append(colorize(f"Filtered by {VERSION_COLOR}{version_filter}@."))
+ header.append(colorize(f"Filtered by {VERSION_COLOR}@@{version_filter}@."))
version_with_url = [
colorize(
diff --git a/lib/spack/spack/test/audit.py b/lib/spack/spack/test/audit.py
index 2efc2bbd88913a..a3d4bb8e3fbaf1 100644
--- a/lib/spack/spack/test/audit.py
+++ b/lib/spack/spack/test/audit.py
@@ -21,6 +21,10 @@
(["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has a GitHub patch URL without full_index=1
(["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
+ # This package has invalid GitLab patch URLs
+ (["invalid-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
+ # This package has invalid GitLab patch URLs
+ (["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has a stand-alone 'test*' method in build-time callbacks
(["fail-test-audit"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has no issues
diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py
index 20802bbdd80e91..ea9caf7fc031ee 100644
--- a/lib/spack/spack/test/bindist.py
+++ b/lib/spack/spack/test/bindist.py
@@ -4,7 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import filecmp
import glob
+import gzip
import io
+import json
import os
import platform
import sys
@@ -1112,3 +1114,77 @@ def test_tarfile_of_spec_prefix(tmpdir):
assert tar.getmember(f"{expected_prefix}/b_directory/file").isreg()
assert tar.getmember(f"{expected_prefix}/c_directory").isdir()
assert tar.getmember(f"{expected_prefix}/c_directory/file").isreg()
+
+
+@pytest.mark.parametrize("layout,expect_success", [(None, True), (1, True), (2, False)])
+def test_get_valid_spec_file(tmp_path, layout, expect_success):
+ # Test reading a spec.json file that does not specify a layout version.
+ spec_dict = Spec("example").to_dict()
+ path = tmp_path / "spec.json"
+ effective_layout = layout or 0 # If not specified it should be 0
+
+ # Add a layout version
+ if layout is not None:
+ spec_dict["buildcache_layout_version"] = layout
+
+ # Save to file
+ with open(path, "w") as f:
+ json.dump(spec_dict, f)
+
+ try:
+ spec_dict_disk, layout_disk = bindist._get_valid_spec_file(
+ str(path), max_supported_layout=1
+ )
+ assert expect_success
+ assert spec_dict_disk == spec_dict
+ assert layout_disk == effective_layout
+ except bindist.InvalidMetadataFile:
+ assert not expect_success
+
+
+def test_get_valid_spec_file_doesnt_exist(tmp_path):
+ with pytest.raises(bindist.InvalidMetadataFile, match="No such file"):
+ bindist._get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1)
+
+
+def test_get_valid_spec_file_gzipped(tmp_path):
+ # Create a gzipped file, contents don't matter
+ path = tmp_path / "spec.json.gz"
+ with gzip.open(path, "wb") as f:
+ f.write(b"hello")
+ with pytest.raises(
+ bindist.InvalidMetadataFile, match="Compressed spec files are not supported"
+ ):
+ bindist._get_valid_spec_file(str(path), max_supported_layout=1)
+
+
+@pytest.mark.parametrize("filename", ["spec.json", "spec.json.sig"])
+def test_get_valid_spec_file_no_json(tmp_path, filename):
+ tmp_path.joinpath(filename).write_text("not json")
+ with pytest.raises(bindist.InvalidMetadataFile):
+ bindist._get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1)
+
+
+def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config, capsys):
+ layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1
+ spec = Spec("gmake@4.4.1%gcc@13.1.0 arch=linux-ubuntu23.04-zen2")
+ spec._mark_concrete()
+ spec_dict = spec.to_dict()
+ spec_dict["buildcache_layout_version"] = layout_version
+
+ # Setup a basic local build cache structure
+ path = (
+ tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json")
+ )
+ path.parent.mkdir(parents=True)
+ with open(path, "w") as f:
+ json.dump(spec_dict, f)
+
+ # Configure as a mirror.
+ mirror_cmd("add", "test-mirror", str(tmp_path))
+
+ # Shouldn't be able "download" this.
+ assert bindist.download_tarball(spec, unsigned=True) is None
+
+ # And there should be a warning about an unsupported layout version.
+ assert f"Layout version {layout_version} is too new" in capsys.readouterr().err
diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py
index 0893b76a98a2f3..cbccbc429eac6f 100644
--- a/lib/spack/spack/test/build_environment.py
+++ b/lib/spack/spack/test/build_environment.py
@@ -642,3 +642,28 @@ def test_effective_deptype_run_environment(default_mock_concretization):
for spec, effective_type in spack.build_environment.effective_deptypes(s, context=Context.RUN):
assert effective_type & expected_flags.pop(spec.name) == effective_type
assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes"
+
+
+def test_monkey_patching_works_across_virtual(default_mock_concretization):
+ """Assert that a monkeypatched attribute is found regardless we access through the
+ real name or the virtual name.
+ """
+ s = default_mock_concretization("mpileaks ^mpich")
+ s["mpich"].foo = "foo"
+ assert s["mpich"].foo == "foo"
+ assert s["mpi"].foo == "foo"
+
+
+def test_clear_compiler_related_runtime_variables_of_build_deps(default_mock_concretization):
+ """Verify that Spack drops CC, CXX, FC and F77 from the dependencies related build environment
+ variable changes if they are set in setup_run_environment. Spack manages those variables
+ elsewhere."""
+ s = default_mock_concretization("build-env-compiler-var-a")
+ ctx = spack.build_environment.SetupContext(s, context=Context.BUILD)
+ result = {}
+ ctx.get_env_modifications().apply_modifications(result)
+ assert "CC" not in result
+ assert "CXX" not in result
+ assert "FC" not in result
+ assert "F77" not in result
+ assert result["ANOTHER_VAR"] == "this-should-be-present"
diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py
index 6c9b8c4cf552cb..55ec605913b3f4 100644
--- a/lib/spack/spack/test/cmd/buildcache.py
+++ b/lib/spack/spack/test/cmd/buildcache.py
@@ -326,4 +326,8 @@ def fake_push(node, push_url, options):
buildcache(*buildcache_create_args)
- assert packages_to_push == expected
+ # Order is not guaranteed, so we can't just compare lists
+ assert set(packages_to_push) == set(expected)
+
+ # Ensure no duplicates
+ assert len(set(packages_to_push)) == len(packages_to_push)
diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py
index b2fc9d5f6ce11c..0dbaa88053070a 100644
--- a/lib/spack/spack/test/cmd/checksum.py
+++ b/lib/spack/spack/test/cmd/checksum.py
@@ -8,6 +8,7 @@
import pytest
import spack.cmd.checksum
+import spack.parser
import spack.repo
import spack.spec
from spack.main import SpackCommand
@@ -254,17 +255,10 @@ def test_checksum_deprecated_version(mock_packages, mock_clone_repo, mock_fetch,
assert "Added 0 new versions to" not in output
-def test_checksum_at(mock_packages):
- pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
- versions = [str(v) for v in pkg_cls.versions]
- output = spack_checksum(f"zlib@{versions[0]}")
- assert "Found 1 version" in output
-
-
def test_checksum_url(mock_packages):
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
- output = spack_checksum(f"{pkg_cls.url}", fail_on_error=False)
- assert "accepts package names" in output
+ with pytest.raises(spack.parser.SpecSyntaxError):
+ spack_checksum(f"{pkg_cls.url}")
def test_checksum_verification_fails(install_mockery, capsys):
diff --git a/lib/spack/spack/test/cmd/commands.py b/lib/spack/spack/test/cmd/commands.py
index 99faac72b9ef11..3288b092d4d3c0 100644
--- a/lib/spack/spack/test/cmd/commands.py
+++ b/lib/spack/spack/test/cmd/commands.py
@@ -58,6 +58,24 @@ def test_subcommands():
assert "spack compiler add" in out2
+@pytest.mark.not_on_windows("subprocess not supported on Windows")
+def test_override_alias():
+ """Test that spack commands cannot be overriden by aliases."""
+
+ install = spack.main.SpackCommand("install", subprocess=True)
+ instal = spack.main.SpackCommand("instal", subprocess=True)
+
+ out = install(fail_on_error=False, global_args=["-c", "config:aliases:install:find"])
+ assert "install requires a package argument or active environment" in out
+ assert "Alias 'install' (mapping to 'find') attempts to override built-in command" in out
+
+ out = install(fail_on_error=False, global_args=["-c", "config:aliases:foo bar:find"])
+ assert "Alias 'foo bar' (mapping to 'find') contains a space, which is not supported" in out
+
+ out = instal(fail_on_error=False, global_args=["-c", "config:aliases:instal:find"])
+ assert "install requires a package argument or active environment" not in out
+
+
def test_rst():
"""Do some simple sanity checks of the rst writer."""
out1 = commands("--format=rst")
diff --git a/lib/spack/spack/test/cmd/compiler.py b/lib/spack/spack/test/cmd/compiler.py
index 9bc2049fdfbe9f..1cea72d3b25ad6 100644
--- a/lib/spack/spack/test/cmd/compiler.py
+++ b/lib/spack/spack/test/cmd/compiler.py
@@ -4,12 +4,14 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import shutil
-import sys
import pytest
+import spack.cmd.compiler
import spack.compilers
import spack.main
+import spack.spec
+import spack.util.pattern
import spack.version
compiler = spack.main.SpackCommand("compiler")
@@ -146,7 +148,7 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable):
compilers_before_find = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
- all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None
+ all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None, mixed_toolchain=False
)
spack.cmd.compiler.compiler_find(args)
compilers_after_find = set(spack.compilers.all_compiler_specs())
@@ -159,10 +161,15 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable):
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
@pytest.mark.regression("17590")
-def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
+@pytest.mark.parametrize("mixed_toolchain", [True, False])
+def test_compiler_find_mixed_suffixes(
+ mixed_toolchain, no_compilers_yaml, working_env, compilers_dir
+):
"""Ensure that we'll mix compilers with different suffixes when necessary."""
os.environ["PATH"] = str(compilers_dir)
- output = compiler("find", "--scope=site")
+ output = compiler(
+ "find", "--scope=site", "--mixed-toolchain" if mixed_toolchain else "--no-mixed-toolchain"
+ )
assert "clang@11.0.0" in output
assert "gcc@8.4.0" in output
@@ -176,9 +183,8 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_
assert clang["paths"] == {
"cc": str(compilers_dir / "clang"),
"cxx": str(compilers_dir / "clang++"),
- # we only auto-detect mixed clang on macos
- "f77": gfortran_path if sys.platform == "darwin" else None,
- "fc": gfortran_path if sys.platform == "darwin" else None,
+ "f77": gfortran_path if mixed_toolchain else None,
+ "fc": gfortran_path if mixed_toolchain else None,
}
assert gcc["paths"] == {
diff --git a/lib/spack/spack/test/cmd/config.py b/lib/spack/spack/test/cmd/config.py
index 4f3d5afe770e0c..7247ce97531dcd 100644
--- a/lib/spack/spack/test/cmd/config.py
+++ b/lib/spack/spack/test/cmd/config.py
@@ -215,10 +215,10 @@ def test_config_add_override_leaf(mutable_empty_config):
def test_config_add_update_dict(mutable_empty_config):
- config("add", "packages:all:version:[1.0.0]")
+ config("add", "packages:hdf5:version:[1.0.0]")
output = config("get", "packages")
- expected = "packages:\n all:\n version: [1.0.0]\n"
+ expected = "packages:\n hdf5:\n version: [1.0.0]\n"
assert output == expected
@@ -352,8 +352,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
contents = """spack:
packages:
all:
- version:
- - 1.0.0
+ target: [x86_64]
"""
# create temp file and add it to config
@@ -368,8 +367,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
# added config comes before prior config
expected = """packages:
all:
- version:
- - 1.0.0
+ target: [x86_64]
compiler: [gcc]
"""
@@ -381,7 +379,7 @@ def test_config_add_invalid_file_fails(tmpdir):
# invalid because version requires a list
contents = """spack:
packages:
- all:
+ hdf5:
version: 1.0.0
"""
@@ -631,14 +629,11 @@ def test_config_prefer_upstream(
packages = syaml.load(open(cfg_file))["packages"]
# Make sure only the non-default variants are set.
- assert packages["boost"] == {
- "compiler": ["gcc@=10.2.1"],
- "variants": "+debug +graph",
- "version": ["1.63.0"],
- }
- assert packages["dependency-install"] == {"compiler": ["gcc@=10.2.1"], "version": ["2.0"]}
+ assert packages["all"] == {"compiler": ["gcc@=10.2.1"]}
+ assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]}
+ assert packages["dependency-install"] == {"version": ["2.0"]}
# Ensure that neither variant gets listed for hdf5, since they conflict
- assert packages["hdf5"] == {"compiler": ["gcc@=10.2.1"], "version": ["2.3"]}
+ assert packages["hdf5"] == {"version": ["2.3"]}
# Make sure a message about the conflicting hdf5's was given.
assert "- hdf5" in output
diff --git a/lib/spack/spack/test/cmd/deconcretize.py b/lib/spack/spack/test/cmd/deconcretize.py
new file mode 100644
index 00000000000000..30e39604bf4d4d
--- /dev/null
+++ b/lib/spack/spack/test/cmd/deconcretize.py
@@ -0,0 +1,78 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import pytest
+
+import spack.environment as ev
+from spack.main import SpackCommand, SpackCommandError
+
+deconcretize = SpackCommand("deconcretize")
+
+
+@pytest.fixture(scope="function")
+def test_env(mutable_mock_env_path, config, mock_packages):
+ ev.create("test")
+ with ev.read("test") as e:
+ e.add("a@2.0 foobar=bar ^b@1.0")
+ e.add("a@1.0 foobar=bar ^b@0.9")
+ e.concretize()
+ e.write()
+
+
+def test_deconcretize_dep(test_env):
+ with ev.read("test") as e:
+ deconcretize("-y", "b@1.0")
+ specs = [s for s, _ in e.concretized_specs()]
+
+ assert len(specs) == 1
+ assert specs[0].satisfies("a@1.0")
+
+
+def test_deconcretize_all_dep(test_env):
+ with ev.read("test") as e:
+ with pytest.raises(SpackCommandError):
+ deconcretize("-y", "b")
+ deconcretize("-y", "--all", "b")
+ specs = [s for s, _ in e.concretized_specs()]
+
+ assert len(specs) == 0
+
+
+def test_deconcretize_root(test_env):
+ with ev.read("test") as e:
+ output = deconcretize("-y", "--root", "b@1.0")
+ assert "No matching specs to deconcretize" in output
+ assert len(e.concretized_order) == 2
+
+ deconcretize("-y", "--root", "a@2.0")
+ specs = [s for s, _ in e.concretized_specs()]
+
+ assert len(specs) == 1
+ assert specs[0].satisfies("a@1.0")
+
+
+def test_deconcretize_all_root(test_env):
+ with ev.read("test") as e:
+ with pytest.raises(SpackCommandError):
+ deconcretize("-y", "--root", "a")
+
+ output = deconcretize("-y", "--root", "--all", "b")
+ assert "No matching specs to deconcretize" in output
+ assert len(e.concretized_order) == 2
+
+ deconcretize("-y", "--root", "--all", "a")
+ specs = [s for s, _ in e.concretized_specs()]
+
+ assert len(specs) == 0
+
+
+def test_deconcretize_all(test_env):
+ with ev.read("test") as e:
+ with pytest.raises(SpackCommandError):
+ deconcretize()
+ deconcretize("-y", "--all")
+ specs = [s for s, _ in e.concretized_specs()]
+
+ assert len(specs) == 0
diff --git a/lib/spack/spack/test/cmd/dependencies.py b/lib/spack/spack/test/cmd/dependencies.py
index f61c19a7f1f942..bc615c7a3a10d7 100644
--- a/lib/spack/spack/test/cmd/dependencies.py
+++ b/lib/spack/spack/test/cmd/dependencies.py
@@ -14,7 +14,14 @@
dependencies = SpackCommand("dependencies")
-mpis = ["low-priority-provider", "mpich", "mpich2", "multi-provider-mpi", "zmpi"]
+mpis = [
+ "intel-parallel-studio",
+ "low-priority-provider",
+ "mpich",
+ "mpich2",
+ "multi-provider-mpi",
+ "zmpi",
+]
mpi_deps = ["fake"]
diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py
index c5a7b5c3bb801a..85199eddd66da2 100644
--- a/lib/spack/spack/test/cmd/dev_build.py
+++ b/lib/spack/spack/test/cmd/dev_build.py
@@ -163,8 +163,15 @@ def test_dev_build_fails_multiple_specs(mock_packages):
def test_dev_build_fails_nonexistent_package_name(mock_packages):
- output = dev_build("no_such_package", fail_on_error=False)
- assert "No package for 'no_such_package' was found" in output
+ output = ""
+
+ try:
+ dev_build("no_such_package")
+ assert False, "no exception was raised!"
+ except spack.repo.UnknownPackageError as e:
+ output = e.message
+
+ assert "Package 'no_such_package' not found" in output
def test_dev_build_fails_no_version(mock_packages):
diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py
index 7d0eb37951b862..3fd40867eb7133 100644
--- a/lib/spack/spack/test/cmd/env.py
+++ b/lib/spack/spack/test/cmd/env.py
@@ -14,6 +14,7 @@
import llnl.util.filesystem as fs
import llnl.util.link_tree
+import llnl.util.tty as tty
import spack.cmd.env
import spack.config
@@ -52,6 +53,7 @@
stage = SpackCommand("stage")
uninstall = SpackCommand("uninstall")
find = SpackCommand("find")
+module = SpackCommand("module")
sep = os.sep
@@ -283,7 +285,7 @@ def setup_error(pkg, env):
_, err = capfd.readouterr()
assert "cmake-client had issues!" in err
- assert "Warning: couldn't load runtime environment" in err
+ assert "Warning: could not load runtime environment" in err
def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, monkeypatch):
@@ -501,12 +503,12 @@ def test_env_activate_broken_view(
# test that Spack detects the missing package and fails gracefully
with spack.repo.use_repositories(mock_custom_repository):
wrong_repo = env("activate", "--sh", "test")
- assert "Warning: couldn't load runtime environment" in wrong_repo
+ assert "Warning: could not load runtime environment" in wrong_repo
assert "Unknown namespace: builtin.mock" in wrong_repo
# test replacing repo fixes it
normal_repo = env("activate", "--sh", "test")
- assert "Warning: couldn't load runtime environment" not in normal_repo
+ assert "Warning: could not load runtime environment" not in normal_repo
assert "Unknown namespace: builtin.mock" not in normal_repo
@@ -631,7 +633,7 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
manifest_dir.mkdir(parents=True, exist_ok=False)
manifest_file = manifest_dir / ev.manifest_name
manifest_file.write_text(
- """
+ """\
spack:
specs:
- a
@@ -719,38 +721,25 @@ def test_env_with_config(environment_from_manifest):
def test_with_config_bad_include(environment_from_manifest):
"""Confirm missing include paths raise expected exception and error."""
- e = environment_from_manifest(
- """
+ with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"):
+ e = environment_from_manifest(
+ """
spack:
include:
- /no/such/directory
- no/such/file.yaml
"""
- )
- with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"):
+ )
with e:
e.concretize()
assert ev.active_environment() is None
-def test_env_with_include_config_files_same_basename(environment_from_manifest):
- e = environment_from_manifest(
- """
-spack:
- include:
- - ./path/to/included-config.yaml
- - ./second/path/to/include-config.yaml
- specs:
- - libelf
- - mpileaks
-"""
- )
-
- e = ev.read("test")
-
- fs.mkdirp(os.path.join(e.path, "path", "to"))
- with open(os.path.join(e.path, "./path/to/included-config.yaml"), "w") as f:
+def test_env_with_include_config_files_same_basename(tmp_path, environment_from_manifest):
+ file1 = fs.join_path(tmp_path, "path", "to", "included-config.yaml")
+ fs.mkdirp(os.path.dirname(file1))
+ with open(file1, "w") as f:
f.write(
"""\
packages:
@@ -759,8 +748,9 @@ def test_env_with_include_config_files_same_basename(environment_from_manifest):
"""
)
- fs.mkdirp(os.path.join(e.path, "second", "path", "to"))
- with open(os.path.join(e.path, "./second/path/to/include-config.yaml"), "w") as f:
+ file2 = fs.join_path(tmp_path, "second", "path", "included-config.yaml")
+ fs.mkdirp(os.path.dirname(file2))
+ with open(file2, "w") as f:
f.write(
"""\
packages:
@@ -769,6 +759,18 @@ def test_env_with_include_config_files_same_basename(environment_from_manifest):
"""
)
+ e = environment_from_manifest(
+ f"""
+spack:
+ include:
+ - {file1}
+ - {file2}
+ specs:
+ - libelf
+ - mpileaks
+"""
+ )
+
with e:
e.concretize()
@@ -805,12 +807,18 @@ def mpileaks_env_config(include_path):
)
-def test_env_with_included_config_file(environment_from_manifest, packages_file):
+def test_env_with_included_config_file(mutable_mock_env_path, packages_file):
"""Test inclusion of a relative packages configuration file added to an
existing environment.
"""
+ env_root = mutable_mock_env_path
+ fs.mkdirp(env_root)
include_filename = "included-config.yaml"
- e = environment_from_manifest(
+ included_path = env_root / include_filename
+ shutil.move(packages_file.strpath, included_path)
+
+ spack_yaml = env_root / ev.manifest_name
+ spack_yaml.write_text(
f"""\
spack:
include:
@@ -820,9 +828,7 @@ def test_env_with_included_config_file(environment_from_manifest, packages_file)
"""
)
- included_path = os.path.join(e.path, include_filename)
- shutil.move(packages_file.strpath, included_path)
-
+ e = ev.Environment(env_root)
with e:
e.concretize()
@@ -855,68 +861,67 @@ def test_env_with_included_config_missing_file(tmpdir, mutable_empty_config):
with spack_yaml.open("w") as f:
f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath))
- env = ev.Environment(tmpdir.strpath)
with pytest.raises(spack.config.ConfigError, match="missing include path"):
- ev.activate(env)
+ ev.Environment(tmpdir.strpath)
-def test_env_with_included_config_scope(environment_from_manifest, packages_file):
+def test_env_with_included_config_scope(mutable_mock_env_path, packages_file):
"""Test inclusion of a package file from the environment's configuration
stage directory. This test is intended to represent a case where a remote
file has already been staged."""
- config_scope_path = os.path.join(ev.root("test"), "config")
-
- # Configure the environment to include file(s) from the environment's
- # remote configuration stage directory.
- e = environment_from_manifest(mpileaks_env_config(config_scope_path))
+ env_root = mutable_mock_env_path
+ config_scope_path = env_root / "config"
# Copy the packages.yaml file to the environment configuration
# directory, so it is picked up during concretization. (Using
# copy instead of rename in case the fixture scope changes.)
fs.mkdirp(config_scope_path)
include_filename = os.path.basename(packages_file.strpath)
- included_path = os.path.join(config_scope_path, include_filename)
+ included_path = config_scope_path / include_filename
fs.copy(packages_file.strpath, included_path)
+ # Configure the environment to include file(s) from the environment's
+ # remote configuration stage directory.
+ spack_yaml = env_root / ev.manifest_name
+ spack_yaml.write_text(mpileaks_env_config(config_scope_path))
+
# Ensure the concretized environment reflects contents of the
# packages.yaml file.
+ e = ev.Environment(env_root)
with e:
e.concretize()
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
-def test_env_with_included_config_var_path(environment_from_manifest, packages_file):
+def test_env_with_included_config_var_path(tmpdir, packages_file):
"""Test inclusion of a package configuration file with path variables
"staged" in the environment's configuration stage directory."""
- config_var_path = os.path.join("$tempdir", "included-config.yaml")
- e = environment_from_manifest(mpileaks_env_config(config_var_path))
+ included_file = packages_file.strpath
+ env_path = pathlib.PosixPath(tmpdir)
+ config_var_path = os.path.join("$tempdir", "included-packages.yaml")
+
+ spack_yaml = env_path / ev.manifest_name
+ spack_yaml.write_text(mpileaks_env_config(config_var_path))
config_real_path = substitute_path_variables(config_var_path)
- fs.mkdirp(os.path.dirname(config_real_path))
- shutil.move(packages_file.strpath, config_real_path)
+ shutil.move(included_file, config_real_path)
assert os.path.exists(config_real_path)
+ e = ev.Environment(env_path)
with e:
e.concretize()
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
-def test_env_config_precedence(environment_from_manifest):
- e = environment_from_manifest(
- """
-spack:
- packages:
- libelf:
- version: ["0.8.12"]
- include:
- - ./included-config.yaml
- specs:
- - mpileaks
-"""
- )
- with open(os.path.join(e.path, "included-config.yaml"), "w") as f:
+def test_env_with_included_config_precedence(tmp_path):
+ """Test included scope and manifest precedence when including a package
+ configuration file."""
+
+ included_file = "included-packages.yaml"
+ included_path = tmp_path / included_file
+ with open(included_path, "w") as f:
f.write(
"""\
packages:
@@ -927,29 +932,50 @@ def test_env_config_precedence(environment_from_manifest):
"""
)
+ spack_yaml = tmp_path / ev.manifest_name
+ spack_yaml.write_text(
+ f"""\
+spack:
+ packages:
+ libelf:
+ version: ["0.8.12"]
+ include:
+ - {os.path.join(".", included_file)}
+ specs:
+ - mpileaks
+"""
+ )
+
+ e = ev.Environment(tmp_path)
with e:
e.concretize()
+ specs = e._get_environment_specs()
# ensure included scope took effect
- assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
+ assert any(x.satisfies("mpileaks@2.2") for x in specs)
# ensure env file takes precedence
- assert any(x.satisfies("libelf@0.8.12") for x in e._get_environment_specs())
+ assert any(x.satisfies("libelf@0.8.12") for x in specs)
-def test_included_config_precedence(environment_from_manifest):
- e = environment_from_manifest(
- """
+def test_env_with_included_configs_precedence(tmp_path):
+ """Test precendence of multiple included configuration files."""
+ file1 = "high-config.yaml"
+ file2 = "low-config.yaml"
+
+ spack_yaml = tmp_path / ev.manifest_name
+ spack_yaml.write_text(
+ f"""\
spack:
include:
- - ./high-config.yaml # this one should take precedence
- - ./low-config.yaml
+ - {os.path.join(".", file1)} # this one should take precedence
+ - {os.path.join(".", file2)}
specs:
- mpileaks
"""
)
- with open(os.path.join(e.path, "high-config.yaml"), "w") as f:
+ with open(tmp_path / file1, "w") as f:
f.write(
"""\
packages:
@@ -958,7 +984,7 @@ def test_included_config_precedence(environment_from_manifest):
"""
)
- with open(os.path.join(e.path, "low-config.yaml"), "w") as f:
+ with open(tmp_path / file2, "w") as f:
f.write(
"""\
packages:
@@ -969,39 +995,125 @@ def test_included_config_precedence(environment_from_manifest):
"""
)
+ e = ev.Environment(tmp_path)
with e:
e.concretize()
+ specs = e._get_environment_specs()
- assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
+ # ensure included package spec took precedence over manifest spec
+ assert any(x.satisfies("mpileaks@2.2") for x in specs)
- assert any([x.satisfies("libelf@0.8.10") for x in e._get_environment_specs()])
+ # ensure first included package spec took precedence over one from second
+ assert any(x.satisfies("libelf@0.8.10") for x in specs)
-def test_bad_env_yaml_format(tmpdir):
- filename = str(tmpdir.join("spack.yaml"))
+@pytest.mark.regression("39248")
+def test_bad_env_yaml_format_remove(mutable_mock_env_path):
+ badenv = "badenv"
+ env("create", badenv)
+ filename = mutable_mock_env_path / "spack.yaml"
with open(filename, "w") as f:
f.write(
"""\
-spack:
- spacks:
- mpileaks
"""
)
- with tmpdir.as_cwd():
- with pytest.raises(spack.config.ConfigFormatError) as e:
- env("create", "test", "./spack.yaml")
- assert "spack.yaml:2" in str(e)
- assert "'spacks' was unexpected" in str(e)
+ assert badenv in env("list")
+ env("remove", "-y", badenv)
+ assert badenv not in env("list")
-def test_env_loads(install_mockery, mock_fetch):
+@pytest.mark.regression("39248")
+@pytest.mark.parametrize(
+ "error,message,contents",
+ [
+ (
+ spack.config.ConfigFormatError,
+ "not of type",
+ """\
+spack:
+ specs: mpi@2.0
+""",
+ ),
+ (
+ ev.SpackEnvironmentConfigError,
+ "duplicate key",
+ """\
+spack:
+ packages:
+ all:
+ providers:
+ mpi: [mvapich2]
+ mpi: [mpich]
+""",
+ ),
+ (
+ spack.config.ConfigFormatError,
+ "'specks' was unexpected",
+ """\
+spack:
+ specks:
+ - libdwarf
+""",
+ ),
+ ],
+)
+def test_bad_env_yaml_create_fails(tmp_path, mutable_mock_env_path, error, message, contents):
+ """Ensure creation with invalid yaml does NOT create or leave the environment."""
+ filename = tmp_path / ev.manifest_name
+ filename.write_text(contents)
+ env_name = "bad_env"
+ with pytest.raises(error, match=message):
+ env("create", env_name, str(filename))
+
+ assert env_name not in env("list")
+ manifest = mutable_mock_env_path / env_name / ev.manifest_name
+ assert not os.path.exists(str(manifest))
+
+
+@pytest.mark.regression("39248")
+@pytest.mark.parametrize("answer", ["-y", ""])
+def test_multi_env_remove(mutable_mock_env_path, monkeypatch, answer):
+ """Test removal (or not) of a valid and invalid environment"""
+ remove_environment = answer == "-y"
+ monkeypatch.setattr(tty, "get_yes_or_no", lambda prompt, default: remove_environment)
+
+ environments = ["goodenv", "badenv"]
+ for e in environments:
+ env("create", e)
+
+ # Ensure the bad environment contains invalid yaml
+ filename = mutable_mock_env_path / environments[1] / ev.manifest_name
+ filename.write_text(
+ """\
+ - libdwarf
+"""
+ )
+
+ assert all(e in env("list") for e in environments)
+
+ args = [answer] if answer else []
+ args.extend(environments)
+ output = env("remove", *args, fail_on_error=False)
+
+ if remove_environment is True:
+ # Successfully removed (and reported removal) of *both* environments
+ assert not all(e in env("list") for e in environments)
+ assert output.count("Successfully removed") == len(environments)
+ else:
+ # Not removing any of the environments
+ assert all(e in env("list") for e in environments)
+
+
+def test_env_loads(install_mockery, mock_fetch, mock_modules_root):
env("create", "test")
with ev.read("test"):
add("mpileaks")
concretize()
install("--fake")
+ module("tcl", "refresh", "-y")
with ev.read("test"):
env("loads")
@@ -1531,11 +1643,10 @@ def test_stack_yaml_remove_from_list(tmpdir):
assert Spec("callpath") in test.user_specs
-def test_stack_yaml_remove_from_list_force(tmpdir):
- filename = str(tmpdir.join("spack.yaml"))
- with open(filename, "w") as f:
- f.write(
- """\
+def test_stack_yaml_remove_from_list_force(tmp_path):
+ spack_yaml = tmp_path / ev.manifest_name
+ spack_yaml.write_text(
+ """\
spack:
definitions:
- packages: [mpileaks, callpath]
@@ -1544,20 +1655,20 @@ def test_stack_yaml_remove_from_list_force(tmpdir):
- [$packages]
- [^mpich, ^zmpi]
"""
- )
- with tmpdir.as_cwd():
- env("create", "test", "./spack.yaml")
- with ev.read("test"):
- concretize()
- remove("-f", "-l", "packages", "mpileaks")
- find_output = find("-c")
+ )
- assert "mpileaks" not in find_output
+ env("create", "test", str(spack_yaml))
+ with ev.read("test"):
+ concretize()
+ remove("-f", "-l", "packages", "mpileaks")
+ find_output = find("-c")
- test = ev.read("test")
- assert len(test.user_specs) == 2
- assert Spec("callpath ^zmpi") in test.user_specs
- assert Spec("callpath ^mpich") in test.user_specs
+ assert "mpileaks" not in find_output
+
+ test = ev.read("test")
+ assert len(test.user_specs) == 2
+ assert Spec("callpath ^zmpi") in test.user_specs
+ assert Spec("callpath ^mpich") in test.user_specs
def test_stack_yaml_remove_from_matrix_no_effect(tmpdir):
@@ -1603,7 +1714,7 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
with tmpdir.as_cwd():
env("create", "test", "./spack.yaml")
with ev.read("test") as e:
- concretize()
+ e.concretize()
before_user = e.user_specs.specs
before_conc = e.concretized_user_specs
@@ -2443,8 +2554,12 @@ def test_concretize_user_specs_together():
e.remove("mpich")
e.add("mpich2")
+ exc_cls = spack.error.SpackError
+ if spack.config.get("config:concretizer") == "clingo":
+ exc_cls = spack.error.UnsatisfiableSpecError
+
# Concretizing without invalidating the concrete spec for mpileaks fails
- with pytest.raises(spack.error.UnsatisfiableSpecError):
+ with pytest.raises(exc_cls):
e.concretize()
e.concretize(force=True)
@@ -2476,9 +2591,12 @@ def test_duplicate_packages_raise_when_concretizing_together():
e.add("mpileaks~opt")
e.add("mpich")
- with pytest.raises(
- spack.error.UnsatisfiableSpecError, match=r"You could consider setting `concretizer:unify`"
- ):
+ exc_cls, match = spack.error.SpackError, None
+ if spack.config.get("config:concretizer") == "clingo":
+ exc_cls = spack.error.UnsatisfiableSpecError
+ match = r"You could consider setting `concretizer:unify`"
+
+ with pytest.raises(exc_cls, match=match):
e.concretize()
@@ -2505,7 +2623,7 @@ def test_env_write_only_non_default_nested(tmpdir):
- matrix:
- [mpileaks]
packages:
- mpileaks:
+ all:
compiler: [gcc]
view: true
"""
@@ -2843,6 +2961,25 @@ def test_activate_temp(monkeypatch, tmpdir):
assert ev.is_env_dir(str(tmpdir))
+def test_activate_default(monkeypatch):
+ """Tests whether `spack env activate` creates / activates the default
+ environment"""
+ assert not ev.exists("default")
+
+ # Activating it the first time should create it
+ env("activate", "--sh")
+ env("deactivate", "--sh")
+ assert ev.exists("default")
+
+ # Activating it while it already exists should work
+ env("activate", "--sh")
+ env("deactivate", "--sh")
+ assert ev.exists("default")
+
+ env("remove", "-y", "default")
+ assert not ev.exists("default")
+
+
def test_env_view_fail_if_symlink_points_elsewhere(tmpdir, install_mockery, mock_fetch):
view = str(tmpdir.join("view"))
# Put a symlink to an actual directory in view
@@ -3328,6 +3465,20 @@ def test_spack_package_ids_variable(tmpdir, mock_packages):
assert "post-install: {}".format(s.dag_hash()) in out
+def test_depfile_empty_does_not_error(tmp_path):
+ # For empty environments Spack should create a depfile that does nothing
+ make = Executable("make")
+ makefile = str(tmp_path / "Makefile")
+
+ env("create", "test")
+ with ev.read("test"):
+ env("depfile", "-o", makefile)
+
+ make("-f", makefile)
+
+ assert make.returncode == 0
+
+
def test_unify_when_possible_works_around_conflicts():
e = ev.create("coconcretization")
e.unify = "when_possible"
diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py
index e94d6efe5c4d4d..e9a387aac03e66 100644
--- a/lib/spack/spack/test/cmd/external.py
+++ b/lib/spack/spack/test/cmd/external.py
@@ -28,21 +28,12 @@ def _mock_search(path_hints=None):
return _factory
-@pytest.fixture
-def _platform_executables(monkeypatch):
- def _win_exe_ext():
- return ".bat"
-
- monkeypatch.setattr(spack.util.path, "win_exe_ext", _win_exe_ext)
-
-
def define_plat_exe(exe):
if sys.platform == "win32":
exe += ".bat"
return exe
-@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
def test_find_external_single_package(mock_executable):
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
search_dir = cmake_path.parent.parent
@@ -54,7 +45,7 @@ def test_find_external_single_package(mock_executable):
assert len(detected_spec) == 1 and detected_spec[0].spec == Spec("cmake@1.foo")
-def test_find_external_two_instances_same_package(mock_executable, _platform_executables):
+def test_find_external_two_instances_same_package(mock_executable):
# Each of these cmake instances is created in a different prefix
# In Windows, quoted strings are echo'd with quotes includes
# we need to avoid that for proper regex.
@@ -236,32 +227,7 @@ def test_list_detectable_packages(mutable_config, mutable_mock_repo):
assert external.returncode == 0
-@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
-def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _platform_executables):
- # Prepare an environment to detect a fake gcc
- gcc_exe = mock_executable("gcc", output="echo 4.2.1")
- prefix = os.path.dirname(gcc_exe)
- monkeypatch.setenv("PATH", prefix)
-
- # Find the external spec
- external("find", "gcc")
-
- # Check entries in 'packages.yaml'
- packages_yaml = spack.config.get("packages")
- assert "gcc" in packages_yaml
- assert "externals" in packages_yaml["gcc"]
- externals = packages_yaml["gcc"]["externals"]
- assert len(externals) == 1
- external_gcc = externals[0]
- assert external_gcc["spec"] == "gcc@4.2.1 languages=c"
- assert external_gcc["prefix"] == os.path.dirname(prefix)
- assert "extra_attributes" in external_gcc
- extra_attributes = external_gcc["extra_attributes"]
- assert "prefix" not in extra_attributes
- assert extra_attributes["compilers"]["c"] == str(gcc_exe)
-
-
-def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
+def test_overriding_prefix(mock_executable, mutable_config, monkeypatch):
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
search_dir = gcc_exe.parent
@@ -282,10 +248,7 @@ def _determine_variants(cls, exes, version_str):
assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin")
-@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
-def test_new_entries_are_reported_correctly(
- mock_executable, mutable_config, monkeypatch, _platform_executables
-):
+def test_new_entries_are_reported_correctly(mock_executable, mutable_config, monkeypatch):
# Prepare an environment to detect a fake gcc
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
prefix = os.path.dirname(gcc_exe)
diff --git a/lib/spack/spack/test/cmd/gpg.py b/lib/spack/spack/test/cmd/gpg.py
index 78a2a9ece9f952..08749022cab360 100644
--- a/lib/spack/spack/test/cmd/gpg.py
+++ b/lib/spack/spack/test/cmd/gpg.py
@@ -43,7 +43,7 @@ def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch):
f.write(TEMPLATE.format(version=version))
fs.set_executable(fname)
- monkeypatch.setitem(os.environ, "PATH", str(tmpdir))
+ monkeypatch.setenv("PATH", str(tmpdir))
if version == "undetectable" or version.endswith("1.3.4"):
with pytest.raises(spack.util.gpg.SpackGPGError):
spack.util.gpg.init(force=True)
@@ -54,7 +54,7 @@ def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch):
def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch, mutable_config):
- monkeypatch.setitem(os.environ, "PATH", str(tmpdir))
+ monkeypatch.setenv("PATH", str(tmpdir))
bootstrap("disable")
with pytest.raises(RuntimeError):
spack.util.gpg.init(force=True)
diff --git a/lib/spack/spack/test/cmd/info.py b/lib/spack/spack/test/cmd/info.py
index c4528f9852e284..9493e1d17fef8b 100644
--- a/lib/spack/spack/test/cmd/info.py
+++ b/lib/spack/spack/test/cmd/info.py
@@ -25,7 +25,7 @@ def parser():
def print_buffer(monkeypatch):
buffer = []
- def _print(*args):
+ def _print(*args, **kwargs):
buffer.extend(args)
monkeypatch.setattr(spack.cmd.info.color, "cprint", _print, raising=False)
@@ -33,10 +33,11 @@ def _print(*args):
@pytest.mark.parametrize(
- "pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk"] # a BundlePackage
+ "pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk", "gasnet", "warpx"]
)
-def test_it_just_runs(pkg):
- info(pkg)
+@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
+def test_it_just_runs(pkg, extra_args):
+ info(pkg, *extra_args)
def test_info_noversion(mock_packages, print_buffer):
@@ -78,7 +79,8 @@ def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
"gcc", # This should ensure --test's c_names processing loop covered
],
)
-def test_info_fields(pkg_query, parser, print_buffer):
+@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
+def test_info_fields(pkg_query, extra_args, parser, print_buffer):
expected_fields = (
"Description:",
"Homepage:",
@@ -91,7 +93,7 @@ def test_info_fields(pkg_query, parser, print_buffer):
"Licenses:",
)
- args = parser.parse_args(["--all", pkg_query])
+ args = parser.parse_args(["--all", pkg_query] + extra_args)
spack.cmd.info.info(parser, args)
for text in expected_fields:
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index ef9d19d77884a8..8eb3bfedb815c0 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -904,13 +904,12 @@ def test_install_help_cdash():
@pytest.mark.disable_clean_stage_check
-def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, capfd):
+def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, monkeypatch, capfd):
# capfd interferes with Spack's capturing
- with tmpdir.as_cwd():
- with capfd.disabled():
- os.environ["SPACK_CDASH_AUTH_TOKEN"] = "asdf"
- out = install("-v", "--log-file=cdash_reports", "--log-format=cdash", "a")
- assert "Using CDash auth token from environment" in out
+ with tmpdir.as_cwd(), capfd.disabled():
+ monkeypatch.setenv("SPACK_CDASH_AUTH_TOKEN", "asdf")
+ out = install("-v", "--log-file=cdash_reports", "--log-format=cdash", "a")
+ assert "Using CDash auth token from environment" in out
@pytest.mark.not_on_windows("Windows log_output logs phase header out of order")
diff --git a/lib/spack/spack/test/cmd/spec.py b/lib/spack/spack/test/cmd/spec.py
index 66dfce93089718..763d83bf0ad17a 100644
--- a/lib/spack/spack/test/cmd/spec.py
+++ b/lib/spack/spack/test/cmd/spec.py
@@ -104,8 +104,7 @@ def test_spec_parse_unquoted_flags_report():
spec("gcc cflags=-Os -pipe")
cm = str(cm.value)
assert cm.startswith(
- 'trying to set variant "pipe" in package "gcc", but the package has no such '
- 'variant [happened during concretization of gcc cflags="-Os" ~pipe]'
+ 'trying to set variant "pipe" in package "gcc", but the package has no such variant'
)
assert cm.endswith('(1) cflags=-Os -pipe => cflags="-Os -pipe"')
diff --git a/lib/spack/spack/test/compilers/basics.py b/lib/spack/spack/test/compilers/basics.py
index 512defb195370f..910c9e87d9335f 100644
--- a/lib/spack/spack/test/compilers/basics.py
+++ b/lib/spack/spack/test/compilers/basics.py
@@ -422,7 +422,6 @@ def test_clang_flags():
"-gdwarf-5",
"-gline-tables-only",
"-gmodules",
- "-gz",
"-g",
],
"clang@3.3",
@@ -445,7 +444,6 @@ def test_aocc_flags():
"-gdwarf-5",
"-gline-tables-only",
"-gmodules",
- "-gz",
"-g",
],
"aocc@2.2.0",
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 04959a19b34b51..eba86d14fcf7f0 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -349,6 +349,9 @@ def test_compiler_flags_differ_identical_compilers(self):
spec.concretize()
assert spec.satisfies("cflags=-O2")
+ @pytest.mark.only_clingo(
+ "Optional compiler propagation isn't deprecated for original concretizer"
+ )
def test_concretize_compiler_flag_propagate(self):
spec = Spec("hypre cflags=='-g' ^openblas")
spec.concretize()
@@ -458,19 +461,66 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
@pytest.mark.only_clingo(
"Optional compiler propagation isn't deprecated for original concretizer"
)
- def test_concretize_propagate_disabled_variant(self):
+ @pytest.mark.parametrize(
+ "spec_str,expected_propagation",
+ [
+ ("hypre~~shared ^openblas+shared", [("hypre", "~shared"), ("openblas", "+shared")]),
+ # Propagates past a node that doesn't have the variant
+ ("hypre~~shared ^openblas", [("hypre", "~shared"), ("openblas", "~shared")]),
+ (
+ "ascent~~shared +adios2",
+ [("ascent", "~shared"), ("adios2", "~shared"), ("bzip2", "~shared")],
+ ),
+ # Propagates below a node that uses the other value explicitly
+ (
+ "ascent~~shared +adios2 ^adios2+shared",
+ [("ascent", "~shared"), ("adios2", "+shared"), ("bzip2", "~shared")],
+ ),
+ (
+ "ascent++shared +adios2 ^adios2~shared",
+ [("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "+shared")],
+ ),
+ ],
+ )
+ def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagation):
+ """Tests various patterns of boolean variant propagation"""
+ spec = Spec(spec_str).concretized()
+ for key, expected_satisfies in expected_propagation:
+ spec[key].satisfies(expected_satisfies)
+
+ @pytest.mark.only_clingo(
+ "Optional compiler propagation isn't deprecated for original concretizer"
+ )
+ def test_concretize_propagated_variant_is_not_passed_to_dependent(self):
"""Test a package variant value was passed from its parent."""
- spec = Spec("hypre~~shared ^openblas")
+ spec = Spec("ascent~~shared +adios2 ^adios2+shared")
spec.concretize()
- assert spec.satisfies("^openblas~shared")
+ assert spec.satisfies("^adios2+shared")
+ assert spec.satisfies("^bzip2~shared")
- def test_concretize_propagated_variant_is_not_passed_to_dependent(self):
- """Test a package variant value was passed from its parent."""
- spec = Spec("hypre~~shared ^openblas+shared")
+ @pytest.mark.only_clingo(
+ "Optional compiler propagation isn't deprecated for original concretizer"
+ )
+ def test_concretize_propagate_specified_variant(self):
+ """Test that only the specified variant is propagated to the dependencies"""
+ spec = Spec("parent-foo-bar ~~foo")
spec.concretize()
- assert spec.satisfies("^openblas+shared")
+ assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo")
+ assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar")
+
+ @pytest.mark.only_clingo("Original concretizer is allowed to forego variant propagation")
+ def test_concretize_propagate_multivalue_variant(self):
+ """Test that multivalue variants are propagating the specified value(s)
+ to their dependecies. The dependencies should not have the default value"""
+ spec = Spec("multivalue-variant foo==baz,fee")
+ spec.concretize()
+
+ assert spec.satisfies("^a foo=baz,fee")
+ assert spec.satisfies("^b foo=baz,fee")
+ assert not spec.satisfies("^a foo=bar")
+ assert not spec.satisfies("^b foo=bar")
def test_no_matching_compiler_specs(self, mock_low_high_config):
# only relevant when not building compilers as needed
@@ -1838,7 +1888,8 @@ def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch
# If we concretize with --reuse it is not, since "mpich~debug" was already installed
with spack.config.override("concretizer:reuse", True):
s = Spec("mpich").concretized()
- assert s.satisfies("~debug")
+ assert s.installed
+ assert s.satisfies("~debug"), s
@pytest.mark.regression("32471")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
@@ -2132,14 +2183,16 @@ def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database):
@pytest.fixture()
def duplicates_test_repository():
- builder_test_path = os.path.join(spack.paths.repos_path, "duplicates.test")
- with spack.repo.use_repositories(builder_test_path) as mock_repo:
+ repository_path = os.path.join(spack.paths.repos_path, "duplicates.test")
+ with spack.repo.use_repositories(repository_path) as mock_repo:
yield mock_repo
@pytest.mark.usefixtures("mutable_config", "duplicates_test_repository")
@pytest.mark.only_clingo("Not supported by the original concretizer")
class TestConcretizeSeparately:
+ """Collects test on separate concretization"""
+
@pytest.mark.parametrize("strategy", ["minimal", "full"])
def test_two_gmake(self, strategy):
"""Tests that we can concretize a spec with nodes using the same build
@@ -2320,3 +2373,53 @@ def test_adding_specs(self, input_specs, default_mock_concretization):
assert node == container[node.dag_hash()]
assert node.dag_hash() in container
assert node is not container[node.dag_hash()]
+
+
+@pytest.fixture()
+def edges_test_repository():
+ repository_path = os.path.join(spack.paths.repos_path, "edges.test")
+ with spack.repo.use_repositories(repository_path) as mock_repo:
+ yield mock_repo
+
+
+@pytest.mark.usefixtures("mutable_config", "edges_test_repository")
+@pytest.mark.only_clingo("Edge properties not supported by the original concretizer")
+class TestConcretizeEdges:
+ """Collects tests on edge properties"""
+
+ @pytest.mark.parametrize(
+ "spec_str,expected_satisfies,expected_not_satisfies",
+ [
+ ("conditional-edge", ["^zlib@2.0"], ["^zlib-api"]),
+ ("conditional-edge~foo", ["^zlib@2.0"], ["^zlib-api"]),
+ (
+ "conditional-edge+foo",
+ ["^zlib@1.0", "^zlib-api", "^[virtuals=zlib-api] zlib"],
+ ["^[virtuals=mpi] zlib"],
+ ),
+ ],
+ )
+ def test_condition_triggered_by_edge_property(
+ self, spec_str, expected_satisfies, expected_not_satisfies
+ ):
+ """Tests that we can enforce constraints based on edge attributes"""
+ s = Spec(spec_str).concretized()
+
+ for expected in expected_satisfies:
+ assert s.satisfies(expected), str(expected)
+
+ for not_expected in expected_not_satisfies:
+ assert not s.satisfies(not_expected), str(not_expected)
+
+ def test_virtuals_provided_together_but_only_one_required_in_dag(self):
+ """Tests that we can use a provider that provides more than one virtual together,
+ and is providing only one, iff the others are not needed in the DAG.
+
+ o blas-only-client
+ | [virtual=blas]
+ o openblas (provides blas and lapack together)
+
+ """
+ s = Spec("blas-only-client ^openblas").concretized()
+ assert s.satisfies("^[virtuals=blas] openblas")
+ assert not s.satisfies("^[virtuals=blas,lapack] openblas")
diff --git a/lib/spack/spack/test/concretize_errors.py b/lib/spack/spack/test/concretize_errors.py
new file mode 100644
index 00000000000000..2a8be3e0457a46
--- /dev/null
+++ b/lib/spack/spack/test/concretize_errors.py
@@ -0,0 +1,68 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import pytest
+
+import spack.solver.asp
+import spack.spec
+
+pytestmark = [
+ pytest.mark.not_on_windows("Windows uses old concretizer"),
+ pytest.mark.only_clingo("Original concretizer does not support configuration requirements"),
+]
+
+version_error_messages = [
+ "Cannot satisfy 'fftw@:1.0' and 'fftw@1.1:",
+ " required because quantum-espresso depends on fftw@:1.0",
+ " required because quantum-espresso ^fftw@1.1: requested from CLI",
+ " required because quantum-espresso ^fftw@1.1: requested from CLI",
+]
+
+external_error_messages = [
+ (
+ "Attempted to build package quantum-espresso which is not buildable and does not have"
+ " a satisfying external"
+ ),
+ (
+ " 'quantum-espresso~veritas' is an external constraint for quantum-espresso"
+ " which was not satisfied"
+ ),
+ " 'quantum-espresso+veritas' required",
+ " required because quantum-espresso+veritas requested from CLI",
+]
+
+variant_error_messages = [
+ "'fftw' required multiple values for single-valued variant 'mpi'",
+ " Requested '~mpi' and '+mpi'",
+ " required because quantum-espresso depends on fftw+mpi when +invino",
+ " required because quantum-espresso+invino ^fftw~mpi requested from CLI",
+ " required because quantum-espresso+invino ^fftw~mpi requested from CLI",
+]
+
+external_config = {
+ "packages:quantum-espresso": {
+ "buildable": False,
+ "externals": [{"spec": "quantum-espresso@1.0~veritas", "prefix": "/path/to/qe"}],
+ }
+}
+
+
+@pytest.mark.parametrize(
+ "error_messages,config_set,spec",
+ [
+ (version_error_messages, {}, "quantum-espresso^fftw@1.1:"),
+ (external_error_messages, external_config, "quantum-espresso+veritas"),
+ (variant_error_messages, {}, "quantum-espresso+invino^fftw~mpi"),
+ ],
+)
+def test_error_messages(error_messages, config_set, spec, mock_packages, mutable_config):
+ for path, conf in config_set.items():
+ spack.config.set(path, conf)
+
+ with pytest.raises(spack.solver.asp.UnsatisfiableSpecError) as e:
+ _ = spack.spec.Spec(spec).concretized()
+
+ for em in error_messages:
+ assert em in str(e.value)
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
index 20d0e1ae91ad84..929ae0a9ec27a5 100644
--- a/lib/spack/spack/test/concretize_preferences.py
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -105,17 +105,13 @@ def test_preferred_variants_from_wildcard(self):
@pytest.mark.parametrize(
"compiler_str,spec_str",
- [("gcc@4.5.0", "mpileaks"), ("clang@12.0.0", "mpileaks"), ("gcc@4.5.0", "openmpi")],
+ [("gcc@=4.5.0", "mpileaks"), ("clang@=12.0.0", "mpileaks"), ("gcc@=4.5.0", "openmpi")],
)
def test_preferred_compilers(self, compiler_str, spec_str):
"""Test preferred compilers are applied correctly"""
- spec = Spec(spec_str)
- update_packages(spec.name, "compiler", [compiler_str])
- spec.concretize()
- # note: lhs has concrete compiler version, rhs still abstract.
- # Could be made more strict by checking for equality with `gcc@=4.5.0`
- # etc.
- assert spec.compiler.satisfies(CompilerSpec(compiler_str))
+ update_packages("all", "compiler", [compiler_str])
+ spec = spack.spec.Spec(spec_str).concretized()
+ assert spec.compiler == CompilerSpec(compiler_str)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_preferred_target(self, mutable_mock_repo):
@@ -124,7 +120,7 @@ def test_preferred_target(self, mutable_mock_repo):
default = str(spec.target)
preferred = str(spec.target.family)
- update_packages("mpich", "target", [preferred])
+ update_packages("all", "target", [preferred])
spec = concretize("mpich")
assert str(spec.target) == preferred
@@ -132,7 +128,7 @@ def test_preferred_target(self, mutable_mock_repo):
assert str(spec["mpileaks"].target) == preferred
assert str(spec["mpich"].target) == preferred
- update_packages("mpileaks", "target", [default])
+ update_packages("all", "target", [default])
spec = concretize("mpileaks")
assert str(spec["mpileaks"].target) == default
assert str(spec["mpich"].target) == default
@@ -508,3 +504,13 @@ def test_sticky_variant_accounts_for_packages_yaml(self):
with spack.config.override("packages:sticky-variant", {"variants": "+allow-gcc"}):
s = Spec("sticky-variant %gcc").concretized()
assert s.satisfies("%gcc") and s.satisfies("+allow-gcc")
+
+ @pytest.mark.regression("41134")
+ @pytest.mark.only_clingo("Not backporting the fix to the old concretizer")
+ def test_default_preference_variant_different_type_does_not_error(self):
+ """Tests that a different type for an existing variant in the 'all:' section of
+ packages.yaml doesn't fail with an error.
+ """
+ with spack.config.override("packages:all", {"variants": "+foo"}):
+ s = Spec("a").concretized()
+ assert s.satisfies("foo=bar")
diff --git a/lib/spack/spack/test/concretize_requirements.py b/lib/spack/spack/test/concretize_requirements.py
index cd51006088403b..529d481b2f9211 100644
--- a/lib/spack/spack/test/concretize_requirements.py
+++ b/lib/spack/spack/test/concretize_requirements.py
@@ -469,16 +469,22 @@ def test_one_package_multiple_oneof_groups(concretize_scope, test_repo):
@pytest.mark.regression("34241")
-def test_require_cflags(concretize_scope, test_repo):
+def test_require_cflags(concretize_scope, mock_packages):
"""Ensures that flags can be required from configuration."""
conf_str = """\
packages:
- y:
+ mpich2:
require: cflags="-g"
+ mpi:
+ require: mpich cflags="-O1"
"""
update_packages_config(conf_str)
- spec = Spec("y").concretized()
- assert spec.satisfies("cflags=-g")
+
+ spec_mpich2 = Spec("mpich2").concretized()
+ assert spec_mpich2.satisfies("cflags=-g")
+
+ spec_mpi = Spec("mpi").concretized()
+ assert spec_mpi.satisfies("mpich cflags=-O1")
def test_requirements_for_package_that_is_not_needed(concretize_scope, test_repo):
@@ -890,3 +896,134 @@ def test_requires_directive(concretize_scope, mock_packages):
# This package can only be compiled with clang
with pytest.raises(spack.error.SpackError, match="can only be compiled with Clang"):
Spec("requires_clang").concretized()
+
+
+@pytest.mark.parametrize(
+ "packages_yaml",
+ [
+ # Simple string
+ """
+ packages:
+ all:
+ require: "+shared"
+ """,
+ # List of strings
+ """
+ packages:
+ all:
+ require:
+ - "+shared"
+ """,
+ # Objects with attributes
+ """
+ packages:
+ all:
+ require:
+ - spec: "+shared"
+ """,
+ """
+ packages:
+ all:
+ require:
+ - one_of: ["+shared"]
+ """,
+ ],
+)
+def test_default_requirements_semantic(packages_yaml, concretize_scope, mock_packages):
+ """Tests that requirements under 'all:' are by default applied only if the variant/property
+ required exists, but are strict otherwise.
+
+ For example:
+
+ packages:
+ all:
+ require: "+shared"
+
+ should enforce the value of "+shared" when a Boolean variant named "shared" exists. This is
+ not overridable from the command line, so with the configuration above:
+
+ > spack spec zlib~shared
+
+ is unsatisfiable.
+ """
+ update_packages_config(packages_yaml)
+
+ # Regular zlib concretize to +shared
+ s = Spec("zlib").concretized()
+ assert s.satisfies("+shared")
+
+ # If we specify the variant we can concretize only the one matching the constraint
+ s = Spec("zlib +shared").concretized()
+ assert s.satisfies("+shared")
+ with pytest.raises(UnsatisfiableSpecError):
+ Spec("zlib ~shared").concretized()
+
+ # A spec without the shared variant still concretize
+ s = Spec("a").concretized()
+ assert not s.satisfies("a +shared")
+ assert not s.satisfies("a ~shared")
+
+
+@pytest.mark.parametrize(
+ "packages_yaml,spec_str,expected,not_expected",
+ [
+ # The package has a 'libs' mv variant defaulting to 'libs=shared'
+ (
+ """
+ packages:
+ all:
+ require: "+libs"
+ """,
+ "multivalue-variant",
+ ["libs=shared"],
+ ["libs=static", "+libs"],
+ ),
+ (
+ """
+ packages:
+ all:
+ require: "libs=foo"
+ """,
+ "multivalue-variant",
+ ["libs=shared"],
+ ["libs=static", "libs=foo"],
+ ),
+ (
+ # (TODO): revisit this case when we'll have exact value semantic for mv variants
+ """
+ packages:
+ all:
+ require: "libs=static"
+ """,
+ "multivalue-variant",
+ ["libs=static", "libs=shared"],
+ [],
+ ),
+ (
+ # Constraint apply as a whole, so having a non-existing variant
+ # invalidate the entire constraint
+ """
+ packages:
+ all:
+ require: "libs=static +feefoo"
+ """,
+ "multivalue-variant",
+ ["libs=shared"],
+ ["libs=static"],
+ ),
+ ],
+)
+def test_default_requirements_semantic_with_mv_variants(
+ packages_yaml, spec_str, expected, not_expected, concretize_scope, mock_packages
+):
+ """Tests that requirements under 'all:' are behaving correctly under cases that could stem
+ from MV variants.
+ """
+ update_packages_config(packages_yaml)
+ s = Spec(spec_str).concretized()
+
+ for constraint in expected:
+ assert s.satisfies(constraint), constraint
+
+ for constraint in not_expected:
+ assert not s.satisfies(constraint), constraint
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index f7bf7d75698cd1..5f544a31296a68 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -78,7 +78,7 @@ def env_yaml(tmpdir):
verify_ssl: False
dirty: False
packages:
- libelf:
+ all:
compiler: [ 'gcc@4.5.3' ]
repos:
- /x/y/z
@@ -942,7 +942,7 @@ def test_single_file_scope(config, env_yaml):
# from the single-file config
assert spack.config.get("config:verify_ssl") is False
assert spack.config.get("config:dirty") is False
- assert spack.config.get("packages:libelf:compiler") == ["gcc@4.5.3"]
+ assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"]
# from the lower config scopes
assert spack.config.get("config:checksum") is True
@@ -965,7 +965,7 @@ def test_single_file_scope_section_override(tmpdir, config):
config:
verify_ssl: False
packages::
- libelf:
+ all:
compiler: [ 'gcc@4.5.3' ]
repos:
- /x/y/z
@@ -977,7 +977,7 @@ def test_single_file_scope_section_override(tmpdir, config):
with spack.config.override(scope):
# from the single-file config
assert spack.config.get("config:verify_ssl") is False
- assert spack.config.get("packages:libelf:compiler") == ["gcc@4.5.3"]
+ assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"]
# from the lower config scopes
assert spack.config.get("config:checksum") is True
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index c4b3df92edf17f..fb7608a56bd377 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -6,6 +6,7 @@
import collections
import datetime
import errno
+import functools
import inspect
import itertools
import json
@@ -31,6 +32,7 @@
import spack.binary_distribution
import spack.caches
+import spack.cmd.buildcache
import spack.compilers
import spack.config
import spack.database
@@ -494,7 +496,7 @@ def mock_binary_index(monkeypatch, tmpdir_factory):
tmpdir = tmpdir_factory.mktemp("mock_binary_index")
index_path = tmpdir.join("binary_index").strpath
mock_index = spack.binary_distribution.BinaryCacheIndex(index_path)
- monkeypatch.setattr(spack.binary_distribution, "binary_index", mock_index)
+ monkeypatch.setattr(spack.binary_distribution, "BINARY_INDEX", mock_index)
yield
@@ -1709,8 +1711,8 @@ def inode_cache():
@pytest.fixture(autouse=True)
def brand_new_binary_cache():
yield
- spack.binary_distribution.binary_index = llnl.util.lang.Singleton(
- spack.binary_distribution._binary_index
+ spack.binary_distribution.BINARY_INDEX = llnl.util.lang.Singleton(
+ spack.binary_distribution.BinaryCacheIndex
)
@@ -1948,3 +1950,32 @@ def pytest_runtest_setup(item):
not_on_windows_marker = item.get_closest_marker(name="not_on_windows")
if not_on_windows_marker and sys.platform == "win32":
pytest.skip(*not_on_windows_marker.args)
+
+
+@pytest.fixture(scope="function")
+def disable_parallel_buildcache_push(monkeypatch):
+ class MockPool:
+ def map(self, func, args):
+ return [func(a) for a in args]
+
+ def starmap(self, func, args):
+ return [func(*a) for a in args]
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ pass
+
+ monkeypatch.setattr(spack.cmd.buildcache, "_make_pool", MockPool)
+
+
+def _root_path(x, y, *, path):
+ return path
+
+
+@pytest.fixture
+def mock_modules_root(tmp_path, monkeypatch):
+ """Sets the modules root to a temporary directory, to avoid polluting configuration scopes."""
+ fn = functools.partial(_root_path, path=str(tmp_path))
+ monkeypatch.setattr(spack.modules.common, "root_path", fn)
diff --git a/lib/spack/spack/test/data/config/modules.yaml b/lib/spack/spack/test/data/config/modules.yaml
index 28e2ec91b3d962..f217dd7eaf3379 100644
--- a/lib/spack/spack/test/data/config/modules.yaml
+++ b/lib/spack/spack/test/data/config/modules.yaml
@@ -14,12 +14,7 @@
# ~/.spack/modules.yaml
# -------------------------------------------------------------------------
modules:
- default:
- enable:
- - tcl
- roots:
- tcl: $user_cache_path/tcl
- lmod: $user_cache_path/lmod
+ default: {}
prefix_inspections:
bin:
- PATH
diff --git a/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml b/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml
deleted file mode 100644
index 997501e08ba454..00000000000000
--- a/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
-# DEPRECATED: remove this in v0.20
-# See `alter_environment.yaml` for the new syntax
-enable:
- - lmod
-lmod:
- core_compilers:
- - 'clang@3.3'
-
- hierarchy:
- - mpi
-
- all:
- autoload: none
- filter:
- environment_blacklist:
- - CMAKE_PREFIX_PATH
- environment:
- set:
- '{name}_ROOT': '{prefix}'
-
- 'platform=test target=x86_64':
- environment:
- set:
- FOO: 'foo'
- unset:
- - BAR
-
- 'platform=test target=core2':
- load:
- - 'foo/bar'
diff --git a/lib/spack/spack/test/data/modules/lmod/blacklist.yaml b/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml
similarity index 51%
rename from lib/spack/spack/test/data/modules/lmod/blacklist.yaml
rename to lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml
index 8c88214380f28b..e9326ab42c4661 100644
--- a/lib/spack/spack/test/data/modules/lmod/blacklist.yaml
+++ b/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml
@@ -1,14 +1,12 @@
-# DEPRECATED: remove this in v0.20
-# See `exclude.yaml` for the new syntax
enable:
- lmod
lmod:
+ hide_implicits: true
+ hash_length: 0
core_compilers:
- 'clang@3.3'
hierarchy:
- mpi
- blacklist:
- - callpath
all:
autoload: direct
diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist.yaml
deleted file mode 100644
index 4ffeb135e95ebe..00000000000000
--- a/lib/spack/spack/test/data/modules/tcl/blacklist.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-# DEPRECATED: remove this in v0.20
-# See `exclude.yaml` for the new syntax
-enable:
- - tcl
-tcl:
- whitelist:
- - zmpi
- blacklist:
- - callpath
- - mpi
- all:
- autoload: direct
diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml
deleted file mode 100644
index 128200d6ec6f87..00000000000000
--- a/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-# DEPRECATED: remove this in v0.20
-# See `alter_environment.yaml` for the new syntax
-enable:
- - tcl
-tcl:
- all:
- autoload: none
- filter:
- environment_blacklist:
- - CMAKE_PREFIX_PATH
- environment:
- set:
- '{name}_ROOT': '{prefix}'
-
- 'platform=test target=x86_64':
- environment:
- set:
- FOO: 'foo'
- OMPI_MCA_mpi_leave_pinned: '1'
- unset:
- - BAR
-
- 'platform=test target=core2':
- load:
- - 'foo/bar'
diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml
deleted file mode 100644
index b49bc80b5e82a7..00000000000000
--- a/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-# DEPRECATED: remove this in v0.20
-# See `exclude_implicits.yaml` for the new syntax
-enable:
- - tcl
-tcl:
- blacklist_implicits: true
- all:
- autoload: direct
diff --git a/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml
index 2d892c43513a51..4835b4ecd93f33 100644
--- a/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml
+++ b/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml
@@ -1,6 +1,9 @@
+# DEPRECATED: remove this in ?
+# See `hide_implicits.yaml` for the new syntax
enable:
- tcl
tcl:
exclude_implicits: true
+ hash_length: 0
all:
autoload: direct
diff --git a/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml
new file mode 100644
index 00000000000000..136c42f3c7cb50
--- /dev/null
+++ b/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml
@@ -0,0 +1,7 @@
+enable:
+ - tcl
+tcl:
+ hide_implicits: true
+ hash_length: 0
+ all:
+ autoload: direct
diff --git a/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml b/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml
index b03f966c7c1509..75b4cd09d2ec42 100644
--- a/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml
+++ b/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml
@@ -4,7 +4,7 @@ tcl:
all:
autoload: none
filter:
- environment_blacklist:
+ exclude_env_vars:
- CMAKE_PREFIX_PATH
environment:
set:
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 3033370ac6f191..ee3e5da81ef679 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -803,6 +803,14 @@ def test_query_spec_with_non_conditional_virtual_dependency(database):
assert len(results) == 1
+def test_query_virtual_spec(database):
+ """Make sure we can query for virtuals in the DB"""
+ results = spack.store.STORE.db.query_local("mpi")
+ assert len(results) == 3
+ names = [s.name for s in results]
+ assert all(name in names for name in ["mpich", "mpich2", "zmpi"])
+
+
def test_failed_spec_path_error(database):
"""Ensure spec not concrete check is covered."""
s = spack.spec.Spec("a")
diff --git a/lib/spack/spack/test/detection.py b/lib/spack/spack/test/detection.py
new file mode 100644
index 00000000000000..6218bc87578f7c
--- /dev/null
+++ b/lib/spack/spack/test/detection.py
@@ -0,0 +1,30 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import collections
+
+import spack.detection
+import spack.spec
+
+
+def test_detection_update_config(mutable_config):
+ # mock detected package
+ detected_packages = collections.defaultdict(list)
+ detected_packages["cmake"] = [
+ spack.detection.common.DetectedPackage(
+ spec=spack.spec.Spec("cmake@3.27.5"), prefix="/usr/bin"
+ )
+ ]
+
+ # update config for new package
+ spack.detection.common.update_configuration(detected_packages)
+ # Check entries in 'packages.yaml'
+ packages_yaml = spack.config.get("packages")
+ assert "cmake" in packages_yaml
+ assert "externals" in packages_yaml["cmake"]
+ externals = packages_yaml["cmake"]["externals"]
+ assert len(externals) == 1
+ external_gcc = externals[0]
+ assert external_gcc["spec"] == "cmake@3.27.5"
+ assert external_gcc["prefix"] == "/usr/bin"
diff --git a/lib/spack/spack/test/env.py b/lib/spack/spack/test/env.py
index e88af08761979f..7490a6e0b26204 100644
--- a/lib/spack/spack/test/env.py
+++ b/lib/spack/spack/test/env.py
@@ -18,6 +18,7 @@
SpackEnvironmentViewError,
_error_on_nonempty_view_dir,
)
+from spack.spec_list import UndefinedReferenceError
pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows")
@@ -690,3 +691,90 @@ def test_removing_spec_from_manifest_with_exact_duplicates(
assert "zlib" in manifest.read_text()
with ev.Environment(tmp_path) as env:
assert len(env.user_specs) == 1
+
+
+@pytest.mark.regression("35298")
+@pytest.mark.only_clingo("Propagation not supported in the original concretizer")
+def test_variant_propagation_with_unify_false(tmp_path, mock_packages):
+ """Spack distributes concretizations to different processes, when unify:false is selected and
+ the number of roots is 2 or more. When that happens, the specs to be concretized need to be
+ properly reconstructed on the worker process, if variant propagation was requested.
+ """
+ manifest = tmp_path / "spack.yaml"
+ manifest.write_text(
+ """
+ spack:
+ specs:
+ - parent-foo ++foo
+ - c
+ concretizer:
+ unify: false
+ """
+ )
+ with ev.Environment(tmp_path) as env:
+ env.concretize()
+
+ root = env.matching_spec("parent-foo")
+ for node in root.traverse():
+ assert node.satisfies("+foo")
+
+
+def test_env_with_include_defs(mutable_mock_env_path, mock_packages):
+ """Test environment with included definitions file."""
+ env_path = mutable_mock_env_path
+ env_path.mkdir()
+ defs_file = env_path / "definitions.yaml"
+ defs_file.write_text(
+ """definitions:
+- core_specs: [libdwarf, libelf]
+- compilers: ['%gcc']
+"""
+ )
+
+ spack_yaml = env_path / ev.manifest_name
+ spack_yaml.write_text(
+ f"""spack:
+ include:
+ - file://{defs_file}
+
+ definitions:
+ - my_packages: [zlib]
+
+ specs:
+ - matrix:
+ - [$core_specs]
+ - [$compilers]
+ - $my_packages
+"""
+ )
+
+ e = ev.Environment(env_path)
+ with e:
+ e.concretize()
+
+
+def test_env_with_include_def_missing(mutable_mock_env_path, mock_packages):
+ """Test environment with included definitions file that is missing a definition."""
+ env_path = mutable_mock_env_path
+ env_path.mkdir()
+ filename = "missing-def.yaml"
+ defs_file = env_path / filename
+ defs_file.write_text("definitions:\n- my_compilers: ['%gcc']\n")
+
+ spack_yaml = env_path / ev.manifest_name
+ spack_yaml.write_text(
+ f"""spack:
+ include:
+ - file://{defs_file}
+
+ specs:
+ - matrix:
+ - [$core_specs]
+ - [$my_compilers]
+"""
+ )
+
+ e = ev.Environment(env_path)
+ with e:
+ with pytest.raises(UndefinedReferenceError, match=r"which does not appear"):
+ e.concretize()
diff --git a/lib/spack/spack/test/llnl/util/lock.py b/lib/spack/spack/test/llnl/util/lock.py
index 9e7f3a3bde31b5..a17e5c94d9a5b8 100644
--- a/lib/spack/spack/test/llnl/util/lock.py
+++ b/lib/spack/spack/test/llnl/util/lock.py
@@ -18,7 +18,7 @@
mpirun -n 7 spack test lock
And it will test locking correctness among MPI processes. Ideally, you
-want the MPI processes to span across multiple nodes, so, e.g., for SLURM
+want the MPI processes to span across multiple nodes, so, e.g., for Slurm
you might do this::
srun -N 7 -n 7 -m cyclic spack test lock
diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py
index b333ae58fea006..5716ca5a48dea8 100644
--- a/lib/spack/spack/test/make_executable.py
+++ b/lib/spack/spack/test/make_executable.py
@@ -9,10 +9,7 @@
This just tests whether the right args are getting passed to make.
"""
import os
-import shutil
import sys
-import tempfile
-import unittest
import pytest
@@ -20,110 +17,104 @@
from spack.util.environment import path_put_first
pytestmark = pytest.mark.skipif(
- sys.platform == "win32",
- reason="MakeExecutable \
- not supported on Windows",
+ sys.platform == "win32", reason="MakeExecutable not supported on Windows"
)
-class MakeExecutableTest(unittest.TestCase):
- def setUp(self):
- self.tmpdir = tempfile.mkdtemp()
-
- make_exe = os.path.join(self.tmpdir, "make")
- with open(make_exe, "w") as f:
- f.write("#!/bin/sh\n")
- f.write('echo "$@"')
- os.chmod(make_exe, 0o700)
-
- path_put_first("PATH", [self.tmpdir])
-
- def tearDown(self):
- shutil.rmtree(self.tmpdir)
-
- def test_make_normal(self):
- make = MakeExecutable("make", 8)
- self.assertEqual(make(output=str).strip(), "-j8")
- self.assertEqual(make("install", output=str).strip(), "-j8 install")
-
- def test_make_explicit(self):
- make = MakeExecutable("make", 8)
- self.assertEqual(make(parallel=True, output=str).strip(), "-j8")
- self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install")
-
- def test_make_one_job(self):
- make = MakeExecutable("make", 1)
- self.assertEqual(make(output=str).strip(), "-j1")
- self.assertEqual(make("install", output=str).strip(), "-j1 install")
-
- def test_make_parallel_false(self):
- make = MakeExecutable("make", 8)
- self.assertEqual(make(parallel=False, output=str).strip(), "-j1")
- self.assertEqual(make("install", parallel=False, output=str).strip(), "-j1 install")
-
- def test_make_parallel_disabled(self):
- make = MakeExecutable("make", 8)
-
- os.environ["SPACK_NO_PARALLEL_MAKE"] = "true"
- self.assertEqual(make(output=str).strip(), "-j1")
- self.assertEqual(make("install", output=str).strip(), "-j1 install")
-
- os.environ["SPACK_NO_PARALLEL_MAKE"] = "1"
- self.assertEqual(make(output=str).strip(), "-j1")
- self.assertEqual(make("install", output=str).strip(), "-j1 install")
-
- # These don't disable (false and random string)
- os.environ["SPACK_NO_PARALLEL_MAKE"] = "false"
- self.assertEqual(make(output=str).strip(), "-j8")
- self.assertEqual(make("install", output=str).strip(), "-j8 install")
-
- os.environ["SPACK_NO_PARALLEL_MAKE"] = "foobar"
- self.assertEqual(make(output=str).strip(), "-j8")
- self.assertEqual(make("install", output=str).strip(), "-j8 install")
-
- del os.environ["SPACK_NO_PARALLEL_MAKE"]
-
- def test_make_parallel_precedence(self):
- make = MakeExecutable("make", 8)
-
- # These should work
- os.environ["SPACK_NO_PARALLEL_MAKE"] = "true"
- self.assertEqual(make(parallel=True, output=str).strip(), "-j1")
- self.assertEqual(make("install", parallel=True, output=str).strip(), "-j1 install")
-
- os.environ["SPACK_NO_PARALLEL_MAKE"] = "1"
- self.assertEqual(make(parallel=True, output=str).strip(), "-j1")
- self.assertEqual(make("install", parallel=True, output=str).strip(), "-j1 install")
-
- # These don't disable (false and random string)
- os.environ["SPACK_NO_PARALLEL_MAKE"] = "false"
- self.assertEqual(make(parallel=True, output=str).strip(), "-j8")
- self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install")
-
- os.environ["SPACK_NO_PARALLEL_MAKE"] = "foobar"
- self.assertEqual(make(parallel=True, output=str).strip(), "-j8")
- self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install")
-
- del os.environ["SPACK_NO_PARALLEL_MAKE"]
-
- def test_make_jobs_env(self):
- make = MakeExecutable("make", 8)
- dump_env = {}
- self.assertEqual(
- make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip(), "-j8"
- )
- self.assertEqual(dump_env["MAKE_PARALLELISM"], "8")
-
- def test_make_jobserver(self):
- make = MakeExecutable("make", 8)
- os.environ["MAKEFLAGS"] = "--jobserver-auth=X,Y"
- self.assertEqual(make(output=str).strip(), "")
- self.assertEqual(make(parallel=False, output=str).strip(), "-j1")
- del os.environ["MAKEFLAGS"]
-
- def test_make_jobserver_not_supported(self):
- make = MakeExecutable("make", 8, supports_jobserver=False)
- os.environ["MAKEFLAGS"] = "--jobserver-auth=X,Y"
- # Currently fallback on default job count, Maybe it should force -j1 ?
- self.assertEqual(make(output=str).strip(), "-j8")
- del os.environ["MAKEFLAGS"]
+@pytest.fixture(autouse=True)
+def make_executable(tmp_path, working_env):
+ make_exe = tmp_path / "make"
+ with open(make_exe, "w") as f:
+ f.write("#!/bin/sh\n")
+ f.write('echo "$@"')
+ os.chmod(make_exe, 0o700)
+
+ path_put_first("PATH", [tmp_path])
+
+
+def test_make_normal():
+ make = MakeExecutable("make", 8)
+ assert make(output=str).strip() == "-j8"
+ assert make("install", output=str).strip() == "-j8 install"
+
+
+def test_make_explicit():
+ make = MakeExecutable("make", 8)
+ assert make(parallel=True, output=str).strip() == "-j8"
+ assert make("install", parallel=True, output=str).strip() == "-j8 install"
+
+
+def test_make_one_job():
+ make = MakeExecutable("make", 1)
+ assert make(output=str).strip() == "-j1"
+ assert make("install", output=str).strip() == "-j1 install"
+
+
+def test_make_parallel_false():
+ make = MakeExecutable("make", 8)
+ assert make(parallel=False, output=str).strip() == "-j1"
+ assert make("install", parallel=False, output=str).strip() == "-j1 install"
+
+
+def test_make_parallel_disabled(monkeypatch):
+ make = MakeExecutable("make", 8)
+
+ monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true")
+ assert make(output=str).strip() == "-j1"
+ assert make("install", output=str).strip() == "-j1 install"
+
+ monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "1")
+ assert make(output=str).strip() == "-j1"
+ assert make("install", output=str).strip() == "-j1 install"
+
+ # These don't disable (false and random string)
+ monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "false")
+ assert make(output=str).strip() == "-j8"
+ assert make("install", output=str).strip() == "-j8 install"
+
+ monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "foobar")
+ assert make(output=str).strip() == "-j8"
+ assert make("install", output=str).strip() == "-j8 install"
+
+
+def test_make_parallel_precedence(monkeypatch):
+ make = MakeExecutable("make", 8)
+
+ # These should work
+ monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true")
+ assert make(parallel=True, output=str).strip() == "-j1"
+ assert make("install", parallel=True, output=str).strip() == "-j1 install"
+
+ monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "1")
+ assert make(parallel=True, output=str).strip() == "-j1"
+ assert make("install", parallel=True, output=str).strip() == "-j1 install"
+
+ # These don't disable (false and random string)
+ monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "false")
+ assert make(parallel=True, output=str).strip() == "-j8"
+ assert make("install", parallel=True, output=str).strip() == "-j8 install"
+
+ monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "foobar")
+ assert make(parallel=True, output=str).strip() == "-j8"
+ assert make("install", parallel=True, output=str).strip() == "-j8 install"
+
+
+def test_make_jobs_env():
+ make = MakeExecutable("make", 8)
+ dump_env = {}
+ assert make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip() == "-j8"
+ assert dump_env["MAKE_PARALLELISM"] == "8"
+
+
+def test_make_jobserver(monkeypatch):
+ make = MakeExecutable("make", 8)
+ monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y")
+ assert make(output=str).strip() == ""
+ assert make(parallel=False, output=str).strip() == "-j1"
+
+
+def test_make_jobserver_not_supported(monkeypatch):
+ make = MakeExecutable("make", 8, supports_jobserver=False)
+ monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y")
+ # Currently fallback on default job count, Maybe it should force -j1 ?
+ assert make(output=str).strip() == "-j8"
diff --git a/lib/spack/spack/test/modules/common.py b/lib/spack/spack/test/modules/common.py
index 5b4a2d16ce8dd2..06edb71374d1b7 100644
--- a/lib/spack/spack/test/modules/common.py
+++ b/lib/spack/spack/test/modules/common.py
@@ -17,7 +17,10 @@
from spack.modules.common import UpstreamModuleIndex
from spack.spec import Spec
-pytestmark = pytest.mark.not_on_windows("does not run on windows")
+pytestmark = [
+ pytest.mark.not_on_windows("does not run on windows"),
+ pytest.mark.usefixtures("mock_modules_root"),
+]
def test_update_dictionary_extending_list():
@@ -174,6 +177,7 @@ def test_load_installed_package_not_in_repo(install_mockery, mock_fetch, monkeyp
"""Test that installed packages that have been removed are still loadable"""
spec = Spec("trivial-install-test-package").concretized()
spec.package.do_install()
+ spack.modules.module_types["tcl"](spec, "default", True).write()
def find_nothing(*args):
raise spack.repo.UnknownPackageError("Repo package access is disabled for test")
@@ -194,7 +198,6 @@ def find_nothing(*args):
def test_check_module_set_name(mutable_config):
"""Tests that modules set name are validated correctly and an error is reported if the
name we require does not exist or is reserved by the configuration."""
-
# Minimal modules.yaml config.
spack.config.set(
"modules",
diff --git a/lib/spack/spack/test/modules/conftest.py b/lib/spack/spack/test/modules/conftest.py
index 210a88a65f8ad3..12ee5c1fcd9443 100644
--- a/lib/spack/spack/test/modules/conftest.py
+++ b/lib/spack/spack/test/modules/conftest.py
@@ -2,6 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import pathlib
+
import pytest
import spack.config
@@ -13,26 +15,15 @@
@pytest.fixture()
def modulefile_content(request):
- """Returns a function that generates the content of a module file
- as a list of lines.
- """
-
+ """Returns a function that generates the content of a module file as a list of lines."""
writer_cls = getattr(request.module, "writer_cls")
def _impl(spec_str, module_set_name="default", explicit=True):
- # Write the module file
- spec = spack.spec.Spec(spec_str)
- spec.concretize()
+ spec = spack.spec.Spec(spec_str).concretized()
generator = writer_cls(spec, module_set_name, explicit)
generator.write(overwrite=True)
-
- # Get its filename
- filename = generator.layout.filename
-
- # Retrieve the content
- with open(filename) as f:
- content = f.readlines()
- content = "".join(content).split("\n")
+ written_module = pathlib.Path(generator.layout.filename)
+ content = written_module.read_text().splitlines()
generator.remove()
return content
@@ -40,27 +31,21 @@ def _impl(spec_str, module_set_name="default", explicit=True):
@pytest.fixture()
-def factory(request):
- """Function that, given a spec string, returns an instance of the writer
- and the corresponding spec.
- """
-
- # Class of the module file writer
+def factory(request, mock_modules_root):
+ """Given a spec string, returns an instance of the writer and the corresponding spec."""
writer_cls = getattr(request.module, "writer_cls")
def _mock(spec_string, module_set_name="default", explicit=True):
- spec = spack.spec.Spec(spec_string)
- spec.concretize()
+ spec = spack.spec.Spec(spec_string).concretized()
return writer_cls(spec, module_set_name, explicit), spec
return _mock
@pytest.fixture()
-def mock_module_filename(monkeypatch, tmpdir):
- filename = str(tmpdir.join("module"))
+def mock_module_filename(monkeypatch, tmp_path):
+ filename = tmp_path / "module"
# Set for both module types so we can test both
- monkeypatch.setattr(spack.modules.lmod.LmodFileLayout, "filename", filename)
- monkeypatch.setattr(spack.modules.tcl.TclFileLayout, "filename", filename)
-
- yield filename
+ monkeypatch.setattr(spack.modules.lmod.LmodFileLayout, "filename", str(filename))
+ monkeypatch.setattr(spack.modules.tcl.TclFileLayout, "filename", str(filename))
+ yield str(filename)
diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py
index fcea6b0e794eaf..35c3f3cd97a5db 100644
--- a/lib/spack/spack/test/modules/lmod.py
+++ b/lib/spack/spack/test/modules/lmod.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
import pytest
@@ -20,7 +21,10 @@
#: Class of the writer tested in this module
writer_cls = spack.modules.lmod.LmodModulefileWriter
-pytestmark = pytest.mark.not_on_windows("does not run on windows")
+pytestmark = [
+ pytest.mark.not_on_windows("does not run on windows"),
+ pytest.mark.usefixtures("mock_modules_root"),
+]
@pytest.fixture(params=["clang@=12.0.0", "gcc@=10.2.1"])
@@ -433,3 +437,84 @@ def test_modules_no_arch(self, factory, module_configuration):
path = module.layout.filename
assert str(spec.os) not in path
+
+ def test_hide_implicits(self, module_configuration, temporary_store):
+ """Tests the addition and removal of hide command in modulerc."""
+ module_configuration("hide_implicits")
+
+ spec = spack.spec.Spec("mpileaks@2.3").concretized()
+
+ # mpileaks is defined as implicit, thus hide command should appear in modulerc
+ writer = writer_cls(spec, "default", False)
+ writer.write()
+ assert os.path.exists(writer.layout.modulerc)
+ with open(writer.layout.modulerc) as f:
+ content = [line.strip() for line in f.readlines()]
+ hide_implicit_mpileaks = f'hide_version("{writer.layout.use_name}")'
+ assert len([x for x in content if hide_implicit_mpileaks == x]) == 1
+
+ # The direct dependencies are all implicitly installed, and they should all be hidden,
+ # except for mpich, which is provider for mpi, which is in the hierarchy, and therefore
+ # can't be hidden. All other hidden modules should have a 7 character hash (the config
+ # hash_length = 0 only applies to exposed modules).
+ with open(writer.layout.filename) as f:
+ depends_statements = [line.strip() for line in f.readlines() if "depends_on" in line]
+ for dep in spec.dependencies(deptype=("link", "run")):
+ if dep.satisfies("mpi"):
+ assert not any(dep.dag_hash(7) in line for line in depends_statements)
+ else:
+ assert any(dep.dag_hash(7) in line for line in depends_statements)
+
+ # when mpileaks becomes explicit, its file name changes (hash_length = 0), meaning an
+ # extra module file is created; the old one still exists and remains hidden.
+ writer = writer_cls(spec, "default", True)
+ writer.write()
+ assert os.path.exists(writer.layout.modulerc)
+ with open(writer.layout.modulerc) as f:
+ content = [line.strip() for line in f.readlines()]
+ assert hide_implicit_mpileaks in content # old, implicit mpileaks is still hidden
+ assert f'hide_version("{writer.layout.use_name}")' not in content
+
+ # after removing both the implicit and explicit module, the modulerc file would be empty
+ # and should be removed.
+ writer_cls(spec, "default", False).remove()
+ writer_cls(spec, "default", True).remove()
+ assert not os.path.exists(writer.layout.modulerc)
+ assert not os.path.exists(writer.layout.filename)
+
+ # implicit module is removed
+ writer = writer_cls(spec, "default", False)
+ writer.write()
+ assert os.path.exists(writer.layout.filename)
+ assert os.path.exists(writer.layout.modulerc)
+ writer.remove()
+ assert not os.path.exists(writer.layout.modulerc)
+ assert not os.path.exists(writer.layout.filename)
+
+ # three versions of mpileaks are implicit
+ writer = writer_cls(spec, "default", False)
+ writer.write(overwrite=True)
+ spec_alt1 = spack.spec.Spec("mpileaks@2.2").concretized()
+ spec_alt2 = spack.spec.Spec("mpileaks@2.1").concretized()
+ writer_alt1 = writer_cls(spec_alt1, "default", False)
+ writer_alt1.write(overwrite=True)
+ writer_alt2 = writer_cls(spec_alt2, "default", False)
+ writer_alt2.write(overwrite=True)
+ assert os.path.exists(writer.layout.modulerc)
+ with open(writer.layout.modulerc) as f:
+ content = [line.strip() for line in f.readlines()]
+ hide_cmd = f'hide_version("{writer.layout.use_name}")'
+ hide_cmd_alt1 = f'hide_version("{writer_alt1.layout.use_name}")'
+ hide_cmd_alt2 = f'hide_version("{writer_alt2.layout.use_name}")'
+ assert len([x for x in content if hide_cmd == x]) == 1
+ assert len([x for x in content if hide_cmd_alt1 == x]) == 1
+ assert len([x for x in content if hide_cmd_alt2 == x]) == 1
+
+ # one version is removed
+ writer_alt1.remove()
+ assert os.path.exists(writer.layout.modulerc)
+ with open(writer.layout.modulerc) as f:
+ content = [line.strip() for line in f.readlines()]
+ assert len([x for x in content if hide_cmd == x]) == 1
+ assert len([x for x in content if hide_cmd_alt1 == x]) == 0
+ assert len([x for x in content if hide_cmd_alt2 == x]) == 1
diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py
index 3c5bb01b81035a..e2f1235db0083b 100644
--- a/lib/spack/spack/test/modules/tcl.py
+++ b/lib/spack/spack/test/modules/tcl.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os
import pytest
@@ -17,7 +18,10 @@
#: Class of the writer tested in this module
writer_cls = spack.modules.tcl.TclModulefileWriter
-pytestmark = pytest.mark.not_on_windows("does not run on windows")
+pytestmark = [
+ pytest.mark.not_on_windows("does not run on windows"),
+ pytest.mark.usefixtures("mock_modules_root"),
+]
@pytest.mark.usefixtures("config", "mock_packages", "mock_module_filename")
@@ -132,9 +136,9 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
module_configuration("module_path_separator")
content = modulefile_content("module-path-separator")
- assert len([x for x in content if "append-path --delim {:} COLON {foo}" in x]) == 1
- assert len([x for x in content if "prepend-path --delim {:} COLON {foo}" in x]) == 1
- assert len([x for x in content if "remove-path --delim {:} COLON {foo}" in x]) == 1
+ assert len([x for x in content if "append-path COLON {foo}" in x]) == 1
+ assert len([x for x in content if "prepend-path COLON {foo}" in x]) == 1
+ assert len([x for x in content if "remove-path COLON {foo}" in x]) == 1
assert len([x for x in content if "append-path --delim {;} SEMICOLON {bar}" in x]) == 1
assert len([x for x in content if "prepend-path --delim {;} SEMICOLON {bar}" in x]) == 1
assert len([x for x in content if "remove-path --delim {;} SEMICOLON {bar}" in x]) == 1
@@ -149,37 +153,23 @@ def test_manpath_setup(self, modulefile_content, module_configuration):
# no manpath set by module
content = modulefile_content("mpileaks")
- assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 0
+ assert len([x for x in content if "append-path MANPATH {}" in x]) == 0
# manpath set by module with prepend-path
content = modulefile_content("module-manpath-prepend")
- assert (
- len([x for x in content if "prepend-path --delim {:} MANPATH {/path/to/man}" in x])
- == 1
- )
- assert (
- len(
- [
- x
- for x in content
- if "prepend-path --delim {:} MANPATH {/path/to/share/man}" in x
- ]
- )
- == 1
- )
- assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 1
+ assert len([x for x in content if "prepend-path MANPATH {/path/to/man}" in x]) == 1
+ assert len([x for x in content if "prepend-path MANPATH {/path/to/share/man}" in x]) == 1
+ assert len([x for x in content if "append-path MANPATH {}" in x]) == 1
# manpath set by module with append-path
content = modulefile_content("module-manpath-append")
- assert (
- len([x for x in content if "append-path --delim {:} MANPATH {/path/to/man}" in x]) == 1
- )
- assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 1
+ assert len([x for x in content if "append-path MANPATH {/path/to/man}" in x]) == 1
+ assert len([x for x in content if "append-path MANPATH {}" in x]) == 1
# manpath set by module with setenv
content = modulefile_content("module-manpath-setenv")
assert len([x for x in content if "setenv MANPATH {/path/to/man}" in x]) == 1
- assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 0
+ assert len([x for x in content if "append-path MANPATH {}" in x]) == 0
@pytest.mark.regression("29578")
def test_setenv_raw_value(self, modulefile_content, module_configuration):
@@ -292,7 +282,7 @@ def test_projections_all(self, factory, module_configuration):
projection = writer.spec.format(writer.conf.projections["all"])
assert projection in writer.layout.use_name
- def test_invalid_naming_scheme(self, factory, module_configuration, mock_module_filename):
+ def test_invalid_naming_scheme(self, factory, module_configuration):
"""Tests the evaluation of an invalid naming scheme."""
module_configuration("invalid_naming_scheme")
@@ -303,7 +293,7 @@ def test_invalid_naming_scheme(self, factory, module_configuration, mock_module_
with pytest.raises(RuntimeError):
writer.layout.use_name
- def test_invalid_token_in_env_name(self, factory, module_configuration, mock_module_filename):
+ def test_invalid_token_in_env_name(self, factory, module_configuration):
"""Tests setting environment variables with an invalid name."""
module_configuration("invalid_token_in_env_var_name")
@@ -438,7 +428,7 @@ def test_extend_context(self, modulefile_content, module_configuration):
@pytest.mark.regression("4400")
@pytest.mark.db
- def test_exclude_implicits(self, module_configuration, database):
+ def test_hide_implicits_no_arg(self, module_configuration, database):
module_configuration("exclude_implicits")
# mpileaks has been installed explicitly when setting up
@@ -456,7 +446,7 @@ def test_exclude_implicits(self, module_configuration, database):
assert writer.conf.excluded
@pytest.mark.regression("12105")
- def test_exclude_implicits_with_arg(self, module_configuration):
+ def test_hide_implicits_with_arg(self, module_configuration):
module_configuration("exclude_implicits")
# mpileaks is defined as explicit with explicit argument set on writer
@@ -498,3 +488,79 @@ def test_modules_no_arch(self, factory, module_configuration):
path = module.layout.filename
assert str(spec.os) not in path
+
+ def test_hide_implicits(self, module_configuration, temporary_store):
+ """Tests the addition and removal of hide command in modulerc."""
+ module_configuration("hide_implicits")
+
+ spec = spack.spec.Spec("mpileaks@2.3").concretized()
+
+ # mpileaks is defined as implicit, thus hide command should appear in modulerc
+ writer = writer_cls(spec, "default", False)
+ writer.write()
+ assert os.path.exists(writer.layout.modulerc)
+ with open(writer.layout.modulerc) as f:
+ content = [line.strip() for line in f.readlines()]
+ hide_implicit_mpileaks = f"module-hide --soft --hidden-loaded {writer.layout.use_name}"
+ assert len([x for x in content if hide_implicit_mpileaks == x]) == 1
+
+ # The direct dependencies are all implicit, and they should have depends-on with fixed
+ # 7 character hash, even though the config is set to hash_length = 0.
+ with open(writer.layout.filename) as f:
+ depends_statements = [line.strip() for line in f.readlines() if "depends-on" in line]
+ for dep in spec.dependencies(deptype=("link", "run")):
+ assert any(dep.dag_hash(7) in line for line in depends_statements)
+
+ # when mpileaks becomes explicit, its file name changes (hash_length = 0), meaning an
+ # extra module file is created; the old one still exists and remains hidden.
+ writer = writer_cls(spec, "default", True)
+ writer.write()
+ assert os.path.exists(writer.layout.modulerc)
+ with open(writer.layout.modulerc) as f:
+ content = [line.strip() for line in f.readlines()]
+ assert hide_implicit_mpileaks in content # old, implicit mpileaks is still hidden
+ assert f"module-hide --soft --hidden-loaded {writer.layout.use_name}" not in content
+
+ # after removing both the implicit and explicit module, the modulerc file would be empty
+ # and should be removed.
+ writer_cls(spec, "default", False).remove()
+ writer_cls(spec, "default", True).remove()
+ assert not os.path.exists(writer.layout.modulerc)
+ assert not os.path.exists(writer.layout.filename)
+
+ # implicit module is removed
+ writer = writer_cls(spec, "default", False)
+ writer.write()
+ assert os.path.exists(writer.layout.filename)
+ assert os.path.exists(writer.layout.modulerc)
+ writer.remove()
+ assert not os.path.exists(writer.layout.modulerc)
+ assert not os.path.exists(writer.layout.filename)
+
+ # three versions of mpileaks are implicit
+ writer = writer_cls(spec, "default", False)
+ writer.write(overwrite=True)
+ spec_alt1 = spack.spec.Spec("mpileaks@2.2").concretized()
+ spec_alt2 = spack.spec.Spec("mpileaks@2.1").concretized()
+ writer_alt1 = writer_cls(spec_alt1, "default", False)
+ writer_alt1.write(overwrite=True)
+ writer_alt2 = writer_cls(spec_alt2, "default", False)
+ writer_alt2.write(overwrite=True)
+ assert os.path.exists(writer.layout.modulerc)
+ with open(writer.layout.modulerc) as f:
+ content = [line.strip() for line in f.readlines()]
+ hide_cmd = f"module-hide --soft --hidden-loaded {writer.layout.use_name}"
+ hide_cmd_alt1 = f"module-hide --soft --hidden-loaded {writer_alt1.layout.use_name}"
+ hide_cmd_alt2 = f"module-hide --soft --hidden-loaded {writer_alt2.layout.use_name}"
+ assert len([x for x in content if hide_cmd == x]) == 1
+ assert len([x for x in content if hide_cmd_alt1 == x]) == 1
+ assert len([x for x in content if hide_cmd_alt2 == x]) == 1
+
+ # one version is removed
+ writer_alt1.remove()
+ assert os.path.exists(writer.layout.modulerc)
+ with open(writer.layout.modulerc) as f:
+ content = [line.strip() for line in f.readlines()]
+ assert len([x for x in content if hide_cmd == x]) == 1
+ assert len([x for x in content if hide_cmd_alt1 == x]) == 0
+ assert len([x for x in content if hide_cmd_alt2 == x]) == 1
diff --git a/lib/spack/spack/test/oci/image.py b/lib/spack/spack/test/oci/image.py
new file mode 100644
index 00000000000000..b074cc679af0a6
--- /dev/null
+++ b/lib/spack/spack/test/oci/image.py
@@ -0,0 +1,105 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import re
+
+import pytest
+
+import spack.spec
+from spack.oci.image import Digest, ImageReference, default_tag, tag
+
+
+@pytest.mark.parametrize(
+ "image_ref, expected",
+ [
+ (
+ f"example.com:1234/a/b/c:tag@sha256:{'a'*64}",
+ ("example.com:1234", "a/b/c", "tag", Digest.from_sha256("a" * 64)),
+ ),
+ ("example.com:1234/a/b/c:tag", ("example.com:1234", "a/b/c", "tag", None)),
+ ("example.com:1234/a/b/c", ("example.com:1234", "a/b/c", "latest", None)),
+ (
+ f"example.com:1234/a/b/c@sha256:{'a'*64}",
+ ("example.com:1234", "a/b/c", "latest", Digest.from_sha256("a" * 64)),
+ ),
+ # ipv4
+ ("1.2.3.4:1234/a/b/c:tag", ("1.2.3.4:1234", "a/b/c", "tag", None)),
+ # ipv6
+ ("[2001:db8::1]:1234/a/b/c:tag", ("[2001:db8::1]:1234", "a/b/c", "tag", None)),
+ # Follow docker rules for parsing
+ ("ubuntu:22.04", ("index.docker.io", "library/ubuntu", "22.04", None)),
+ ("myname/myimage:abc", ("index.docker.io", "myname/myimage", "abc", None)),
+ ("myname:1234/myimage:abc", ("myname:1234", "myimage", "abc", None)),
+ ("localhost/myimage:abc", ("localhost", "myimage", "abc", None)),
+ ("localhost:1234/myimage:abc", ("localhost:1234", "myimage", "abc", None)),
+ (
+ "example.com/UPPERCASE/lowercase:AbC",
+ ("example.com", "uppercase/lowercase", "AbC", None),
+ ),
+ ],
+)
+def test_name_parsing(image_ref, expected):
+ x = ImageReference.from_string(image_ref)
+ assert (x.domain, x.name, x.tag, x.digest) == expected
+
+
+@pytest.mark.parametrize(
+ "image_ref",
+ [
+ # wrong order of tag and sha
+ f"example.com:1234/a/b/c@sha256:{'a'*64}:tag",
+ # double tag
+ "example.com:1234/a/b/c:tag:tag",
+ # empty tag
+ "example.com:1234/a/b/c:",
+ # empty digest
+ "example.com:1234/a/b/c@sha256:",
+ # unsupport digest algorithm
+ f"example.com:1234/a/b/c@sha512:{'a'*128}",
+ # invalid digest length
+ f"example.com:1234/a/b/c@sha256:{'a'*63}",
+ # whitespace
+ "example.com:1234/a/b/c :tag",
+ "example.com:1234/a/b/c: tag",
+ "example.com:1234/a/b/c:tag ",
+ " example.com:1234/a/b/c:tag",
+ # broken ipv4
+ "1.2..3:1234/a/b/c:tag",
+ ],
+)
+def test_parsing_failure(image_ref):
+ with pytest.raises(ValueError):
+ ImageReference.from_string(image_ref)
+
+
+def test_digest():
+ valid_digest = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"
+
+ # Test string roundtrip
+ assert str(Digest.from_string(f"sha256:{valid_digest}")) == f"sha256:{valid_digest}"
+
+ # Invalid digest length
+ with pytest.raises(ValueError):
+ Digest.from_string("sha256:abcdef")
+
+ # Missing algorithm
+ with pytest.raises(ValueError):
+ Digest.from_string(valid_digest)
+
+
+@pytest.mark.parametrize(
+ "spec",
+ [
+ # Standard case
+ "short-name@=1.2.3",
+ # Unsupported characters in git version
+ f"git-version@{1:040x}=develop",
+ # Too long of a name
+ f"{'too-long':x<256}@=1.2.3",
+ ],
+)
+def test_default_tag(spec: str):
+ """Make sure that computed image tags are valid."""
+ assert re.fullmatch(tag, default_tag(spack.spec.Spec(spec)))
diff --git a/lib/spack/spack/test/oci/integration_test.py b/lib/spack/spack/test/oci/integration_test.py
new file mode 100644
index 00000000000000..b2f9366c3a5fbf
--- /dev/null
+++ b/lib/spack/spack/test/oci/integration_test.py
@@ -0,0 +1,148 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+# These are slow integration tests that do concretization, install, tarballing
+# and compression. They still use an in-memory OCI registry.
+
+import hashlib
+import json
+import os
+from contextlib import contextmanager
+
+import spack.oci.opener
+from spack.binary_distribution import gzip_compressed_tarfile
+from spack.main import SpackCommand
+from spack.oci.image import Digest, ImageReference, default_config, default_manifest
+from spack.oci.oci import blob_exists, get_manifest_and_config, upload_blob, upload_manifest
+from spack.test.oci.mock_registry import DummyServer, InMemoryOCIRegistry, create_opener
+
+buildcache = SpackCommand("buildcache")
+mirror = SpackCommand("mirror")
+
+
+@contextmanager
+def oci_servers(*servers: DummyServer):
+ old_opener = spack.oci.opener.urlopen
+ spack.oci.opener.urlopen = create_opener(*servers).open
+ yield
+ spack.oci.opener.urlopen = old_opener
+
+
+def test_buildcache_push_command(mutable_database, disable_parallel_buildcache_push):
+ with oci_servers(InMemoryOCIRegistry("example.com")):
+ mirror("add", "oci-test", "oci://example.com/image")
+
+ # Push the package(s) to the OCI registry
+ buildcache("push", "--update-index", "oci-test", "mpileaks^mpich")
+
+ # Remove mpileaks from the database
+ matches = mutable_database.query_local("mpileaks^mpich")
+ assert len(matches) == 1
+ spec = matches[0]
+ spec.package.do_uninstall()
+
+ # Reinstall mpileaks from the OCI registry
+ buildcache("install", "--unsigned", "mpileaks^mpich")
+
+ # Now it should be installed again
+ assert spec.installed
+
+ # And let's check that the bin/mpileaks executable is there
+ assert os.path.exists(os.path.join(spec.prefix, "bin", "mpileaks"))
+
+
+def test_buildcache_push_with_base_image_command(
+ mutable_database, tmpdir, disable_parallel_buildcache_push
+):
+ """Test that we can push a package with a base image to an OCI registry.
+
+ This test is a bit involved, cause we have to create a small base image."""
+
+ registry_src = InMemoryOCIRegistry("src.example.com")
+ registry_dst = InMemoryOCIRegistry("dst.example.com")
+
+ base_image = ImageReference.from_string("src.example.com/my-base-image:latest")
+
+ with oci_servers(registry_src, registry_dst):
+ mirror("add", "oci-test", "oci://dst.example.com/image")
+
+ # TODO: simplify creation of images...
+ # We create a rootfs.tar.gz, a config file and a manifest file,
+ # and upload those.
+
+ config, manifest = default_config(architecture="amd64", os="linux"), default_manifest()
+
+ # Create a small rootfs
+ rootfs = tmpdir.join("rootfs")
+ rootfs.ensure(dir=True)
+ rootfs.join("bin").ensure(dir=True)
+ rootfs.join("bin", "sh").ensure(file=True)
+
+ # Create a tarball of it.
+ tarball = tmpdir.join("base.tar.gz")
+ with gzip_compressed_tarfile(tarball) as (tar, tar_gz_checksum, tar_checksum):
+ tar.add(rootfs, arcname=".")
+
+ tar_gz_digest = Digest.from_sha256(tar_gz_checksum.hexdigest())
+ tar_digest = Digest.from_sha256(tar_checksum.hexdigest())
+
+ # Save the config file
+ config["rootfs"]["diff_ids"] = [str(tar_digest)]
+ config_file = tmpdir.join("config.json")
+ with open(config_file, "w") as f:
+ f.write(json.dumps(config))
+
+ config_digest = Digest.from_sha256(
+ hashlib.sha256(open(config_file, "rb").read()).hexdigest()
+ )
+
+ # Register the layer in the manifest
+ manifest["layers"].append(
+ {
+ "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
+ "digest": str(tar_gz_digest),
+ "size": tarball.size(),
+ }
+ )
+ manifest["config"]["digest"] = str(config_digest)
+ manifest["config"]["size"] = config_file.size()
+
+ # Upload the layer and config file
+ upload_blob(base_image, tarball, tar_gz_digest)
+ upload_blob(base_image, config_file, config_digest)
+
+ # Upload the manifest
+ upload_manifest(base_image, manifest)
+
+ # END TODO
+
+ # Finally... use it as a base image
+ buildcache("push", "--base-image", str(base_image), "oci-test", "mpileaks^mpich")
+
+ # Figure out what tag was produced
+ tag = next(tag for _, tag in registry_dst.manifests.keys() if tag.startswith("mpileaks-"))
+ assert tag is not None
+
+ # Fetch the manifest and config
+ dst_image = ImageReference.from_string(f"dst.example.com/image:{tag}")
+ retrieved_manifest, retrieved_config = get_manifest_and_config(dst_image)
+
+ # Check that the base image layer is first.
+ assert retrieved_manifest["layers"][0]["digest"] == str(tar_gz_digest)
+ assert retrieved_config["rootfs"]["diff_ids"][0] == str(tar_digest)
+
+ # And also check that we have layers for each link-run dependency
+ matches = mutable_database.query_local("mpileaks^mpich")
+ assert len(matches) == 1
+ spec = matches[0]
+
+ num_runtime_deps = len(list(spec.traverse(root=True, deptype=("link", "run"))))
+
+ # One base layer + num_runtime_deps
+ assert len(retrieved_manifest["layers"]) == 1 + num_runtime_deps
+
+ # And verify that all layers including the base layer are present
+ for layer in retrieved_manifest["layers"]:
+ assert blob_exists(dst_image, digest=Digest.from_string(layer["digest"]))
diff --git a/lib/spack/spack/test/oci/mock_registry.py b/lib/spack/spack/test/oci/mock_registry.py
new file mode 100644
index 00000000000000..ec3e85c333ab7e
--- /dev/null
+++ b/lib/spack/spack/test/oci/mock_registry.py
@@ -0,0 +1,410 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+import base64
+import email.message
+import hashlib
+import io
+import json
+import re
+import urllib.error
+import urllib.parse
+import urllib.request
+import uuid
+from typing import Callable, Dict, List, Optional, Pattern, Tuple
+from urllib.request import Request
+
+from spack.oci.image import Digest
+from spack.oci.opener import OCIAuthHandler
+
+
+class MockHTTPResponse(io.IOBase):
+ """This is a mock HTTP response, which implements part of http.client.HTTPResponse"""
+
+ def __init__(self, status, reason, headers=None, body=None):
+ self.msg = None
+ self.version = 11
+ self.url = None
+ self.headers = email.message.EmailMessage()
+ self.status = status
+ self.code = status
+ self.reason = reason
+ self.debuglevel = 0
+ self._body = body
+
+ if headers is not None:
+ for key, value in headers.items():
+ self.headers[key] = value
+
+ @classmethod
+ def with_json(cls, status, reason, headers=None, body=None):
+ """Create a mock HTTP response with JSON string as body"""
+ body = io.BytesIO(json.dumps(body).encode("utf-8"))
+ return cls(status, reason, headers, body)
+
+ def read(self, *args, **kwargs):
+ return self._body.read(*args, **kwargs)
+
+ def getheader(self, name, default=None):
+ self.headers.get(name, default)
+
+ def getheaders(self):
+ return self.headers.items()
+
+ def fileno(self):
+ return 0
+
+ def getcode(self):
+ return self.status
+
+ def info(self):
+ return self.headers
+
+
+class MiddlewareError(Exception):
+ """Thrown in a handler to return a response early."""
+
+ def __init__(self, response: MockHTTPResponse):
+ self.response = response
+
+
+class Router:
+ """This class is a small router for requests to the OCI registry.
+
+ It is used to dispatch requests to a handler, and middleware can be
+ used to transform requests, as well as return responses early
+ (e.g. for authentication)."""
+
+ def __init__(self) -> None:
+ self.routes: List[Tuple[str, Pattern, Callable]] = []
+ self.middleware: List[Callable[[Request], Request]] = []
+
+ def handle(self, req: Request) -> MockHTTPResponse:
+ """Dispatch a request to a handler."""
+ result = urllib.parse.urlparse(req.full_url)
+
+ # Apply middleware
+ try:
+ for handler in self.middleware:
+ req = handler(req)
+ except MiddlewareError as e:
+ return e.response
+
+ for method, path_regex, handler in self.routes:
+ if method != req.get_method():
+ continue
+ match = re.fullmatch(path_regex, result.path)
+ if not match:
+ continue
+
+ return handler(req, **match.groupdict())
+
+ return MockHTTPResponse(404, "Not found")
+
+ def register(self, method, path: str, handler: Callable):
+ self.routes.append((method, re.compile(path), handler))
+
+ def add_middleware(self, handler: Callable[[Request], Request]):
+ self.middleware.append(handler)
+
+
+class DummyServer:
+ def __init__(self, domain: str) -> None:
+ # The domain of the server, e.g. "registry.example.com"
+ self.domain = domain
+
+ # List of (method, url) tuples
+ self.requests: List[Tuple[str, str]] = []
+
+ # Dispatches requests to handlers
+ self.router = Router()
+
+ # Always install a request logger
+ self.router.add_middleware(self.log_request)
+
+ def handle(self, req: Request) -> MockHTTPResponse:
+ return self.router.handle(req)
+
+ def log_request(self, req: Request):
+ path = urllib.parse.urlparse(req.full_url).path
+ self.requests.append((req.get_method(), path))
+ return req
+
+ def clear_log(self):
+ self.requests = []
+
+
+class InMemoryOCIRegistry(DummyServer):
+ """This implements the basic OCI registry API, but in memory.
+
+ It supports two types of blob uploads:
+ 1. POST + PUT: the client first starts a session with POST, then does a large PUT request
+ 2. POST: the client does a single POST request with the whole blob
+
+ Option 2 is not supported by all registries, so we allow to disable it,
+ with allow_single_post=False.
+
+ A third option is to use the chunked upload, but this is not implemented here, because
+ it's typically a major performance hit in upload speed, so we're not using it in Spack."""
+
+ def __init__(self, domain: str, allow_single_post: bool = True) -> None:
+ super().__init__(domain)
+ self.router.register("GET", r"/v2/", self.index)
+ self.router.register("HEAD", r"/v2/(?P.+)/blobs/(?P.+)", self.head_blob)
+ self.router.register("POST", r"/v2/(?P.+)/blobs/uploads/", self.start_session)
+ self.router.register("PUT", r"/upload", self.put_session)
+ self.router.register("PUT", r"/v2/(?P.+)/manifests/(?P[.+)", self.put_manifest)
+ self.router.register("GET", r"/v2/(?P.+)/manifests/(?P][.+)", self.get_manifest)
+ self.router.register("GET", r"/v2/(?P.+)/blobs/(?P.+)", self.get_blob)
+ self.router.register("GET", r"/v2/(?P.+)/tags/list", self.list_tags)
+
+ # If True, allow single POST upload, not all registries support this
+ self.allow_single_post = allow_single_post
+
+ # Used for POST + PUT upload. This is a map from session ID to image name
+ self.sessions: Dict[str, str] = {}
+
+ # Set of sha256:... digests that are known to the registry
+ self.blobs: Dict[str, bytes] = {}
+
+ # Map from (name, tag) to manifest
+ self.manifests: Dict[Tuple[str, str], Dict] = {}
+
+ def index(self, req: Request):
+ return MockHTTPResponse.with_json(200, "OK", body={})
+
+ def head_blob(self, req: Request, name: str, digest: str):
+ if digest in self.blobs:
+ return MockHTTPResponse(200, "OK", headers={"Content-Length": "1234"})
+ return MockHTTPResponse(404, "Not found")
+
+ def get_blob(self, req: Request, name: str, digest: str):
+ if digest in self.blobs:
+ return MockHTTPResponse(200, "OK", body=io.BytesIO(self.blobs[digest]))
+ return MockHTTPResponse(404, "Not found")
+
+ def start_session(self, req: Request, name: str):
+ id = str(uuid.uuid4())
+ self.sessions[id] = name
+
+ # Check if digest is present (single monolithic upload)
+ result = urllib.parse.urlparse(req.full_url)
+ query = urllib.parse.parse_qs(result.query)
+
+ if self.allow_single_post and "digest" in query:
+ return self.handle_upload(
+ req, name=name, digest=Digest.from_string(query["digest"][0])
+ )
+
+ return MockHTTPResponse(202, "Accepted", headers={"Location": f"/upload?uuid={id}"})
+
+ def put_session(self, req: Request):
+ # Do the upload.
+ result = urllib.parse.urlparse(req.full_url)
+ query = urllib.parse.parse_qs(result.query)
+
+ # uuid param should be preserved, and digest should be present
+ assert "uuid" in query and len(query["uuid"]) == 1
+ assert "digest" in query and len(query["digest"]) == 1
+
+ id = query["uuid"][0]
+ assert id in self.sessions
+
+ name, digest = self.sessions[id], Digest.from_string(query["digest"][0])
+
+ response = self.handle_upload(req, name=name, digest=digest)
+
+ # End the session
+ del self.sessions[id]
+
+ return response
+
+ def put_manifest(self, req: Request, name: str, ref: str):
+ # In requests, Python runs header.capitalize().
+ content_type = req.get_header("Content-type")
+ assert content_type in (
+ "application/vnd.oci.image.manifest.v1+json",
+ "application/vnd.oci.image.index.v1+json",
+ )
+
+ index_or_manifest = json.loads(self._require_data(req))
+
+ # Verify that we have all blobs (layers for manifest, manifests for index)
+ if content_type == "application/vnd.oci.image.manifest.v1+json":
+ for layer in index_or_manifest["layers"]:
+ assert layer["digest"] in self.blobs, "Missing blob while uploading manifest"
+
+ else:
+ for manifest in index_or_manifest["manifests"]:
+ assert (
+ name,
+ manifest["digest"],
+ ) in self.manifests, "Missing manifest while uploading index"
+
+ self.manifests[(name, ref)] = index_or_manifest
+
+ return MockHTTPResponse(
+ 201, "Created", headers={"Location": f"/v2/{name}/manifests/{ref}"}
+ )
+
+ def get_manifest(self, req: Request, name: str, ref: str):
+ if (name, ref) not in self.manifests:
+ return MockHTTPResponse(404, "Not found")
+
+ manifest_or_index = self.manifests[(name, ref)]
+
+ return MockHTTPResponse.with_json(
+ 200,
+ "OK",
+ headers={"Content-type": manifest_or_index["mediaType"]},
+ body=manifest_or_index,
+ )
+
+ def _require_data(self, req: Request) -> bytes:
+ """Extract request.data, it's type remains a mystery"""
+ assert req.data is not None
+
+ if hasattr(req.data, "read"):
+ return req.data.read()
+ elif isinstance(req.data, bytes):
+ return req.data
+
+ raise ValueError("req.data should be bytes or have a read() method")
+
+ def handle_upload(self, req: Request, name: str, digest: Digest):
+ """Verify the digest, save the blob, return created status"""
+ data = self._require_data(req)
+ assert hashlib.sha256(data).hexdigest() == digest.digest
+ self.blobs[str(digest)] = data
+ return MockHTTPResponse(201, "Created", headers={"Location": f"/v2/{name}/blobs/{digest}"})
+
+ def list_tags(self, req: Request, name: str):
+ # List all tags, exclude digests.
+ tags = [_tag for _name, _tag in self.manifests.keys() if _name == name and ":" not in _tag]
+ tags.sort()
+ return MockHTTPResponse.with_json(200, "OK", body={"tags": tags})
+
+
+class DummyServerUrllibHandler(urllib.request.BaseHandler):
+ """Glue between urllib and DummyServer, routing requests to
+ the correct mock server for a given domain."""
+
+ def __init__(self) -> None:
+ self.servers: Dict[str, DummyServer] = {}
+
+ def add_server(self, domain: str, api: DummyServer):
+ self.servers[domain] = api
+ return self
+
+ def https_open(self, req: Request):
+ domain = urllib.parse.urlparse(req.full_url).netloc
+
+ if domain not in self.servers:
+ return MockHTTPResponse(404, "Not found")
+
+ return self.servers[domain].handle(req)
+
+
+class InMemoryOCIRegistryWithAuth(InMemoryOCIRegistry):
+ """This is another in-memory OCI registry, but it requires authentication."""
+
+ def __init__(
+ self, domain, token: Optional[str], realm: str, allow_single_post: bool = True
+ ) -> None:
+ super().__init__(domain, allow_single_post)
+ self.token = token # token to accept
+ self.realm = realm # url to the authorization server
+ self.router.add_middleware(self.authenticate)
+
+ def authenticate(self, req: Request):
+ # Any request needs an Authorization header
+ authorization = req.get_header("Authorization")
+
+ if authorization is None:
+ raise MiddlewareError(self.unauthorized())
+
+ # Ensure that the token is correct
+ assert authorization.startswith("Bearer ")
+ token = authorization[7:]
+
+ if token != self.token:
+ raise MiddlewareError(self.unauthorized())
+
+ return req
+
+ def unauthorized(self):
+ return MockHTTPResponse(
+ 401,
+ "Unauthorized",
+ {
+ "www-authenticate": f'Bearer realm="{self.realm}",'
+ f'service="{self.domain}",'
+ 'scope="repository:spack-registry:pull,push"'
+ },
+ )
+
+
+class MockBearerTokenServer(DummyServer):
+ """Simulates a basic server that hands out bearer tokens
+ at the /login endpoint for the following services:
+ public.example.com, which doesn't require Basic Auth
+ private.example.com, which requires Basic Auth, with user:pass
+ """
+
+ def __init__(self, domain: str) -> None:
+ super().__init__(domain)
+ self.router.register("GET", "/login", self.login)
+
+ def login(self, req: Request):
+ url = urllib.parse.urlparse(req.full_url)
+ query_params = urllib.parse.parse_qs(url.query)
+
+ # Verify query params, from the www-authenticate header
+ assert query_params["client_id"] == ["spack"]
+ assert len(query_params["service"]) == 1
+ assert query_params["scope"] == ["repository:spack-registry:pull,push"]
+
+ service = query_params["service"][0]
+
+ if service == "public.example.com":
+ return self.public_auth(req)
+ elif service == "private.example.com":
+ return self.private_auth(req)
+
+ return MockHTTPResponse(404, "Not found")
+
+ def public_auth(self, req: Request):
+ # No need to login with username and password for the public registry
+ assert req.get_header("Authorization") is None
+ return MockHTTPResponse.with_json(200, "OK", body={"token": "public_token"})
+
+ def private_auth(self, req: Request):
+ # For the private registry we need to login with username and password
+ auth_value = req.get_header("Authorization")
+
+ if (
+ auth_value is None
+ or not auth_value.startswith("Basic ")
+ or base64.b64decode(auth_value[6:]) != b"user:pass"
+ ):
+ return MockHTTPResponse(401, "Unauthorized")
+
+ return MockHTTPResponse.with_json(200, "OK", body={"token": "private_token"})
+
+
+def create_opener(*servers: DummyServer, credentials_provider=None):
+ """Creates a mock opener, that can be used to fake requests to a list
+ of servers."""
+ opener = urllib.request.OpenerDirector()
+ handler = DummyServerUrllibHandler()
+ for server in servers:
+ handler.add_server(server.domain, server)
+ opener.add_handler(handler)
+ opener.add_handler(urllib.request.HTTPDefaultErrorHandler())
+ opener.add_handler(urllib.request.HTTPErrorProcessor())
+ if credentials_provider is not None:
+ opener.add_handler(OCIAuthHandler(credentials_provider))
+ return opener
diff --git a/lib/spack/spack/test/oci/urlopen.py b/lib/spack/spack/test/oci/urlopen.py
new file mode 100644
index 00000000000000..16efdfe12d9673
--- /dev/null
+++ b/lib/spack/spack/test/oci/urlopen.py
@@ -0,0 +1,672 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+import hashlib
+import json
+import urllib.error
+import urllib.parse
+import urllib.request
+from urllib.request import Request
+
+import pytest
+
+import spack.mirror
+from spack.oci.image import Digest, ImageReference, default_config, default_manifest
+from spack.oci.oci import (
+ copy_missing_layers,
+ get_manifest_and_config,
+ image_from_mirror,
+ upload_blob,
+ upload_manifest,
+)
+from spack.oci.opener import (
+ Challenge,
+ RealmServiceScope,
+ UsernamePassword,
+ credentials_from_mirrors,
+ default_retry,
+ get_bearer_challenge,
+ parse_www_authenticate,
+)
+from spack.test.oci.mock_registry import (
+ DummyServer,
+ DummyServerUrllibHandler,
+ InMemoryOCIRegistry,
+ InMemoryOCIRegistryWithAuth,
+ MiddlewareError,
+ MockBearerTokenServer,
+ MockHTTPResponse,
+ create_opener,
+)
+
+
+def test_parse_www_authenticate():
+ """Test parsing of valid WWW-Authenticate header, check whether it's
+ decomposed into a list of challenges with correct scheme and parameters
+ according to RFC 7235 section 4.1"""
+ www_authenticate = 'Bearer realm="https://spack.io/authenticate",service="spack-registry",scope="repository:spack-registry:pull,push"'
+ assert parse_www_authenticate(www_authenticate) == [
+ Challenge(
+ "Bearer",
+ [
+ ("realm", "https://spack.io/authenticate"),
+ ("service", "spack-registry"),
+ ("scope", "repository:spack-registry:pull,push"),
+ ],
+ )
+ ]
+
+ assert parse_www_authenticate("Bearer") == [Challenge("Bearer")]
+ assert parse_www_authenticate("MethodA, MethodB,MethodC") == [
+ Challenge("MethodA"),
+ Challenge("MethodB"),
+ Challenge("MethodC"),
+ ]
+
+ assert parse_www_authenticate(
+ 'Digest realm="Digest Realm", nonce="1234567890", algorithm=MD5, qop="auth"'
+ ) == [
+ Challenge(
+ "Digest",
+ [
+ ("realm", "Digest Realm"),
+ ("nonce", "1234567890"),
+ ("algorithm", "MD5"),
+ ("qop", "auth"),
+ ],
+ )
+ ]
+
+ assert parse_www_authenticate(
+ r'Newauth realm="apps", type=1, title="Login to \"apps\"", Basic realm="simple"'
+ ) == [
+ Challenge("Newauth", [("realm", "apps"), ("type", "1"), ("title", 'Login to "apps"')]),
+ Challenge("Basic", [("realm", "simple")]),
+ ]
+
+
+@pytest.mark.parametrize(
+ "invalid_str",
+ [
+ # Not comma separated
+ "SchemeA SchemeB SchemeC",
+ # Unexpected eof
+ "SchemeA, SchemeB, SchemeC, ",
+ # Invalid auth param or scheme
+ r"Scheme x=y, ",
+ # Unexpected eof
+ "Scheme key=",
+ # Invalid token
+ r'"Bearer"',
+ # Invalid token
+ r'Scheme"xyz"',
+ # No auth param
+ r"Scheme ",
+ ],
+)
+def test_invalid_www_authenticate(invalid_str):
+ with pytest.raises(ValueError):
+ parse_www_authenticate(invalid_str)
+
+
+def test_get_bearer_challenge():
+ """Test extracting Bearer challenge from a list of challenges"""
+
+ # Only an incomplete bearer challenge, missing service and scope, not usable.
+ assert (
+ get_bearer_challenge(
+ [
+ Challenge("Bearer", [("realm", "https://spack.io/authenticate")]),
+ Challenge("Basic", [("realm", "simple")]),
+ Challenge(
+ "Digest",
+ [
+ ("realm", "Digest Realm"),
+ ("nonce", "1234567890"),
+ ("algorithm", "MD5"),
+ ("qop", "auth"),
+ ],
+ ),
+ ]
+ )
+ is None
+ )
+
+ # Multiple challenges, should pick the bearer one.
+ assert get_bearer_challenge(
+ [
+ Challenge(
+ "Dummy",
+ [("realm", "https://example.com/"), ("service", "service"), ("scope", "scope")],
+ ),
+ Challenge(
+ "Bearer",
+ [
+ ("realm", "https://spack.io/authenticate"),
+ ("service", "spack-registry"),
+ ("scope", "repository:spack-registry:pull,push"),
+ ],
+ ),
+ ]
+ ) == RealmServiceScope(
+ "https://spack.io/authenticate", "spack-registry", "repository:spack-registry:pull,push"
+ )
+
+
+@pytest.mark.parametrize(
+ "image_ref,token",
+ [
+ ("public.example.com/spack-registry:latest", "public_token"),
+ ("private.example.com/spack-registry:latest", "private_token"),
+ ],
+)
+def test_automatic_oci_authentication(image_ref, token):
+ image = ImageReference.from_string(image_ref)
+
+ def credentials_provider(domain: str):
+ return UsernamePassword("user", "pass") if domain == "private.example.com" else None
+
+ opener = create_opener(
+ InMemoryOCIRegistryWithAuth(
+ image.domain, token=token, realm="https://auth.example.com/login"
+ ),
+ MockBearerTokenServer("auth.example.com"),
+ credentials_provider=credentials_provider,
+ )
+
+ # Run this twice, as it will triggers a code path that caches the bearer token
+ assert opener.open(image.endpoint()).status == 200
+ assert opener.open(image.endpoint()).status == 200
+
+
+def test_wrong_credentials():
+ """Test that when wrong credentials are rejected by the auth server, we
+ get a 401 error."""
+ credentials_provider = lambda domain: UsernamePassword("wrong", "wrong")
+ image = ImageReference.from_string("private.example.com/image")
+ opener = create_opener(
+ InMemoryOCIRegistryWithAuth(
+ image.domain, token="something", realm="https://auth.example.com/login"
+ ),
+ MockBearerTokenServer("auth.example.com"),
+ credentials_provider=credentials_provider,
+ )
+
+ with pytest.raises(urllib.error.HTTPError) as e:
+ opener.open(image.endpoint())
+
+ assert e.value.getcode() == 401
+
+
+def test_wrong_bearer_token_returned_by_auth_server():
+ """When the auth server returns a wrong bearer token, we should get a 401 error
+ when the request we attempt fails. We shouldn't go in circles getting a 401 from
+ the registry, then a non-working token from the auth server, then a 401 from the
+ registry, etc."""
+ image = ImageReference.from_string("private.example.com/image")
+ opener = create_opener(
+ InMemoryOCIRegistryWithAuth(
+ image.domain,
+ token="other_token_than_token_server_provides",
+ realm="https://auth.example.com/login",
+ ),
+ MockBearerTokenServer("auth.example.com"),
+ credentials_provider=lambda domain: UsernamePassword("user", "pass"),
+ )
+
+ with pytest.raises(urllib.error.HTTPError) as e:
+ opener.open(image.endpoint())
+
+ assert e.value.getcode() == 401
+
+
+class TrivialAuthServer(DummyServer):
+ """A trivial auth server that hands out a bearer token at GET /login."""
+
+ def __init__(self, domain: str, token: str) -> None:
+ super().__init__(domain)
+ self.router.register("GET", "/login", self.login)
+ self.token = token
+
+ def login(self, req: Request):
+ return MockHTTPResponse.with_json(200, "OK", body={"token": self.token})
+
+
+def test_registry_with_short_lived_bearer_tokens():
+ """An issued bearer token is mostly opaque to the client, but typically
+ it embeds a short-lived expiration date. To speed up requests to a registry,
+ it's good not to authenticate on every request, but to cache the bearer token,
+ however: we have to deal with the case of an expired bearer token.
+
+ Here we test that when the bearer token expires, we authenticate again, and
+ when the token is still valid, we don't re-authenticate."""
+
+ image = ImageReference.from_string("private.example.com/image")
+ credentials_provider = lambda domain: UsernamePassword("user", "pass")
+
+ auth_server = TrivialAuthServer("auth.example.com", token="token")
+ registry_server = InMemoryOCIRegistryWithAuth(
+ image.domain, token="token", realm="https://auth.example.com/login"
+ )
+ urlopen = create_opener(
+ registry_server, auth_server, credentials_provider=credentials_provider
+ ).open
+
+ # First request, should work with token "token"
+ assert urlopen(image.endpoint()).status == 200
+
+ # Invalidate the token on the registry
+ registry_server.token = "new_token"
+ auth_server.token = "new_token"
+
+ # Second request: reusing the cached token should fail
+ # but in the background we will get a new token from the auth server
+ assert urlopen(image.endpoint()).status == 200
+
+ # Subsequent requests should work with the same token, let's do two more
+ assert urlopen(image.endpoint()).status == 200
+ assert urlopen(image.endpoint()).status == 200
+
+ # And finally, we should see that we've issues exactly two requests to the auth server
+ assert auth_server.requests == [("GET", "/login"), ("GET", "/login")]
+
+ # Whereas we've done more requests to the registry
+ assert registry_server.requests == [
+ ("GET", "/v2/"), # 1: without bearer token
+ ("GET", "/v2/"), # 2: retry with bearer token
+ ("GET", "/v2/"), # 3: with incorrect bearer token
+ ("GET", "/v2/"), # 4: retry with new bearer token
+ ("GET", "/v2/"), # 5: with recyled correct bearer token
+ ("GET", "/v2/"), # 6: with recyled correct bearer token
+ ]
+
+
+class InMemoryRegistryWithUnsupportedAuth(InMemoryOCIRegistry):
+ """A registry that does set a WWW-Authenticate header, but
+ with a challenge we don't support."""
+
+ def __init__(self, domain: str, allow_single_post: bool = True, www_authenticate=None) -> None:
+ self.www_authenticate = www_authenticate
+ super().__init__(domain, allow_single_post)
+ self.router.add_middleware(self.unsupported_auth_method)
+
+ def unsupported_auth_method(self, req: Request):
+ headers = {}
+ if self.www_authenticate:
+ headers["WWW-Authenticate"] = self.www_authenticate
+ raise MiddlewareError(MockHTTPResponse(401, "Unauthorized", headers=headers))
+
+
+@pytest.mark.parametrize(
+ "www_authenticate,error_message",
+ [
+ # missing service and scope
+ ('Bearer realm="https://auth.example.com/login"', "unsupported authentication scheme"),
+ # we don't do basic auth
+ ('Basic realm="https://auth.example.com/login"', "unsupported authentication scheme"),
+ # multiple unsupported challenges
+ (
+ "CustomChallenge method=unsupported, OtherChallenge method=x,param=y",
+ "unsupported authentication scheme",
+ ),
+ # no challenge
+ (None, "missing WWW-Authenticate header"),
+ # malformed challenge, missing quotes
+ ("Bearer realm=https://auth.example.com", "malformed WWW-Authenticate header"),
+ # http instead of https
+ ('Bearer realm="http://auth.example.com",scope=x,service=y', "insecure http connection"),
+ ],
+)
+def test_auth_method_we_cannot_handle_is_error(www_authenticate, error_message):
+ # We can only handle WWW-Authenticate with a Bearer challenge
+ image = ImageReference.from_string("private.example.com/image")
+ urlopen = create_opener(
+ InMemoryRegistryWithUnsupportedAuth(image.domain, www_authenticate=www_authenticate),
+ TrivialAuthServer("auth.example.com", token="token"),
+ credentials_provider=lambda domain: UsernamePassword("user", "pass"),
+ ).open
+
+ with pytest.raises(urllib.error.HTTPError, match=error_message) as e:
+ urlopen(image.endpoint())
+ assert e.value.getcode() == 401
+
+
+# Parametrize over single POST vs POST + PUT.
+@pytest.mark.parametrize("client_single_request", [True, False])
+@pytest.mark.parametrize("server_single_request", [True, False])
+def test_oci_registry_upload(tmpdir, client_single_request, server_single_request):
+ opener = urllib.request.OpenerDirector()
+ opener.add_handler(
+ DummyServerUrllibHandler().add_server(
+ "example.com", InMemoryOCIRegistry(server_single_request)
+ )
+ )
+ opener.add_handler(urllib.request.HTTPDefaultErrorHandler())
+ opener.add_handler(urllib.request.HTTPErrorProcessor())
+
+ # Create a small blob
+ blob = tmpdir.join("blob")
+ blob.write("Hello world!")
+
+ image = ImageReference.from_string("example.com/image:latest")
+ digest = Digest.from_sha256(hashlib.sha256(blob.read_binary()).hexdigest())
+
+ # Set small file size larger than the blob iff we're doing single request
+ small_file_size = 1024 if client_single_request else 0
+
+ # Upload once, should actually upload
+ assert upload_blob(
+ ref=image,
+ file=blob.strpath,
+ digest=digest,
+ small_file_size=small_file_size,
+ _urlopen=opener.open,
+ )
+
+ # Second time should exit as it exists
+ assert not upload_blob(
+ ref=image,
+ file=blob.strpath,
+ digest=digest,
+ small_file_size=small_file_size,
+ _urlopen=opener.open,
+ )
+
+ # Force upload should upload again
+ assert upload_blob(
+ ref=image,
+ file=blob.strpath,
+ digest=digest,
+ force=True,
+ small_file_size=small_file_size,
+ _urlopen=opener.open,
+ )
+
+
+def test_copy_missing_layers(tmpdir, config):
+ """Test copying layers from one registry to another.
+ Creates 3 blobs, 1 config and 1 manifest in registry A
+ and copies layers to registry B. Then checks that all
+ layers are present in registry B. Finally it runs the copy
+ again and checks that no new layers are uploaded."""
+
+ # NOTE: config fixture is used to disable default source mirrors
+ # which are used in Stage(...). Otherwise this test doesn't really
+ # rely on globals.
+
+ src = ImageReference.from_string("a.example.com/image:x")
+ dst = ImageReference.from_string("b.example.com/image:y")
+
+ src_registry = InMemoryOCIRegistry(src.domain)
+ dst_registry = InMemoryOCIRegistry(dst.domain)
+
+ urlopen = create_opener(src_registry, dst_registry).open
+
+ # TODO: make it a bit easier to create bunch of blobs + config + manifest?
+
+ # Create a few blobs and a config file
+ blobs = [tmpdir.join(f"blob{i}") for i in range(3)]
+
+ for i, blob in enumerate(blobs):
+ blob.write(f"Blob {i}")
+
+ digests = [
+ Digest.from_sha256(hashlib.sha256(blob.read_binary()).hexdigest()) for blob in blobs
+ ]
+
+ config = default_config(architecture="amd64", os="linux")
+ configfile = tmpdir.join("config.json")
+ configfile.write(json.dumps(config))
+ config_digest = Digest.from_sha256(hashlib.sha256(configfile.read_binary()).hexdigest())
+
+ for blob, digest in zip(blobs, digests):
+ upload_blob(src, blob.strpath, digest, _urlopen=urlopen)
+ upload_blob(src, configfile.strpath, config_digest, _urlopen=urlopen)
+
+ # Then create a manifest referencing them
+ manifest = default_manifest()
+
+ for blob, digest in zip(blobs, digests):
+ manifest["layers"].append(
+ {
+ "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
+ "digest": str(digest),
+ "size": blob.size(),
+ }
+ )
+
+ manifest["config"] = {
+ "mediaType": "application/vnd.oci.image.config.v1+json",
+ "digest": str(config_digest),
+ "size": configfile.size(),
+ }
+
+ upload_manifest(src, manifest, _urlopen=urlopen)
+
+ # Finally, copy the image from src to dst
+ copy_missing_layers(src, dst, architecture="amd64", _urlopen=urlopen)
+
+ # Check that all layers (not config) were copied and identical
+ assert len(dst_registry.blobs) == len(blobs)
+ for blob, digest in zip(blobs, digests):
+ assert dst_registry.blobs.get(str(digest)) == blob.read_binary()
+
+ is_upload = lambda method, path: method == "POST" and path == "/v2/image/blobs/uploads/"
+ is_exists = lambda method, path: method == "HEAD" and path.startswith("/v2/image/blobs/")
+
+ # Check that exactly 3 uploads were initiated, and that we don't do
+ # double existence checks when uploading.
+ assert sum(is_upload(method, path) for method, path in dst_registry.requests) == 3
+ assert sum(is_exists(method, path) for method, path in dst_registry.requests) == 3
+
+ # Check that re-uploading skips existing layers.
+ dst_registry.clear_log()
+ copy_missing_layers(src, dst, architecture="amd64", _urlopen=urlopen)
+
+ # Check that no uploads were initiated, only existence checks were done.
+ assert sum(is_upload(method, path) for method, path in dst_registry.requests) == 0
+ assert sum(is_exists(method, path) for method, path in dst_registry.requests) == 3
+
+
+def test_image_from_mirror():
+ mirror = spack.mirror.Mirror("oci://example.com/image")
+ assert image_from_mirror(mirror) == ImageReference.from_string("example.com/image")
+
+
+def test_image_reference_str():
+ """Test that with_digest() works with Digest and str."""
+ digest_str = f"sha256:{1234:064x}"
+ digest = Digest.from_string(digest_str)
+
+ img = ImageReference.from_string("example.com/image")
+
+ assert str(img.with_digest(digest)) == f"example.com/image:latest@{digest}"
+ assert str(img.with_digest(digest_str)) == f"example.com/image:latest@{digest}"
+ assert str(img.with_tag("hello")) == "example.com/image:hello"
+ assert str(img.with_tag("hello").with_digest(digest)) == f"example.com/image:hello@{digest}"
+
+
+@pytest.mark.parametrize(
+ "image",
+ [
+ # white space issue
+ " example.com/image",
+ # not alpha-numeric
+ "hello#world:latest",
+ ],
+)
+def test_image_reference_invalid(image):
+ with pytest.raises(ValueError, match="Invalid image reference"):
+ ImageReference.from_string(image)
+
+
+def test_default_credentials_provider():
+ """The default credentials provider uses a collection of configured
+ mirrors."""
+
+ mirrors = [
+ # OCI mirror with push credentials
+ spack.mirror.Mirror(
+ {"url": "oci://a.example.com/image", "push": {"access_pair": ["user.a", "pass.a"]}}
+ ),
+ # Not an OCI mirror
+ spack.mirror.Mirror(
+ {"url": "https://b.example.com/image", "access_pair": ["user.b", "pass.b"]}
+ ),
+ # No credentials
+ spack.mirror.Mirror("oci://c.example.com/image"),
+ # Top-level credentials
+ spack.mirror.Mirror(
+ {"url": "oci://d.example.com/image", "access_pair": ["user.d", "pass.d"]}
+ ),
+ # Dockerhub short reference
+ spack.mirror.Mirror(
+ {"url": "oci://user/image", "access_pair": ["dockerhub_user", "dockerhub_pass"]}
+ ),
+ # Localhost (not a dockerhub short reference)
+ spack.mirror.Mirror(
+ {"url": "oci://localhost/image", "access_pair": ["user.localhost", "pass.localhost"]}
+ ),
+ ]
+
+ assert credentials_from_mirrors("a.example.com", mirrors=mirrors) == UsernamePassword(
+ "user.a", "pass.a"
+ )
+ assert credentials_from_mirrors("b.example.com", mirrors=mirrors) is None
+ assert credentials_from_mirrors("c.example.com", mirrors=mirrors) is None
+ assert credentials_from_mirrors("d.example.com", mirrors=mirrors) == UsernamePassword(
+ "user.d", "pass.d"
+ )
+ assert credentials_from_mirrors("index.docker.io", mirrors=mirrors) == UsernamePassword(
+ "dockerhub_user", "dockerhub_pass"
+ )
+ assert credentials_from_mirrors("localhost", mirrors=mirrors) == UsernamePassword(
+ "user.localhost", "pass.localhost"
+ )
+
+
+def test_manifest_index(tmpdir):
+ """Test obtaining manifest + config from a registry
+ that has an index"""
+ urlopen = create_opener(InMemoryOCIRegistry("registry.example.com")).open
+
+ img = ImageReference.from_string("registry.example.com/image")
+
+ # Create two config files and manifests, for different architectures
+ manifest_descriptors = []
+ manifest_and_config = {}
+ for arch in ("amd64", "arm64"):
+ file = tmpdir.join(f"config_{arch}.json")
+ config = default_config(architecture=arch, os="linux")
+ file.write(json.dumps(config))
+ config_digest = Digest.from_sha256(hashlib.sha256(file.read_binary()).hexdigest())
+ assert upload_blob(img, file, config_digest, _urlopen=urlopen)
+ manifest = {
+ "schemaVersion": 2,
+ "mediaType": "application/vnd.oci.image.manifest.v1+json",
+ "config": {
+ "mediaType": "application/vnd.oci.image.config.v1+json",
+ "digest": str(config_digest),
+ "size": file.size(),
+ },
+ "layers": [],
+ }
+ manifest_digest, manifest_size = upload_manifest(
+ img, manifest, tag=False, _urlopen=urlopen
+ )
+
+ manifest_descriptors.append(
+ {
+ "mediaType": "application/vnd.oci.image.manifest.v1+json",
+ "platform": {"architecture": arch, "os": "linux"},
+ "digest": str(manifest_digest),
+ "size": manifest_size,
+ }
+ )
+
+ manifest_and_config[arch] = (manifest, config)
+
+ # And a single index.
+ index = {
+ "schemaVersion": 2,
+ "mediaType": "application/vnd.oci.image.index.v1+json",
+ "manifests": manifest_descriptors,
+ }
+
+ upload_manifest(img, index, tag=True, _urlopen=urlopen)
+
+ # Check that we fetcht the correct manifest and config for each architecture
+ for arch in ("amd64", "arm64"):
+ assert (
+ get_manifest_and_config(img, architecture=arch, _urlopen=urlopen)
+ == manifest_and_config[arch]
+ )
+
+ # Also test max recursion
+ with pytest.raises(Exception, match="Maximum recursion depth reached"):
+ get_manifest_and_config(img, architecture="amd64", recurse=0, _urlopen=urlopen)
+
+
+class BrokenServer(DummyServer):
+ """Dummy server that returns 500 and 429 errors twice before succeeding"""
+
+ def __init__(self, domain: str) -> None:
+ super().__init__(domain)
+ self.router.register("GET", r"/internal-server-error/", self.internal_server_error_twice)
+ self.router.register("GET", r"/rate-limit/", self.rate_limit_twice)
+ self.router.register("GET", r"/not-found/", self.not_found)
+ self.count_500 = 0
+ self.count_429 = 0
+
+ def internal_server_error_twice(self, request: Request):
+ self.count_500 += 1
+ if self.count_500 < 3:
+ return MockHTTPResponse(500, "Internal Server Error")
+ else:
+ return MockHTTPResponse(200, "OK")
+
+ def rate_limit_twice(self, request: Request):
+ self.count_429 += 1
+ if self.count_429 < 3:
+ return MockHTTPResponse(429, "Rate Limit Exceeded")
+ else:
+ return MockHTTPResponse(200, "OK")
+
+ def not_found(self, request: Request):
+ return MockHTTPResponse(404, "Not Found")
+
+
+@pytest.mark.parametrize(
+ "url,max_retries,expect_failure,expect_requests",
+ [
+ # 500s should be retried
+ ("https://example.com/internal-server-error/", 2, True, 2),
+ ("https://example.com/internal-server-error/", 5, False, 3),
+ # 429s should be retried
+ ("https://example.com/rate-limit/", 2, True, 2),
+ ("https://example.com/rate-limit/", 5, False, 3),
+ # 404s shouldn't be retried
+ ("https://example.com/not-found/", 3, True, 1),
+ ],
+)
+def test_retry(url, max_retries, expect_failure, expect_requests):
+ server = BrokenServer("example.com")
+ urlopen = create_opener(server).open
+ sleep_time = []
+ dont_sleep = lambda t: sleep_time.append(t) # keep track of sleep times
+
+ try:
+ response = default_retry(urlopen, retries=max_retries, sleep=dont_sleep)(url)
+ except urllib.error.HTTPError as e:
+ if not expect_failure:
+ assert False, f"Unexpected HTTPError: {e}"
+ else:
+ if expect_failure:
+ assert False, "Expected HTTPError, but none was raised"
+ assert response.status == 200
+
+ assert len(server.requests) == expect_requests
+ assert sleep_time == [2**i for i in range(expect_requests - 1)]
diff --git a/lib/spack/spack/test/package_class.py b/lib/spack/spack/test/package_class.py
index d0126af230c9ef..279693a529b81b 100644
--- a/lib/spack/spack/test/package_class.py
+++ b/lib/spack/spack/test/package_class.py
@@ -37,6 +37,7 @@ def mpileaks_possible_deps(mock_packages, mpi_names):
"low-priority-provider": set(),
"dyninst": set(["libdwarf", "libelf"]),
"fake": set(),
+ "intel-parallel-studio": set(),
"libdwarf": set(["libelf"]),
"libelf": set(),
"mpich": set(),
diff --git a/lib/spack/spack/test/schema.py b/lib/spack/spack/test/schema.py
index d7f4e524ffadfc..916e61cf26c821 100644
--- a/lib/spack/spack/test/schema.py
+++ b/lib/spack/spack/test/schema.py
@@ -80,7 +80,17 @@ def test_module_suffixes(module_suffixes_schema):
@pytest.mark.regression("10246")
@pytest.mark.parametrize(
"config_name",
- ["compilers", "config", "env", "merged", "mirrors", "modules", "packages", "repos"],
+ [
+ "compilers",
+ "config",
+ "definitions",
+ "env",
+ "merged",
+ "mirrors",
+ "modules",
+ "packages",
+ "repos",
+ ],
)
def test_schema_validation(meta_schema, config_name):
import importlib
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index be646b1e03c23a..3a9c0350ae48a9 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -532,6 +532,7 @@ def test_normalize_mpileaks(self):
assert not spec.eq_dag(expected_normalized, deptypes=True)
assert not spec.eq_dag(non_unique_nodes, deptypes=True)
+ @pytest.mark.xfail(reason="String representation changed")
def test_normalize_with_virtual_package(self):
spec = Spec("mpileaks ^mpi ^libelf@1.8.11 ^libdwarf")
spec.normalize()
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 579ba4486c8a36..87ed1e4b3f994e 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -294,13 +294,10 @@ def test_concrete_specs_which_satisfies_abstract(self, lhs, rhs, default_mock_co
("foo@4.0%pgi@4.5", "@1:3%pgi@4.4:4.6"),
("builtin.mock.mpich", "builtin.mpich"),
("mpileaks ^builtin.mock.mpich", "^builtin.mpich"),
- ("mpileaks^mpich", "^zmpi"),
- ("mpileaks^zmpi", "^mpich"),
("mpileaks^mpich@1.2", "^mpich@2.0"),
("mpileaks^mpich@4.0^callpath@1.5", "^mpich@1:3^callpath@1.4:1.6"),
("mpileaks^mpich@2.0^callpath@1.7", "^mpich@1:3^callpath@1.4:1.6"),
("mpileaks^mpich@4.0^callpath@1.7", "^mpich@1:3^callpath@1.4:1.6"),
- ("mpileaks^mpich", "^zmpi"),
("mpileaks^mpi@3", "^mpi@1.2:1.6"),
("mpileaks^mpi@3:", "^mpich2@1.4"),
("mpileaks^mpi@3:", "^mpich2"),
@@ -338,30 +335,30 @@ def test_constraining_abstract_specs_with_empty_intersection(self, lhs, rhs):
rhs.constrain(lhs)
@pytest.mark.parametrize(
- "lhs,rhs,intersection_expected",
+ "lhs,rhs",
[
- ("mpich", "mpich +foo", True),
- ("mpich", "mpich~foo", True),
- ("mpich", "mpich foo=1", True),
- ("mpich", "mpich++foo", True),
- ("mpich", "mpich~~foo", True),
- ("mpich", "mpich foo==1", True),
+ ("mpich", "mpich +foo"),
+ ("mpich", "mpich~foo"),
+ ("mpich", "mpich foo=1"),
+ ("mpich", "mpich++foo"),
+ ("mpich", "mpich~~foo"),
+ ("mpich", "mpich foo==1"),
# Flags semantics is currently different from other variant
- ("mpich", 'mpich cflags="-O3"', True),
- ("mpich cflags=-O3", 'mpich cflags="-O3 -Ofast"', False),
- ("mpich cflags=-O2", 'mpich cflags="-O3"', False),
- ("multivalue-variant foo=bar", "multivalue-variant +foo", False),
- ("multivalue-variant foo=bar", "multivalue-variant ~foo", False),
- ("multivalue-variant fee=bar", "multivalue-variant fee=baz", False),
+ ("mpich", 'mpich cflags="-O3"'),
+ ("mpich cflags=-O3", 'mpich cflags="-O3 -Ofast"'),
+ ("mpich cflags=-O2", 'mpich cflags="-O3"'),
+ ("multivalue-variant foo=bar", "multivalue-variant +foo"),
+ ("multivalue-variant foo=bar", "multivalue-variant ~foo"),
+ ("multivalue-variant fee=bar", "multivalue-variant fee=baz"),
],
)
def test_concrete_specs_which_do_not_satisfy_abstract(
- self, lhs, rhs, intersection_expected, default_mock_concretization
+ self, lhs, rhs, default_mock_concretization
):
lhs, rhs = default_mock_concretization(lhs), Spec(rhs)
- assert lhs.intersects(rhs) is intersection_expected
- assert rhs.intersects(lhs) is intersection_expected
+ assert lhs.intersects(rhs) is False
+ assert rhs.intersects(lhs) is False
assert not lhs.satisfies(rhs)
assert not rhs.satisfies(lhs)
@@ -483,10 +480,14 @@ def test_intersects_virtual(self):
assert Spec("mpich2").intersects(Spec("mpi"))
assert Spec("zmpi").intersects(Spec("mpi"))
- def test_intersects_virtual_dep_with_virtual_constraint(self):
+ def test_intersects_virtual_providers(self):
+ """Tests that we can always intersect virtual providers from abstract specs.
+ Concretization will give meaning to virtuals, and eventually forbid certain
+ configurations.
+ """
assert Spec("netlib-lapack ^openblas").intersects("netlib-lapack ^openblas")
- assert not Spec("netlib-lapack ^netlib-blas").intersects("netlib-lapack ^openblas")
- assert not Spec("netlib-lapack ^openblas").intersects("netlib-lapack ^netlib-blas")
+ assert Spec("netlib-lapack ^netlib-blas").intersects("netlib-lapack ^openblas")
+ assert Spec("netlib-lapack ^openblas").intersects("netlib-lapack ^netlib-blas")
assert Spec("netlib-lapack ^netlib-blas").intersects("netlib-lapack ^netlib-blas")
def test_intersectable_concrete_specs_must_have_the_same_hash(self):
@@ -1006,6 +1007,103 @@ def test_spec_override(self):
assert new_spec.compiler_flags["cflags"] == ["-O2"]
assert new_spec.compiler_flags["cxxflags"] == ["-O1"]
+ @pytest.mark.parametrize(
+ "spec_str,specs_in_dag",
+ [
+ ("hdf5 ^[virtuals=mpi] mpich", [("mpich", "mpich"), ("mpi", "mpich")]),
+ # Try different combinations with packages that provides a
+ # disjoint set of virtual dependencies
+ (
+ "netlib-scalapack ^mpich ^openblas-with-lapack",
+ [
+ ("mpi", "mpich"),
+ ("lapack", "openblas-with-lapack"),
+ ("blas", "openblas-with-lapack"),
+ ],
+ ),
+ (
+ "netlib-scalapack ^[virtuals=mpi] mpich ^openblas-with-lapack",
+ [
+ ("mpi", "mpich"),
+ ("lapack", "openblas-with-lapack"),
+ ("blas", "openblas-with-lapack"),
+ ],
+ ),
+ (
+ "netlib-scalapack ^mpich ^[virtuals=lapack] openblas-with-lapack",
+ [
+ ("mpi", "mpich"),
+ ("lapack", "openblas-with-lapack"),
+ ("blas", "openblas-with-lapack"),
+ ],
+ ),
+ (
+ "netlib-scalapack ^[virtuals=mpi] mpich ^[virtuals=lapack] openblas-with-lapack",
+ [
+ ("mpi", "mpich"),
+ ("lapack", "openblas-with-lapack"),
+ ("blas", "openblas-with-lapack"),
+ ],
+ ),
+ # Test that we can mix dependencies that provide an overlapping
+ # sets of virtual dependencies
+ (
+ "netlib-scalapack ^[virtuals=mpi] intel-parallel-studio "
+ "^[virtuals=lapack] openblas-with-lapack",
+ [
+ ("mpi", "intel-parallel-studio"),
+ ("lapack", "openblas-with-lapack"),
+ ("blas", "openblas-with-lapack"),
+ ],
+ ),
+ (
+ "netlib-scalapack ^[virtuals=mpi] intel-parallel-studio ^openblas-with-lapack",
+ [
+ ("mpi", "intel-parallel-studio"),
+ ("lapack", "openblas-with-lapack"),
+ ("blas", "openblas-with-lapack"),
+ ],
+ ),
+ (
+ "netlib-scalapack ^intel-parallel-studio ^[virtuals=lapack] openblas-with-lapack",
+ [
+ ("mpi", "intel-parallel-studio"),
+ ("lapack", "openblas-with-lapack"),
+ ("blas", "openblas-with-lapack"),
+ ],
+ ),
+ # Test that we can bind more than one virtual to the same provider
+ (
+ "netlib-scalapack ^[virtuals=lapack,blas] openblas-with-lapack",
+ [("lapack", "openblas-with-lapack"), ("blas", "openblas-with-lapack")],
+ ),
+ ],
+ )
+ def test_virtual_deps_bindings(self, default_mock_concretization, spec_str, specs_in_dag):
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Use case not supported by the original concretizer")
+
+ s = default_mock_concretization(spec_str)
+ for label, expected in specs_in_dag:
+ assert label in s
+ assert s[label].satisfies(expected), label
+
+ @pytest.mark.parametrize(
+ "spec_str",
+ [
+ # openblas-with-lapack needs to provide blas and lapack together
+ "netlib-scalapack ^[virtuals=blas] intel-parallel-studio ^openblas-with-lapack",
+ # intel-* provides blas and lapack together, openblas can provide blas only
+ "netlib-scalapack ^[virtuals=lapack] intel-parallel-studio ^openblas",
+ ],
+ )
+ def test_unsatisfiable_virtual_deps_bindings(self, spec_str):
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Use case not supported by the original concretizer")
+
+ with pytest.raises(spack.solver.asp.UnsatisfiableSpecError):
+ Spec(spec_str).concretized()
+
@pytest.mark.parametrize(
"spec_str,format_str,expected",
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index d731fcd31c1ac5..3cbb59e69f0af2 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -472,33 +472,46 @@ def _specfile_for(spec_str, filename):
[Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, value='cflags=="-O3 -g"')],
'cflags=="-O3 -g"',
),
- # Way too many spaces
+ # Whitespace is allowed in version lists
+ ("@1.2:1.4 , 1.6 ", [Token(TokenType.VERSION, value="@1.2:1.4 , 1.6")], "@1.2:1.4,1.6"),
+ # But not in ranges. `a@1:` and `b` are separate specs, not a single `a@1:b`.
(
- "@1.2 : 1.4 , 1.6 ",
- [Token(TokenType.VERSION, value="@1.2 : 1.4 , 1.6")],
- "@1.2:1.4,1.6",
+ "a@1: b",
+ [
+ Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="a"),
+ Token(TokenType.VERSION, value="@1:"),
+ Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="b"),
+ ],
+ "a@1:",
),
- ("@1.2 : develop", [Token(TokenType.VERSION, value="@1.2 : develop")], "@1.2:develop"),
(
- "@1.2 : develop = foo",
+ "@1.2: develop = foo",
[
- Token(TokenType.VERSION, value="@1.2 :"),
+ Token(TokenType.VERSION, value="@1.2:"),
Token(TokenType.KEY_VALUE_PAIR, value="develop = foo"),
],
"@1.2: develop=foo",
),
(
- "% intel @ 12.1 : 12.6 + debug",
+ "@1.2:develop = foo",
[
- Token(TokenType.COMPILER_AND_VERSION, value="% intel @ 12.1 : 12.6"),
+ Token(TokenType.VERSION, value="@1.2:"),
+ Token(TokenType.KEY_VALUE_PAIR, value="develop = foo"),
+ ],
+ "@1.2: develop=foo",
+ ),
+ (
+ "% intel @ 12.1:12.6 + debug",
+ [
+ Token(TokenType.COMPILER_AND_VERSION, value="% intel @ 12.1:12.6"),
Token(TokenType.BOOL_VARIANT, value="+ debug"),
],
"%intel@12.1:12.6+debug",
),
(
- "@ 12.1 : 12.6 + debug - qt_4",
+ "@ 12.1:12.6 + debug - qt_4",
[
- Token(TokenType.VERSION, value="@ 12.1 : 12.6"),
+ Token(TokenType.VERSION, value="@ 12.1:12.6"),
Token(TokenType.BOOL_VARIANT, value="+ debug"),
Token(TokenType.BOOL_VARIANT, value="- qt_4"),
],
@@ -517,6 +530,26 @@ def _specfile_for(spec_str, filename):
[Token(TokenType.VERSION, value="@:0.4"), Token(TokenType.COMPILER, value="% nvhpc")],
"@:0.4%nvhpc",
),
+ (
+ "^[virtuals=mpi] openmpi",
+ [
+ Token(TokenType.START_EDGE_PROPERTIES, value="^["),
+ Token(TokenType.KEY_VALUE_PAIR, value="virtuals=mpi"),
+ Token(TokenType.END_EDGE_PROPERTIES, value="]"),
+ Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
+ ],
+ "^[virtuals=mpi] openmpi",
+ ),
+ (
+ "^[deptypes=link,build] zlib",
+ [
+ Token(TokenType.START_EDGE_PROPERTIES, value="^["),
+ Token(TokenType.KEY_VALUE_PAIR, value="deptypes=link,build"),
+ Token(TokenType.END_EDGE_PROPERTIES, value="]"),
+ Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
+ ],
+ "^[deptypes=build,link] zlib",
+ ),
(
"zlib@git.foo/bar",
[
@@ -525,6 +558,31 @@ def _specfile_for(spec_str, filename):
],
"zlib@git.foo/bar",
),
+ # Variant propagation
+ (
+ "zlib ++foo",
+ [
+ Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
+ Token(TokenType.PROPAGATED_BOOL_VARIANT, "++foo"),
+ ],
+ "zlib++foo",
+ ),
+ (
+ "zlib ~~foo",
+ [
+ Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
+ Token(TokenType.PROPAGATED_BOOL_VARIANT, "~~foo"),
+ ],
+ "zlib~~foo",
+ ),
+ (
+ "zlib foo==bar",
+ [
+ Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
+ Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, "foo==bar"),
+ ],
+ "zlib foo==bar",
+ ),
],
)
def test_parse_single_spec(spec_str, tokens, expected_roundtrip):
@@ -885,6 +943,9 @@ def test_disambiguate_hash_by_spec(spec1, spec2, constraint, mock_packages, monk
("x platform=test platform=test", spack.spec.DuplicateArchitectureError),
("x os=fe platform=test target=fe os=fe", spack.spec.DuplicateArchitectureError),
("x target=be platform=test os=be os=fe", spack.spec.DuplicateArchitectureError),
+ ("^[@foo] zlib", spack.parser.SpecParsingError),
+ # TODO: Remove this as soon as use variants are added and we can parse custom attributes
+ ("^[foo=bar] zlib", spack.parser.SpecParsingError),
],
)
def test_error_conditions(text, exc_cls):
diff --git a/lib/spack/spack/test/util/elf.py b/lib/spack/spack/test/util/elf.py
index 6380bb7910b5d8..db826df1730fab 100644
--- a/lib/spack/spack/test/util/elf.py
+++ b/lib/spack/spack/test/util/elf.py
@@ -120,6 +120,21 @@ def test_parser_doesnt_deal_with_nonzero_offset():
elf.parse_elf(elf_at_offset_one)
+def test_only_header():
+ # When passing only_header=True parsing a file that is literally just a header
+ # without any sections/segments should not error.
+
+ # 32 bit
+ elf_32 = elf.parse_elf(io.BytesIO(b"\x7fELF\x01\x01" + b"\x00" * 46), only_header=True)
+ assert not elf_32.is_64_bit
+ assert elf_32.is_little_endian
+
+ # 64 bit
+ elf_64 = elf.parse_elf(io.BytesIO(b"\x7fELF\x02\x01" + b"\x00" * 58), only_header=True)
+ assert elf_64.is_64_bit
+ assert elf_64.is_little_endian
+
+
@pytest.mark.requires_executables("gcc")
@skip_unless_linux
def test_elf_get_and_replace_rpaths(binary_with_rpaths):
diff --git a/lib/spack/spack/test/util/executable.py b/lib/spack/spack/test/util/executable.py
index 839cf04bfba6f5..9c49445aab2fcc 100644
--- a/lib/spack/spack/test/util/executable.py
+++ b/lib/spack/spack/test/util/executable.py
@@ -89,8 +89,8 @@ def test_which_with_slash_ignores_path(tmpdir, working_env):
assert exe.path == path
-def test_which(tmpdir):
- os.environ["PATH"] = str(tmpdir)
+def test_which(tmpdir, monkeypatch):
+ monkeypatch.setenv("PATH", str(tmpdir))
assert ex.which("spack-test-exe") is None
with pytest.raises(ex.CommandNotFoundError):
diff --git a/lib/spack/spack/user_environment.py b/lib/spack/spack/user_environment.py
index 5d1561a8eaedb7..6e1c798e51dc1c 100644
--- a/lib/spack/spack/user_environment.py
+++ b/lib/spack/spack/user_environment.py
@@ -11,6 +11,7 @@
import spack.build_environment
import spack.config
+import spack.error
import spack.spec
import spack.util.environment as environment
import spack.util.prefix as prefix
diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py
index 8eebcc92bc383a..2765a6042e26f4 100644
--- a/lib/spack/spack/util/crypto.py
+++ b/lib/spack/spack/util/crypto.py
@@ -4,10 +4,12 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import hashlib
-from typing import Any, Callable, Dict # novm
+from typing import BinaryIO, Callable, Dict, Optional
import llnl.util.tty as tty
+HashFactory = Callable[[], "hashlib._Hash"]
+
#: Set of hash algorithms that Spack can use, mapped to digest size in bytes
hashes = {"sha256": 32, "md5": 16, "sha1": 20, "sha224": 28, "sha384": 48, "sha512": 64}
# Note: keys are ordered by popularity for earliest return in ``hash_key in version_dict`` checks.
@@ -23,7 +25,7 @@
#: cache of hash functions generated
-_hash_functions: Dict[str, Callable[[], Any]] = {}
+_hash_functions: Dict[str, HashFactory] = {}
class DeprecatedHash:
@@ -44,55 +46,57 @@ def __call__(self, disable_alert=False):
return hashlib.new(self.hash_alg)
-def hash_fun_for_algo(algo):
+def hash_fun_for_algo(algo: str) -> HashFactory:
"""Get a function that can perform the specified hash algorithm."""
- hash_gen = _hash_functions.get(algo)
- if hash_gen is None:
- if algo in _deprecated_hash_algorithms:
- try:
- hash_gen = DeprecatedHash(algo, tty.debug, disable_security_check=False)
-
- # call once to get a ValueError if usedforsecurity is needed
- hash_gen(disable_alert=True)
- except ValueError:
- # Some systems may support the 'usedforsecurity' option
- # so try with that (but display a warning when it is used)
- hash_gen = DeprecatedHash(algo, tty.warn, disable_security_check=True)
- else:
- hash_gen = getattr(hashlib, algo)
- _hash_functions[algo] = hash_gen
-
- return hash_gen
-
-
-def hash_algo_for_digest(hexdigest):
+ fun = _hash_functions.get(algo)
+ if fun:
+ return fun
+ elif algo not in _deprecated_hash_algorithms:
+ _hash_functions[algo] = getattr(hashlib, algo)
+ else:
+ try:
+ deprecated_fun = DeprecatedHash(algo, tty.debug, disable_security_check=False)
+
+ # call once to get a ValueError if usedforsecurity is needed
+ deprecated_fun(disable_alert=True)
+ except ValueError:
+ # Some systems may support the 'usedforsecurity' option
+ # so try with that (but display a warning when it is used)
+ deprecated_fun = DeprecatedHash(algo, tty.warn, disable_security_check=True)
+ _hash_functions[algo] = deprecated_fun
+ return _hash_functions[algo]
+
+
+def hash_algo_for_digest(hexdigest: str) -> str:
"""Gets name of the hash algorithm for a hex digest."""
- bytes = len(hexdigest) / 2
- if bytes not in _size_to_hash:
- raise ValueError("Spack knows no hash algorithm for this digest: %s" % hexdigest)
- return _size_to_hash[bytes]
+ algo = _size_to_hash.get(len(hexdigest) // 2)
+ if algo is None:
+ raise ValueError(f"Spack knows no hash algorithm for this digest: {hexdigest}")
+ return algo
-def hash_fun_for_digest(hexdigest):
+def hash_fun_for_digest(hexdigest: str) -> HashFactory:
"""Gets a hash function corresponding to a hex digest."""
return hash_fun_for_algo(hash_algo_for_digest(hexdigest))
-def checksum(hashlib_algo, filename, **kwargs):
- """Returns a hex digest of the filename generated using an
- algorithm from hashlib.
- """
- block_size = kwargs.get("block_size", 2**20)
+def checksum_stream(hashlib_algo: HashFactory, fp: BinaryIO, *, block_size: int = 2**20) -> str:
+ """Returns a hex digest of the stream generated using given algorithm from hashlib."""
hasher = hashlib_algo()
- with open(filename, "rb") as file:
- while True:
- data = file.read(block_size)
- if not data:
- break
- hasher.update(data)
+ while True:
+ data = fp.read(block_size)
+ if not data:
+ break
+ hasher.update(data)
return hasher.hexdigest()
+def checksum(hashlib_algo: HashFactory, filename: str, *, block_size: int = 2**20) -> str:
+ """Returns a hex digest of the filename generated using an algorithm from hashlib."""
+ with open(filename, "rb") as f:
+ return checksum_stream(hashlib_algo, f, block_size=block_size)
+
+
class Checker:
"""A checker checks files against one particular hex digest.
It will automatically determine what hashing algorithm
@@ -115,18 +119,18 @@ class Checker:
a 1MB (2**20 bytes) buffer.
"""
- def __init__(self, hexdigest, **kwargs):
+ def __init__(self, hexdigest: str, **kwargs) -> None:
self.block_size = kwargs.get("block_size", 2**20)
self.hexdigest = hexdigest
- self.sum = None
+ self.sum: Optional[str] = None
self.hash_fun = hash_fun_for_digest(hexdigest)
@property
- def hash_name(self):
+ def hash_name(self) -> str:
"""Get the name of the hash function this Checker is using."""
return self.hash_fun().name.lower()
- def check(self, filename):
+ def check(self, filename: str) -> bool:
"""Read the file with the specified name and check its checksum
against self.hexdigest. Return True if they match, False
otherwise. Actual checksum is stored in self.sum.
diff --git a/lib/spack/spack/util/elf.py b/lib/spack/spack/util/elf.py
index cab1db0b03dff8..6d0881f4946429 100644
--- a/lib/spack/spack/util/elf.py
+++ b/lib/spack/spack/util/elf.py
@@ -377,7 +377,7 @@ def parse_header(f, elf):
elf.elf_hdr = ElfHeader._make(unpack(elf_header_fmt, data))
-def _do_parse_elf(f, interpreter=True, dynamic_section=True):
+def _do_parse_elf(f, interpreter=True, dynamic_section=True, only_header=False):
# We don't (yet?) allow parsing ELF files at a nonzero offset, we just
# jump to absolute offsets as they are specified in the ELF file.
if f.tell() != 0:
@@ -386,6 +386,9 @@ def _do_parse_elf(f, interpreter=True, dynamic_section=True):
elf = ElfFile()
parse_header(f, elf)
+ if only_header:
+ return elf
+
# We don't handle anything but executables and shared libraries now.
if elf.elf_hdr.e_type not in (ELF_CONSTANTS.ET_EXEC, ELF_CONSTANTS.ET_DYN):
raise ElfParsingError("Not an ET_DYN or ET_EXEC type")
@@ -403,11 +406,11 @@ def _do_parse_elf(f, interpreter=True, dynamic_section=True):
return elf
-def parse_elf(f, interpreter=False, dynamic_section=False):
+def parse_elf(f, interpreter=False, dynamic_section=False, only_header=False):
"""Given a file handle f for an ELF file opened in binary mode, return an ElfFile
object that is stores data about rpaths"""
try:
- return _do_parse_elf(f, interpreter, dynamic_section)
+ return _do_parse_elf(f, interpreter, dynamic_section, only_header)
except (DeprecationWarning, struct.error):
# According to the docs old versions of Python can throw DeprecationWarning
# instead of struct.error.
diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py
index fdd731df6445f7..7e8e626ddeebf9 100644
--- a/lib/spack/spack/util/environment.py
+++ b/lib/spack/spack/util/environment.py
@@ -662,6 +662,14 @@ def group_by_name(self) -> Dict[str, ModificationList]:
modifications[item.name].append(item)
return modifications
+ def drop(self, *name) -> bool:
+ """Drop all modifications to the variable with the given name."""
+ old_mods = self.env_modifications
+ new_mods = [x for x in self.env_modifications if x.name not in name]
+ self.env_modifications = new_mods
+
+ return len(old_mods) != len(new_mods)
+
def is_unset(self, variable_name: str) -> bool:
"""Returns True if the last modification to a variable is to unset it, False otherwise."""
modifications = self.group_by_name()
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index 48dca0ffa35e0b..36c7e73e0638ae 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -35,9 +35,9 @@ def __init__(self, name):
if not self.exe:
raise ProcessError("Cannot construct executable for '%s'" % name)
- def add_default_arg(self, arg):
- """Add a default argument to the command."""
- self.exe.append(arg)
+ def add_default_arg(self, *args):
+ """Add default argument(s) to the command."""
+ self.exe.extend(args)
def add_default_env(self, key, value):
"""Set an environment variable when the command is run.
@@ -330,8 +330,11 @@ def add_extra_search_paths(paths):
for candidate_item in candidate_items:
for directory in search_paths:
exe = directory / candidate_item
- if exe.is_file() and os.access(str(exe), os.X_OK):
- return str(exe)
+ try:
+ if exe.is_file() and os.access(str(exe), os.X_OK):
+ return str(exe)
+ except OSError:
+ pass
if required:
raise CommandNotFoundError("spack requires '%s'. Make sure it is in your path." % args[0])
diff --git a/lib/spack/spack/util/git.py b/lib/spack/spack/util/git.py
index ceb0013412028c..39efdda9c3c3bb 100644
--- a/lib/spack/spack/util/git.py
+++ b/lib/spack/spack/util/git.py
@@ -24,7 +24,6 @@ def git(required: bool = False):
# If we're running under pytest, add this to ignore the fix for CVE-2022-39253 in
# git 2.38.1+. Do this in one place; we need git to do this in all parts of Spack.
if git and "pytest" in sys.modules:
- git.add_default_arg("-c")
- git.add_default_arg("protocol.file.allow=always")
+ git.add_default_arg("-c", "protocol.file.allow=always")
return git
diff --git a/lib/spack/spack/util/parallel.py b/lib/spack/spack/util/parallel.py
index 683835641ae17e..c8e6ef7907f584 100644
--- a/lib/spack/spack/util/parallel.py
+++ b/lib/spack/spack/util/parallel.py
@@ -6,6 +6,7 @@
import os
import sys
import traceback
+from typing import Optional
class ErrorFromWorker:
@@ -53,7 +54,9 @@ def __call__(self, *args, **kwargs):
return value
-def imap_unordered(f, list_of_args, *, processes: int, debug=False):
+def imap_unordered(
+ f, list_of_args, *, processes: int, maxtaskperchild: Optional[int] = None, debug=False
+):
"""Wrapper around multiprocessing.Pool.imap_unordered.
Args:
@@ -62,6 +65,8 @@ def imap_unordered(f, list_of_args, *, processes: int, debug=False):
processes: maximum number of processes allowed
debug: if False, raise an exception containing just the error messages
from workers, if True an exception with complete stacktraces
+ maxtaskperchild: number of tasks to be executed by a child before being
+ killed and substituted
Raises:
RuntimeError: if any error occurred in the worker processes
@@ -70,7 +75,7 @@ def imap_unordered(f, list_of_args, *, processes: int, debug=False):
yield from map(f, list_of_args)
return
- with multiprocessing.Pool(processes) as p:
+ with multiprocessing.Pool(processes, maxtasksperchild=maxtaskperchild) as p:
for result in p.imap_unordered(Task(f), list_of_args):
if isinstance(result, ErrorFromWorker):
raise RuntimeError(result.stacktrace if debug else str(result))
diff --git a/lib/spack/spack/util/path.py b/lib/spack/spack/util/path.py
index a46443c0831189..e2aee48df1e2cd 100644
--- a/lib/spack/spack/util/path.py
+++ b/lib/spack/spack/util/path.py
@@ -98,7 +98,7 @@ def replacements():
def win_exe_ext():
- return ".exe"
+ return r"(?:\.bat|\.exe)"
def sanitize_filename(filename: str) -> str:
diff --git a/lib/spack/spack/util/windows_registry.py b/lib/spack/spack/util/windows_registry.py
index 5cc0edd8bf5271..cfc16724563287 100644
--- a/lib/spack/spack/util/windows_registry.py
+++ b/lib/spack/spack/util/windows_registry.py
@@ -8,6 +8,7 @@
"""
import os
+import re
import sys
from contextlib import contextmanager
@@ -68,8 +69,19 @@ def _gather_subkey_info(self):
sub_keys, _, _ = winreg.QueryInfoKey(self.hkey)
for i in range(sub_keys):
sub_name = winreg.EnumKey(self.hkey, i)
- sub_handle = winreg.OpenKeyEx(self.hkey, sub_name, access=winreg.KEY_READ)
- self._keys.append(RegistryKey(os.path.join(self.path, sub_name), sub_handle))
+ try:
+ sub_handle = winreg.OpenKeyEx(self.hkey, sub_name, access=winreg.KEY_READ)
+ self._keys.append(RegistryKey(os.path.join(self.path, sub_name), sub_handle))
+ except OSError as e:
+ if hasattr(e, "winerror"):
+ if e.winerror == 5:
+ # This is a permission error, we can't read this key
+ # move on
+ pass
+ else:
+ raise
+ else:
+ raise
def _gather_value_info(self):
"""Compose all values for this key into a dict of form value name: RegistryValue Object"""
@@ -161,6 +173,15 @@ def __init__(self, key, root_key=HKEY.HKEY_CURRENT_USER):
self.root = root_key
self._reg = None
+ class KeyMatchConditions:
+ @staticmethod
+ def regex_matcher(subkey_name):
+ return lambda x: re.match(subkey_name, x.name)
+
+ @staticmethod
+ def name_matcher(subkey_name):
+ return lambda x: subkey_name == x.name
+
@contextmanager
def invalid_reg_ref_error_handler(self):
try:
@@ -193,6 +214,10 @@ def _valid_reg_check(self):
return False
return True
+ def _regex_match_subkeys(self, subkey):
+ r_subkey = re.compile(subkey)
+ return [key for key in self.get_subkeys() if r_subkey.match(key.name)]
+
@property
def reg(self):
if not self._reg:
@@ -218,51 +243,106 @@ def get_subkeys(self):
with self.invalid_reg_ref_error_handler():
return self.reg.subkeys
+ def get_matching_subkeys(self, subkey_name):
+ """Returns all subkeys regex matching subkey name
+
+ Note: this method obtains only direct subkeys of the given key and does not
+ desced to transtitve subkeys. For this behavior, see `find_matching_subkeys`"""
+ self._regex_match_subkeys(subkey_name)
+
def get_values(self):
if not self._valid_reg_check():
raise RegistryError("Cannot query values from invalid key %s" % self.key)
with self.invalid_reg_ref_error_handler():
return self.reg.values
- def _traverse_subkeys(self, stop_condition):
+ def _traverse_subkeys(self, stop_condition, collect_all_matching=False):
"""Perform simple BFS of subkeys, returning the key
that successfully triggers the stop condition.
Args:
stop_condition: lambda or function pointer that takes a single argument
a key and returns a boolean value based on that key
+ collect_all_matching: boolean value, if True, the traversal collects and returns
+ all keys meeting stop condition. If false, once stop
+ condition is met, the key that triggered the condition '
+ is returned.
Return:
the key if stop_condition is triggered, or None if not
"""
+ collection = []
if not self._valid_reg_check():
raise RegistryError("Cannot query values from invalid key %s" % self.key)
with self.invalid_reg_ref_error_handler():
queue = self.reg.subkeys
for key in queue:
if stop_condition(key):
- return key
+ if collect_all_matching:
+ collection.append(key)
+ else:
+ return key
queue.extend(key.subkeys)
- return None
+ return collection if collection else None
+
+ def _find_subkey_s(self, search_key, collect_all_matching=False):
+ """Retrieve one or more keys regex matching `search_key`.
+ One key will be returned unless `collect_all_matching` is enabled,
+ in which case call matches are returned.
+
+ Args:
+ search_key (str): regex string represeting a subkey name structure
+ to be matched against.
+ Cannot be provided alongside `direct_subkey`
+ collect_all_matching (bool): No-op if `direct_subkey` is specified
+ Return:
+ the desired subkey as a RegistryKey object, or none
+ """
+ return self._traverse_subkeys(search_key, collect_all_matching=collect_all_matching)
- def find_subkey(self, subkey_name, recursive=True):
- """If non recursive, this method is the same as get subkey with error handling
- Otherwise perform a BFS of subkeys until desired key is found
+ def find_subkey(self, subkey_name):
+ """Perform a BFS of subkeys until desired key is found
Returns None or RegistryKey object corresponding to requested key name
Args:
- subkey_name (str): string representing subkey to be searched for
- recursive (bool): optional argument, if True, subkey need not be a direct
- sub key of this registry entry, and this method will
- search all subkeys recursively.
- Default is True
+ subkey_name (str)
Return:
the desired subkey as a RegistryKey object, or none
+
+ For more details, see the WindowsRegistryView._find_subkey_s method docstring
"""
+ return self._find_subkey_s(
+ WindowsRegistryView.KeyMatchConditions.name_matcher(subkey_name)
+ )
- if not recursive:
- return self.get_subkey(subkey_name)
+ def find_matching_subkey(self, subkey_name):
+ """Perform a BFS of subkeys until a key matching subkey name regex is found
+ Returns None or the first RegistryKey object corresponding to requested key name
- else:
- return self._traverse_subkeys(lambda x: x.name == subkey_name)
+ Args:
+ subkey_name (str)
+ Return:
+ the desired subkey as a RegistryKey object, or none
+
+ For more details, see the WindowsRegistryView._find_subkey_s method docstring
+ """
+ return self._find_subkey_s(
+ WindowsRegistryView.KeyMatchConditions.regex_matcher(subkey_name)
+ )
+
+ def find_subkeys(self, subkey_name):
+ """Exactly the same as find_subkey, except this function tries to match
+ a regex to multiple keys
+
+ Args:
+ subkey_name (str)
+ Return:
+ the desired subkeys as a list of RegistryKey object, or none
+
+ For more details, see the WindowsRegistryView._find_subkey_s method docstring
+ """
+ kwargs = {"collect_all_matching": True}
+ return self._find_subkey_s(
+ WindowsRegistryView.KeyMatchConditions.regex_matcher(subkey_name), **kwargs
+ )
def find_value(self, val_name, recursive=True):
"""
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index 7b045d62628022..9bea903aac7289 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -916,7 +916,7 @@ def __init__(self, spec, variants):
variant_str = "variant" if len(variants) == 1 else "variants"
msg = (
'trying to set {0} "{1}" in package "{2}", but the package'
- " has no such {0} [happened during concretization of {3}]"
+ " has no such {0} [happened when validating '{3}']"
)
msg = msg.format(variant_str, comma_or(variants), spec.name, spec.root)
super().__init__(msg)
diff --git a/share/spack/bash/spack-completion.bash b/share/spack/bash/spack-completion.bash
index 49c691be4c349b..9a5b367be7a49a 100755
--- a/share/spack/bash/spack-completion.bash
+++ b/share/spack/bash/spack-completion.bash
@@ -370,7 +370,7 @@ _spack_compress_aliases() {
# If there are zero or one completions, don't do anything
# If this isn't the first argument, bail because aliases currently only apply
# to top-level commands.
- if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD" != "1" ]; then
+ if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD_NO_FLAGS" != "1" ]; then
return
fi
diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml
index 245bb51933ccf0..0c3991efeda561 100644
--- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml
+++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml
@@ -141,10 +141,16 @@ default:
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
- spack env activate --without-view .
- export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs"
+ - spack
+ --config-scope "${SPACK_CI_CONFIG_ROOT}"
+ --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
+ --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
+ ${CI_STACK_CONFIG_SCOPES}
+ compiler find
- spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))"
< "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
- spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
- - spack -v
+ - spack -v --color=always
--config-scope "${SPACK_CI_CONFIG_ROOT}"
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
@@ -197,7 +203,7 @@ default:
- spack --version
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
- spack env activate --without-view .
- - spack -v
+ - spack -v --color=always
ci generate --check-index-only
--buildcache-destination "${PUSH_BUILDCACHE_DEPRECATED}"
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
@@ -364,25 +370,25 @@ e4s-rocm-external-build:
########################################
# GPU Testing Pipeline
########################################
-.gpu-tests:
- extends: [ ".linux_x86_64_v3" ]
- variables:
- SPACK_CI_STACK_NAME: gpu-tests
+# .gpu-tests:
+# extends: [ ".linux_x86_64_v3" ]
+# variables:
+# SPACK_CI_STACK_NAME: gpu-tests
-gpu-tests-generate:
- extends: [ ".gpu-tests", ".generate-x86_64"]
- image: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
+# gpu-tests-generate:
+# extends: [ ".gpu-tests", ".generate-x86_64"]
+# image: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
-gpu-tests-build:
- extends: [ ".gpu-tests", ".build" ]
- trigger:
- include:
- - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
- job: gpu-tests-generate
- strategy: depend
- needs:
- - artifacts: True
- job: gpu-tests-generate
+# gpu-tests-build:
+# extends: [ ".gpu-tests", ".build" ]
+# trigger:
+# include:
+# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
+# job: gpu-tests-generate
+# strategy: depend
+# needs:
+# - artifacts: True
+# job: gpu-tests-generate
########################################
# E4S OneAPI Pipeline
@@ -615,7 +621,7 @@ aws-isc-aarch64-build:
tutorial-generate:
extends: [ ".tutorial", ".generate-x86_64"]
- image: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-05-07
+ image: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-10-30
tutorial-build:
extends: [ ".tutorial", ".build" ]
@@ -706,7 +712,7 @@ ml-linux-x86_64-rocm-build:
SPACK_CI_STACK_NAME: ml-darwin-aarch64-mps
ml-darwin-aarch64-mps-generate:
- tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ]
+ tags: [ "macos-ventura", "apple-clang-15", "aarch64-macos" ]
extends: [ ".ml-darwin-aarch64-mps", ".generate-base"]
ml-darwin-aarch64-mps-build:
@@ -888,16 +894,16 @@ e4s-cray-rhel-build:
variables:
SPACK_CI_STACK_NAME: e4s-cray-sles
-# e4s-cray-sles-generate:
-# extends: [ ".generate-cray-sles", ".e4s-cray-sles" ]
+e4s-cray-sles-generate:
+ extends: [ ".generate-cray-sles", ".e4s-cray-sles" ]
-# e4s-cray-sles-build:
-# extends: [ ".build", ".e4s-cray-sles" ]
-# trigger:
-# include:
-# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
-# job: e4s-cray-sles-generate
-# strategy: depend
-# needs:
-# - artifacts: True
-# job: e4s-cray-sles-generate
+e4s-cray-sles-build:
+ extends: [ ".build", ".e4s-cray-sles" ]
+ trigger:
+ include:
+ - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
+ job: e4s-cray-sles-generate
+ strategy: depend
+ needs:
+ - artifacts: True
+ job: e4s-cray-sles-generate
diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml
index 9aad850b5df065..f1f77c20b0286b 100644
--- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml
+++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml
@@ -4,6 +4,7 @@ ci:
broken-tests-packages:
- gptune
- superlu-dist # srun -n 4 hangs
+ - papyrus
broken-specs-url: "https://dummy.io" # s3://spack-binaries/broken-specs"
@@ -12,7 +13,7 @@ ci:
before_script-:
- - spack list --count # ensure that spack's cache is populated
- - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
- - spack compiler find
+ - spack compiler list
- if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi
- - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
# AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification)
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml
index d42e5f1fcade20..82a1f07c8d4186 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml
@@ -54,21 +54,6 @@ spack:
cuda:
version: [11.8.0]
- compilers:
- - compiler:
- spec: gcc@11.4.0
- paths:
- cc: /usr/bin/gcc
- cxx: /usr/bin/g++
- f77: /usr/bin/gfortran
- fc: /usr/bin/gfortran
- flags: {}
- operating_system: ubuntu20.04
- target: aarch64
- modules: []
- environment: {}
- extra_rpaths: []
-
specs:
# CPU
- adios
@@ -165,7 +150,7 @@ spack:
- swig@4.0.2-fortran
- sz3
- tasmanian
- - tau +mpi +python
+ - tau +mpi +python +syscall
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
@@ -201,7 +186,7 @@ spack:
- flux-core +cuda
- hpctoolkit +cuda
- papi +cuda
- - tau +mpi +cuda
+ - tau +mpi +cuda +syscall
# --
# - bricks +cuda # not respecting target=aarch64?
# - legion +cuda # legion: needs NVIDIA driver
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml
index d170b0a272772c..eae204527ce5a6 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml
@@ -5,34 +5,6 @@ spack:
reuse: false
unify: false
- compilers:
- - compiler:
- spec: oneapi@2023.2.1
- paths:
- cc: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/icx
- cxx: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/icpx
- f77: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/ifx
- fc: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/ifx
- flags: {}
- operating_system: ubuntu20.04
- target: x86_64
- modules: []
- environment: {}
- extra_rpaths: []
- - compiler:
- spec: gcc@=11.4.0
- paths:
- cc: /usr/bin/gcc
- cxx: /usr/bin/g++
- f77: /usr/bin/gfortran
- fc: /usr/bin/gfortran
- flags: {}
- operating_system: ubuntu20.04
- target: x86_64
- modules: []
- environment: {}
- extra_rpaths: []
-
packages:
all:
require: '%oneapi target=x86_64_v3'
@@ -92,6 +64,9 @@ spack:
require: "%gcc"
bison:
require: '%gcc'
+ # sycl abi change means you need 2024 compiler to use 2024 mkl
+ intel-oneapi-mkl:
+ require: "@2023"
specs:
# CPU
@@ -181,7 +156,7 @@ spack:
- superlu-dist
- sz3
- tasmanian
- - tau +mpi +python
+ - tau +mpi +python +syscall
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
@@ -225,14 +200,18 @@ spack:
- amrex +sycl
- arborx +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples
- cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples
+ - ginkgo +sycl
+ - heffte +sycl
- kokkos +sycl +openmp cxxstd=17 +tests +examples
- kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +tests +examples
- - tau +mpi +opencl +level_zero ~pdt # tau: requires libdrm.so to be installed
+ - petsc +sycl
- slate +sycl
+ - sundials +sycl cxxstd=17 +examples-install
+ - tau +mpi +opencl +level_zero ~pdt +syscall # tau: requires libdrm.so to be installed
+ - upcxx +level_zero
# --
- # - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler.
# - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc
- # - sundials +sycl cxxstd=17 # sundials: include/sunmemory/sunmemory_sycl.h:20:10: fatal error: 'CL/sycl.hpp' file not found
+ # - warpx compute=sycl # warpx: spack-build-wzp6vvo/_deps/fetchedamrex-src/Src/Base/AMReX_RandomEngine.H:18:10: fatal error: 'oneapi/mkl/rng/device.hpp' file not found
- py-scipy
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml
index 10bf4bc57d99f7..511f48e7459408 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml
@@ -5,21 +5,6 @@ spack:
reuse: false
unify: false
- compilers:
- - compiler:
- spec: gcc@9.4.0
- paths:
- cc: /usr/bin/gcc
- cxx: /usr/bin/g++
- f77: /usr/bin/gfortran
- fc: /usr/bin/gfortran
- flags: {}
- operating_system: ubuntu20.04
- target: ppc64le
- modules: []
- environment: {}
- extra_rpaths: []
-
packages:
all:
require: "%gcc@9.4.0 target=ppc64le"
@@ -165,7 +150,7 @@ spack:
- swig@4.0.2-fortran
- sz3
- tasmanian
- - tau +mpi +python # tau: has issue with `spack env depfile` build
+ - tau +mpi +python # +syscall fails: https://github.com/spack/spack/pull/40830#issuecomment-1790799772; tau: has issue with `spack env depfile` build
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
@@ -214,6 +199,7 @@ spack:
- caliper +cuda cuda_arch=70
- chai ~benchmarks ~tests +cuda cuda_arch=70 ^umpire ~shared
- ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp ~paraview +cuda cuda_arch=70
+ - exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=70 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=70 #^raja@0.14.0
- flecsi +cuda cuda_arch=70
- ginkgo +cuda cuda_arch=70
- heffte +cuda cuda_arch=70
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml
index 885dbb538b0476..8f902aa6a8d4f4 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml
@@ -5,21 +5,6 @@ spack:
reuse: false
unify: false
- compilers:
- - compiler:
- spec: gcc@=11.4.0
- paths:
- cc: /usr/bin/gcc
- cxx: /usr/bin/g++
- f77: /usr/bin/gfortran
- fc: /usr/bin/gfortran
- flags: {}
- operating_system: ubuntu20.04
- target: x86_64
- modules: []
- environment: {}
- extra_rpaths: []
-
packages:
all:
require: '%gcc target=x86_64_v3'
@@ -255,15 +240,17 @@ spack:
specs:
# ROCM NOARCH
- hpctoolkit +rocm
- - tau +mpi +rocm # tau: has issue with `spack env depfile` build
+ - tau +mpi +rocm +syscall # tau: has issue with `spack env depfile` build
# ROCM 908
+ - adios2 +kokkos +rocm amdgpu_target=gfx908
- amrex +rocm amdgpu_target=gfx908
- arborx +rocm amdgpu_target=gfx908
- cabana +rocm amdgpu_target=gfx908
- caliper +rocm amdgpu_target=gfx908
- chai ~benchmarks +rocm amdgpu_target=gfx908
- ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx908
+ - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908
- gasnet +rocm amdgpu_target=gfx908
- ginkgo +rocm amdgpu_target=gfx908
- heffte +rocm amdgpu_target=gfx908
@@ -297,12 +284,14 @@ spack:
# - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898
# ROCM 90a
+ - adios2 +kokkos +rocm amdgpu_target=gfx90a
- amrex +rocm amdgpu_target=gfx90a
- arborx +rocm amdgpu_target=gfx90a
- cabana +rocm amdgpu_target=gfx90a
- caliper +rocm amdgpu_target=gfx90a
- chai ~benchmarks +rocm amdgpu_target=gfx90a
- ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx90a
+ - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a
- gasnet +rocm amdgpu_target=gfx90a
- ginkgo +rocm amdgpu_target=gfx90a
- heffte +rocm amdgpu_target=gfx90a
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml
index 86eab1d4074d3a..11396a768f7cb1 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml
@@ -5,21 +5,6 @@ spack:
reuse: false
unify: false
- compilers:
- - compiler:
- spec: gcc@=11.4.0
- paths:
- cc: /usr/bin/gcc
- cxx: /usr/bin/g++
- f77: /usr/bin/gfortran
- fc: /usr/bin/gfortran
- flags: {}
- operating_system: ubuntu20.04
- target: x86_64
- modules: []
- environment: {}
- extra_rpaths: []
-
packages:
all:
require: '%gcc target=x86_64_v3'
@@ -66,6 +51,8 @@ spack:
require: "@3.4.4"
vtk-m:
require: "+examples"
+ visit:
+ require: "~gui"
cuda:
version: [11.8.0]
paraview:
@@ -172,7 +159,7 @@ spack:
- swig@4.0.2-fortran
- sz3
- tasmanian
- - tau +mpi +python
+ - tau +mpi +python +syscall
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
@@ -207,7 +194,7 @@ spack:
- flux-core +cuda
- hpctoolkit +cuda
- papi +cuda
- - tau +mpi +cuda
+ - tau +mpi +cuda +syscall
# --
# - legion +cuda # legion: needs NVIDIA driver
@@ -220,6 +207,7 @@ spack:
- cusz +cuda cuda_arch=80
- dealii +cuda cuda_arch=80
- ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # +ascent fails because fides fetch error
+ - exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=80 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=80 #^raja@0.14.0
- flecsi +cuda cuda_arch=80
- ginkgo +cuda cuda_arch=80
- heffte +cuda cuda_arch=80
@@ -303,9 +291,10 @@ spack:
# ROCM NOARCH
- hpctoolkit +rocm
- - tau +mpi +rocm # tau: has issue with `spack env depfile` build
+ - tau +mpi +rocm +syscall # tau: has issue with `spack env depfile` build
# ROCM 908
+ - adios2 +kokkos +rocm amdgpu_target=gfx908
- amrex +rocm amdgpu_target=gfx908
- arborx +rocm amdgpu_target=gfx908
- cabana +rocm amdgpu_target=gfx908
@@ -341,10 +330,12 @@ spack:
- paraview +rocm amdgpu_target=gfx908
# - vtk-m ~openmp +rocm amdgpu_target=gfx908 # vtk-m: https://github.com/spack/spack/issues/40268
# --
+ # - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908 # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package)
# - lbann ~cuda +rocm amdgpu_target=gfx908 # aluminum: https://github.com/spack/spack/issues/38807
# - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898
# ROCM 90a
+ - adios2 +kokkos +rocm amdgpu_target=gfx90a
- amrex +rocm amdgpu_target=gfx90a
- arborx +rocm amdgpu_target=gfx90a
- cabana +rocm amdgpu_target=gfx90a
@@ -380,6 +371,7 @@ spack:
- paraview +rocm amdgpu_target=gfx90a
# - vtk-m ~openmp +rocm amdgpu_target=gfx90a # vtk-m: https://github.com/spack/spack/issues/40268
# --
+ # - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package)
# - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807
# - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml
index 6d8a0b7491f9a3..0905305113f083 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml
@@ -89,7 +89,7 @@ spack:
- build-job:
variables:
CI_GPG_KEY_ROOT: /etc/protected-runner
- tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ]
+ tags: [ "macos-ventura", "apple-clang-15", "aarch64-macos" ]
cdash:
build-group: Machine Learning MPS
diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml
index 0bc36ce8e44447..c320442cbe0f84 100644
--- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml
+++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml
@@ -1,9 +1,4 @@
spack:
- config:
- # allow deprecated versions in concretizations
- # required for zlib
- deprecated: true
-
view: false
packages:
all:
@@ -13,36 +8,37 @@ spack:
definitions:
- gcc_system_packages:
- matrix:
- - - zlib
- - zlib@1.2.8
- - zlib@1.2.8 cflags=-O3
+ - - zlib-ng
+ - zlib-ng@2.0.7
+ - zlib-ng@2.0.7 cflags=-O3
- tcl
- - tcl ^zlib@1.2.8 cflags=-O3
+ - tcl ^zlib-ng@2.0.7 cflags=-O3
- hdf5
- hdf5~mpi
- hdf5+hl+mpi ^mpich
- trilinos
- trilinos +hdf5 ^hdf5+hl+mpi ^mpich
- - gcc@12.1.0
+ - gcc@12.3.0
- mpileaks
- - lmod
- - macsio@1.1+scr^scr@2.0.0~fortran^silo~fortran^hdf5~fortran
- - ['%gcc@11.3.0']
+ - lmod@8.7.18
+ - environment-modules
+ - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran
+ - ['%gcc@11']
- gcc_old_packages:
- - zlib%gcc@10.4.0
+ - zlib-ng%gcc@10
- clang_packages:
- matrix:
- - [zlib, tcl ^zlib@1.2.8]
- - ['%clang@14.0.0']
+ - [zlib-ng, tcl ^zlib-ng@2.0.7]
+ - ['%clang@14']
- gcc_spack_built_packages:
- matrix:
- [netlib-scalapack]
- [^mpich, ^openmpi]
- [^openblas, ^netlib-lapack]
- - ['%gcc@12.1.0']
+ - ['%gcc@12']
- matrix:
- - [py-scipy^openblas, armadillo^openblas, netlib-lapack, openmpi, mpich, elpa^mpich]
- - ['%gcc@12.1.0']
+ - [py-scipy ^openblas, armadillo ^openblas, netlib-lapack, openmpi, mpich, elpa ^mpich]
+ - ['%gcc@12']
specs:
- $gcc_system_packages
- $gcc_old_packages
@@ -53,7 +49,7 @@ spack:
pipeline-gen:
- build-job:
image:
- name: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-05-07
+ name: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-10-30
entrypoint: ['']
cdash:
build-group: Spack Tutorial
diff --git a/share/spack/qa/setup-env-test.fish b/share/spack/qa/setup-env-test.fish
index 6474917b70766b..589f4cbfa8c353 100755
--- a/share/spack/qa/setup-env-test.fish
+++ b/share/spack/qa/setup-env-test.fish
@@ -371,7 +371,6 @@ spt_contains " spack env list " spack env list --help
title 'Testing `spack env activate`'
spt_contains "No such environment:" spack env activate no_such_environment
-spt_contains "env activate requires an environment " spack env activate
spt_contains "usage: spack env activate " spack env activate -h
spt_contains "usage: spack env activate " spack env activate --help
@@ -415,6 +414,11 @@ spt_contains 'spack_test_2_env' 'fish' '-c' 'echo $PATH'
spt_does_not_contain 'spack_test_env' 'fish' '-c' 'echo $PATH'
despacktivate
+echo "Testing default environment"
+spack env activate
+contains "In environment default" spack env status
+despacktivate
+
echo "Correct error exit codes for activate and deactivate"
spt_fails spack env activate nonexisiting_environment
spt_fails spack env deactivate
diff --git a/share/spack/qa/setup-env-test.sh b/share/spack/qa/setup-env-test.sh
index 4172a40155590b..b26619b9cde0a0 100755
--- a/share/spack/qa/setup-env-test.sh
+++ b/share/spack/qa/setup-env-test.sh
@@ -140,7 +140,6 @@ contains " spack env list " spack env list --help
title 'Testing `spack env activate`'
contains "No such environment:" spack env activate no_such_environment
-contains "env activate requires an environment " spack env activate
contains "usage: spack env activate " spack env activate -h
contains "usage: spack env activate " spack env activate --help
@@ -197,6 +196,11 @@ contains "spack_test_2_env" sh -c 'echo $PATH'
does_not_contain "spack_test_env" sh -c 'echo $PATH'
despacktivate
+echo "Testing default environment"
+spack env activate
+contains "In environment default" spack env status
+despacktivate
+
echo "Correct error exit codes for activate and deactivate"
fails spack env activate nonexisiting_environment
fails spack env deactivate
diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh
index b5f434863595f4..a42882266cf9fa 100755
--- a/share/spack/setup-env.sh
+++ b/share/spack/setup-env.sh
@@ -98,7 +98,7 @@ _spack_shell_wrapper() {
if [ "$_sp_arg" = "-h" ] || [ "$_sp_arg" = "--help" ]; then
command spack cd -h
else
- LOC="$(spack location $_sp_arg "$@")"
+ LOC="$(SPACK_COLOR="${SPACK_COLOR:-always}" spack location $_sp_arg "$@")"
if [ -d "$LOC" ] ; then
cd "$LOC"
else
@@ -126,8 +126,7 @@ _spack_shell_wrapper() {
# Space needed here to differentiate between `-h`
# argument and environments with "-h" in the name.
# Also see: https://www.gnu.org/software/bash/manual/html_node/Shell-Parameter-Expansion.html#Shell-Parameter-Expansion
- if [ -z ${1+x} ] || \
- [ "${_a#* --sh}" != "$_a" ] || \
+ if [ "${_a#* --sh}" != "$_a" ] || \
[ "${_a#* --csh}" != "$_a" ] || \
[ "${_a#* -h}" != "$_a" ] || \
[ "${_a#* --help}" != "$_a" ];
@@ -136,7 +135,7 @@ _spack_shell_wrapper() {
command spack env activate "$@"
else
# Actual call to activate: source the output.
- stdout="$(command spack $_sp_flags env activate --sh "$@")" || return
+ stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags env activate --sh "$@")" || return
eval "$stdout"
fi
;;
@@ -158,7 +157,7 @@ _spack_shell_wrapper() {
command spack env deactivate -h
else
# No args: source the output of the command.
- stdout="$(command spack $_sp_flags env deactivate --sh)" || return
+ stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags env deactivate --sh)" || return
eval "$stdout"
fi
;;
@@ -186,7 +185,7 @@ _spack_shell_wrapper() {
# Args contain --sh, --csh, or -h/--help: just execute.
command spack $_sp_flags $_sp_subcommand "$@"
else
- stdout="$(command spack $_sp_flags $_sp_subcommand --sh "$@")" || return
+ stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags $_sp_subcommand --sh "$@")" || return
eval "$stdout"
fi
;;
diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash
index 0280524536cfbc..a54f7db414e409 100755
--- a/share/spack/spack-completion.bash
+++ b/share/spack/spack-completion.bash
@@ -370,7 +370,7 @@ _spack_compress_aliases() {
# If there are zero or one completions, don't do anything
# If this isn't the first argument, bail because aliases currently only apply
# to top-level commands.
- if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD" != "1" ]; then
+ if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD_NO_FLAGS" != "1" ]; then
return
fi
@@ -401,7 +401,7 @@ _spack() {
then
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace --backtrace -V --version --print-shell-vars"
else
- SPACK_COMPREPLY="add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize concretise config containerize containerise create debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
+ SPACK_COMPREPLY="add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize concretise config containerize containerise create debug deconcretize dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
fi
}
@@ -571,7 +571,7 @@ _spack_buildcache() {
_spack_buildcache_push() {
if $list_options
then
- SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast"
+ SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast --base-image -j --jobs"
else
_mirrors
fi
@@ -580,7 +580,7 @@ _spack_buildcache_push() {
_spack_buildcache_create() {
if $list_options
then
- SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast"
+ SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast --base-image -j --jobs"
else
_mirrors
fi
@@ -681,7 +681,7 @@ _spack_change() {
_spack_checksum() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --keep-stage -b --batch -l --latest -p --preferred -a --add-to-package --verify -j --jobs"
+ SPACK_COMPREPLY="-h --help --keep-stage --batch -b --latest -l --preferred -p --add-to-package -a --verify -j --jobs"
else
_all_packages
fi
@@ -756,7 +756,7 @@ _spack_compiler() {
_spack_compiler_find() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --scope"
+ SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope"
else
SPACK_COMPREPLY=""
fi
@@ -765,7 +765,7 @@ _spack_compiler_find() {
_spack_compiler_add() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --scope"
+ SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope"
else
SPACK_COMPREPLY=""
fi
@@ -937,6 +937,15 @@ _spack_debug_report() {
SPACK_COMPREPLY="-h --help"
}
+_spack_deconcretize() {
+ if $list_options
+ then
+ SPACK_COMPREPLY="-h --help --root -y --yes-to-all -a --all"
+ else
+ _all_packages
+ fi
+}
+
_spack_dependencies() {
if $list_options
then
@@ -1267,7 +1276,7 @@ _spack_help() {
_spack_info() {
if $list_options
then
- SPACK_COMPREPLY="-h --help -a --all --detectable --maintainers --no-dependencies --no-variants --no-versions --phases --tags --tests --virtuals"
+ SPACK_COMPREPLY="-h --help -a --all --detectable --maintainers --no-dependencies --no-variants --no-versions --phases --tags --tests --virtuals --variants-by-name"
else
_all_packages
fi
@@ -1391,7 +1400,7 @@ _spack_mirror_destroy() {
_spack_mirror_add() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --scope --type --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
+ SPACK_COMPREPLY="-h --help --scope --type --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password"
else
_mirrors
fi
@@ -1418,7 +1427,7 @@ _spack_mirror_rm() {
_spack_mirror_set_url() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
+ SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password"
else
_mirrors
fi
@@ -1427,7 +1436,7 @@ _spack_mirror_set_url() {
_spack_mirror_set() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --push --fetch --type --url --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url"
+ SPACK_COMPREPLY="-h --help --push --fetch --type --url --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password"
else
_mirrors
fi
diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish
index e37b3448d5fcfc..1d32fc1d376be5 100755
--- a/share/spack/spack-completion.fish
+++ b/share/spack/spack-completion.fish
@@ -371,6 +371,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a containerize -d '
complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a containerise -d 'creates recipes to build images for different container runtimes'
complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a create -d 'create a new package file'
complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a debug -d 'debugging commands for troubleshooting Spack'
+complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a deconcretize -d 'remove specs from the concretized lockfile of an environment'
complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a dependencies -d 'show dependencies of a package'
complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a dependents -d 'show packages that depend on another'
complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a deprecate -d 'replace one package with another via symlinks'
@@ -696,7 +697,7 @@ complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -f -a
complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -d 'show this help message and exit'
# spack buildcache push
-set -g __fish_spack_optspecs_spack_buildcache_push h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast
+set -g __fish_spack_optspecs_spack_buildcache_push h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast base-image= j/jobs=
complete -c spack -n '__fish_spack_using_command_pos_remainder 1 buildcache push' -f -k -a '(__fish_spack_specs)'
complete -c spack -n '__fish_spack_using_command buildcache push' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command buildcache push' -s h -l help -d 'show this help message and exit'
@@ -716,9 +717,13 @@ complete -c spack -n '__fish_spack_using_command buildcache push' -l only -r -f
complete -c spack -n '__fish_spack_using_command buildcache push' -l only -r -d 'select the buildcache mode. The default is to build a cache for the package along with all its dependencies. Alternatively, one can decide to build a cache for only the package or only the dependencies'
complete -c spack -n '__fish_spack_using_command buildcache push' -l fail-fast -f -a fail_fast
complete -c spack -n '__fish_spack_using_command buildcache push' -l fail-fast -d 'stop pushing on first failure (default is best effort)'
+complete -c spack -n '__fish_spack_using_command buildcache push' -l base-image -r -f -a base_image
+complete -c spack -n '__fish_spack_using_command buildcache push' -l base-image -r -d 'specify the base image for the buildcache. '
+complete -c spack -n '__fish_spack_using_command buildcache push' -s j -l jobs -r -f -a jobs
+complete -c spack -n '__fish_spack_using_command buildcache push' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
# spack buildcache create
-set -g __fish_spack_optspecs_spack_buildcache_create h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast
+set -g __fish_spack_optspecs_spack_buildcache_create h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast base-image= j/jobs=
complete -c spack -n '__fish_spack_using_command_pos_remainder 1 buildcache create' -f -k -a '(__fish_spack_specs)'
complete -c spack -n '__fish_spack_using_command buildcache create' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command buildcache create' -s h -l help -d 'show this help message and exit'
@@ -738,6 +743,10 @@ complete -c spack -n '__fish_spack_using_command buildcache create' -l only -r -
complete -c spack -n '__fish_spack_using_command buildcache create' -l only -r -d 'select the buildcache mode. The default is to build a cache for the package along with all its dependencies. Alternatively, one can decide to build a cache for only the package or only the dependencies'
complete -c spack -n '__fish_spack_using_command buildcache create' -l fail-fast -f -a fail_fast
complete -c spack -n '__fish_spack_using_command buildcache create' -l fail-fast -d 'stop pushing on first failure (default is best effort)'
+complete -c spack -n '__fish_spack_using_command buildcache create' -l base-image -r -f -a base_image
+complete -c spack -n '__fish_spack_using_command buildcache create' -l base-image -r -d 'specify the base image for the buildcache. '
+complete -c spack -n '__fish_spack_using_command buildcache create' -s j -l jobs -r -f -a jobs
+complete -c spack -n '__fish_spack_using_command buildcache create' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
# spack buildcache install
set -g __fish_spack_optspecs_spack_buildcache_install h/help f/force m/multiple u/unsigned o/otherarch
@@ -906,14 +915,14 @@ complete -c spack -n '__fish_spack_using_command checksum' -s h -l help -f -a he
complete -c spack -n '__fish_spack_using_command checksum' -s h -l help -d 'show this help message and exit'
complete -c spack -n '__fish_spack_using_command checksum' -l keep-stage -f -a keep_stage
complete -c spack -n '__fish_spack_using_command checksum' -l keep-stage -d 'don\'t clean up staging area when command completes'
-complete -c spack -n '__fish_spack_using_command checksum' -s b -l batch -f -a batch
-complete -c spack -n '__fish_spack_using_command checksum' -s b -l batch -d 'don\'t ask which versions to checksum'
-complete -c spack -n '__fish_spack_using_command checksum' -s l -l latest -f -a latest
-complete -c spack -n '__fish_spack_using_command checksum' -s l -l latest -d 'checksum the latest available version'
-complete -c spack -n '__fish_spack_using_command checksum' -s p -l preferred -f -a preferred
-complete -c spack -n '__fish_spack_using_command checksum' -s p -l preferred -d 'checksum the known Spack preferred version'
-complete -c spack -n '__fish_spack_using_command checksum' -s a -l add-to-package -f -a add_to_package
-complete -c spack -n '__fish_spack_using_command checksum' -s a -l add-to-package -d 'add new versions to package'
+complete -c spack -n '__fish_spack_using_command checksum' -l batch -s b -f -a batch
+complete -c spack -n '__fish_spack_using_command checksum' -l batch -s b -d 'don\'t ask which versions to checksum'
+complete -c spack -n '__fish_spack_using_command checksum' -l latest -s l -f -a latest
+complete -c spack -n '__fish_spack_using_command checksum' -l latest -s l -d 'checksum the latest available version'
+complete -c spack -n '__fish_spack_using_command checksum' -l preferred -s p -f -a preferred
+complete -c spack -n '__fish_spack_using_command checksum' -l preferred -s p -d 'checksum the known Spack preferred version'
+complete -c spack -n '__fish_spack_using_command checksum' -l add-to-package -s a -f -a add_to_package
+complete -c spack -n '__fish_spack_using_command checksum' -l add-to-package -s a -d 'add new versions to package'
complete -c spack -n '__fish_spack_using_command checksum' -l verify -f -a verify
complete -c spack -n '__fish_spack_using_command checksum' -l verify -d 'verify known package checksums'
complete -c spack -n '__fish_spack_using_command checksum' -s j -l jobs -r -f -a jobs
@@ -1037,18 +1046,26 @@ complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -f -a he
complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -d 'show this help message and exit'
# spack compiler find
-set -g __fish_spack_optspecs_spack_compiler_find h/help scope=
+set -g __fish_spack_optspecs_spack_compiler_find h/help mixed-toolchain no-mixed-toolchain scope=
complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -d 'show this help message and exit'
+complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -f -a mixed_toolchain
+complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -d 'Allow mixed toolchains (for example: clang, clang++, gfortran)'
+complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -f -a mixed_toolchain
+complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)'
complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -f -a '_builtin defaults system site user command_line'
complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -d 'configuration scope to modify'
# spack compiler add
-set -g __fish_spack_optspecs_spack_compiler_add h/help scope=
+set -g __fish_spack_optspecs_spack_compiler_add h/help mixed-toolchain no-mixed-toolchain scope=
complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -d 'show this help message and exit'
+complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -f -a mixed_toolchain
+complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -d 'Allow mixed toolchains (for example: clang, clang++, gfortran)'
+complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -f -a mixed_toolchain
+complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)'
complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -f -a '_builtin defaults system site user command_line'
complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -d 'configuration scope to modify'
@@ -1151,19 +1168,19 @@ complete -c spack -n '__fish_spack_using_command config' -l scope -r -d 'configu
# spack config get
set -g __fish_spack_optspecs_spack_config_get h/help
-complete -c spack -n '__fish_spack_using_command_pos 0 config get' -f -a 'bootstrap cdash ci compilers concretizer config mirrors modules packages repos upstreams'
+complete -c spack -n '__fish_spack_using_command_pos 0 config get' -f -a 'bootstrap cdash ci compilers concretizer config definitions mirrors modules packages repos upstreams'
complete -c spack -n '__fish_spack_using_command config get' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command config get' -s h -l help -d 'show this help message and exit'
# spack config blame
set -g __fish_spack_optspecs_spack_config_blame h/help
-complete -c spack -n '__fish_spack_using_command_pos 0 config blame' -f -a 'bootstrap cdash ci compilers concretizer config mirrors modules packages repos upstreams'
+complete -c spack -n '__fish_spack_using_command_pos 0 config blame' -f -a 'bootstrap cdash ci compilers concretizer config definitions mirrors modules packages repos upstreams'
complete -c spack -n '__fish_spack_using_command config blame' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command config blame' -s h -l help -d 'show this help message and exit'
# spack config edit
set -g __fish_spack_optspecs_spack_config_edit h/help print-file
-complete -c spack -n '__fish_spack_using_command_pos 0 config edit' -f -a 'bootstrap cdash ci compilers concretizer config mirrors modules packages repos upstreams'
+complete -c spack -n '__fish_spack_using_command_pos 0 config edit' -f -a 'bootstrap cdash ci compilers concretizer config definitions mirrors modules packages repos upstreams'
complete -c spack -n '__fish_spack_using_command config edit' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command config edit' -s h -l help -d 'show this help message and exit'
complete -c spack -n '__fish_spack_using_command config edit' -l print-file -f -a print_file
@@ -1244,7 +1261,7 @@ complete -c spack -n '__fish_spack_using_command create' -l keep-stage -f -a kee
complete -c spack -n '__fish_spack_using_command create' -l keep-stage -d 'don\'t clean up staging area when command completes'
complete -c spack -n '__fish_spack_using_command create' -s n -l name -r -f -a name
complete -c spack -n '__fish_spack_using_command create' -s n -l name -r -d 'name of the package to create'
-complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -f -a 'autoreconf autotools bazel bundle cmake generic intel lua makefile maven meson octave perlbuild perlmake python qmake r racket ruby scons sip waf'
+complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -f -a 'autoreconf autotools bazel bundle cargo cmake generic go intel lua makefile maven meson octave perlbuild perlmake python qmake r racket ruby scons sip waf'
complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -d 'build system template to use'
complete -c spack -n '__fish_spack_using_command create' -s r -l repo -r -f -a repo
complete -c spack -n '__fish_spack_using_command create' -s r -l repo -r -d 'path to a repository where the package should be created'
@@ -1274,6 +1291,18 @@ set -g __fish_spack_optspecs_spack_debug_report h/help
complete -c spack -n '__fish_spack_using_command debug report' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command debug report' -s h -l help -d 'show this help message and exit'
+# spack deconcretize
+set -g __fish_spack_optspecs_spack_deconcretize h/help root y/yes-to-all a/all
+complete -c spack -n '__fish_spack_using_command_pos_remainder 0 deconcretize' -f -k -a '(__fish_spack_specs)'
+complete -c spack -n '__fish_spack_using_command deconcretize' -s h -l help -f -a help
+complete -c spack -n '__fish_spack_using_command deconcretize' -s h -l help -d 'show this help message and exit'
+complete -c spack -n '__fish_spack_using_command deconcretize' -l root -f -a root
+complete -c spack -n '__fish_spack_using_command deconcretize' -l root -d 'deconcretize only specific environment roots'
+complete -c spack -n '__fish_spack_using_command deconcretize' -s y -l yes-to-all -f -a yes_to_all
+complete -c spack -n '__fish_spack_using_command deconcretize' -s y -l yes-to-all -d 'assume "yes" is the answer to every confirmation request'
+complete -c spack -n '__fish_spack_using_command deconcretize' -s a -l all -f -a all
+complete -c spack -n '__fish_spack_using_command deconcretize' -s a -l all -d 'deconcretize ALL specs that match each supplied spec'
+
# spack dependencies
set -g __fish_spack_optspecs_spack_dependencies h/help i/installed t/transitive deptype= V/no-expand-virtuals
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 dependencies' -f -k -a '(__fish_spack_specs)'
@@ -1839,7 +1868,7 @@ complete -c spack -n '__fish_spack_using_command help' -l spec -f -a guide
complete -c spack -n '__fish_spack_using_command help' -l spec -d 'help on the package specification syntax'
# spack info
-set -g __fish_spack_optspecs_spack_info h/help a/all detectable maintainers no-dependencies no-variants no-versions phases tags tests virtuals
+set -g __fish_spack_optspecs_spack_info h/help a/all detectable maintainers no-dependencies no-variants no-versions phases tags tests virtuals variants-by-name
complete -c spack -n '__fish_spack_using_command_pos 0 info' -f -a '(__fish_spack_packages)'
complete -c spack -n '__fish_spack_using_command info' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command info' -s h -l help -d 'show this help message and exit'
@@ -1863,6 +1892,8 @@ complete -c spack -n '__fish_spack_using_command info' -l tests -f -a tests
complete -c spack -n '__fish_spack_using_command info' -l tests -d 'output relevant build-time and stand-alone tests'
complete -c spack -n '__fish_spack_using_command info' -l virtuals -f -a virtuals
complete -c spack -n '__fish_spack_using_command info' -l virtuals -d 'output virtual packages'
+complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -f -a variants_by_name
+complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -d 'list variants in strict name order; don\'t group by condition'
# spack install
set -g __fish_spack_optspecs_spack_install h/help only= u/until= j/jobs= overwrite fail-fast keep-prefix keep-stage dont-restage use-cache no-cache cache-only use-buildcache= include-build-deps no-check-signature show-log-on-error source n/no-checksum deprecated v/verbose fake only-concrete add no-add f/file= clean dirty test= log-format= log-file= help-cdash cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= y/yes-to-all U/fresh reuse reuse-deps
@@ -2139,7 +2170,7 @@ complete -c spack -n '__fish_spack_using_command mirror destroy' -l mirror-url -
complete -c spack -n '__fish_spack_using_command mirror destroy' -l mirror-url -r -d 'find mirror to destroy by url'
# spack mirror add
-set -g __fish_spack_optspecs_spack_mirror_add h/help scope= type= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url=
+set -g __fish_spack_optspecs_spack_mirror_add h/help scope= type= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password=
complete -c spack -n '__fish_spack_using_command_pos 0 mirror add' -f
complete -c spack -n '__fish_spack_using_command mirror add' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command mirror add' -s h -l help -d 'show this help message and exit'
@@ -2157,6 +2188,10 @@ complete -c spack -n '__fish_spack_using_command mirror add' -l s3-profile -r -f
complete -c spack -n '__fish_spack_using_command mirror add' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror'
complete -c spack -n '__fish_spack_using_command mirror add' -l s3-endpoint-url -r -f -a s3_endpoint_url
complete -c spack -n '__fish_spack_using_command mirror add' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror'
+complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username -r -f -a oci_username
+complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username -r -d 'username to use to connect to this OCI mirror'
+complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password -r -f -a oci_password
+complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password -r -d 'password to use to connect to this OCI mirror'
# spack mirror remove
set -g __fish_spack_optspecs_spack_mirror_remove h/help scope=
@@ -2175,7 +2210,7 @@ complete -c spack -n '__fish_spack_using_command mirror rm' -l scope -r -f -a '_
complete -c spack -n '__fish_spack_using_command mirror rm' -l scope -r -d 'configuration scope to modify'
# spack mirror set-url
-set -g __fish_spack_optspecs_spack_mirror_set_url h/help push fetch scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url=
+set -g __fish_spack_optspecs_spack_mirror_set_url h/help push fetch scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password=
complete -c spack -n '__fish_spack_using_command_pos 0 mirror set-url' -f -a '(__fish_spack_mirrors)'
complete -c spack -n '__fish_spack_using_command mirror set-url' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command mirror set-url' -s h -l help -d 'show this help message and exit'
@@ -2195,9 +2230,13 @@ complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-profile -
complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror'
complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-endpoint-url -r -f -a s3_endpoint_url
complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror'
+complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username -r -f -a oci_username
+complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username -r -d 'username to use to connect to this OCI mirror'
+complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password -r -f -a oci_password
+complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password -r -d 'password to use to connect to this OCI mirror'
# spack mirror set
-set -g __fish_spack_optspecs_spack_mirror_set h/help push fetch type= url= scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url=
+set -g __fish_spack_optspecs_spack_mirror_set h/help push fetch type= url= scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password=
complete -c spack -n '__fish_spack_using_command_pos 0 mirror set' -f -a '(__fish_spack_mirrors)'
complete -c spack -n '__fish_spack_using_command mirror set' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command mirror set' -s h -l help -d 'show this help message and exit'
@@ -2221,6 +2260,10 @@ complete -c spack -n '__fish_spack_using_command mirror set' -l s3-profile -r -f
complete -c spack -n '__fish_spack_using_command mirror set' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror'
complete -c spack -n '__fish_spack_using_command mirror set' -l s3-endpoint-url -r -f -a s3_endpoint_url
complete -c spack -n '__fish_spack_using_command mirror set' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror'
+complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username -r -f -a oci_username
+complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username -r -d 'username to use to connect to this OCI mirror'
+complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password -r -f -a oci_password
+complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password -r -d 'password to use to connect to this OCI mirror'
# spack mirror list
set -g __fish_spack_optspecs_spack_mirror_list h/help scope=
diff --git a/share/spack/templates/depfile/Makefile b/share/spack/templates/depfile/Makefile
index dde42cf7d5b605..4b764752678c0f 100644
--- a/share/spack/templates/depfile/Makefile
+++ b/share/spack/templates/depfile/Makefile
@@ -8,7 +8,7 @@ SPACK_INSTALL_FLAGS ?=
{{ all_target }}: {{ env_target }}
-{{ env_target }}: {{ root_install_targets }}
+{{ env_target }}: {{ root_install_targets }} | {{ dirs_target }}
@touch $@
{{ dirs_target }}:
diff --git a/share/spack/templates/modules/modulefile.tcl b/share/spack/templates/modules/modulefile.tcl
index 746fea2f31def9..d1593b88280e65 100644
--- a/share/spack/templates/modules/modulefile.tcl
+++ b/share/spack/templates/modules/modulefile.tcl
@@ -54,11 +54,23 @@ conflict {{ name }}
{% block environment %}
{% for command_name, cmd in environment_modifications %}
{% if command_name == 'PrependPath' %}
+{% if cmd.separator == ':' %}
+prepend-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }}
+{% else %}
prepend-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }}
+{% endif %}
{% elif command_name in ('AppendPath', 'AppendFlagsEnv') %}
+{% if cmd.separator == ':' %}
+append-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }}
+{% else %}
append-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }}
+{% endif %}
{% elif command_name in ('RemovePath', 'RemoveFlagsEnv') %}
+{% if cmd.separator == ':' %}
+remove-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }}
+{% else %}
remove-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }}
+{% endif %}
{% elif command_name == 'SetEnv' %}
setenv {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }}
{% elif command_name == 'UnsetEnv' %}
@@ -68,7 +80,7 @@ unsetenv {{ cmd.name }}
{% endfor %}
{# Make sure system man pages are enabled by appending trailing delimiter to MANPATH #}
{% if has_manpath_modifications %}
-append-path --delim {{ '{' }}:{{ '}' }} MANPATH {{ '{' }}{{ '}' }}
+append-path MANPATH {{ '{' }}{{ '}' }}
{% endif %}
{% endblock %}
diff --git a/var/spack/repos/builtin.mock/packages/adios2/package.py b/var/spack/repos/builtin.mock/packages/adios2/package.py
new file mode 100644
index 00000000000000..fb2f43ea0e154a
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/adios2/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class Adios2(Package):
+ """This packagae has the variants shared and
+ bzip2, both defaulted to True"""
+
+ homepage = "https://example.com"
+ url = "https://example.com/adios2.tar.gz"
+
+ version("2.9.1", sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9")
+
+ variant("shared", default=True, description="Build shared libraries")
+ variant("bzip2", default=True, description="Enable BZip2 compression")
+
+ depends_on("bzip2")
diff --git a/var/spack/repos/builtin.mock/packages/ascent/package.py b/var/spack/repos/builtin.mock/packages/ascent/package.py
new file mode 100644
index 00000000000000..9a8db472dc07ab
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/ascent/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class Ascent(Package):
+ """This packagae has the variants shared, defaulted
+ to True and adios2 defaulted to False"""
+
+ homepage = "https://github.com/Alpine-DAV/ascent"
+ url = "http://www.example.com/ascent-1.0.tar.gz"
+
+ version("0.9.2", sha256="44cd954aa5db478ab40042cd54fd6fcedf25000c3bb510ca23fcff8090531b91")
+
+ variant("adios2", default=False, description="Build Adios2 filter support")
+ variant("shared", default=True, description="Build Ascent as shared libs")
+
+ depends_on("adios2", when="+adios2")
diff --git a/var/spack/repos/builtin.mock/packages/b/package.py b/var/spack/repos/builtin.mock/packages/b/package.py
index 06d82860850808..1685711825f9ea 100644
--- a/var/spack/repos/builtin.mock/packages/b/package.py
+++ b/var/spack/repos/builtin.mock/packages/b/package.py
@@ -15,4 +15,8 @@ class B(Package):
version("1.0", md5="0123456789abcdef0123456789abcdef")
version("0.9", md5="abcd456789abcdef0123456789abcdef")
+ variant(
+ "foo", description="", values=any_combination_of("bar", "baz", "fee").with_default("bar")
+ )
+
depends_on("test-dependency", type="test")
diff --git a/var/spack/repos/builtin.mock/packages/build-env-compiler-var-a/package.py b/var/spack/repos/builtin.mock/packages/build-env-compiler-var-a/package.py
new file mode 100644
index 00000000000000..ea6f0f34e8ee6d
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/build-env-compiler-var-a/package.py
@@ -0,0 +1,14 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class BuildEnvCompilerVarA(Package):
+ """Package with runtime variable that should be dropped in the parent's build environment."""
+
+ url = "https://www.example.com"
+ version("1.0", md5="0123456789abcdef0123456789abcdef")
+ depends_on("build-env-compiler-var-b", type="build")
diff --git a/var/spack/repos/builtin.mock/packages/build-env-compiler-var-b/package.py b/var/spack/repos/builtin.mock/packages/build-env-compiler-var-b/package.py
new file mode 100644
index 00000000000000..7905869b344de6
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/build-env-compiler-var-b/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class BuildEnvCompilerVarB(Package):
+ """Package with runtime variable that should be dropped in the parent's build environment."""
+
+ url = "https://www.example.com"
+ version("1.0", md5="0123456789abcdef0123456789abcdef")
+
+ def setup_run_environment(self, env):
+ env.set("CC", "this-should-be-dropped")
+ env.set("CXX", "this-should-be-dropped")
+ env.set("FC", "this-should-be-dropped")
+ env.set("F77", "this-should-be-dropped")
+ env.set("ANOTHER_VAR", "this-should-be-present")
diff --git a/var/spack/repos/builtin.mock/packages/bzip2/package.py b/var/spack/repos/builtin.mock/packages/bzip2/package.py
new file mode 100644
index 00000000000000..326533ac5ea117
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/bzip2/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class Bzip2(Package):
+ """This packagae has the variants shared
+ defaulted to True"""
+
+ homepage = "https://example.com"
+ url = "https://example.com/bzip2-1.0.8tar.gz"
+
+ version("1.0.8", sha256="ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269")
+
+ variant("shared", default=True, description="Enables the build of shared libraries.")
diff --git a/var/spack/repos/builtin.mock/packages/client-not-foo/package.py b/var/spack/repos/builtin.mock/packages/client-not-foo/package.py
new file mode 100644
index 00000000000000..03c9374b3acce1
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/client-not-foo/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class ClientNotFoo(Package):
+ """This package has a variant "foo", which is False by default."""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/c-1.0.tar.gz"
+
+ version("1.0", md5="0123456789abcdef0123456789abcdef")
+
+ variant("foo", default=False, description="")
diff --git a/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py
new file mode 100644
index 00000000000000..21e67f8a61bc12
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class DependencyFooBar(Package):
+ """This package has a variant "bar", which is False by default, and
+ variant "foo" which is True by default.
+ """
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dependency-foo-bar-1.0.tar.gz"
+
+ version("1.0", md5="1234567890abcdefg1234567890098765")
+
+ variant("foo", default=True, description="")
+ variant("bar", default=False, description="")
diff --git a/var/spack/repos/builtin.mock/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin.mock/packages/intel-parallel-studio/package.py
new file mode 100644
index 00000000000000..1ec5cf6932619e
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/intel-parallel-studio/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class IntelParallelStudio(Package):
+ """Intel Parallel Studio."""
+
+ homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe"
+ url = "http://tec/16225/parallel_studio_xe_2020_cluster_edition.tgz"
+
+ version("cluster.2020.0", sha256="b1d3e3e425b2e44a06760ff173104bdf")
+
+ provides("mpi@:3")
+ provides("scalapack")
+ provides("blas", "lapack")
diff --git a/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py b/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py
new file mode 100644
index 00000000000000..527a1815e62863
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class InvalidGitlabPatchUrl(Package):
+ """Package that has GitLab patch URLs that fail auditing."""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/patch-1.0.tar.gz"
+
+ version("1.0", md5="0123456789abcdef0123456789abcdef")
+
+ patch(
+ "https://gitlab.com/QEF/q-e/-/commit/4ca3afd4c6f27afcf3f42415a85a353a7be1bd37.patch",
+ sha256="d7dec588efb5c04f99d949d8b9bb4a0fbc98b917ae79e12e4b87ad7c3dc9e268",
+ )
diff --git a/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py b/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py
new file mode 100644
index 00000000000000..818876405c26f6
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class InvalidSelfhostedGitlabPatchUrl(Package):
+ """Package that has GitLab patch URLs that fail auditing."""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/patch-1.0.tar.gz"
+
+ version("1.0", md5="0123456789abcdef0123456789abcdef")
+
+ patch(
+ "https://gitlab.gnome.org/GNOME/glib/-/commit/bda87264372c006c94e21ffb8ff9c50ecb3e14bd.patch",
+ sha256="2e811ec62cb09044c95a4d0213993f09af70cdcc1c709257b33bc9248ae950ed",
+ )
diff --git a/var/spack/repos/builtin.mock/packages/low-priority-provider/package.py b/var/spack/repos/builtin.mock/packages/low-priority-provider/package.py
index 5b7bfc03c1aad2..940dea3dafc13c 100644
--- a/var/spack/repos/builtin.mock/packages/low-priority-provider/package.py
+++ b/var/spack/repos/builtin.mock/packages/low-priority-provider/package.py
@@ -14,5 +14,5 @@ class LowPriorityProvider(Package):
version("1.0", md5="0123456789abcdef0123456789abcdef")
- provides("lapack")
- provides("mpi")
+ # A low priority provider that provides both these specs together
+ provides("mpi", "lapack")
diff --git a/var/spack/repos/builtin.mock/packages/many-virtual-consumer/package.py b/var/spack/repos/builtin.mock/packages/many-virtual-consumer/package.py
index 070adf60bc80e1..087cfb77cccac1 100644
--- a/var/spack/repos/builtin.mock/packages/many-virtual-consumer/package.py
+++ b/var/spack/repos/builtin.mock/packages/many-virtual-consumer/package.py
@@ -19,4 +19,4 @@ class ManyVirtualConsumer(Package):
# This directive is an example of imposing a constraint on a
# dependency is that dependency is in the DAG. This pattern
# is mainly used with virtual providers.
- depends_on("low-priority-provider@1.0", when="^low-priority-provider")
+ depends_on("low-priority-provider@1.0", when="^[virtuals=mpi,lapack] low-priority-provider")
diff --git a/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py b/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py
index 136646324191dd..b0f7ac9501cd18 100644
--- a/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py
+++ b/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py
@@ -19,7 +19,7 @@ class MultivalueVariant(Package):
variant(
"foo",
description="Multi-valued variant",
- values=any_combination_of("bar", "baz", "barbaz"),
+ values=any_combination_of("bar", "baz", "barbaz", "fee"),
)
variant(
diff --git a/var/spack/repos/builtin.mock/packages/netlib-scalapack/package.py b/var/spack/repos/builtin.mock/packages/netlib-scalapack/package.py
new file mode 100644
index 00000000000000..fe5d7f90a1f27d
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/netlib-scalapack/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class NetlibScalapack(Package):
+ homepage = "http://www.netlib.org/scalapack/"
+ url = "http://www.netlib.org/scalapack/scalapack-2.1.0.tgz"
+
+ version("2.1.0", "b1d3e3e425b2e44a06760ff173104bdf")
+
+ provides("scalapack")
+
+ depends_on("mpi")
+ depends_on("lapack")
+ depends_on("blas")
diff --git a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py
index 015608587756c6..1273b70def2127 100644
--- a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py
+++ b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py
@@ -14,5 +14,4 @@ class OpenblasWithLapack(Package):
version("0.2.15", md5="b1190f3d3471685f17cfd1ec1d252ac9")
- provides("lapack")
- provides("blas")
+ provides("lapack", "blas")
diff --git a/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py
new file mode 100644
index 00000000000000..14516566a9f7d4
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class ParentFooBar(Package):
+ """This package has a variant "bar", which is True by default, and depends on another
+ package which has the same variant defaulting to False.
+ """
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/parent-foo-bar-1.0.tar.gz"
+
+ version("1.0", md5="abcdefg0123456789abcdefghfedcba0")
+
+ variant("foo", default=True, description="")
+ variant("bar", default=True, description="")
+
+ depends_on("dependency-foo-bar")
diff --git a/var/spack/repos/builtin.mock/packages/parent-foo/package.py b/var/spack/repos/builtin.mock/packages/parent-foo/package.py
new file mode 100644
index 00000000000000..61d15231f70822
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/parent-foo/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class ParentFoo(Package):
+ """This package has a variant "foo", which is True by default, and depends on another
+ package which has the same variant defaulting to False.
+ """
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/c-1.0.tar.gz"
+
+ version("1.0", md5="0123456789abcdef0123456789abcdef")
+
+ variant("foo", default=True, description="")
+
+ depends_on("client-not-foo")
diff --git a/var/spack/repos/builtin/packages/3proxy/package.py b/var/spack/repos/builtin/packages/3proxy/package.py
index e9a408698b87ac..78e52895145b65 100644
--- a/var/spack/repos/builtin/packages/3proxy/package.py
+++ b/var/spack/repos/builtin/packages/3proxy/package.py
@@ -24,7 +24,9 @@ class _3proxy(MakefilePackage):
depends_on("m4", type="build")
def build(self, spec, prefix):
- make("-f", f"Makefile.{platform.system()}")
+ make("-f", f"Makefile.{platform.system()}", f"CC={spack_cc}")
def install(self, spec, prefix):
- make("-f", f"Makefile.{platform.system()}", f"prefix={prefix}", "install")
+ make(
+ "-f", f"Makefile.{platform.system()}", f"prefix={prefix}", f"CC={spack_cc}", "install"
+ )
diff --git a/var/spack/repos/builtin/packages/abi-compliance-checker/package.py b/var/spack/repos/builtin/packages/abi-compliance-checker/package.py
index 05d57471ba24f2..6f575badd208dc 100644
--- a/var/spack/repos/builtin/packages/abi-compliance-checker/package.py
+++ b/var/spack/repos/builtin/packages/abi-compliance-checker/package.py
@@ -22,4 +22,4 @@ class AbiComplianceChecker(MakefilePackage):
depends_on("universal-ctags")
def install(self, spec, prefix):
- make("prefix={0}".format(prefix), "install")
+ make(f"prefix={prefix}", "install")
diff --git a/var/spack/repos/builtin/packages/abi-dumper/package.py b/var/spack/repos/builtin/packages/abi-dumper/package.py
index f649bf9db20874..584eed3664cdd0 100644
--- a/var/spack/repos/builtin/packages/abi-dumper/package.py
+++ b/var/spack/repos/builtin/packages/abi-dumper/package.py
@@ -25,4 +25,4 @@ class AbiDumper(Package):
depends_on("vtable-dumper@1.1:")
def install(self, spec, prefix):
- make("prefix={0}".format(prefix), "install")
+ make(f"prefix={prefix}", "install")
diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py
index 445fc60bbe0985..07a706590456ac 100644
--- a/var/spack/repos/builtin/packages/abinit/package.py
+++ b/var/spack/repos/builtin/packages/abinit/package.py
@@ -27,6 +27,8 @@ class Abinit(AutotoolsPackage):
homepage = "https://www.abinit.org/"
url = "https://www.abinit.org/sites/default/files/packages/abinit-8.6.3.tar.gz"
+ maintainers("downloadico")
+ version("9.10.3", sha256="3f2a9aebbf1fee9855a09dd687f88d2317b8b8e04f97b2628ab96fb898dce49b")
version("9.8.4", sha256="a086d5045f0093b432e6a044d5f71f7edf5a41a62d67b3677cb0751d330c564a")
version("9.8.3", sha256="de823878aea2c20098f177524fbb4b60de9b1b5971b2e835ec244dfa3724589b")
version("9.6.1", sha256="b6a12760fd728eb4aacca431ae12150609565bedbaa89763f219fcd869f79ac6")
@@ -85,6 +87,11 @@ class Abinit(AutotoolsPackage):
# libxml2
depends_on("libxml2", when="@9:+libxml2")
+ # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa
+ for _intel_pkg in INTEL_MATH_LIBRARIES:
+ requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}")
+ requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}")
+
# Cannot ask for +scalapack if it does not depend on MPI
conflicts("+scalapack", when="~mpi")
@@ -101,11 +108,7 @@ class Abinit(AutotoolsPackage):
# TODO: The logic here can be reversed with the new concretizer. Instead of
# using `conflicts`, `depends_on` could be used instead.
for fftw in ["amdfftw", "cray-fftw", "fujitsu-fftw", "fftw"]:
- conflicts(
- "+openmp",
- when="^{0}~openmp".format(fftw),
- msg="Need to request {0} +openmp".format(fftw),
- )
+ conflicts("+openmp", when=f"^{fftw}~openmp", msg=f"Need to request {fftw} +openmp")
mkl_message = "Need to set dependent variant to threads=openmp"
conflicts("+openmp", when="^intel-mkl threads=none", msg=mkl_message)
@@ -137,34 +140,36 @@ def configure_args(self):
oapp = options.append
if "@:8" in spec:
- oapp("--enable-optim={0}".format(self.spec.variants["optimization-flavor"].value))
+ oapp(f"--enable-optim={self.spec.variants['optimization-flavor'].value}")
else:
- oapp("--with-optim-flavor={0}".format(self.spec.variants["optimization-flavor"].value))
+ oapp(f"--with-optim-flavor={self.spec.variants['optimization-flavor'].value}")
if "+wannier90" in spec:
- if "@:8" in spec:
- oapp(
- "--with-wannier90-libs=-L{0}".format(
- spec["wannier90"].prefix.lib + " -lwannier -lm"
- )
- )
- oapp("--with-wannier90-incs=-I{0}".format(spec["wannier90"].prefix.modules))
- oapp("--with-wannier90-bins={0}".format(spec["wannier90"].prefix.bin))
+ if spec.satisfies("@:8"):
+ oapp(f"--with-wannier90-libs=-L{spec['wannier90'].prefix.lib} -lwannier -lm")
+ oapp(f"--with-wannier90-incs=-I{spec['wannier90'].prefix.modules}")
+ oapp(f"--with-wannier90-bins={spec['wannier90'].prefix.bin}")
oapp("--enable-connectors")
oapp("--with-dft-flavor=atompaw+libxc+wannier90")
+ elif spec.satisfies("@:9.8"):
+ options.extend(
+ [
+ f"WANNIER90_CPPFLAGS=-I{spec['wannier90'].prefix.modules}",
+ f"WANNIER90_LIBS=-L{spec['wannier90'].prefix.lib} -lwannier",
+ ]
+ )
else:
options.extend(
[
- "WANNIER90_CPPFLAGS=-I{0}".format(spec["wannier90"].prefix.modules),
- "WANNIER90_LIBS=-L{0} {1}".format(
- spec["wannier90"].prefix.lib, "-lwannier"
- ),
+ f"WANNIER90_CPPFLAGS=-I{spec['wannier90'].prefix.modules}",
+ f"WANNIER90_LIBS=-L{spec['wannier90'].prefix.lib}"
+ "WANNIER90_LDFLAGS=-lwannier",
]
)
else:
if "@:9.8" in spec:
- oapp("--with-fftw={0}".format(spec["fftw-api"].prefix))
- oapp("--with-hdf5={0}".format(spec["hdf5"].prefix))
+ oapp(f"--with-fftw={spec['fftw-api'].prefix}")
+ oapp(f"--with-hdf5={spec['hdf5'].prefix}")
if "@:8" in spec:
oapp("--with-dft-flavor=atompaw+libxc")
@@ -172,9 +177,12 @@ def configure_args(self):
"--without-wannier90",
if "+mpi" in spec:
- oapp("CC={0}".format(spec["mpi"].mpicc))
- oapp("CXX={0}".format(spec["mpi"].mpicxx))
- oapp("FC={0}".format(spec["mpi"].mpifc))
+ oapp(f"CC={spec['mpi'].mpicc}")
+ oapp(f"CXX={spec['mpi'].mpicxx}")
+ if spec.satisfies("@9.8:"):
+ oapp(f"F90={spec['mpi'].mpifc}")
+ else:
+ oapp(f"FC={spec['mpi'].mpifc}")
# MPI version:
# let the configure script auto-detect MPI support from mpi_prefix
@@ -196,7 +204,8 @@ def configure_args(self):
# BLAS/LAPACK/SCALAPACK-ELPA
linalg = spec["lapack"].libs + spec["blas"].libs
- if "^mkl" in spec:
+ is_using_intel_libraries = spec["lapack"].name in INTEL_MATH_LIBRARIES
+ if is_using_intel_libraries:
linalg_flavor = "mkl"
elif "@9:" in spec and "^openblas" in spec:
linalg_flavor = "openblas"
@@ -208,16 +217,16 @@ def configure_args(self):
if "+scalapack" in spec:
linalg = spec["scalapack"].libs + linalg
if "@:8" in spec:
- linalg_flavor = "scalapack+{0}".format(linalg_flavor)
+ linalg_flavor = f"scalapack+{linalg_flavor}"
if "@:8" in spec:
- oapp("--with-linalg-libs={0}".format(linalg.ld_flags))
+ oapp(f"--with-linalg-libs={linalg.ld_flags}")
else:
- oapp("LINALG_LIBS={0}".format(linalg.ld_flags))
+ oapp(f"LINALG_LIBS={linalg.ld_flags}")
- oapp("--with-linalg-flavor={0}".format(linalg_flavor))
+ oapp(f"--with-linalg-flavor={linalg_flavor}")
- if "^mkl" in spec:
+ if is_using_intel_libraries:
fftflavor = "dfti"
else:
if "+openmp" in spec:
@@ -225,32 +234,32 @@ def configure_args(self):
else:
fftflavor, fftlibs = "fftw3", "-lfftw3 -lfftw3f"
- oapp("--with-fft-flavor={0}".format(fftflavor))
+ oapp(f"--with-fft-flavor={fftflavor}")
if "@:8" in spec:
- if "^mkl" in spec:
- oapp("--with-fft-incs={0}".format(spec["fftw-api"].headers.cpp_flags))
- oapp("--with-fft-libs={0}".format(spec["fftw-api"].libs.ld_flags))
+ if is_using_intel_libraries:
+ oapp(f"--with-fft-incs={spec['fftw-api'].headers.cpp_flags}")
+ oapp(f"--with-fft-libs={spec['fftw-api'].libs.ld_flags}")
else:
options.extend(
[
- "--with-fft-incs={0}".format(spec["fftw-api"].headers.cpp_flags),
- "--with-fft-libs=-L{0} {1}".format(spec["fftw-api"].prefix.lib, fftlibs),
+ f"--with-fft-incs={spec['fftw-api'].headers.cpp_flags}",
+ f"--with-fft-libs=-L{spec['fftw-api'].prefix.lib} {fftlibs}",
]
)
else:
- if "^mkl" in spec:
+ if is_using_intel_libraries:
options.extend(
[
- "FFT_CPPFLAGS={0}".format(spec["fftw-api"].headers.cpp_flags),
- "FFT_LIBs={0}".format(spec["fftw-api"].libs.ld_flags),
+ f"FFT_CPPFLAGS={spec['fftw-api'].headers.cpp_flags}",
+ f"FFT_LIBs={spec['fftw-api'].libs.ld_flags}",
]
)
else:
options.extend(
[
- "FFTW3_CPPFLAGS={0}".format(spec["fftw-api"].headers.cpp_flags),
- "FFTW3_LIBS=-L{0} {1}".format(spec["fftw-api"].prefix.lib, fftlibs),
+ f"FFTW3_CPPFLAGS={spec['fftw-api'].headers.cpp_flags}",
+ f"FFTW3_LIBS=-L{spec['fftw-api'].prefix.lib} {fftlibs}",
]
)
@@ -259,12 +268,12 @@ def configure_args(self):
if "@:8" in spec:
options.extend(
[
- "--with-libxc-incs={0}".format(libxc.headers.cpp_flags),
- "--with-libxc-libs={0}".format(libxc.libs.ld_flags + " -lm"),
+ f"--with-libxc-incs={libxc.headers.cpp_flags}",
+ f"--with-libxc-libs={libxc.libs.ld_flags + ' -lm'}",
]
)
else:
- oapp("--with-libxc={0}".format(libxc.prefix))
+ oapp(f"--with-libxc={libxc.prefix}")
# Netcdf4/HDF5
hdf5 = spec["hdf5:hl"]
@@ -276,24 +285,21 @@ def configure_args(self):
# to link with the high level HDF5 library
options.extend(
[
- "--with-netcdf-incs={0}".format(
+ "--with-netcdf-incs={}".format(
netcdfc.headers.cpp_flags + " " + netcdff.headers.cpp_flags
),
- "--with-netcdf-libs={0}".format(
+ "--with-netcdf-libs={}".format(
netcdff.libs.ld_flags + " " + hdf5.libs.ld_flags
),
]
)
else:
options.extend(
- [
- "--with-netcdf={0}".format(netcdfc.prefix),
- "--with-netcdf-fortran={0}".format(netcdff.prefix),
- ]
+ [f"--with-netcdf={netcdfc.prefix}", f"--with-netcdf-fortran={netcdff.prefix}"]
)
if self.spec.satisfies("%fj"):
- oapp("FCFLAGS_MODDIR=-M{0}".format(join_path(self.stage.source_path, "src/mods")))
+ oapp(f"FCFLAGS_MODDIR=-M{join_path(self.stage.source_path, 'src/mods')}")
return options
diff --git a/var/spack/repos/builtin/packages/abyss/package.py b/var/spack/repos/builtin/packages/abyss/package.py
index c345626761d1c3..1cb46a8957eed1 100644
--- a/var/spack/repos/builtin/packages/abyss/package.py
+++ b/var/spack/repos/builtin/packages/abyss/package.py
@@ -60,12 +60,12 @@ class Abyss(AutotoolsPackage):
def configure_args(self):
maxk = int(self.spec.variants["maxk"].value)
args = [
- "--with-boost=%s" % self.spec["boost"].prefix,
- "--with-sqlite=%s" % self.spec["sqlite"].prefix,
- "--with-mpi=%s" % self.spec["mpi"].prefix,
+ f"--with-boost={self.spec['boost'].prefix}",
+ f"--with-sqlite={self.spec['sqlite'].prefix}",
+ f"--with-mpi={self.spec['mpi'].prefix}",
]
if maxk:
- args.append("--enable-maxk=%s" % maxk)
+ args.append(f"--enable-maxk={maxk}")
if self.spec["mpi"].name == "mpich":
args.append("--enable-mpich")
return args
diff --git a/var/spack/repos/builtin/packages/accfft/package.py b/var/spack/repos/builtin/packages/accfft/package.py
index aa32f1b0a53e81..eb99aec48492d9 100644
--- a/var/spack/repos/builtin/packages/accfft/package.py
+++ b/var/spack/repos/builtin/packages/accfft/package.py
@@ -32,15 +32,15 @@ class Accfft(CMakePackage, CudaPackage):
def cmake_args(self):
spec = self.spec
args = [
- "-DFFTW_ROOT={0}".format(spec["fftw"].prefix),
- "-DFFTW_USE_STATIC_LIBS=false",
- "-DBUILD_GPU={0}".format("true" if "+cuda" in spec else "false"),
- "-DBUILD_SHARED={0}".format("true" if "+shared" in spec else "false"),
+ self.define("FFTW_ROOT", spec["fftw"].prefix),
+ self.define("FFTW_USE_STATIC_LIBS", "false"),
+ self.define("BUILD_GPU", str(spec.satisfies("+cuda")).lower()),
+ self.define("BUILD_SHARED", str(spec.satisfies("+shared")).lower()),
]
if "+cuda" in spec:
cuda_arch = [x for x in spec.variants["cuda_arch"].value if x]
if cuda_arch:
- args.append("-DCUDA_NVCC_FLAGS={0}".format(" ".join(self.cuda_flags(cuda_arch))))
+ args.append(f"-DCUDA_NVCC_FLAGS={' '.join(self.cuda_flags(cuda_arch))}")
return args
diff --git a/var/spack/repos/builtin/packages/ace/package.py b/var/spack/repos/builtin/packages/ace/package.py
index c152bbdeb60fc8..afd164fc31527c 100644
--- a/var/spack/repos/builtin/packages/ace/package.py
+++ b/var/spack/repos/builtin/packages/ace/package.py
@@ -43,4 +43,4 @@ def edit(self, spec, prefix):
"include $(ACE_ROOT)/include/makeinclude/"
"platform_linux" + supported[self.compiler.name] + ".GNU\n"
)
- f.write("INSTALL_PREFIX=%s" % prefix)
+ f.write(f"INSTALL_PREFIX={prefix}")
diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py
index 684106ff5db260..320f9e818cc60a 100644
--- a/var/spack/repos/builtin/packages/ack/package.py
+++ b/var/spack/repos/builtin/packages/ack/package.py
@@ -41,7 +41,7 @@ class Ack(Package):
def install(self, spec, prefix):
mkdirp(prefix.bin)
- ack_source = "ack-{0}-single-file".format(self.version)
+ ack_source = f"ack-{self.version}-single-file"
ack_installed = join_path(prefix.bin, "ack")
# install source
diff --git a/var/spack/repos/builtin/packages/acpica-tools/package.py b/var/spack/repos/builtin/packages/acpica-tools/package.py
index 55fee583c7841b..c9d063a5a975f1 100644
--- a/var/spack/repos/builtin/packages/acpica-tools/package.py
+++ b/var/spack/repos/builtin/packages/acpica-tools/package.py
@@ -19,4 +19,4 @@ class AcpicaTools(MakefilePackage):
depends_on("bison", type="build")
def install(self, spec, prefix):
- make("PREFIX={0}".format(prefix), "install")
+ make(f"PREFIX={prefix}", "install")
diff --git a/var/spack/repos/builtin/packages/activeharmony/package.py b/var/spack/repos/builtin/packages/activeharmony/package.py
index e3f2d92955a6c4..62af3515b9051c 100644
--- a/var/spack/repos/builtin/packages/activeharmony/package.py
+++ b/var/spack/repos/builtin/packages/activeharmony/package.py
@@ -29,7 +29,7 @@ def setup_build_environment(self, spack_env):
@when("@:4.5")
def install(self, spec, prefix):
- make("install", "PREFIX=%s" % prefix)
+ make("install", f"PREFIX={prefix}")
@when("@4.6.0:")
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py
index 9b06fd3d444360..f474b92cc98c99 100644
--- a/var/spack/repos/builtin/packages/acts/package.py
+++ b/var/spack/repos/builtin/packages/acts/package.py
@@ -40,6 +40,18 @@ class Acts(CMakePackage, CudaPackage):
# Supported Acts versions
version("main", branch="main")
version("master", branch="main", deprecated=True) # For compatibility
+ version("30.3.2", commit="76826f208f5929d8326798c87263f2563d0ae7e9", submodules=True)
+ version("30.3.1", commit="bbee459dd93855417d5717d53cbbb2bace7de2bb", submodules=True)
+ version("30.3.0", commit="311acb9ab41c2d79a4b90b193e5b25297182d670", submodules=True)
+ version("30.2.0", commit="264b0a3214cbf8ca013623fc196e2d90d647c58f", submodules=True)
+ version("30.1.1", commit="3d43492b2775e62051e9ad31f06b91d6e2357ab9", submodules=True)
+ version("30.1.0", commit="60d9eec916f6c81373858c8d99d821861d7efeb8", submodules=True)
+ version("30.0.0", commit="00fa3fabac86a1e65198d4b94dd263b1c731a84c", submodules=True)
+ version("29.2.0", commit="b2d65308399d8f653fa8bdd73a2a203c58608358", submodules=True)
+ version("29.1.0", commit="4681c3b142db469b00ca03e92e6b237f7c89d141", submodules=True)
+ version("29.0.0", commit="9c6e4597af39f826e17d46850fdb407a48817ba6", submodules=True)
+ version("28.2.0", commit="c612e7c625f961330e383fb7856cc7398dd82881", submodules=True)
+ version("28.1.0", commit="08e51b5f93c0d09f2d1e7e4f062e715072ec3e9b", submodules=True)
version("28.0.0", commit="0d8aa418c00e8f79bab2cf88234f3433670b447c", submodules=True)
version("27.1.0", commit="219480220738318fbedb943cac85415687d75b66", submodules=True)
version("27.0.0", commit="4d7029bd4e9285fcda2770aef6d78a7f833cb14f", submodules=True)
@@ -214,6 +226,7 @@ class Acts(CMakePackage, CudaPackage):
variant("mlpack", default=False, description="Build MLpack plugin", when="@25:")
variant("onnx", default=False, description="Build ONNX plugin")
variant("odd", default=False, description="Build the Open Data Detector", when="@19.1:")
+ variant("podio", default=False, description="Build Podio plugin", when="@30.3:")
variant(
"profilecpu",
default=False,
@@ -230,6 +243,9 @@ class Acts(CMakePackage, CudaPackage):
variant("tgeo", default=False, description="Build the TGeo plugin", when="+identification")
# Variants that only affect Acts examples for now
+ variant(
+ "binaries", default=False, description="Build the examples binaries", when="@23: +examples"
+ )
variant(
"edm4hep",
default=False,
@@ -300,6 +316,8 @@ class Acts(CMakePackage, CudaPackage):
depends_on("mlpack@3.1.1:", when="+mlpack")
depends_on("nlohmann-json @3.9.1:", when="@0.14: +json")
depends_on("podio @0.6:", when="@25: +edm4hep")
+ depends_on("podio @0.16:", when="@30.3: +edm4hep")
+ depends_on("podio @0.16:", when="+podio")
depends_on("pythia8", when="+pythia8")
depends_on("python", when="+python")
depends_on("python@3.8:", when="+python @19.11:19")
@@ -369,6 +387,7 @@ def plugin_cmake_variant(plugin_name, spack_variant):
cmake_variant("ANALYSIS_APPS", "analysis"),
plugin_cmake_variant("AUTODIFF", "autodiff"),
cmake_variant("BENCHMARKS", "benchmarks"),
+ example_cmake_variant("BINARIES", "binaries"),
plugin_cmake_variant("CUDA", "cuda"),
plugin_cmake_variant("DD4HEP", "dd4hep"),
example_cmake_variant("DD4HEP", "dd4hep"),
@@ -390,6 +409,7 @@ def plugin_cmake_variant(plugin_name, spack_variant):
plugin_cmake_variant("ONNX", "onnx"),
enable_cmake_variant("CPU_PROFILING", "profilecpu"),
enable_cmake_variant("MEMORY_PROFILING", "profilemem"),
+ plugin_cmake_variant("PODIO", "podio"),
example_cmake_variant("PYTHIA8", "pythia8"),
example_cmake_variant("PYTHON_BINDINGS", "python"),
plugin_cmake_variant("ACTSVG", "svg"),
diff --git a/var/spack/repos/builtin/packages/adf/package.py b/var/spack/repos/builtin/packages/adf/package.py
index 908cd5351faf16..244087f1116027 100644
--- a/var/spack/repos/builtin/packages/adf/package.py
+++ b/var/spack/repos/builtin/packages/adf/package.py
@@ -19,7 +19,7 @@ class Adf(Package):
version("2017.113", sha256="666ef15d253b74c707dd14da35e7cf283ca20e21e24ed43cb953fb9d1f2f1e15")
def url_for_version(self, version):
- return "file://{0}/adf/adf{1}.pc64_linux.openmpi.bin.tgz".format(os.getcwd(), version)
+ return f"file://{os.getcwd()}/adf/adf{version}.pc64_linux.openmpi.bin.tgz"
# Licensing
license_required = True
diff --git a/var/spack/repos/builtin/packages/adiak/package.py b/var/spack/repos/builtin/packages/adiak/package.py
index 05f936e3f92c5a..e1d757e0827833 100644
--- a/var/spack/repos/builtin/packages/adiak/package.py
+++ b/var/spack/repos/builtin/packages/adiak/package.py
@@ -20,8 +20,9 @@ class Adiak(CMakePackage):
variant("shared", default=True, description="Build dynamic libraries")
version(
- "0.2.2", commit="3aedd494c81c01df1183af28bc09bade2fabfcd3", submodules=True, preferred=True
+ "0.4.0", commit="7e8b7233f8a148b402128ed46b2f0c643e3b397e", submodules=True, preferred=True
)
+ version("0.2.2", commit="3aedd494c81c01df1183af28bc09bade2fabfcd3", submodules=True)
version(
"0.3.0-alpha",
commit="054d2693a977ed0e1f16c665b4966bb90924779e",
diff --git a/var/spack/repos/builtin/packages/adios2/2.9.2-cmake-find-threads-package-first.patch b/var/spack/repos/builtin/packages/adios2/2.9.2-cmake-find-threads-package-first.patch
new file mode 100644
index 00000000000000..afc6808d2862a6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/adios2/2.9.2-cmake-find-threads-package-first.patch
@@ -0,0 +1,36 @@
+From 80e4739fb53b0b7e02dae48b928d8b8247992763 Mon Sep 17 00:00:00 2001
+From: Vicente Adolfo Bolea Sanchez
+Date: Thu, 2 Nov 2023 12:18:49 -0400
+Subject: [PATCH] cmake: find threads package first
+
+---
+ cmake/DetectOptions.cmake | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/cmake/DetectOptions.cmake b/cmake/DetectOptions.cmake
+index 3f511e02a..615995b71 100644
+--- a/cmake/DetectOptions.cmake
++++ b/cmake/DetectOptions.cmake
+@@ -67,6 +67,9 @@ function(lists_get_prefix listVars outVar)
+ set(${outVar} "${prefix}" PARENT_SCOPE)
+ endfunction()
+
++# Multithreading
++find_package(Threads REQUIRED)
++
+ # Blosc2
+ if(ADIOS2_USE_Blosc2 STREQUAL AUTO)
+ # Prefect CONFIG mode
+@@ -554,9 +557,6 @@ if(AWSSDK_FOUND)
+ set(ADIOS2_HAVE_AWSSDK TRUE)
+ endif()
+
+-# Multithreading
+-find_package(Threads REQUIRED)
+-
+ # Floating point detection
+ include(CheckTypeRepresentation)
+
+--
+2.35.3
+
diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py
index bfb08227e6ff9d..4a038ddcacafc5 100644
--- a/var/spack/repos/builtin/packages/adios2/package.py
+++ b/var/spack/repos/builtin/packages/adios2/package.py
@@ -9,7 +9,7 @@
from spack.package import *
-class Adios2(CMakePackage, CudaPackage):
+class Adios2(CMakePackage, CudaPackage, ROCmPackage):
"""The Adaptable Input Output System version 2,
developed in the Exascale Computing Program"""
@@ -24,10 +24,11 @@ class Adios2(CMakePackage, CudaPackage):
version("master", branch="master")
version(
- "2.9.1",
- sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9",
+ "2.9.2",
+ sha256="78309297c82a95ee38ed3224c98b93d330128c753a43893f63bbe969320e4979",
preferred=True,
)
+ version("2.9.1", sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9")
version("2.9.0", sha256="69f98ef58c818bb5410133e1891ac192653b0ec96eb9468590140f2552b6e5d1")
version("2.8.3", sha256="4906ab1899721c41dd918dddb039ba2848a1fb0cf84f3a563a1179b9d6ee0d9f")
version("2.8.2", sha256="9909f6409dc44b2c28c1fda0042dab4b711f25ec3277ef0cb6ffc40f5483910d")
@@ -62,7 +63,8 @@ class Adios2(CMakePackage, CudaPackage):
variant(
"libpressio", default=False, when="@2.8:", description="Enable LibPressio for compression"
)
- variant("blosc", default=True, when="@2.4:", description="Enable Blosc compression")
+ variant("blosc", default=True, when="@2.4:2.8", description="Enable Blosc compression")
+ variant("blosc2", default=True, when="@2.9:", description="Enable Blosc2 compression")
variant("bzip2", default=True, when="@2.4:", description="Enable BZip2 compression")
variant("zfp", default=True, description="Enable ZFP compression")
variant("png", default=True, when="@2.4:", description="Enable PNG compression")
@@ -78,7 +80,7 @@ class Adios2(CMakePackage, CudaPackage):
description="Enable the DataMan engine for WAN transports",
)
variant("dataspaces", default=False, when="@2.5:", description="Enable support for DATASPACES")
- variant("ssc", default=True, description="Enable the SSC staging engine")
+ variant("ssc", default=True, when="@:2.7", description="Enable the SSC staging engine")
variant("hdf5", default=False, description="Enable the HDF5 engine")
variant(
"aws",
@@ -94,7 +96,8 @@ class Adios2(CMakePackage, CudaPackage):
)
# Optional language bindings, C++11 and C always provided
- variant("cuda", default=False, when="@2.8:", description="Enable CUDA support")
+ variant("kokkos", default=False, when="@2.9:", description="Enable Kokkos support")
+ variant("sycl", default=False, when="@2.10:", description="Enable SYCL support")
variant("python", default=False, description="Enable the Python bindings")
variant("fortran", default=True, description="Enable the Fortran bindings")
@@ -108,6 +111,37 @@ class Adios2(CMakePackage, CudaPackage):
depends_on("cmake@3.12.0:", type="build")
+ # Standalone CUDA support
+ depends_on("cuda", when="+cuda ~kokkos")
+
+ # Kokkos support
+ depends_on("kokkos@3.7: +cuda +wrapper", when="+kokkos +cuda")
+ depends_on("kokkos@3.7: +rocm", when="+kokkos +rocm")
+ depends_on("kokkos@3.7: +sycl", when="+kokkos +sycl")
+
+ # Propagate CUDA target to kokkos for +cuda
+ for cuda_arch in CudaPackage.cuda_arch_values:
+ depends_on(
+ "kokkos cuda_arch=%s" % cuda_arch, when="+kokkos +cuda cuda_arch=%s" % cuda_arch
+ )
+
+ # Propagate AMD GPU target to kokkos for +rocm
+ for amdgpu_value in ROCmPackage.amdgpu_targets:
+ depends_on(
+ "kokkos amdgpu_target=%s" % amdgpu_value,
+ when="+kokkos +rocm amdgpu_target=%s" % amdgpu_value,
+ )
+
+ conflicts("+cuda", when="@:2.7")
+ conflicts("+rocm", when="@:2.8")
+
+ conflicts("+cuda", when="+sycl")
+ conflicts("+rocm", when="+cuda")
+ conflicts("+rocm", when="+sycl")
+
+ conflicts("+rocm", when="~kokkos", msg="ADIOS2 does not support HIP without Kokkos")
+ conflicts("+sycl", when="~kokkos", msg="ADIOS2 does not support SYCL without Kokkos")
+
for _platform in ["linux", "darwin", "cray"]:
depends_on("pkgconfig", type="build", when=f"platform={_platform}")
variant(
@@ -135,8 +169,8 @@ class Adios2(CMakePackage, CudaPackage):
depends_on("hdf5+mpi", when="+hdf5+mpi")
depends_on("libpressio", when="+libpressio")
- depends_on("c-blosc", when="@:2.8 +blosc")
- depends_on("c-blosc2", when="@2.9: +blosc")
+ depends_on("c-blosc", when="+blosc")
+ depends_on("c-blosc2", when="+blosc2")
depends_on("bzip2", when="+bzip2")
depends_on("libpng@1.6:", when="+png")
depends_on("zfp@0.5.1:0.5", when="+zfp")
@@ -178,6 +212,10 @@ class Adios2(CMakePackage, CudaPackage):
sha256="8221073d1b2f8944395a88a5d60a15c7370646b62f5fc6309867bbb6a8c2096c",
)
+ # cmake: find threads package first
+ # https://github.com/ornladios/ADIOS2/pull/3893
+ patch("2.9.2-cmake-find-threads-package-first.patch", when="@2.9.2:")
+
@when("%fj")
def patch(self):
"""add fujitsu mpi commands #16864"""
@@ -202,6 +240,7 @@ def cmake_args(self):
from_variant("BUILD_SHARED_LIBS", "shared"),
from_variant("ADIOS2_USE_AWSSDK", "aws"),
from_variant("ADIOS2_USE_Blosc", "blosc"),
+ from_variant("ADIOS2_USE_Blosc2", "blosc2"),
from_variant("ADIOS2_USE_BZip2", "bzip2"),
from_variant("ADIOS2_USE_DataMan", "dataman"),
from_variant("ADIOS2_USE_DataSpaces", "dataspaces"),
@@ -214,9 +253,13 @@ def cmake_args(self):
from_variant("ADIOS2_USE_SST", "sst"),
from_variant("ADIOS2_USE_SZ", "sz"),
from_variant("ADIOS2_USE_ZFP", "zfp"),
- from_variant("ADIOS2_USE_CUDA", "cuda"),
from_variant("ADIOS2_USE_Catalyst", "libcatalyst"),
from_variant("ADIOS2_USE_LIBPRESSIO", "libpressio"),
+ self.define("ADIOS2_USE_CUDA", self.spec.satisfies("+cuda ~kokkos")),
+ self.define("ADIOS2_USE_Kokkos", self.spec.satisfies("+kokkos")),
+ self.define("Kokkos_ENABLE_CUDA", self.spec.satisfies("+cuda +kokkos")),
+ self.define("Kokkos_ENABLE_HIP", self.spec.satisfies("+rocm")),
+ self.define("Kokkos_ENABLE_SYCL", self.spec.satisfies("+sycl")),
self.define("BUILD_TESTING", self.run_tests),
self.define("ADIOS2_BUILD_EXAMPLES", False),
self.define("ADIOS2_USE_Endian_Reverse", True),
@@ -244,6 +287,14 @@ def cmake_args(self):
args.append(f"-DPYTHON_EXECUTABLE:FILEPATH={spec['python'].command.path}")
args.append(f"-DPython_EXECUTABLE:FILEPATH={spec['python'].command.path}")
+ # hip support
+ if "+cuda" in spec:
+ args.append(self.builder.define_cuda_architectures(self))
+
+ # hip support
+ if "+rocm" in spec:
+ args.append(self.builder.define_hip_architectures(self))
+
return args
@property
diff --git a/var/spack/repos/builtin/packages/akantu/package.py b/var/spack/repos/builtin/packages/akantu/package.py
index 2952f9bb4a1688..bba92edcc8e396 100644
--- a/var/spack/repos/builtin/packages/akantu/package.py
+++ b/var/spack/repos/builtin/packages/akantu/package.py
@@ -65,10 +65,8 @@ def cmake_args(self):
"-DAKANTU_HEAT_TRANSFER:BOOL=ON",
"-DAKANTU_SOLID_MECHANICS:BOOL=ON",
"-DAKANTU_STRUCTURAL_MECHANICS:BOOL=OFF",
- "-DAKANTU_PARALLEL:BOOL={0}".format("ON" if spec.satisfies("+mpi") else "OFF"),
- "-DAKANTU_PYTHON_INTERFACE:BOOL={0}".format(
- "ON" if spec.satisfies("+python") else "OFF"
- ),
+ f"-DAKANTU_PARALLEL:BOOL={'ON' if spec.satisfies('+mpi') else 'OFF'}",
+ f"-DAKANTU_PYTHON_INTERFACE:BOOL={'ON' if spec.satisfies('+python') else 'OFF'}",
]
if spec.satisfies("@:3.0"):
@@ -84,14 +82,14 @@ def cmake_args(self):
solvers = []
if spec.satisfies("external_solvers=mumps"):
solvers.append("Mumps")
- args.append("-DMUMPS_DIR:PATH=${0}".format(spec["mumps"].prefix))
+ args.append(f"-DMUMPS_DIR:PATH=${spec['mumps'].prefix}")
if spec.satisfies("external_solvers=petsc"):
solvers.append("PETSc")
if len(solvers) > 0:
args.extend(
[
- "-DAKANTU_IMPLICIT_SOLVER:STRING={0}".format("+".join(solvers)),
+ f"-DAKANTU_IMPLICIT_SOLVER:STRING={'+'.join(solvers)}",
"-DAKANTU_IMPLICIT:BOOL=ON",
]
)
diff --git a/var/spack/repos/builtin/packages/alglib/package.py b/var/spack/repos/builtin/packages/alglib/package.py
index f962d0fd44fc10..98ade340fd2e11 100644
--- a/var/spack/repos/builtin/packages/alglib/package.py
+++ b/var/spack/repos/builtin/packages/alglib/package.py
@@ -30,7 +30,7 @@ def edit(self, spec, prefix):
filter_file(r"so", dso_suffix, make_file)
def install(self, spec, prefix):
- name = "libalglib.{0}".format(dso_suffix)
+ name = f"libalglib.{dso_suffix}"
with working_dir("src"):
mkdirp(prefix.lib)
install(name, prefix.lib)
diff --git a/var/spack/repos/builtin/packages/alpgen/package.py b/var/spack/repos/builtin/packages/alpgen/package.py
index 52b47adebf6ff0..9d0a96922edb6e 100644
--- a/var/spack/repos/builtin/packages/alpgen/package.py
+++ b/var/spack/repos/builtin/packages/alpgen/package.py
@@ -16,7 +16,6 @@ class Alpgen(CMakePackage, MakefilePackage):
homepage = "http://mlm.home.cern.ch/mlm/alpgen/"
url = "http://mlm.home.cern.ch/mlm/alpgen/V2.1/v214.tgz"
- maintainers("iarspider")
tags = ["hep"]
version("2.1.4", sha256="2f43f7f526793fe5f81a3a3e1adeffe21b653a7f5851efc599ed69ea13985c5e")
diff --git a/var/spack/repos/builtin/packages/alquimia/package.py b/var/spack/repos/builtin/packages/alquimia/package.py
index 0a03abc2e0e6f1..523e0936759f77 100644
--- a/var/spack/repos/builtin/packages/alquimia/package.py
+++ b/var/spack/repos/builtin/packages/alquimia/package.py
@@ -15,7 +15,8 @@ class Alquimia(CMakePackage):
maintainers("smolins", "balay")
- version("develop")
+ version("master")
+ version("1.1.0", commit="211931c3e76b1ae7cdb48c46885b248412d6fe3d") # tag v1.1.0
version("1.0.10", commit="b2c11b6cde321f4a495ef9fcf267cb4c7a9858a0") # tag v.1.0.10
version("1.0.9", commit="2ee3bcfacc63f685864bcac2b6868b48ad235225") # tag v.1.0.9
version("xsdk-0.6.0", commit="9a0aedd3a927d4d5e837f8fd18b74ad5a78c3821")
@@ -25,6 +26,7 @@ class Alquimia(CMakePackage):
depends_on("mpi")
depends_on("hdf5")
+ depends_on("pflotran@5.0.0", when="@1.1.0")
depends_on("pflotran@4.0.1", when="@1.0.10")
depends_on("pflotran@3.0.2", when="@1.0.9")
depends_on("pflotran@xsdk-0.6.0", when="@xsdk-0.6.0")
@@ -32,7 +34,7 @@ class Alquimia(CMakePackage):
depends_on("pflotran@develop", when="@develop")
depends_on("petsc@3.10:", when="@develop")
- @when("@1.0.10")
+ @when("@1.0.10:1.1.0")
def patch(self):
filter_file(
"use iso_[cC]_binding",
diff --git a/var/spack/repos/builtin/packages/alsa-lib/package.py b/var/spack/repos/builtin/packages/alsa-lib/package.py
index 631e9bf6851717..46e3d2f1bfa672 100644
--- a/var/spack/repos/builtin/packages/alsa-lib/package.py
+++ b/var/spack/repos/builtin/packages/alsa-lib/package.py
@@ -30,8 +30,8 @@ def configure_args(self):
spec = self.spec
args = []
if spec.satisfies("+python"):
- args.append("--with-pythonlibs={0}".format(spec["python"].libs.ld_flags))
- args.append("--with-pythonincludes={0}".format(spec["python"].headers.include_flags))
+ args.append(f"--with-pythonlibs={spec['python'].libs.ld_flags}")
+ args.append(f"--with-pythonincludes={spec['python'].headers.include_flags}")
else:
args.append("--disable-python")
return args
diff --git a/var/spack/repos/builtin/packages/aluminum/package.py b/var/spack/repos/builtin/packages/aluminum/package.py
index 7c48339f69413a..7b15b23856fd77 100644
--- a/var/spack/repos/builtin/packages/aluminum/package.py
+++ b/var/spack/repos/builtin/packages/aluminum/package.py
@@ -9,7 +9,7 @@
from spack.package import *
-class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
+class Aluminum(CachedCMakePackage, CudaPackage, ROCmPackage):
"""Aluminum provides a generic interface to high-performance
communication libraries, with a focus on allreduce
algorithms. Blocking and non-blocking algorithms and GPU-aware
@@ -22,206 +22,207 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/LLNL/Aluminum.git"
tags = ["ecp", "radiuss"]
- maintainers("bvanessen")
+ maintainers("benson31", "bvanessen")
version("master", branch="master")
+ version("1.4.1", sha256="d130a67fef1cb7a9cb3bbec1d0de426f020fe68c9df6e172c83ba42281cd90e3")
+ version("1.4.0", sha256="ac54de058f38cead895ec8163f7b1fa7674e4dc5aacba683a660a61babbfe0c6")
version("1.3.1", sha256="28ce0af6c6f29f97b7f19c5e45184bd2f8a0b1428f1e898b027d96d47cb74b0b")
version("1.3.0", sha256="d0442efbebfdfb89eec793ae65eceb8f1ba65afa9f2e48df009f81985a4c27e3")
version("1.2.3", sha256="9b214bdf30f9b7e8e017f83e6615db6be2631f5be3dd186205dbe3aa62f4018a")
- version(
- "1.2.2",
- sha256="c01d9dd98be4cab9b944bae99b403abe76d65e9e1750e7f23bf0105636ad5485",
- deprecated=True,
- )
- version(
- "1.2.1",
- sha256="869402708c8a102a67667b83527b4057644a32b8cdf4990bcd1a5c4e5f0e30af",
- deprecated=True,
- )
- version(
- "1.2.0",
- sha256="2f3725147f4dbc045b945af68d3d747f5dffbe2b8e928deed64136785210bc9a",
- deprecated=True,
- )
- version(
- "1.1.0",
- sha256="78b03e36e5422e8651f400feb4d8a527f87302db025d77aa37e223be6b9bdfc9",
- deprecated=True,
- )
- version("1.0.0-lbann", tag="v1.0.0-lbann", commit="40a062b1f63e84e074489c0f926f36b806c6b8f3")
- version("1.0.0", sha256="028d12e271817214db5c07c77b0528f88862139c3e442e1b12f58717290f414a")
- version(
- "0.7.0",
- sha256="bbb73d2847c56efbe6f99e46b41d837763938483f2e2d1982ccf8350d1148caa",
- deprecated=True,
- )
- version(
- "0.6.0",
- sha256="6ca329951f4c7ea52670e46e5020e7e7879d9b56fed5ff8c5df6e624b313e925",
- deprecated=True,
- )
- version(
- "0.5.0",
- sha256="dc365a5849eaba925355a8efb27005c5f22bcd1dca94aaed8d0d29c265c064c1",
- deprecated=True,
- )
- version(
- "0.4.0",
- sha256="4d6fab5481cc7c994b32fb23a37e9ee44041a9f91acf78f981a97cb8ef57bb7d",
- deprecated=True,
- )
- version(
- "0.3.3",
- sha256="26e7f263f53c6c6ee0fe216e981a558dfdd7ec997d0dd2a24285a609a6c68f3b",
- deprecated=True,
- )
- version(
- "0.3.2",
- sha256="09b6d1bcc02ac54ba269b1123eee7be20f0104b93596956c014b794ba96b037f",
- deprecated=True,
- )
- version(
- "0.2.1-1",
- sha256="066b750e9d1134871709a3e2414b96b166e0e24773efc7d512df2f1d96ee8eef",
- deprecated=True,
- )
- version(
- "0.2.1",
- sha256="3d5d15853cccc718f60df68205e56a2831de65be4d96e7f7e8497097e7905f89",
- deprecated=True,
- )
- version(
- "0.2",
- sha256="fc8f06c6d8faab17a2aedd408d3fe924043bf857da1094d5553f35c4d2af893b",
- deprecated=True,
- )
- version(
- "0.1",
- sha256="3880b736866e439dd94e6a61eeeb5bb2abccebbac82b82d52033bc6c94950bdb",
- deprecated=True,
- )
- variant("nccl", default=False, description="Builds with support for NCCL communication lib")
+ # Library capabilities
+ variant(
+ "cuda_rma",
+ default=False,
+ when="+cuda",
+ description="Builds with support for CUDA intra-node "
+ " Put/Get and IPC RMA functionality",
+ )
variant(
"ht",
default=False,
description="Builds with support for host-enabled MPI"
" communication of accelerator data",
)
+ variant("nccl", default=False, description="Builds with support for NCCL communication lib")
+ variant("shared", default=True, description="Build Aluminum as a shared library")
+
+ # Debugging features
+ variant("hang_check", default=False, description="Enable hang checking")
+ variant("trace", default=False, description="Enable runtime tracing")
+
+ # Profiler support
+ variant("nvtx", default=False, when="+cuda", description="Enable profiling via nvprof/NVTX")
variant(
- "cuda_rma",
+ "roctracer", default=False, when="+rocm", description="Enable profiling via rocprof/roctx"
+ )
+
+ # Advanced options
+ variant("mpi_serialize", default=False, description="Serialize MPI operations")
+ variant("stream_mem_ops", default=False, description="Enable stream memory operations")
+ variant(
+ "thread_multiple",
default=False,
- description="Builds with support for CUDA intra-node "
- " Put/Get and IPC RMA functionality",
+ description="Allow multiple threads to call Aluminum concurrently",
)
- variant("rccl", default=False, description="Builds with support for RCCL communication lib")
+
+ # Benchmark/testing support
variant(
- "ofi_libfabric_plugin",
- default=spack.platforms.cray.slingshot_network(),
- when="+rccl",
- description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib",
+ "benchmarks",
+ default=False,
+ description="Build the Aluminum benchmarking drivers "
+ "(warning: may significantly increase build time!)",
)
variant(
- "ofi_libfabric_plugin",
- default=spack.platforms.cray.slingshot_network(),
- when="+nccl",
- description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib",
+ "tests",
+ default=False,
+ description="Build the Aluminum test drivers "
+ "(warning: may moderately increase build time!)",
)
- depends_on("cmake@3.21.0:", type="build", when="@1.0.1:")
- depends_on("cmake@3.17.0:", type="build", when="@:1.0.0")
- depends_on("mpi")
- depends_on("nccl@2.7.0-0:", when="+nccl")
- depends_on("hwloc@1.11:")
- depends_on("hwloc +cuda +nvml", when="+cuda")
- depends_on("hwloc@2.3.0:", when="+rocm")
- depends_on("cub", when="@:0.1,0.6.0: +cuda ^cuda@:10")
- depends_on("hipcub", when="@:0.1,0.6.0: +rocm")
-
- depends_on("rccl", when="+rccl")
- depends_on("aws-ofi-rccl", when="+rccl +ofi_libfabric_plugin")
- depends_on("aws-ofi-nccl", when="+nccl +ofi_libfabric_plugin")
+ # FIXME: Do we want to expose tuning parameters to the Spack
+ # recipe? Some are numeric values, some are on/off switches.
conflicts("~cuda", when="+cuda_rma", msg="CUDA RMA support requires CUDA")
conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive")
- conflicts("+nccl", when="+rccl", msg="NCCL and RCCL support are mutually exclusive")
- generator("ninja")
+ depends_on("mpi")
- def cmake_args(self):
- spec = self.spec
- args = [
- "-DCMAKE_CXX_STANDARD:STRING=17",
- "-DALUMINUM_ENABLE_CUDA:BOOL=%s" % ("+cuda" in spec),
- "-DALUMINUM_ENABLE_NCCL:BOOL=%s" % ("+nccl" in spec or "+rccl" in spec),
- "-DALUMINUM_ENABLE_ROCM:BOOL=%s" % ("+rocm" in spec),
- ]
-
- if not spec.satisfies("^cmake@3.23.0"):
- # There is a bug with using Ninja generator in this version
- # of CMake
- args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON")
-
- if "+cuda" in spec:
- if self.spec.satisfies("%clang"):
- for flag in self.spec.compiler_flags["cxxflags"]:
- if "gcc-toolchain" in flag:
- args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag))
- if spec.satisfies("^cuda@11.0:"):
- args.append("-DCMAKE_CUDA_STANDARD=17")
- else:
- args.append("-DCMAKE_CUDA_STANDARD=14")
- archs = spec.variants["cuda_arch"].value
- if archs != "none":
- arch_str = ";".join(archs)
- args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str)
-
- if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"):
- args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler")
-
- if spec.satisfies("@0.5:"):
- args.extend(
- [
- "-DALUMINUM_ENABLE_HOST_TRANSFER:BOOL=%s" % ("+ht" in spec),
- "-DALUMINUM_ENABLE_MPI_CUDA:BOOL=%s" % ("+cuda_rma" in spec),
- "-DALUMINUM_ENABLE_MPI_CUDA_RMA:BOOL=%s" % ("+cuda_rma" in spec),
- ]
+ depends_on("cmake@3.21.0:", type="build", when="@1.0.1:")
+ depends_on("hwloc@1.11:")
+
+ with when("+cuda"):
+ depends_on("cub", when="^cuda@:10")
+ depends_on("hwloc +cuda +nvml")
+ with when("+nccl"):
+ depends_on("nccl@2.7.0-0:")
+ for arch in CudaPackage.cuda_arch_values:
+ depends_on(
+ "nccl +cuda cuda_arch={0}".format(arch),
+ when="+cuda cuda_arch={0}".format(arch),
+ )
+ if spack.platforms.cray.slingshot_network():
+ depends_on("aws-ofi-nccl") # Note: NOT a CudaPackage
+
+ with when("+rocm"):
+ for val in ROCmPackage.amdgpu_targets:
+ depends_on(
+ "hipcub +rocm amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val)
)
- else:
- args.append("-DALUMINUM_ENABLE_MPI_CUDA:BOOL=%s" % ("+ht" in spec))
-
- if spec.satisfies("@:0.1,0.6.0: +cuda ^cuda@:10"):
- args.append("-DCUB_DIR:FILEPATH=%s" % spec["cub"].prefix)
-
- # Add support for OS X to find OpenMP (LLVM installed via brew)
- if self.spec.satisfies("%clang platform=darwin"):
- clang = self.compiler.cc
- clang_bin = os.path.dirname(clang)
- clang_root = os.path.dirname(clang_bin)
- args.extend(["-DOpenMP_DIR={0}".format(clang_root)])
-
- if "+rocm" in spec:
- args.extend(
- [
- "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix),
- "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc),
- "-DCMAKE_CXX_FLAGS=-std=c++17",
- ]
+ depends_on(
+ "hwloc@2.3.0: +rocm amdgpu_target={0}".format(val),
+ when="amdgpu_target={0}".format(val),
)
- archs = self.spec.variants["amdgpu_target"].value
- if archs != "none":
- arch_str = ",".join(archs)
- if spec.satisfies("%rocmcc@:5"):
- args.append(
- "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}"
- " -g -fsized-deallocation -fPIC -std=c++17".format(arch_str)
- )
- args.extend(
- [
- "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str,
- "-DAMDGPU_TARGETS=%s" % arch_str,
- "-DGPU_TARGETS=%s" % arch_str,
- ]
- )
+ # RCCL is *NOT* implented as a ROCmPackage
+ depends_on(
+ "rccl amdgpu_target={0}".format(val), when="+nccl amdgpu_target={0}".format(val)
+ )
+ depends_on(
+ "roctracer-dev +rocm amdgpu_target={0}".format(val),
+ when="+roctracer amdgpu_target={0}".format(val),
+ )
+ if spack.platforms.cray.slingshot_network():
+ depends_on("aws-ofi-rccl", when="+nccl")
+ def cmake_args(self):
+ args = []
return args
+
+ def get_cuda_flags(self):
+ spec = self.spec
+ args = []
+ if spec.satisfies("^cuda+allow-unsupported-compilers"):
+ args.append("-allow-unsupported-compiler")
+
+ if spec.satisfies("%clang"):
+ for flag in spec.compiler_flags["cxxflags"]:
+ if "gcc-toolchain" in flag:
+ args.append("-Xcompiler={0}".format(flag))
+ return args
+
+ def std_initconfig_entries(self):
+ entries = super(Aluminum, self).std_initconfig_entries()
+
+ # CMAKE_PREFIX_PATH, in CMake types, is a "STRING", not a "PATH". :/
+ entries = [x for x in entries if "CMAKE_PREFIX_PATH" not in x]
+ cmake_prefix_path = os.environ["CMAKE_PREFIX_PATH"].replace(":", ";")
+ entries.append(cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path))
+ return entries
+
+ def initconfig_compiler_entries(self):
+ spec = self.spec
+ entries = super(Aluminum, self).initconfig_compiler_entries()
+
+ # FIXME: Enforce this better in the actual CMake.
+ entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17"))
+ entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec))
+ entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True))
+ entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True))
+
+ return entries
+
+ def initconfig_hardware_entries(self):
+ spec = self.spec
+ entries = super(Aluminum, self).initconfig_hardware_entries()
+
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_CUDA", "+cuda" in spec))
+ if spec.satisfies("+cuda"):
+ entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17"))
+ if not spec.satisfies("cuda_arch=none"):
+ archs = spec.variants["cuda_arch"].value
+ arch_str = ";".join(archs)
+ entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str))
+
+ # FIXME: Should this use the "cuda_flags" function of the
+ # CudaPackage class or something? There might be other
+ # flags in play, and we need to be sure to get them all.
+ cuda_flags = self.get_cuda_flags()
+ if len(cuda_flags) > 0:
+ entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
+
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_ROCM", "+rocm" in spec))
+ if spec.satisfies("+rocm"):
+ entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17"))
+ if not spec.satisfies("amdgpu_target=none"):
+ archs = self.spec.variants["amdgpu_target"].value
+ arch_str = ";".join(archs)
+ entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
+ entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
+ entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
+ entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix))
+
+ return entries
+
+ def initconfig_package_entries(self):
+ spec = self.spec
+ entries = super(Aluminum, self).initconfig_package_entries()
+
+ # Library capabilities
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_MPI_CUDA", "+cuda_rma" in spec))
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_MPI_CUDA_RMA", "+cuda_rma" in spec))
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_HOST_TRANSFER", "+ht" in spec))
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_NCCL", "+nccl" in spec))
+
+ # Debugging features
+ entries.append(cmake_cache_option("ALUMINUM_DEBUG_HANG_CHECK", "+hang_check" in spec))
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_TRACE", "+trace" in spec))
+
+ # Profiler support
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_NVPROF", "+nvtx" in spec))
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_ROCTRACER", "+roctracer" in spec))
+
+ # Advanced options
+ entries.append(cmake_cache_option("ALUMINUM_MPI_SERIALIZE", "+mpi_serialize" in spec))
+ entries.append(
+ cmake_cache_option("ALUMINUM_ENABLE_STREAM_MEM_OPS", "+stream_mem_ops" in spec)
+ )
+ entries.append(
+ cmake_cache_option("ALUMINUM_ENABLE_THREAD_MULTIPLE", "+thread_multiple" in spec)
+ )
+
+ # Benchmark/testing support
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_BENCHMARKS", "+benchmarks" in spec))
+ entries.append(cmake_cache_option("ALUMINUM_ENABLE_TESTS", "+tests" in spec))
+
+ return entries
diff --git a/var/spack/repos/builtin/packages/amdlibflame/package.py b/var/spack/repos/builtin/packages/amdlibflame/package.py
index d8e06c6b43727c..e1b96e042e02a9 100644
--- a/var/spack/repos/builtin/packages/amdlibflame/package.py
+++ b/var/spack/repos/builtin/packages/amdlibflame/package.py
@@ -59,10 +59,11 @@ class Amdlibflame(LibflameBase):
conflicts("+ilp64", when="@:3.0.0", msg="ILP64 is supported from 3.0.1 onwards")
conflicts("threads=pthreads", msg="pthread is not supported")
- conflicts("threads=openmp", msg="openmp is not supported")
+ conflicts("threads=openmp", when="@:3", msg="openmp is not supported by amdlibflame < 4.0")
patch("aocc-2.2.0.patch", when="@:2", level=1)
patch("cray-compiler-wrapper.patch", when="@:3.0.0", level=1)
+ patch("supermat.patch", when="@4.0:4.1", level=1)
provides("flame@5.2", when="@2:")
@@ -109,13 +110,13 @@ def configure_args(self):
)
# From 3.2 version, amd optimized flags are encapsulated under:
- # enable-amd-flags for gcc compiler
- # enable-amd-aocc-flags for aocc compiler
+ # enable-amd-aocc-flags for AOCC compiler
+ # enable-amd-flags for all other compilers
if "@3.2:" in self.spec:
- if "%gcc" in self.spec:
- args.append("--enable-amd-flags")
if "%aocc" in self.spec:
args.append("--enable-amd-aocc-flags")
+ else:
+ args.append("--enable-amd-flags")
if "@:3.1" in self.spec:
args.append("--enable-external-lapack-interfaces")
diff --git a/var/spack/repos/builtin/packages/amdlibflame/supermat.patch b/var/spack/repos/builtin/packages/amdlibflame/supermat.patch
new file mode 100644
index 00000000000000..374ffa3dc34e44
--- /dev/null
+++ b/var/spack/repos/builtin/packages/amdlibflame/supermat.patch
@@ -0,0 +1,11 @@
+diff --git a/src/map/lapack2flamec/FLA_getrf.c b/src/map/lapack2flamec/FLA_getrf.c
+index af70857e..1ffc63a1 100644
+--- a/src/map/lapack2flamec/FLA_getrf.c
++++ b/src/map/lapack2flamec/FLA_getrf.c
+@@ -232,6 +232,7 @@ extern fla_context global_context;
+
+ #else /* FLA_ENABLE_SUPERMATRIX */
+
++#define LAPACK_getrf_body_s LAPACK_getrf_body
+ #define LAPACK_getrf_body_d LAPACK_getrf_body
+ #define LAPACK_getrf_body_z LAPACK_getrf_body
diff --git a/var/spack/repos/builtin/packages/amdsmi/package.py b/var/spack/repos/builtin/packages/amdsmi/package.py
new file mode 100644
index 00000000000000..5c293799b80a2e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/amdsmi/package.py
@@ -0,0 +1,49 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class Amdsmi(CMakePackage):
+ """The AMD System Management Interface Library, or AMD SMI library,
+ is a C library for Linux that provides a user space interface for
+ applications to monitor and control AMD device."""
+
+ homepage = "https://github.com/RadeonOpenCompute/amdsmi"
+ url = "https://github.com/RadeonOpenCompute/amdsmi/archive/refs/tags/rocm-5.6.0.tar.gz"
+
+ tags = ["rocm"]
+ maintainers("srekolam", "renjithravindrankannath")
+ libraries = ["libamd_smi"]
+
+ version("5.6.0", sha256="595c9d6d79d9071290b2f19ab4ef9222c8d2983b4322b3143fcd9d0b1ce0f6d8")
+ version("5.5.1", sha256="b794c7fd562fd92f2c9f2bbdc2d5dded7486101fcd4598f2e8c3484c9a939281")
+ version("5.5.0", sha256="dcfbd96e93afcf86b1261464e008e9ef7e521670871a1885e6eaffc7cdc8f555")
+
+ depends_on("cmake@3.11:", type="build")
+ depends_on("python@3.6:", type="run")
+ depends_on("py-virtualenv", type="build")
+ depends_on("llvm@14:", type="build")
+ depends_on("pkgconfig", type="build")
+ depends_on("libdrm", type="build")
+ depends_on("py-pyyaml", type="build")
+
+ @classmethod
+ def determine_version(cls, lib):
+ match = re.search(r"lib\S*\.so\.\d+\.\d+\.(\d)(\d\d)(\d\d)", lib)
+ if match:
+ ver = "{0}.{1}.{2}".format(
+ int(match.group(1)), int(match.group(2)), int(match.group(3))
+ )
+ else:
+ ver = None
+ return ver
+
+ def cmake_args(self):
+ args = []
+ args.append(self.define("BUILD_TESTS", "ON"))
+ args.append("-DCMAKE_INSTALL_LIBDIR=lib")
+ return args
diff --git a/var/spack/repos/builtin/packages/amg2013/package.py b/var/spack/repos/builtin/packages/amg2013/package.py
index 638e874a46cf10..def0b495a83aac 100644
--- a/var/spack/repos/builtin/packages/amg2013/package.py
+++ b/var/spack/repos/builtin/packages/amg2013/package.py
@@ -46,9 +46,9 @@ def build_targets(self):
if "+int64" in self.spec:
include_cflags.append("-DHYPRE_BIGINT")
- targets.append("INCLUDE_CFLAGS={0}".format(" ".join(include_cflags)))
- targets.append("INCLUDE_LFLAGS={0}".format(" ".join(include_lflags)))
- targets.append("CC={0}".format(self.spec["mpi"].mpicc))
+ targets.append(f"INCLUDE_CFLAGS={' '.join(include_cflags)}")
+ targets.append(f"INCLUDE_LFLAGS={' '.join(include_lflags)}")
+ targets.append(f"CC={self.spec['mpi'].mpicc}")
return targets
diff --git a/var/spack/repos/builtin/packages/amg2023/package.py b/var/spack/repos/builtin/packages/amg2023/package.py
index 96b2dc335d74a3..a2e8b676e9a9c9 100644
--- a/var/spack/repos/builtin/packages/amg2023/package.py
+++ b/var/spack/repos/builtin/packages/amg2023/package.py
@@ -40,7 +40,7 @@ def cmake_args(self):
cmake_options = []
cmake_options.append(self.define_from_variant("AMG_WITH_CALIPER", "caliper"))
cmake_options.append(self.define_from_variant("AMG_WITH_OMP", "openmp"))
- cmake_options.append("-DHYPRE_PREFIX={0}".format(self.spec["hypre"].prefix))
+ cmake_options.append(self.define("HYPRE_PREFIX", self.spec["hypre"].prefix))
if self.spec["hypre"].satisfies("+cuda"):
cmake_options.append("-DAMG_WITH_CUDA=ON")
if self.spec["hypre"].satisfies("+rocm"):
diff --git a/var/spack/repos/builtin/packages/amp/package.py b/var/spack/repos/builtin/packages/amp/package.py
index 1716a13e57a36a..f0ec4071ce14a4 100644
--- a/var/spack/repos/builtin/packages/amp/package.py
+++ b/var/spack/repos/builtin/packages/amp/package.py
@@ -117,9 +117,7 @@ def cmake_args(self):
):
if "+" + vname in spec:
tpl_list.append(vname.upper())
- options.append(
- self.define("TPL_{0}_INSTALL_DIR".format(vname.upper()), spec[vname].prefix)
- )
+ options.append(self.define(f"TPL_{vname.upper()}_INSTALL_DIR", spec[vname].prefix))
if "+netcdf" in spec:
tpl_list.append("NETCDF")
diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py
index 37f953ba75d73c..da5c2ee353f584 100644
--- a/var/spack/repos/builtin/packages/amrex/package.py
+++ b/var/spack/repos/builtin/packages/amrex/package.py
@@ -21,9 +21,10 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
tags = ["ecp", "e4s"]
- maintainers("WeiqunZhang", "asalmgren", "etpalmer63")
+ maintainers("WeiqunZhang", "asalmgren", "atmyers")
version("develop", branch="development")
+ version("23.11", sha256="49b9fea10cd2a2b6cb0fedf7eac8f7889eacc68a05ae5ac7c5702bc0eb1b3848")
version("23.10", sha256="3c85aa0ad5f96303e797960a6e0aa37c427f6483f39cdd61dbc2f7ca16357714")
version("23.09", sha256="1a539c2628041b17ad910afd9270332060251c8e346b1482764fdb87a4f25053")
version("23.08", sha256="a83b7249d65ad8b6ac1881377e5f814b6db8ed8410ea5562b8ae9d4ed1f37c29")
diff --git a/var/spack/repos/builtin/packages/ams/package.py b/var/spack/repos/builtin/packages/ams/package.py
index ba75a25e63f6c3..b95e69abc3d81d 100644
--- a/var/spack/repos/builtin/packages/ams/package.py
+++ b/var/spack/repos/builtin/packages/ams/package.py
@@ -15,6 +15,12 @@ class Ams(CMakePackage, CudaPackage):
maintainers("koparasy", "lpottier")
version("develop", branch="develop", submodules=False)
+ version(
+ "11.08.23.alpha",
+ tag="11.08.23.alpha",
+ commit="1a42b29268bb916dae301654ca0b92fdfe288732",
+ submodules=False,
+ )
version(
"07.25.23-alpha",
tag="07.25.23-alpha",
diff --git a/var/spack/repos/builtin/packages/anaconda3/package.py b/var/spack/repos/builtin/packages/anaconda3/package.py
index 4c0196fe9092b1..c405e9d9034197 100644
--- a/var/spack/repos/builtin/packages/anaconda3/package.py
+++ b/var/spack/repos/builtin/packages/anaconda3/package.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import platform
from os.path import split
from spack.package import *
@@ -22,172 +23,199 @@ class Anaconda3(Package):
maintainers("ajkotobi")
- version(
- "2022.10",
- sha256="e7ecbccbc197ebd7e1f211c59df2e37bc6959d081f2235d387e08c9026666acd",
- expand=False,
- )
- version(
- "2022.05",
- sha256="a7c0afe862f6ea19a596801fc138bde0463abcbce1b753e8d5c474b506a2db2d",
- expand=False,
- )
- version(
- "2021.11",
- sha256="fedf9e340039557f7b5e8a8a86affa9d299f5e9820144bd7b92ae9f7ee08ac60",
- expand=False,
- )
+ if platform.machine() == "ppc64le":
+ version(
+ "2023.09-0",
+ sha256="5ea1ed9808af95eb2655fe6a4ffdb66bea66ecd1d053fc2ee69eacc7685ef665",
+ expand=False,
+ )
+ elif platform.machine() == "aarch64":
+ version(
+ "2023.09-0",
+ sha256="69ee26361c1ec974199bce5c0369e3e9a71541de7979d2b9cfa4af556d1ae0ea",
+ expand=False,
+ )
+ elif platform.machine() == "x86_64":
+ version(
+ "2023.09-0",
+ sha256="6c8a4abb36fbb711dc055b7049a23bbfd61d356de9468b41c5140f8a11abd851",
+ expand=False,
+ )
+ version(
+ "2023.07-2",
+ sha256="589fb34fe73bc303379abbceba50f3131254e85ce4e7cd819ba4276ba29cad16",
+ expand=False,
+ )
+ version(
+ "2022.10",
+ sha256="e7ecbccbc197ebd7e1f211c59df2e37bc6959d081f2235d387e08c9026666acd",
+ expand=False,
+ )
+ version(
+ "2022.05",
+ sha256="a7c0afe862f6ea19a596801fc138bde0463abcbce1b753e8d5c474b506a2db2d",
+ expand=False,
+ )
+ version(
+ "2021.11",
+ sha256="fedf9e340039557f7b5e8a8a86affa9d299f5e9820144bd7b92ae9f7ee08ac60",
+ expand=False,
+ )
- version(
- "2021.05",
- sha256="2751ab3d678ff0277ae80f9e8a74f218cfc70fe9a9cdc7bb1c137d7e47e33d53",
- expand=False,
- )
- version(
- "2020.11",
- sha256="cf2ff493f11eaad5d09ce2b4feaa5ea90db5174303d5b3fe030e16d29aeef7de",
- expand=False,
- )
- version(
- "2020.07",
- sha256="38ce717758b95b3bd0b1797cc6ccfb76f29a90c25bdfa50ee45f11e583edfdbf",
- expand=False,
- )
- version(
- "2020.02",
- sha256="2b9f088b2022edb474915d9f69a803d6449d5fdb4c303041f60ac4aefcc208bb",
- expand=False,
- )
- version(
- "2019.10",
- sha256="46d762284d252e51cd58a8ca6c8adc9da2eadc82c342927b2f66ed011d1d8b53",
- expand=False,
- )
- version(
- "2019.07",
- sha256="69581cf739365ec7fb95608eef694ba959d7d33b36eb961953f2b82cb25bdf5a",
- expand=False,
- )
- version(
- "2019.03",
- sha256="45c851b7497cc14d5ca060064394569f724b67d9b5f98a926ed49b834a6bb73a",
- expand=False,
- )
- version(
- "2018.12",
- sha256="1019d0857e5865f8a6861eaf15bfe535b87e92b72ce4f531000dc672be7fce00",
- expand=False,
- )
- version(
- "5.3.1",
- sha256="d4c4256a8f46173b675dd6a62d12f566ed3487f932bab6bb7058f06c124bcc27",
- expand=False,
- )
- version(
- "5.3.0",
- sha256="cfbf5fe70dd1b797ec677e63c61f8efc92dad930fd1c94d60390bb07fdc09959",
- expand=False,
- )
- version(
- "5.2.0",
- sha256="09f53738b0cd3bb96f5b1bac488e5528df9906be2480fe61df40e0e0d19e3d48",
- expand=False,
- )
- version(
- "5.1.0",
- sha256="7e6785caad25e33930bc03fac4994a434a21bc8401817b7efa28f53619fa9c29",
- expand=False,
- )
- version(
- "5.0.1",
- sha256="55e4db1919f49c92d5abbf27a4be5986ae157f074bf9f8238963cd4582a4068a",
- expand=False,
- )
- version(
- "5.0.0.1",
- sha256="092c92427f44687d789a41922ce8426fbdc3c529cc9d6d4ee6de5b62954b93b2",
- expand=False,
- )
- version(
- "5.0.0",
- sha256="67f5c20232a3e493ea3f19a8e273e0618ab678fa14b03b59b1783613062143e9",
- expand=False,
- )
- version(
- "4.4.0",
- sha256="3301b37e402f3ff3df216fe0458f1e6a4ccbb7e67b4d626eae9651de5ea3ab63",
- expand=False,
- )
- version(
- "4.3.1",
- sha256="4447b93d2c779201e5fb50cfc45de0ec96c3804e7ad0fe201ab6b99f73e90302",
- expand=False,
- )
- version(
- "4.3.0",
- sha256="e9169c3a5029aa820393ac92704eb9ee0701778a085ca7bdc3c57b388ac1beb6",
- expand=False,
- )
- version(
- "4.2.0",
- sha256="73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78",
- expand=False,
- )
- version(
- "4.1.1",
- sha256="4f5c95feb0e7efeadd3d348dcef117d7787c799f24b0429e45017008f3534e55",
- expand=False,
- )
- version(
- "4.1.0",
- sha256="11d32cf4026603d3b327dc4299863be6b815905ff51a80329085e1bb9f96c8bd",
- expand=False,
- )
- version(
- "4.0.0",
- sha256="36a558a1109868661a5735f5f32607643f6dc05cf581fefb1c10fb8abbe22f39",
- expand=False,
- )
- version(
- "2.5.0",
- sha256="addadcb927f15cb0b5b6e36890563d3352a8ff6a901ea753d389047d274a29a9",
- expand=False,
- )
- version(
- "2.4.1",
- sha256="0735e69199fc37135930ea2fd4fb6ad0adef215a2a7ba9fd6b0a0a4daaadb1cf",
- expand=False,
- )
- version(
- "2.4.0",
- sha256="fb4e480059e991f2fa632b5a9bcdd284c7f0677814cd719c11d524453f96a40d",
- expand=False,
- )
- version(
- "2.3.0",
- sha256="3be5410b2d9db45882c7de07c554cf4f1034becc274ec9074b23fd37a5c87a6f",
- expand=False,
- )
- version(
- "2.2.0",
- sha256="4aac68743e7706adb93f042f970373a6e7e087dbf4b02ac467c94ca4ce33d2d1",
- expand=False,
- )
- version(
- "2.1.0",
- sha256="af3225ccbe8df0ffb918939e009aa57740e35058ebf9dfcf5fec794a77556c3c",
- expand=False,
- )
- version(
- "2.0.1",
- sha256="3c3b834793e461f3316ad1d9a9178c67859a9d74aaf7bcade076f04134dd1e26",
- expand=False,
- )
- version(
- "2.0.0",
- sha256="57ce4f97e300cf94c5724f72d992e9eecef708fdaa13bc672ae9779773056540",
- expand=False,
- )
+ version(
+ "2021.05",
+ sha256="2751ab3d678ff0277ae80f9e8a74f218cfc70fe9a9cdc7bb1c137d7e47e33d53",
+ expand=False,
+ )
+ version(
+ "2020.11",
+ sha256="cf2ff493f11eaad5d09ce2b4feaa5ea90db5174303d5b3fe030e16d29aeef7de",
+ expand=False,
+ )
+ version(
+ "2020.07",
+ sha256="38ce717758b95b3bd0b1797cc6ccfb76f29a90c25bdfa50ee45f11e583edfdbf",
+ expand=False,
+ )
+ version(
+ "2020.02",
+ sha256="2b9f088b2022edb474915d9f69a803d6449d5fdb4c303041f60ac4aefcc208bb",
+ expand=False,
+ )
+ version(
+ "2019.10",
+ sha256="46d762284d252e51cd58a8ca6c8adc9da2eadc82c342927b2f66ed011d1d8b53",
+ expand=False,
+ )
+ version(
+ "2019.07",
+ sha256="69581cf739365ec7fb95608eef694ba959d7d33b36eb961953f2b82cb25bdf5a",
+ expand=False,
+ )
+ version(
+ "2019.03",
+ sha256="45c851b7497cc14d5ca060064394569f724b67d9b5f98a926ed49b834a6bb73a",
+ expand=False,
+ )
+ version(
+ "2018.12",
+ sha256="1019d0857e5865f8a6861eaf15bfe535b87e92b72ce4f531000dc672be7fce00",
+ expand=False,
+ )
+ version(
+ "5.3.1",
+ sha256="d4c4256a8f46173b675dd6a62d12f566ed3487f932bab6bb7058f06c124bcc27",
+ expand=False,
+ )
+ version(
+ "5.3.0",
+ sha256="cfbf5fe70dd1b797ec677e63c61f8efc92dad930fd1c94d60390bb07fdc09959",
+ expand=False,
+ )
+ version(
+ "5.2.0",
+ sha256="09f53738b0cd3bb96f5b1bac488e5528df9906be2480fe61df40e0e0d19e3d48",
+ expand=False,
+ )
+ version(
+ "5.1.0",
+ sha256="7e6785caad25e33930bc03fac4994a434a21bc8401817b7efa28f53619fa9c29",
+ expand=False,
+ )
+ version(
+ "5.0.1",
+ sha256="55e4db1919f49c92d5abbf27a4be5986ae157f074bf9f8238963cd4582a4068a",
+ expand=False,
+ )
+ version(
+ "5.0.0.1",
+ sha256="092c92427f44687d789a41922ce8426fbdc3c529cc9d6d4ee6de5b62954b93b2",
+ expand=False,
+ )
+ version(
+ "5.0.0",
+ sha256="67f5c20232a3e493ea3f19a8e273e0618ab678fa14b03b59b1783613062143e9",
+ expand=False,
+ )
+ version(
+ "4.4.0",
+ sha256="3301b37e402f3ff3df216fe0458f1e6a4ccbb7e67b4d626eae9651de5ea3ab63",
+ expand=False,
+ )
+ version(
+ "4.3.1",
+ sha256="4447b93d2c779201e5fb50cfc45de0ec96c3804e7ad0fe201ab6b99f73e90302",
+ expand=False,
+ )
+ version(
+ "4.3.0",
+ sha256="e9169c3a5029aa820393ac92704eb9ee0701778a085ca7bdc3c57b388ac1beb6",
+ expand=False,
+ )
+ version(
+ "4.2.0",
+ sha256="73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78",
+ expand=False,
+ )
+ version(
+ "4.1.1",
+ sha256="4f5c95feb0e7efeadd3d348dcef117d7787c799f24b0429e45017008f3534e55",
+ expand=False,
+ )
+ version(
+ "4.1.0",
+ sha256="11d32cf4026603d3b327dc4299863be6b815905ff51a80329085e1bb9f96c8bd",
+ expand=False,
+ )
+ version(
+ "4.0.0",
+ sha256="36a558a1109868661a5735f5f32607643f6dc05cf581fefb1c10fb8abbe22f39",
+ expand=False,
+ )
+ version(
+ "2.5.0",
+ sha256="addadcb927f15cb0b5b6e36890563d3352a8ff6a901ea753d389047d274a29a9",
+ expand=False,
+ )
+ version(
+ "2.4.1",
+ sha256="0735e69199fc37135930ea2fd4fb6ad0adef215a2a7ba9fd6b0a0a4daaadb1cf",
+ expand=False,
+ )
+ version(
+ "2.4.0",
+ sha256="fb4e480059e991f2fa632b5a9bcdd284c7f0677814cd719c11d524453f96a40d",
+ expand=False,
+ )
+ version(
+ "2.3.0",
+ sha256="3be5410b2d9db45882c7de07c554cf4f1034becc274ec9074b23fd37a5c87a6f",
+ expand=False,
+ )
+ version(
+ "2.2.0",
+ sha256="4aac68743e7706adb93f042f970373a6e7e087dbf4b02ac467c94ca4ce33d2d1",
+ expand=False,
+ )
+ version(
+ "2.1.0",
+ sha256="af3225ccbe8df0ffb918939e009aa57740e35058ebf9dfcf5fec794a77556c3c",
+ expand=False,
+ )
+ version(
+ "2.0.1",
+ sha256="3c3b834793e461f3316ad1d9a9178c67859a9d74aaf7bcade076f04134dd1e26",
+ expand=False,
+ )
+ version(
+ "2.0.0",
+ sha256="57ce4f97e300cf94c5724f72d992e9eecef708fdaa13bc672ae9779773056540",
+ expand=False,
+ )
+
+ def url_for_version(self, version):
+ url = "https://repo.anaconda.com/archive/Anaconda3-{0}-Linux-{1}.sh"
+ return url.format(version, platform.machine())
def install(self, spec, prefix):
dir, anaconda_script = split(self.stage.archive_file)
diff --git a/var/spack/repos/builtin/packages/anicalculator/package.py b/var/spack/repos/builtin/packages/anicalculator/package.py
index b14e05a67020fe..9d002975d98485 100644
--- a/var/spack/repos/builtin/packages/anicalculator/package.py
+++ b/var/spack/repos/builtin/packages/anicalculator/package.py
@@ -19,7 +19,7 @@ class Anicalculator(Package):
https://spack.readthedocs.io/en/latest/mirrors.html"""
homepage = "https://ani.jgi.doe.gov/html/download.php?"
- url = "file://{0}/ANIcalculator_v1.tgz".format(os.getcwd())
+ url = f"file://{os.getcwd()}/ANIcalculator_v1.tgz"
manual_download = True
version("1", sha256="236596a9a204cbcad162fc66be3506b2530b1f48f4f84d9647ccec3ca7483a43")
diff --git a/var/spack/repos/builtin/packages/aocc/package.py b/var/spack/repos/builtin/packages/aocc/package.py
index 229a37bb5375c5..f246af46357988 100644
--- a/var/spack/repos/builtin/packages/aocc/package.py
+++ b/var/spack/repos/builtin/packages/aocc/package.py
@@ -91,3 +91,13 @@ def license_reminder(self):
def install(self, spec, prefix):
print("Installing AOCC Compiler ... ")
install_tree(".", prefix)
+
+ @run_after("install")
+ def cfg_files(self):
+ # Add path to gcc/g++ such that clang/clang++ can always find a full gcc installation
+ # including libstdc++.so and header files.
+ if self.spec.satisfies("%gcc") and self.compiler.cxx is not None:
+ compiler_options = "--gcc-toolchain={}".format(self.compiler.prefix)
+ for compiler in ["clang", "clang++"]:
+ with open(join_path(self.prefix.bin, "{}.cfg".format(compiler)), "w") as f:
+ f.write(compiler_options)
diff --git a/var/spack/repos/builtin/packages/ape/package.py b/var/spack/repos/builtin/packages/ape/package.py
index 9468a7f9a69567..39b833ab0e10d6 100644
--- a/var/spack/repos/builtin/packages/ape/package.py
+++ b/var/spack/repos/builtin/packages/ape/package.py
@@ -23,9 +23,9 @@ def install(self, spec, prefix):
args = []
args.extend(
[
- "--prefix=%s" % prefix,
- "--with-gsl-prefix=%s" % spec["gsl"].prefix,
- "--with-libxc-prefix=%s" % spec["libxc"].prefix,
+ f"--prefix={prefix}",
+ f"--with-gsl-prefix={spec['gsl'].prefix}",
+ f"--with-libxc-prefix={spec['libxc'].prefix}",
]
)
diff --git a/var/spack/repos/builtin/packages/apple-libuuid/package.py b/var/spack/repos/builtin/packages/apple-libuuid/package.py
index 4c7526d6118537..9028efc5ac4780 100644
--- a/var/spack/repos/builtin/packages/apple-libuuid/package.py
+++ b/var/spack/repos/builtin/packages/apple-libuuid/package.py
@@ -18,28 +18,14 @@ class AppleLibuuid(BundlePackage):
# Only supported on 'platform=darwin'
conflicts("platform=linux")
conflicts("platform=cray")
+ conflicts("platform=windows")
@property
- def libs(self):
- """Export the Apple libuuid library.
-
- According to https://bugs.freedesktop.org/show_bug.cgi?id=105366,
- libuuid is provided as part of libsystem_c. The Apple libsystem_c
- library cannot be linked to directly using an absolute path; doing so
- will cause the linker to throw an error 'cannot link directly with
- /usr/lib/system/libsystem_c.dylib' and the linker will suggest linking
- with System.framework instead. Linking to this framework is equivalent
- to linking with libSystem.dylib, which can be confirmed on a macOS
- system by executing at a terminal the command `ls -l
- /System/Library/Frameworks/System.Framework` -- the file "System" is a
- symlink to `/usr/lib/libSystem.B.dylib`, and `/usr/lib/libSystem.dylib`
- also symlinks to this file. Running `otool -L /usr/lib/libSystem.dylib`
- confirms that it will link dynamically to
- `/usr/lib/system/libsystem_c.dylib`."""
-
- return LibraryList("/usr/lib/libSystem.dylib")
+ def headers(self):
+ return HeaderList(
+ join_path(self.prefix, "System/Library/Frameworks/Kernel.framework/Headers")
+ )
@property
- def headers(self):
- """Export the Apple libuuid header."""
- return HeaderList(self.prefix.include.uuid.join("uuid.h"))
+ def libs(self):
+ return LibraryList(join_path(self.prefix, "System/Library/Frameworks/Kernel.framework"))
diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py
index 2351c48619f49c..dc0fad53d225d9 100644
--- a/var/spack/repos/builtin/packages/apr-util/package.py
+++ b/var/spack/repos/builtin/packages/apr-util/package.py
@@ -37,16 +37,16 @@ class AprUtil(AutotoolsPackage):
@property
def libs(self):
return find_libraries(
- ["libaprutil-{0}".format(self.version.up_to(1))], root=self.prefix, recursive=True
+ [f"libaprutil-{self.version.up_to(1)}"], root=self.prefix, recursive=True
)
def configure_args(self):
spec = self.spec
args = [
- "--with-apr={0}".format(spec["apr"].prefix),
- "--with-expat={0}".format(spec["expat"].prefix),
- "--with-iconv={0}".format(spec["iconv"].prefix),
+ f"--with-apr={spec['apr'].prefix}",
+ f"--with-expat={spec['expat'].prefix}",
+ f"--with-iconv={spec['iconv'].prefix}",
# TODO: Add support for the following database managers
"--without-ndbm",
"--without-berkeley-db",
@@ -55,34 +55,30 @@ def configure_args(self):
]
if "+crypto" in spec:
- args.extend(["--with-crypto", "--with-openssl={0}".format(spec["openssl"].prefix)])
+ args.extend(["--with-crypto", f"--with-openssl={spec['openssl'].prefix}"])
else:
args.append("--without-crypto")
if "+gdbm" in spec:
- args.append("--with-gdbm={0}".format(spec["gdbm"].prefix))
+ args.append(f"--with-gdbm={spec['gdbm'].prefix}")
else:
args.append("--without-gdbm")
if "+pgsql" in spec:
- args.append("--with-pgsql={0}".format(spec["postgresql"].prefix))
+ args.append(f"--with-pgsql={spec['postgresql'].prefix}")
else:
args.append("--without-pgsql")
if "+sqlite" in spec:
if spec.satisfies("^sqlite@3.0:3"):
- args.extend(
- ["--with-sqlite3={0}".format(spec["sqlite"].prefix), "--without-sqlite2"]
- )
+ args.extend([f"--with-sqlite3={spec['sqlite'].prefix}", "--without-sqlite2"])
elif spec.satisfies("^sqlite@2.0:2"):
- args.extend(
- ["--with-sqlite2={0}".format(spec["sqlite"].prefix), "--without-sqlite3"]
- )
+ args.extend([f"--with-sqlite2={spec['sqlite'].prefix}", "--without-sqlite3"])
else:
args.extend(["--without-sqlite2", "--without-sqlite3"])
if "+odbc" in spec:
- args.append("--with-odbc={0}".format(spec["unixodbc"].prefix))
+ args.append(f"--with-odbc={spec['unixodbc'].prefix}")
else:
args.append("--without-odbc")
diff --git a/var/spack/repos/builtin/packages/apr/package.py b/var/spack/repos/builtin/packages/apr/package.py
index de82ee5817c18a..45de21e3ee02af 100644
--- a/var/spack/repos/builtin/packages/apr/package.py
+++ b/var/spack/repos/builtin/packages/apr/package.py
@@ -26,5 +26,5 @@ class Apr(AutotoolsPackage):
@property
def libs(self):
return find_libraries(
- ["libapr-{0}".format(self.version.up_to(1))], root=self.prefix, recursive=True
+ [f"libapr-{self.version.up_to(1)}"], root=self.prefix, recursive=True
)
diff --git a/var/spack/repos/builtin/packages/aragorn/package.py b/var/spack/repos/builtin/packages/aragorn/package.py
index dc55dc52bb84be..8ac7894192f457 100644
--- a/var/spack/repos/builtin/packages/aragorn/package.py
+++ b/var/spack/repos/builtin/packages/aragorn/package.py
@@ -31,7 +31,7 @@ class Aragorn(Package):
# fix checksum error
def url_for_version(self, version):
- return "http://www.ansikte.se/ARAGORN/Downloads/aragorn{0}.c".format(version)
+ return f"http://www.ansikte.se/ARAGORN/Downloads/aragorn{version}.c"
def install(self, spec, prefix):
cc = Executable(spack_cc)
diff --git a/var/spack/repos/builtin/packages/archer/package.py b/var/spack/repos/builtin/packages/archer/package.py
index 8492eebc96b26c..52011bebd46f26 100644
--- a/var/spack/repos/builtin/packages/archer/package.py
+++ b/var/spack/repos/builtin/packages/archer/package.py
@@ -38,9 +38,9 @@ def patch(self):
def cmake_args(self):
return [
- "-DCMAKE_C_COMPILER=clang",
- "-DCMAKE_CXX_COMPILER=clang++",
- "-DOMP_PREFIX:PATH=%s" % self.spec["llvm-openmp-ompt"].prefix,
+ self.define("CMAKE_C_COMPILER", "clang"),
+ self.define("CMAKE_CXX_COMPILER", "clang++"),
+ self.define("OMP_PREFIX:PATH", self.spec["llvm-openmp-ompt"].prefix),
]
@run_after("install")
@@ -56,7 +56,7 @@ def test_run_parallel_example(self):
raise SkipTest("Parallel test directory does not exist")
test_exe = "parallel-simple"
- test_src = "{0}.c".format(test_exe)
+ test_src = f"{test_exe}.c"
with working_dir(test_dir):
clang = which("clang-archer")
clang("-o", test_exe, test_src)
diff --git a/var/spack/repos/builtin/packages/argon2/package.py b/var/spack/repos/builtin/packages/argon2/package.py
index c9762b6bc978cb..d41ba9761c1366 100644
--- a/var/spack/repos/builtin/packages/argon2/package.py
+++ b/var/spack/repos/builtin/packages/argon2/package.py
@@ -20,4 +20,4 @@ class Argon2(MakefilePackage):
version("20161029", sha256="fe0049728b946b58b94cc6db89b34e2d050c62325d16316a534d2bedd78cd5e7")
def install(self, spec, prefix):
- make("PREFIX={0}".format(prefix), "install", "LIBRARY_REL=lib")
+ make(f"PREFIX={prefix}", "install", "LIBRARY_REL=lib")
diff --git a/var/spack/repos/builtin/packages/armadillo/package.py b/var/spack/repos/builtin/packages/armadillo/package.py
index 9d83de741a8efb..78794086f91b7c 100644
--- a/var/spack/repos/builtin/packages/armadillo/package.py
+++ b/var/spack/repos/builtin/packages/armadillo/package.py
@@ -66,14 +66,14 @@ def cmake_args(self):
return [
# ARPACK support
- "-DARPACK_LIBRARY={0}".format(spec["arpack-ng"].libs.joined(";")),
+ self.define("ARPACK_LIBRARY", spec["arpack-ng"].libs.joined(";")),
# BLAS support
- "-DBLAS_LIBRARY={0}".format(spec["blas"].libs.joined(";")),
+ self.define("BLAS_LIBRARY", spec["blas"].libs.joined(";")),
# LAPACK support
- "-DLAPACK_LIBRARY={0}".format(spec["lapack"].libs.joined(";")),
+ self.define("LAPACK_LIBRARY", spec["lapack"].libs.joined(";")),
# SuperLU support
- "-DSuperLU_INCLUDE_DIR={0}".format(spec["superlu"].prefix.include),
- "-DSuperLU_LIBRARY={0}".format(spec["superlu"].libs.joined(";")),
+ self.define("SuperLU_INCLUDE_DIR", spec["superlu"].prefix.include),
+ self.define("SuperLU_LIBRARY", spec["superlu"].libs.joined(";")),
# HDF5 support
- "-DDETECT_HDF5={0}".format("ON" if "+hdf5" in spec else "OFF"),
+ self.define("DETECT_HDF5", "ON" if spec.satisfies("+hdf5") else "OFF"),
]
diff --git a/var/spack/repos/builtin/packages/armpl-gcc/package.py b/var/spack/repos/builtin/packages/armpl-gcc/package.py
index 22f8521d925169..f0157ae551ffe1 100644
--- a/var/spack/repos/builtin/packages/armpl-gcc/package.py
+++ b/var/spack/repos/builtin/packages/armpl-gcc/package.py
@@ -31,11 +31,62 @@
"rhel8": "RHEL-8",
"rhel9": "RHEL-9",
"rocky8": "RHEL-8",
+ "rocky9": "RHEL-9",
"amzn2": "AmazonLinux-2",
"amzn2023": "AmazonLinux-2023",
}
_versions = {
+ "23.10_gcc-12.2": {
+ "RHEL-7": ("e5e2c69ad281a676f2a06c835fbf31d4f9fdf46aa3f3f7c8aafff46985f64902"),
+ "RHEL-8": ("cc0f3572ead93d1e31797b7a39a40cff3414878df9bd24a452bf4877dc35ca4c"),
+ "RHEL-9": ("18c75f57333031e454921cc3f4f22fd567e5a701424ff9ac219bbfe9955a8a96"),
+ "SLES-15": ("e1e891eceaffedecf7351e2c499ef2b49a36c9af29174b366ff470d0a568c18f"),
+ "Ubuntu-20.04": ("976424875c52c2062fc76cbc5d527ee82413cdc0432d7c59f423295a3b0cc612"),
+ "Ubuntu-22.04": ("6dd778edf55e13e8b766d75c340f0259f6cb507a93966d76d188b8b3943c769b"),
+ "AmazonLinux-2": ("423ac3df262b5fcca6cea480503b693306c970dd8e8e05c753ece92446ac7fee"),
+ "AmazonLinux-2023": ("acadf3b6cde866cb41f7363b290a646a492769aaa5819d4c0d60df89913342a9"),
+ },
+ "23.10_gcc-11.3": {
+ "RHEL-7": ("b2afbdc056ae01fb5c71935448b19300ef368962a94ae76b8811f1d328c723c2"),
+ "RHEL-8": ("79b83a8a2c46b949896b3964c761cbd0b66c37826996afb62c466af5fb420bc2"),
+ "RHEL-9": ("7a84f561bcf941bb25123b3ef730b4c02616bc51215933870677163e78af38e3"),
+ "SLES-15": ("9243c405d092d3eabff112ccabc300e96f13c3d2c5c319df04d7093bb6f535a2"),
+ "Ubuntu-20.04": ("a16df088ef9303040d92b017b233c6e4c6f0300d09c2ad0a66c0318831bf009c"),
+ "Ubuntu-22.04": ("fabda66dc6388fa8c094443fa53deece5590db66caaa6a1e39e99e64d5bb0709"),
+ "AmazonLinux-2": ("db5d039fa1d07695a71b8733584d878bb778d41bc0ecc3e19059b75cffdcf8cd"),
+ "AmazonLinux-2023": ("977fd465702f086a69e3f7fc28f2bcb6c79a7af381dc7d865345115b26f4631f"),
+ },
+ "23.10_gcc-10.4": {
+ "RHEL-7": ("3c8bad3af82a76ca1a45705afd47028cc26c7093377a554e692e1cd6f61cb304"),
+ "RHEL-8": ("381afae0e3e94aa91029f571de0e51c2342e50b4f855db7a9b9ca66e16e26276"),
+ "SLES-15": ("226e9519407331b4ad5ded8699cd15f1d9b845843304bbf21f47009a399fe2a0"),
+ "Ubuntu-20.04": ("45de59f795ad9026a838ab611b03b1644169a034ce59d6cca2c7940850fa17ad"),
+ "AmazonLinux-2": ("637b51da12548dc66da9132328fe2ea39ba0736af66fb30332ca8eeb540e3373"),
+ },
+ "23.10_gcc-9.3": {
+ "RHEL-7": ("6fc2e3319b83ea2b1bf8d98ec43f614b937bb5f23d15aefe9e9171c882d24a60"),
+ "RHEL-8": ("1a05548a7051d1df42280fdcfcffeaf89d519aa7978bffd29171da60fdbccecf"),
+ "SLES-15": ("389ddd34e1299e4d942864f63f236158a81ce4190f59af512a1bea3221153bfe"),
+ "Ubuntu-20.04": ("a1a221859b5f0962df3a0c6ce31669827bff0bfffb185b80429620f14b40f4f4"),
+ "AmazonLinux-2": ("2eef9b28e95e75f0040eb61c9e1b406ec4d0b81cce3e95a652029aa0898733a0"),
+ },
+ "23.10_gcc-8.2": {
+ "RHEL-7": ("d6596721e74e7bdc8d9ce7b8b2a4c5ab2bd430f3ca69b9ec84f587f1aa181083"),
+ "RHEL-8": ("004aed52003e19a6c14df303456318e486ad783eb543b79285c7953a23722a4a"),
+ "SLES-15": ("12c638c0cc5bdc220699499ec6bb160a7b889f105901f4354bd2748a77d25c8e"),
+ "AmazonLinux-2": ("d039134236cda298cd0920c3c5b017eeef83fcab82949221dc7deb081026252f"),
+ },
+ "23.10_gcc-7.5": {
+ "RHEL-7": ("1a0ca860c168987d174923dfc7800e10521303914793162a8bae2b2cd3f68203"),
+ "AmazonLinux-2": ("58b201a6bbe7ee10563d8d42b32a77c4b15c57b4e81abb35d24b8c3fc9cff4d9"),
+ },
+ "23.10_flang-new_clang_17": {
+ "macOS": ("baf09cd6d1d1b7c780b8b31cfe1dd709596b182dc714127fbc9f23007ff9e23a")
+ },
+ "23.06_flang-new_clang_16": {
+ "macOS": ("232f5e89e0f1f4777480c64a790e477dfd2f423d3cf5704a116a2736f36250ea")
+ },
"23.04.1_gcc-12.2": {
"RHEL-7": ("789cc093cb7e0d9294aff0fdf94b74987435a09cdff4c1b7118a03350548d03c"),
"RHEL-8": ("1b668baec6d3df2d48c5aedc70baa6a9b638983b94bf2cd58d378859a1da49f0"),
@@ -177,20 +228,28 @@
def get_os(ver):
- spack_os = spack.platforms.host().default_os
+ platform = spack.platforms.host()
+ if platform.name == "darwin":
+ return "macOS"
if ver.startswith("22."):
- return _os_map_before_23.get(spack_os, "")
+ return _os_map_before_23.get(platform.default_os, "")
else:
- return _os_map.get(spack_os, "RHEL-7")
+ return _os_map.get(platform.default_os, "RHEL-7")
def get_package_url(version):
base_url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries/"
armpl_version = version.split("_")[0]
armpl_version_dashed = armpl_version.replace(".", "-")
- gcc_version = version.split("_")[1]
+ compiler_version = version.split("_", 1)[1]
os = get_os(armpl_version)
- filename = "arm-performance-libraries_" + armpl_version + "_" + os + "_" + gcc_version + ".tar"
+ if os == "macOS":
+ if armpl_version.startswith("23.06"):
+ return f"{base_url}{armpl_version_dashed}/armpl_{armpl_version}_{compiler_version}.dmg"
+ else:
+ filename = f"arm-performance-libraries_{armpl_version}_macOS.dmg"
+ return f"{base_url}{armpl_version_dashed}/macos/{filename}"
+ filename = f"arm-performance-libraries_{armpl_version}_{os}_{compiler_version}.tar"
os_short = ""
if armpl_version.startswith("22.0."):
os_short = os.replace("-", "")
@@ -198,7 +257,7 @@ def get_package_url(version):
os_short = os.split(".")[0].lower()
if "amazonlinux" in os_short:
os_short = os_short.replace("amazonlinux", "al")
- return base_url + armpl_version_dashed + "/" + os_short + "/" + filename
+ return f"{base_url}{armpl_version_dashed}/{os_short}/{filename}"
def get_armpl_prefix(spec):
@@ -215,16 +274,26 @@ class ArmplGcc(Package):
maintainers("annop-w")
for ver, packages in _versions.items():
- key = "{0}".format(get_os(ver))
+ key = get_os(ver)
sha256sum = packages.get(key)
url = get_package_url(ver)
if sha256sum:
- version(ver, sha256=sha256sum, url=url)
+ extension = os.path.splitext(url)[1]
+ # Don't attempt to expand .dmg files
+ expand = extension != ".dmg"
+ version(ver, sha256=sha256sum, url=url, extension=extension, expand=expand)
conflicts("target=x86:", msg="Only available on Aarch64")
conflicts("target=ppc64:", msg="Only available on Aarch64")
conflicts("target=ppc64le:", msg="Only available on Aarch64")
+ conflicts("%gcc@:11", when="@23.10_gcc-12.2")
+ conflicts("%gcc@:10", when="@23.10_gcc-11.3")
+ conflicts("%gcc@:9", when="@23.10_gcc-10.4")
+ conflicts("%gcc@:8", when="@23.10_gcc-9.3")
+ conflicts("%gcc@:7", when="@23.10_gcc-8.2")
+ conflicts("%gcc@:6", when="@23.10_gcc-7.5")
+
conflicts("%gcc@:11", when="@23.04.1_gcc-12.2")
conflicts("%gcc@:10", when="@23.04.1_gcc-11.3")
conflicts("%gcc@:9", when="@23.04.1_gcc-10.2")
@@ -266,17 +335,29 @@ class ArmplGcc(Package):
# Run the installer with the desired install directory
def install(self, spec, prefix):
+ if spec.platform == "darwin":
+ hdiutil = which("hdiutil")
+ # Mount image
+ mountpoint = os.path.join(self.stage.path, "mount")
+ hdiutil("attach", "-mountpoint", mountpoint, self.stage.archive_file)
+ try:
+ # Run installer
+ exe_name = f"armpl_{spec.version.string}_install.sh"
+ installer = Executable(os.path.join(mountpoint, exe_name))
+ installer("-y", f"--install_dir={prefix}")
+ finally:
+ # Unmount image
+ hdiutil("detach", mountpoint)
+ return
if self.compiler.name != "gcc":
raise spack.error.SpackError(("Only compatible with GCC.\n"))
with when("@:22"):
- armpl_version = "{}".format(spec.version.up_to(3)).split("_")[0]
+ armpl_version = spec.version.up_to(3).string.split("_")[0]
with when("@23:"):
- armpl_version = "{}".format(spec.version).split("_")[0]
+ armpl_version = spec.version.string.split("_")[0]
- exe = Executable(
- "./arm-performance-libraries_{0}_{1}.sh".format(armpl_version, get_os(armpl_version))
- )
+ exe = Executable(f"./arm-performance-libraries_{armpl_version}_{get_os(armpl_version)}.sh")
exe("--accept", "--force", "--install-to", prefix)
@property
@@ -330,14 +411,22 @@ def headers(self):
def setup_run_environment(self, env):
armpl_dir = get_armpl_prefix(self.spec)
- env.prepend_path("LD_LIBRARY_PATH", join_path(armpl_dir, "lib"))
+ if self.spec.platform == "darwin":
+ env.prepend_path("DYLD_LIBRARY_PATH", join_path(armpl_dir, "lib"))
+ else:
+ env.prepend_path("LD_LIBRARY_PATH", join_path(armpl_dir, "lib"))
@run_after("install")
def check_install(self):
armpl_dir = get_armpl_prefix(self.spec)
armpl_example_dir = join_path(armpl_dir, "examples")
# run example makefile
- make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir)
+ if self.spec.platform == "darwin":
+ # Fortran examples on MacOS requires flang-new which is
+ # not commonly installed, so only run the C examples.
+ make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir, "c_examples")
+ else:
+ make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir)
# clean up
make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir, "clean")
diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py
index 92176069c1f19a..c50b90d6d10f45 100644
--- a/var/spack/repos/builtin/packages/arpack-ng/package.py
+++ b/var/spack/repos/builtin/packages/arpack-ng/package.py
@@ -150,14 +150,14 @@ def configure_args(self):
options = (
self.enable_or_disable("mpi")
+ [
- "--with-blas={0}".format(spec["blas"].libs.ld_flags),
- "--with-lapack={0}".format(spec["lapack"].libs.ld_flags),
+ f"--with-blas={spec['blas'].libs.ld_flags}",
+ f"--with-lapack={spec['lapack'].libs.ld_flags}",
]
+ self.enable_or_disable("shared")
)
if "+mpi" in spec:
- options.append("F77={0}".format(spec["mpi"].mpif77))
+ options.append(f"F77={spec['mpi'].mpif77}")
return options
diff --git a/var/spack/repos/builtin/packages/arrayfire/package.py b/var/spack/repos/builtin/packages/arrayfire/package.py
index 70cdfc67b200ab..9befc618417835 100644
--- a/var/spack/repos/builtin/packages/arrayfire/package.py
+++ b/var/spack/repos/builtin/packages/arrayfire/package.py
@@ -79,7 +79,7 @@ def cmake_args(self):
]
args.append(self.define("CUDA_architecture_build_targets", arch_list))
- if "^mkl" in self.spec:
+ if self.spec["blas"].name in INTEL_MATH_LIBRARIES:
if self.version >= Version("3.8.0"):
args.append(self.define("AF_COMPUTE_LIBRARY", "Intel-MKL"))
else:
diff --git a/var/spack/repos/builtin/packages/asdcplib/package.py b/var/spack/repos/builtin/packages/asdcplib/package.py
index 7ca3b2f9f1b786..5aec849ee59480 100644
--- a/var/spack/repos/builtin/packages/asdcplib/package.py
+++ b/var/spack/repos/builtin/packages/asdcplib/package.py
@@ -27,6 +27,6 @@ class Asdcplib(AutotoolsPackage):
def configure_args(self):
spec = self.spec
- args = ["--with-openssl={0}".format(spec["openssl"].prefix)]
+ args = [f"--with-openssl={spec['openssl'].prefix}"]
return args
diff --git a/var/spack/repos/builtin/packages/asio/package.py b/var/spack/repos/builtin/packages/asio/package.py
index 3c66d7df9e3ba1..05f391caa31ecd 100644
--- a/var/spack/repos/builtin/packages/asio/package.py
+++ b/var/spack/repos/builtin/packages/asio/package.py
@@ -16,6 +16,8 @@ class Asio(AutotoolsPackage):
git = "https://github.com/chriskohlhoff/asio.git"
maintainers("msimberg", "pauleonix")
+ license("BSL-1.0")
+
# As uneven minor versions of asio are not considered stable, they wont be added anymore
version("1.28.0", sha256="226438b0798099ad2a202563a83571ce06dd13b570d8fded4840dbc1f97fa328")
version("1.26.0", sha256="935583f86825b7b212479277d03543e0f419a55677fa8cb73a79a927b858a72d")
diff --git a/var/spack/repos/builtin/packages/aspa/package.py b/var/spack/repos/builtin/packages/aspa/package.py
index 6bfbad1d3926e2..8219a46b004bd3 100644
--- a/var/spack/repos/builtin/packages/aspa/package.py
+++ b/var/spack/repos/builtin/packages/aspa/package.py
@@ -35,12 +35,12 @@ def build_targets(self):
targets = [
"--directory=exec",
"--file=Makefile",
- "LIBS={0} {1} {2}".format(
+ "LIBS={} {} {}".format(
self.spec["lapack"].libs.ld_flags,
self.spec["blas"].libs.ld_flags,
self.spec["hdf5"].libs.ld_flags,
),
- "CXX={0}".format(self.spec["mpi"].mpicxx),
+ f"CXX={self.spec['mpi'].mpicxx}",
]
return targets
diff --git a/var/spack/repos/builtin/packages/aspcud/package.py b/var/spack/repos/builtin/packages/aspcud/package.py
index fbaef453e990e4..8233dcaba0e058 100644
--- a/var/spack/repos/builtin/packages/aspcud/package.py
+++ b/var/spack/repos/builtin/packages/aspcud/package.py
@@ -28,11 +28,9 @@ class Aspcud(CMakePackage):
depends_on("clingo")
def cmake_args(self):
- spec = self.spec
- gringo_path = join_path(spec["clingo"].prefix.bin, "gringo")
- clasp_path = join_path(spec["clingo"].prefix.bin, "clasp")
- args = [
- "-DASPCUD_GRINGO_PATH={0}".format(gringo_path),
- "-DASPCUD_CLASP_PATH={0}".format(clasp_path),
+ gringo_path = join_path(self.spec["clingo"].prefix.bin, "gringo")
+ clasp_path = join_path(self.spec["clingo"].prefix.bin, "clasp")
+ return [
+ self.define("ASPCUD_GRINGO_PATH", gringo_path),
+ self.define("ASPCUD_CLASP_PATH", clasp_path),
]
- return args
diff --git a/var/spack/repos/builtin/packages/aspera-cli/package.py b/var/spack/repos/builtin/packages/aspera-cli/package.py
index afa63b4c33dc8f..91aa1e19e47818 100644
--- a/var/spack/repos/builtin/packages/aspera-cli/package.py
+++ b/var/spack/repos/builtin/packages/aspera-cli/package.py
@@ -29,7 +29,7 @@ def install(self, spec, prefix):
# Update destination path
filter_file(
"INSTALL_DIR=~/.aspera",
- "INSTALL_DIR=%s" % prefix,
+ f"INSTALL_DIR={prefix}",
runfile,
string=True,
stop_at="__ARCHIVE_FOLLOWS__",
diff --git a/var/spack/repos/builtin/packages/astral/package.py b/var/spack/repos/builtin/packages/astral/package.py
index 3afa3691ba0b0f..af196ab9c391c4 100644
--- a/var/spack/repos/builtin/packages/astral/package.py
+++ b/var/spack/repos/builtin/packages/astral/package.py
@@ -31,7 +31,7 @@ def install(self, spec, prefix):
make()
mkdirp(prefix.bin)
install_tree("lib", prefix.tools.lib)
- jar_file = "astral.{v}.jar".format(v=self.version)
+ jar_file = f"astral.{self.version}.jar"
install(jar_file, prefix.tools)
script_sh = join_path(os.path.dirname(__file__), "astral.sh")
diff --git a/var/spack/repos/builtin/packages/astyle/package.py b/var/spack/repos/builtin/packages/astyle/package.py
index 951661004b724a..ef4fe29378ffa0 100644
--- a/var/spack/repos/builtin/packages/astyle/package.py
+++ b/var/spack/repos/builtin/packages/astyle/package.py
@@ -30,11 +30,11 @@ def build_directory(self):
def edit(self, spec, prefix):
makefile = join_path(self.build_directory, "Makefile")
- filter_file(r"^CXX\s*=.*", "CXX=%s" % spack_cxx, makefile)
+ filter_file(r"^CXX\s*=.*", f"CXX={spack_cxx}", makefile)
# If the group is not a user account, the installation will fail,
# so remove the -o $ (USER) -g $ (USER) parameter.
filter_file(r"^INSTALL=.*", "INSTALL=install", makefile)
@property
def install_targets(self):
- return ["install", "prefix={0}".format(self.prefix)]
+ return ["install", f"prefix={self.prefix}"]
diff --git a/var/spack/repos/builtin/packages/at-spi2-atk/package.py b/var/spack/repos/builtin/packages/at-spi2-atk/package.py
index 27e875f9f65c30..6e2f492112ce25 100644
--- a/var/spack/repos/builtin/packages/at-spi2-atk/package.py
+++ b/var/spack/repos/builtin/packages/at-spi2-atk/package.py
@@ -27,4 +27,4 @@ class AtSpi2Atk(MesonPackage):
def url_for_version(self, version):
"""Handle gnome's version-based custom URLs."""
url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-atk"
- return url + "/%s/at-spi2-atk-%s.tar.xz" % (version.up_to(2), version)
+ return url + f"/{version.up_to(2)}/at-spi2-atk-{version}.tar.xz"
diff --git a/var/spack/repos/builtin/packages/at-spi2-core/package.py b/var/spack/repos/builtin/packages/at-spi2-core/package.py
index f1bf5a61c86fca..ec8cbd5e23c68b 100644
--- a/var/spack/repos/builtin/packages/at-spi2-core/package.py
+++ b/var/spack/repos/builtin/packages/at-spi2-core/package.py
@@ -45,7 +45,7 @@ def patch(self):
def url_for_version(self, version):
"""Handle gnome's version-based custom URLs."""
url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-core"
- return url + "/%s/at-spi2-core-%s.tar.xz" % (version.up_to(2), version)
+ return url + f"/{version.up_to(2)}/at-spi2-core-{version}.tar.xz"
def setup_run_environment(self, env):
env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0"))
diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py
index 41dec1c587080d..52849669ecca01 100644
--- a/var/spack/repos/builtin/packages/atk/package.py
+++ b/var/spack/repos/builtin/packages/atk/package.py
@@ -43,7 +43,7 @@ class Atk(Package):
def url_for_version(self, version):
"""Handle gnome's version-based custom URLs."""
url = "http://ftp.gnome.org/pub/gnome/sources/atk"
- return url + "/%s/atk-%s.tar.xz" % (version.up_to(2), version)
+ return url + f"/{version.up_to(2)}/atk-{version}.tar.xz"
def setup_run_environment(self, env):
env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0"))
@@ -64,7 +64,7 @@ def install(self, spec, prefix):
@when("@:2.27")
def install(self, spec, prefix):
- configure("--prefix={0}".format(prefix))
+ configure(f"--prefix={prefix}")
make()
if self.run_tests:
make("check")
diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py
index d23a16cb1ba16b..ecc6379b570c62 100644
--- a/var/spack/repos/builtin/packages/atlas/package.py
+++ b/var/spack/repos/builtin/packages/atlas/package.py
@@ -109,11 +109,11 @@ def install(self, spec, prefix):
# Lapack resource to provide full lapack build. Note that
# ATLAS only provides a few LAPACK routines natively.
- options.append("--with-netlib-lapack-tarfile=%s" % self.stage[1].archive_file)
+ options.append(f"--with-netlib-lapack-tarfile={self.stage[1].archive_file}")
with working_dir("spack-build", create=True):
configure = Executable("../configure")
- configure("--prefix=%s" % prefix, *options)
+ configure(f"--prefix={prefix}", *options)
make()
make("check")
make("ptcheck")
@@ -147,7 +147,7 @@ def install_test(self):
source_file = join_path(os.path.dirname(self.module.__file__), "test_cblas_dgemm.c")
blessed_file = join_path(os.path.dirname(self.module.__file__), "test_cblas_dgemm.output")
- include_flags = ["-I%s" % self.spec.prefix.include]
+ include_flags = [f"-I{self.spec.prefix.include}"]
link_flags = self.spec["atlas"].libs.ld_flags.split()
output = compile_c_and_execute(source_file, include_flags, link_flags)
diff --git a/var/spack/repos/builtin/packages/atom-dft/package.py b/var/spack/repos/builtin/packages/atom-dft/package.py
index 9015067428fb15..3f8c5e7756303a 100644
--- a/var/spack/repos/builtin/packages/atom-dft/package.py
+++ b/var/spack/repos/builtin/packages/atom-dft/package.py
@@ -24,8 +24,8 @@ def edit(self, spec, prefix):
@property
def build_targets(self):
return [
- "XMLF90_ROOT=%s" % self.spec["xmlf90"].prefix,
- "GRIDXC_ROOT=%s" % self.spec["libgridxc"].prefix,
+ f"XMLF90_ROOT={self.spec['xmlf90'].prefix}",
+ f"GRIDXC_ROOT={self.spec['libgridxc'].prefix}",
"FC=fc",
]
diff --git a/var/spack/repos/builtin/packages/atompaw/package.py b/var/spack/repos/builtin/packages/atompaw/package.py
index 7cc4b4d417fc0f..f0ea750583910a 100644
--- a/var/spack/repos/builtin/packages/atompaw/package.py
+++ b/var/spack/repos/builtin/packages/atompaw/package.py
@@ -49,8 +49,8 @@ def configure_args(self):
spec = self.spec
linalg = spec["lapack"].libs + spec["blas"].libs
return [
- "--with-linalg-libs=%s" % linalg.ld_flags,
+ f"--with-linalg-libs={linalg.ld_flags}",
"--enable-libxc",
- "--with-libxc-incs=-I%s" % spec["libxc"].prefix.include,
- "--with-libxc-libs=-L%s -lxcf90 -lxc" % spec["libxc"].prefix.lib,
+ f"--with-libxc-incs=-I{spec['libxc'].prefix.include}",
+ f"--with-libxc-libs=-L{spec['libxc'].prefix.lib} -lxcf90 -lxc",
]
diff --git a/var/spack/repos/builtin/packages/audacious/package.py b/var/spack/repos/builtin/packages/audacious/package.py
index 4cce09e0fcf23f..1d6634780b5870 100644
--- a/var/spack/repos/builtin/packages/audacious/package.py
+++ b/var/spack/repos/builtin/packages/audacious/package.py
@@ -28,7 +28,7 @@ class Audacious(AutotoolsPackage):
def patch(self):
search_path_args = " ".join(self.autoreconf_search_path_args)
- search_path_str = "-I m4 {0}".format(search_path_args)
+ search_path_str = f"-I m4 {search_path_args}"
filter_file("-I m4", search_path_str, "autogen.sh")
def autoreconf(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/augustus/package.py b/var/spack/repos/builtin/packages/augustus/package.py
index e0e002d6fcef81..2c5cfa5c0d531e 100644
--- a/var/spack/repos/builtin/packages/augustus/package.py
+++ b/var/spack/repos/builtin/packages/augustus/package.py
@@ -69,7 +69,7 @@ def edit(self, spec, prefix):
filter_file("g++", spack_cxx, "makefile", string=True)
filter_file(
"g++ -I/usr/include/boost",
- "{0} -I{1}".format(spack_cxx, self.spec["boost"].prefix.include),
+ f"{spack_cxx} -I{self.spec['boost'].prefix.include}",
"src/subdir.mk",
string=True,
)
@@ -101,7 +101,7 @@ def edit(self, spec, prefix):
with working_dir(join_path("auxprogs", "filterBam", "src")):
makefile = FileFilter("Makefile")
- makefile.filter("BAMTOOLS = .*", "BAMTOOLS = {0}".format(bamtools))
+ makefile.filter("BAMTOOLS = .*", f"BAMTOOLS = {bamtools}")
makefile.filter("INCLUDES = *", "INCLUDES = -I$(BAMTOOLS)/include/bamtools ")
if "bamtools@2.5:" in spec:
makefile.filter(
@@ -113,32 +113,30 @@ def edit(self, spec, prefix):
)
with working_dir(join_path("auxprogs", "bam2hints")):
makefile = FileFilter("Makefile")
- makefile.filter("/usr/include/bamtools", "{0}/include/bamtools".format(bamtools))
+ makefile.filter("/usr/include/bamtools", f"{bamtools}/include/bamtools")
if "bamtools@2.5:" in spec:
makefile.filter(
- "LIBS = -lbamtools -lz",
- "LIBS = {0}/lib64" "/libbamtools.a -lz".format(bamtools),
+ "LIBS = -lbamtools -lz", f"LIBS = {bamtools}/lib64/libbamtools.a -lz"
)
if "bamtools@:2.4" in spec:
makefile.filter(
- "LIBS = -lbamtools -lz",
- "LIBS = {0}/lib/bamtools" "/libbamtools.a -lz".format(bamtools),
+ "LIBS = -lbamtools -lz", f"LIBS = {bamtools}/lib/bamtools/libbamtools.a -lz"
)
if self.version < Version("3.4.0"):
with working_dir(join_path("auxprogs", "bam2wig")):
makefile = FileFilter("Makefile")
# point tools to spack installations
- makefile.filter("BCFTOOLS=.*$", "BCFTOOLS={0}/include".format(bcftools))
- makefile.filter("SAMTOOLS=.*$", "SAMTOOLS={0}/include".format(samtools))
- makefile.filter("HTSLIB=.*$", "HTSLIB={0}/include".format(htslib))
+ makefile.filter("BCFTOOLS=.*$", f"BCFTOOLS={bcftools}/include")
+ makefile.filter("SAMTOOLS=.*$", f"SAMTOOLS={samtools}/include")
+ makefile.filter("HTSLIB=.*$", f"HTSLIB={htslib}/include")
# fix bad linking dirs
makefile.filter("$(SAMTOOLS)/libbam.a", "$(SAMTOOLS)/../lib/libbam.a", string=True)
makefile.filter("$(HTSLIB)/libhts.a", "$(HTSLIB)/../lib/libhts.a", string=True)
with working_dir(join_path("auxprogs", "checkTargetSortedness")):
makefile = FileFilter("Makefile")
- makefile.filter("SAMTOOLS.*=.*$", "SAMTOOLS={0}/include".format(samtools))
+ makefile.filter("SAMTOOLS.*=.*$", f"SAMTOOLS={samtools}/include")
makefile.filter("LIBS=-lbam", "LIBS=$(SAMTOOLS)/../lib/libbam.a", string=True)
else:
mysql = self.spec["mysql-client"].prefix
@@ -147,12 +145,12 @@ def edit(self, spec, prefix):
with working_dir("src"):
makefile = FileFilter("Makefile")
- makefile.filter(r"/usr/include/mysql\+\+", "{0}/include/mysql++".format(mysqlpp))
+ makefile.filter(r"/usr/include/mysql\+\+", f"{mysqlpp}/include/mysql++")
if "^mariadb-c-client" in spec:
- makefile.filter("/usr/include/mysql", "{0}/include/mariadb".format(mysql))
+ makefile.filter("/usr/include/mysql", f"{mysql}/include/mariadb")
else:
- makefile.filter("/usr/include/mysql", "{0}/include/mysql".format(mysql))
- makefile.filter("/usr/include/lpsolve", "{0}/include/lpsolve".format(lpsolve))
+ makefile.filter("/usr/include/mysql", f"{mysql}/include/mysql")
+ makefile.filter("/usr/include/lpsolve", f"{lpsolve}/include/lpsolve")
def install(self, spec, prefix):
install_tree("bin", join_path(self.spec.prefix, "bin"))
@@ -163,12 +161,12 @@ def install(self, spec, prefix):
def filter_sbang(self):
with working_dir(self.prefix.scripts):
pattern = "^#!.*"
- repl = "#!{0}".format(self.spec["perl"].command.path)
+ repl = f"#!{self.spec['perl'].command.path}"
files = glob.glob("*.pl")
for file in files:
filter_file(pattern, repl, *files, backup=False)
- repl = "#!{0}".format(self.spec["python"].command.path)
+ repl = f"#!{self.spec['python'].command.path}"
files = glob.glob("*.py")
for file in files:
filter_file(pattern, repl, *files, backup=False)
diff --git a/var/spack/repos/builtin/packages/authd/package.py b/var/spack/repos/builtin/packages/authd/package.py
index b2cee813c1eae0..dbb290839bc916 100644
--- a/var/spack/repos/builtin/packages/authd/package.py
+++ b/var/spack/repos/builtin/packages/authd/package.py
@@ -20,4 +20,4 @@ def setup_run_environment(self, env):
env.prepend_path("PATH", self.prefix.sbin)
def install(self, spec, prefix):
- make("prefix={0}".format(prefix), "install")
+ make(f"prefix={prefix}", "install")
diff --git a/var/spack/repos/builtin/packages/autodock-vina/package.py b/var/spack/repos/builtin/packages/autodock-vina/package.py
index dadf50ea054ab3..8ca01804d8c0dc 100644
--- a/var/spack/repos/builtin/packages/autodock-vina/package.py
+++ b/var/spack/repos/builtin/packages/autodock-vina/package.py
@@ -44,10 +44,10 @@ def edit(self, spec, prefix):
with working_dir(self.build_directory):
makefile = FileFilter("Makefile")
makefile.filter(
- "BOOST_INCLUDE = .*", "BOOST_INCLUDE = %s" % self.spec["boost"].prefix.include
+ "BOOST_INCLUDE = .*", f"BOOST_INCLUDE = {self.spec['boost'].prefix.include}"
)
makefile.filter("C_PLATFORM=.*", "C_PLATFORM=-pthread")
- makefile.filter("GPP=.*", "GPP=%s" % spack_cxx)
+ makefile.filter("GPP=.*", f"GPP={spack_cxx}")
def build(self, spec, prefix):
with working_dir(self.build_directory):
diff --git a/var/spack/repos/builtin/packages/autogen/package.py b/var/spack/repos/builtin/packages/autogen/package.py
index 2ecc434106eec3..54b088beb599c6 100644
--- a/var/spack/repos/builtin/packages/autogen/package.py
+++ b/var/spack/repos/builtin/packages/autogen/package.py
@@ -36,7 +36,7 @@ def configure_args(self):
]
if "+xml" in spec:
- args.append("--with-libxml2={0}".format(spec["libxml2"].prefix))
+ args.append(f"--with-libxml2={spec['libxml2'].prefix}")
else:
args.append("--without-libxml2")
diff --git a/var/spack/repos/builtin/packages/avizo/package.py b/var/spack/repos/builtin/packages/avizo/package.py
index 42c201e780ed0a..43364919cd757f 100644
--- a/var/spack/repos/builtin/packages/avizo/package.py
+++ b/var/spack/repos/builtin/packages/avizo/package.py
@@ -24,25 +24,25 @@ class Avizo(Package):
version(
"2020.1",
sha256="9321aaa276567eebf116e268353c33a4c930d768d22793f921338e1d8cefe991",
- url="file://{0}/Avizo-20201-Linux64-gcc48.bin".format(os.getcwd()),
+ url=f"file://{os.getcwd()}/Avizo-20201-Linux64-gcc48.bin",
expand=False,
)
version(
"2019.4",
sha256="a637720535bcbe254ab56368004a9544c64ec36186373fa24f26cee279685248",
- url="file://{0}/Avizo-20194-Linux64-gcc48.bin".format(os.getcwd()),
+ url=f"file://{os.getcwd()}/Avizo-20194-Linux64-gcc48.bin",
expand=False,
)
version(
"2019.3",
sha256="be109df81e2f7238f234862367841dae05e76cc62218c1f36b1d9bc9514ce5f7",
- url="file://{0}/Avizo-20193-Linux64-gcc48.bin".format(os.getcwd()),
+ url=f"file://{os.getcwd()}/Avizo-20193-Linux64-gcc48.bin",
expand=False,
)
version(
"9.7.0",
sha256="9c9b9e81957387f4218df0c5adbb80717e9ae80ab3ca6ff8da523f7f499dcc5b",
- url="file://{0}/Avizo-970-Linux64-gcc44.bin".format(os.getcwd()),
+ url=f"file://{os.getcwd()}/Avizo-970-Linux64-gcc44.bin",
expand=False,
)
@@ -67,15 +67,11 @@ def setup_run_environment(self, env):
def install(self, spec, prefix):
ver = self.version.joined
sh = which("sh")
- sh(
- "Avizo-{0}-Linux64-gcc{1}.bin".format(ver, self.gcc_ver[self.version.string]),
- "--noexec",
- "--keep",
- )
+ sh(f"Avizo-{ver}-Linux64-gcc{self.gcc_ver[self.version.string]}.bin", "--noexec", "--keep")
with working_dir("Avizo"):
avizo_tar = tarfile.open(
- name="Avizo-{0}-Linux64-gcc{1}.tar.bz2".format(
+ name="Avizo-{}-Linux64-gcc{}.tar.bz2".format(
self.version, self.gcc_ver[self.version.string]
)
)
diff --git a/var/spack/repos/builtin/packages/bart/package.py b/var/spack/repos/builtin/packages/bart/package.py
index cc371f4f5c31da..9fa0baa01833d4 100644
--- a/var/spack/repos/builtin/packages/bart/package.py
+++ b/var/spack/repos/builtin/packages/bart/package.py
@@ -48,7 +48,7 @@ def edit(self, spec, prefix):
if spec["blas"].name == "openblas":
env["OPENBLAS"] = "1"
- if "^mkl" in spec:
+ elif spec["blas"].name in INTEL_MATH_LIBRARIES:
env["MKL"] = "1"
env["MKL_BASE"] = spec["mkl"].prefix.mkl
else:
diff --git a/var/spack/repos/builtin/packages/batchedblas/package.py b/var/spack/repos/builtin/packages/batchedblas/package.py
index c44b50bc81e349..712f270e8cf8fc 100644
--- a/var/spack/repos/builtin/packages/batchedblas/package.py
+++ b/var/spack/repos/builtin/packages/batchedblas/package.py
@@ -23,7 +23,7 @@ class Batchedblas(MakefilePackage):
def edit(self, spec, prefix):
CCFLAGS = [self.compiler.openmp_flag, "-I./", "-O3"]
BLAS = ["-lm", spec["blas"].libs.ld_flags]
- if not spec.satisfies("^mkl"):
+ if spec["blas"].name not in INTEL_MATH_LIBRARIES:
CCFLAGS.append("-D_CBLAS_")
if spec.satisfies("%intel"):
CCFLAGS.extend(["-Os"])
diff --git a/var/spack/repos/builtin/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py
index 18240c7f330954..becb364a2c2d99 100644
--- a/var/spack/repos/builtin/packages/bear/package.py
+++ b/var/spack/repos/builtin/packages/bear/package.py
@@ -23,10 +23,10 @@ class Bear(CMakePackage):
version("2.0.4", sha256="33ea117b09068aa2cd59c0f0f7535ad82c5ee473133779f1cc20f6f99793a63e")
depends_on("pkgconfig", when="@3:")
- depends_on("fmt", when="@3.0.0:")
- depends_on("grpc", when="@3.0.0:")
+ depends_on("fmt@8", when="@3.0.0:")
+ depends_on("grpc +shared", when="@3.0.0:")
depends_on("nlohmann-json", when="@3.0.0:")
- depends_on("spdlog", when="@3.0.0:")
+ depends_on("spdlog +fmt_external", when="@3.0.0:")
depends_on("cmake@2.8:", type="build")
depends_on("python", type="build")
depends_on("googletest", type="test", when="@3:")
diff --git a/var/spack/repos/builtin/packages/beatnik/package.py b/var/spack/repos/builtin/packages/beatnik/package.py
index aa39194494bbee..9afa9afa3825ce 100644
--- a/var/spack/repos/builtin/packages/beatnik/package.py
+++ b/var/spack/repos/builtin/packages/beatnik/package.py
@@ -14,8 +14,7 @@ class Beatnik(CMakePackage, CudaPackage, ROCmPackage):
maintainers("patrickb314", "JStewart28")
- # Add proper versions and checksums here. Will add 1.0 when a proper SHA is available
- # version("1.0", sha256="XXX")
+ version("1.0", commit="ae31ef9cb44678d5ace77994b45b0778defa3d2f")
version("develop", branch="develop")
version("main", branch="main")
@@ -55,6 +54,7 @@ class Beatnik(CMakePackage, CudaPackage, ROCmPackage):
conflicts("mpich ~rocm", when="+rocm")
conflicts("openmpi ~cuda", when="+cuda")
conflicts("^intel-mpi") # Heffte won't build with intel MPI because of needed C++ MPI support
+ conflicts("^spectrum-mpi", when="^cuda@11.3:") # cuda-aware spectrum is broken with cuda 11.3:
# Propagate CUDA and AMD GPU targets to cabana
for cuda_arch in CudaPackage.cuda_arch_values:
diff --git a/var/spack/repos/builtin/packages/benchmark/package.py b/var/spack/repos/builtin/packages/benchmark/package.py
index 9026d3d1c14928..fe0b286352fcd1 100644
--- a/var/spack/repos/builtin/packages/benchmark/package.py
+++ b/var/spack/repos/builtin/packages/benchmark/package.py
@@ -16,7 +16,16 @@ class Benchmark(CMakePackage):
# first properly installed CMake config packages in
# 1.2.0 release: https://github.com/google/benchmark/issues/363
version("main", branch="main")
+ version("1.8.3", sha256="6bc180a57d23d4d9515519f92b0c83d61b05b5bab188961f36ac7b06b0d9e9ce")
+ version("1.8.2", sha256="2aab2980d0376137f969d92848fbb68216abb07633034534fc8c65cc4e7a0e93")
+ version("1.8.1", sha256="e9ff65cecfed4f60c893a1e8a1ba94221fad3b27075f2f80f47eb424b0f8c9bd")
+ version("1.8.0", sha256="ea2e94c24ddf6594d15c711c06ccd4486434d9cf3eca954e2af8a20c88f9f172")
+ version("1.7.1", sha256="6430e4092653380d9dc4ccb45a1e2dc9259d581f4866dc0759713126056bc1d7")
+ version("1.7.0", sha256="3aff99169fa8bdee356eaa1f691e835a6e57b1efeadb8a0f9f228531158246ac")
+ version("1.6.2", sha256="a9f77e6188c1cd4ebedfa7538bf5176d6acc72ead6f456919e5f464ef2f06158")
+ version("1.6.1", sha256="6132883bc8c9b0df5375b16ab520fac1a85dc9e4cf5be59480448ece74b278d4")
version("1.6.0", sha256="1f71c72ce08d2c1310011ea6436b31e39ccab8c2db94186d26657d41747c85d6")
+ version("1.5.6", sha256="789f85b4810d13ff803834ea75999e41b326405d83d6a538baf01499eda96102")
version("1.5.5", sha256="3bff5f237c317ddfd8d5a9b96b3eede7c0802e799db520d38ce756a2a46a18a0")
version("1.5.4", sha256="e3adf8c98bb38a198822725c0fc6c0ae4711f16fbbf6aeb311d5ad11e5a081b5")
version("1.5.0", sha256="3c6a165b6ecc948967a1ead710d4a181d7b0fbcaa183ef7ea84604994966221a")
diff --git a/var/spack/repos/builtin/packages/bfs/package.py b/var/spack/repos/builtin/packages/bfs/package.py
index f90c882648c1a7..3e932f142ac060 100644
--- a/var/spack/repos/builtin/packages/bfs/package.py
+++ b/var/spack/repos/builtin/packages/bfs/package.py
@@ -14,6 +14,7 @@ class Bfs(MakefilePackage):
maintainers("alecbcs")
+ version("3.0.4", sha256="7196f5a624871c91ad051752ea21043c198a875189e08c70ab3167567a72889d")
version("3.0.2", sha256="d3456a9aeecc031064db0dbe012e55a11eb97be88d0ab33a90e570fe66457f92")
version("3.0.1", sha256="a38bb704201ed29f4e0b989fb2ab3791ca51c3eff90acfc31fff424579bbf962")
diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py
index 1ad363f747a667..e42e823bbb64bd 100644
--- a/var/spack/repos/builtin/packages/bison/package.py
+++ b/var/spack/repos/builtin/packages/bison/package.py
@@ -65,6 +65,13 @@ class Bison(AutotoolsPackage, GNUMirrorPackage):
patch("nvhpc-3.7.patch", when="@3.7.0:3.7 %nvhpc")
conflicts("%intel@:14", when="@3.4.2:", msg="Intel 14 has immature C11 support")
+ conflicts(
+ "%oneapi",
+ msg=(
+ "bison is likely miscompiled by oneapi compilers, "
+ "see https://github.com/spack/spack/issues/37172"
+ ),
+ )
if sys.platform == "darwin" and macos_version() >= Version("10.13"):
patch("secure_snprintf.patch", level=0, when="@3.0.4")
diff --git a/var/spack/repos/builtin/packages/brahma/package.py b/var/spack/repos/builtin/packages/brahma/package.py
index 3932de204f7c92..00f20c1e1c2c5f 100644
--- a/var/spack/repos/builtin/packages/brahma/package.py
+++ b/var/spack/repos/builtin/packages/brahma/package.py
@@ -15,11 +15,14 @@ class Brahma(CMakePackage):
version("develop", branch="dev")
version("master", branch="master")
+ version("0.0.2", tag="v0.0.2", commit="bac58d5aa8962a5c902d401fbf8021aff9104d3c")
version("0.0.1", tag="v0.0.1", commit="15156036f14e36511dfc3f3751dc953540526a2b")
variant("mpi", default=False, description="Enable MPI support")
- depends_on("cpp-logger@0.0.1")
- depends_on("gotcha@develop")
+ depends_on("cpp-logger@0.0.1", when="@:0.0.1")
+ depends_on("cpp-logger@0.0.2", when="@0.0.2:")
+ depends_on("gotcha@1.0.4", when="@:0.0.1")
+ depends_on("gotcha@1.0.5", when="@0.0.2:")
depends_on("catch2@3.0.1")
depends_on("mpi", when="+mpi")
diff --git a/var/spack/repos/builtin/packages/butterflypack/package.py b/var/spack/repos/builtin/packages/butterflypack/package.py
index 848dbcdfebabef..746fa32082d894 100644
--- a/var/spack/repos/builtin/packages/butterflypack/package.py
+++ b/var/spack/repos/builtin/packages/butterflypack/package.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from platform import machine
+
from spack.package import *
@@ -26,6 +28,7 @@ class Butterflypack(CMakePackage):
maintainers("liuyangzhuan")
version("master", branch="master")
+ version("2.4.0", sha256="12d04e7101b2c8292b5c62d9f42b5cd1e8a3c5af639d2665596e3e4255fd0804")
version("2.2.2", sha256="73f67073e4291877f1eee19483a8a7b3c761eaf79a75805d52105ceedead85ea")
version("2.2.1", sha256="4cedc2896a6b368773ce4f9003aa2c0230baf56a4464a6b899a155e01406a232")
version("2.2.0", sha256="1ce5b8461b3c4f488cee6396419e8a6f0a1bcf95254f24d7c27bfa53b391c30b")
@@ -74,7 +77,7 @@ def cmake_args(self):
args.append("-Denable_openmp=%s" % ("ON" if "+openmp" in spec else "OFF"))
if "%cce" in spec:
# Assume the proper Cray CCE module (cce) is loaded:
- craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()]
+ craylibs_path = env["CRAYLIBS_" + machine().upper()]
env.setdefault("LDFLAGS", "")
env["LDFLAGS"] += " -Wl,-rpath," + craylibs_path
diff --git a/var/spack/repos/builtin/packages/bzip2/package.py b/var/spack/repos/builtin/packages/bzip2/package.py
index 58f598ac15286d..e3c618bb303835 100644
--- a/var/spack/repos/builtin/packages/bzip2/package.py
+++ b/var/spack/repos/builtin/packages/bzip2/package.py
@@ -44,6 +44,10 @@ class Bzip2(Package, SourcewarePackage):
if sys.platform != "win32":
depends_on("diffutils", type="build")
+ depends_on("gmake", type="build", when="platform=linux")
+ depends_on("gmake", type="build", when="platform=cray")
+ depends_on("gmake", type="build", when="platform=darwin")
+
@classmethod
def determine_version(cls, exe):
output = Executable(exe)("--help", output=str, error=str)
diff --git a/var/spack/repos/builtin/packages/c-blosc/package.py b/var/spack/repos/builtin/packages/c-blosc/package.py
index e578004f24a10f..31de7ef7ae35b1 100644
--- a/var/spack/repos/builtin/packages/c-blosc/package.py
+++ b/var/spack/repos/builtin/packages/c-blosc/package.py
@@ -15,6 +15,7 @@ class CBlosc(CMakePackage):
homepage = "https://www.blosc.org"
url = "https://github.com/Blosc/c-blosc/archive/v1.11.1.tar.gz"
+ version("1.21.5", sha256="32e61961bbf81ffea6ff30e9d70fca36c86178afd3e3cfa13376adec8c687509")
version("1.21.4", sha256="e72bd03827b8564bbb3dc3ea0d0e689b4863871ce3861d946f2efd7a186ecf3e")
version("1.21.2", sha256="e5b4ddb4403cbbad7aab6e9ff55762ef298729c8a793c6147160c771959ea2aa")
version("1.21.1", sha256="f387149eab24efa01c308e4cba0f59f64ccae57292ec9c794002232f7903b55b")
diff --git a/var/spack/repos/builtin/packages/c-blosc2/package.py b/var/spack/repos/builtin/packages/c-blosc2/package.py
index 8eceeca8952917..4b745f426f1093 100644
--- a/var/spack/repos/builtin/packages/c-blosc2/package.py
+++ b/var/spack/repos/builtin/packages/c-blosc2/package.py
@@ -17,6 +17,7 @@ class CBlosc2(CMakePackage):
maintainers("ax3l", "robert-mijakovic")
version("develop", branch="master")
+ version("2.11.1", sha256="1e9923e0f026eb6e6caee608b4b9a523837806076fc79409055a6386cf5de1ea")
version("2.10.5", sha256="a88f94bf839c1371aab8207a6a43698ceb92c72f65d0d7fe5b6e59f24c138b4d")
# 2.10.2+ fixes regressions with external dependencies
version("2.10.2", sha256="069785bc14c006c7dab40ea0c620bdf3eb8752663fd55c706d145bceabc2a31d")
diff --git a/var/spack/repos/builtin/packages/ccache/fix-gcc-12.patch b/var/spack/repos/builtin/packages/ccache/fix-gcc-12.patch
new file mode 100644
index 00000000000000..638d76a2c3440a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ccache/fix-gcc-12.patch
@@ -0,0 +1,23 @@
+https://bugs.gentoo.org/906310
+https://bugs.gentoo.org/906942
+https://github.com/ccache/ccache/issues/1289
+https://github.com/ccache/ccache/commit/689168c292f1ed26c5f4a3070aeb649dad7facb5
+
+From 689168c292f1ed26c5f4a3070aeb649dad7facb5 Mon Sep 17 00:00:00 2001
+From: Joel Rosdahl
+Date: Tue, 1 Aug 2023 12:30:12 +0200
+Subject: [PATCH] fix: Work around GCC 12.3 bug 109241
+
+See also #1289.
+--- a/src/storage/local/LocalStorage.cpp
++++ b/src/storage/local/LocalStorage.cpp
+@@ -854,7 +854,9 @@ LocalStorage::recompress(const std::optional level,
+ auto l2_content_lock = get_level_2_content_lock(l1_index, l2_index);
+ l2_content_lock.make_long_lived(lock_manager);
+ if (!l2_content_lock.acquire()) {
+- LOG("Failed to acquire content lock for {}/{}", l1_index, l2_index);
++ // LOG_RAW+fmt::format instead of LOG due to GCC 12.3 bug #109241
++ LOG_RAW(fmt::format(
++ "Failed to acquire content lock for {}/{}", l1_index, l2_index));
+ return;
+ }
diff --git a/var/spack/repos/builtin/packages/ccache/package.py b/var/spack/repos/builtin/packages/ccache/package.py
index 06b1eb09685026..0b3045a43ac789 100644
--- a/var/spack/repos/builtin/packages/ccache/package.py
+++ b/var/spack/repos/builtin/packages/ccache/package.py
@@ -72,6 +72,8 @@ class Ccache(CMakePackage):
conflicts("%clang@:7", when="@4.7:")
conflicts("%clang@:4", when="@4.4:")
+ patch("fix-gcc-12.patch", when="%gcc@12")
+
def cmake_args(self):
return [
self.define("ENABLE_TESTING", False),
diff --git a/var/spack/repos/builtin/packages/celeritas/package.py b/var/spack/repos/builtin/packages/celeritas/package.py
index 83690862bfdf9f..b384a4e396c22e 100644
--- a/var/spack/repos/builtin/packages/celeritas/package.py
+++ b/var/spack/repos/builtin/packages/celeritas/package.py
@@ -17,8 +17,17 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
maintainers("sethrj")
- version("0.3.2", sha256="65a33de2518716638375df259d9dfc4d68b821ba1110f56b24c823ef5c5df249")
- version("0.3.1", sha256="0f1effab306856d66f5079e8cadcb63e8c1f8a79245b94bf44b89251b3fb0cf0")
+ version("0.4.0", sha256="8b8eaef84641eeca0fc40321d358205fc9d51e3c6dc7bd1bf03218c1919c774e")
+ version(
+ "0.3.2",
+ sha256="65a33de2518716638375df259d9dfc4d68b821ba1110f56b24c823ef5c5df249",
+ deprecated=True,
+ )
+ version(
+ "0.3.1",
+ sha256="0f1effab306856d66f5079e8cadcb63e8c1f8a79245b94bf44b89251b3fb0cf0",
+ deprecated=True,
+ )
version("0.3.0", sha256="f9620b6bcd8c9b5324ef215f8e44461f915c3fff47bf85ae442c9dafacaa79ac")
version("0.2.2", sha256="ba5e341d636e00e3d7dbac13a2016b97014917489f46b8b387a2adf9d9563872")
version(
diff --git a/var/spack/repos/builtin/packages/ceres-solver/package.py b/var/spack/repos/builtin/packages/ceres-solver/package.py
index 30ee0f19709137..c9cdecc07e1f4f 100644
--- a/var/spack/repos/builtin/packages/ceres-solver/package.py
+++ b/var/spack/repos/builtin/packages/ceres-solver/package.py
@@ -17,6 +17,7 @@ class CeresSolver(CMakePackage):
homepage = "http://ceres-solver.org"
url = "http://ceres-solver.org/ceres-solver-1.12.0.tar.gz"
+ version("2.2.0", sha256="48b2302a7986ece172898477c3bcd6deb8fb5cf19b3327bc49969aad4cede82d")
version("2.0.0", sha256="10298a1d75ca884aa0507d1abb0e0f04800a92871cd400d4c361b56a777a7603")
version("1.14.0", sha256="4744005fc3b902fed886ea418df70690caa8e2ff6b5a90f3dd88a3d291ef8e8e")
version("1.12.0", sha256="745bfed55111e086954126b748eb9efe20e30be5b825c6dec3c525cf20afc895")
@@ -25,12 +26,26 @@ class CeresSolver(CMakePackage):
variant("shared", default=True, description="Build shared libraries")
variant("examples", default=False, description="Build examples")
+ depends_on("cmake@2.8.0:", type="build", when="@1.12.0:1.14.0")
+ depends_on("cmake@3.5:", type="build", when="@2.0.0")
+ depends_on("cmake@3.16:3.27", type="build", when="@2.2.0")
depends_on("eigen@3:")
+ depends_on("eigen@3.3:", when="@2.0.0:")
depends_on("lapack")
- depends_on("glog")
+ depends_on("glog@0.3.5:")
+ depends_on("suite-sparse", when="+suitesparse")
def cmake_args(self):
- args = ["-DCXSPARSE=OFF", "-DEIGENSPARSE=ON", "-DLAPACK=ON", "-DSCHUR_SPECIALIZATIONS=OFF"]
+ args = []
+ if self.spec.satisfies("@:2.0.0"):
+ args.extend(
+ [
+ "-DCXSPARSE=OFF",
+ "-DEIGENSPARSE=ON",
+ "-DLAPACK=ON",
+ "-DSCHUR_SPECIALIZATIONS=OFF",
+ ]
+ )
if "+suitesparse" in self.spec:
args.append("-DSUITESPARSE=ON")
diff --git a/var/spack/repos/builtin/packages/charliecloud/package.py b/var/spack/repos/builtin/packages/charliecloud/package.py
index 1b751b511dd139..88cca7d6e2a88a 100644
--- a/var/spack/repos/builtin/packages/charliecloud/package.py
+++ b/var/spack/repos/builtin/packages/charliecloud/package.py
@@ -17,7 +17,12 @@ class Charliecloud(AutotoolsPackage):
tags = ["e4s"]
version("master", branch="master")
- version("0.34", sha256="034080c162949f4344ae1011cda026d4bb3ecd5cdb53135ac06d236f87e3b27d")
+ version("0.35", sha256="042f5be5ed8eda95f45230b4647510780142a50adb4e748be57e8dd8926b310e")
+ version(
+ "0.34",
+ deprecated=True,
+ sha256="034080c162949f4344ae1011cda026d4bb3ecd5cdb53135ac06d236f87e3b27d",
+ )
version(
"0.33",
deprecated=True,
diff --git a/var/spack/repos/builtin/packages/clhep/package.py b/var/spack/repos/builtin/packages/clhep/package.py
index 47774019b84a19..2bbb499182cfd5 100644
--- a/var/spack/repos/builtin/packages/clhep/package.py
+++ b/var/spack/repos/builtin/packages/clhep/package.py
@@ -19,6 +19,8 @@ class Clhep(CMakePackage):
maintainers("drbenmorgan")
+ version("2.4.7.1", sha256="1c8304a7772ac6b99195f1300378c6e3ddf4ad07c85d64a04505652abb8a55f9")
+ version("2.4.7.0", sha256="7fa460030bc1a804ea7da8cce7611b93261493bbb66c3cfd3ceec935d7e1b8d3")
version("2.4.6.4", sha256="49c89330f1903ef707d3c5d79c16a7c5a6f2c90fc290e2034ee3834809489e57")
version("2.4.6.3", sha256="fcd007f11b10ba4af28d027222b63148d0eb44ff7a082eee353bdf921f9c684a")
version("2.4.6.2", sha256="aded73e49bac85a5b4e86f64a0ee3d6f3cfe5551b0f7731c78b6d8f9dac6e8dc")
diff --git a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py
index 7fb34446a11ada..65535f330abfc2 100644
--- a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py
+++ b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py
@@ -32,6 +32,13 @@ class ClingoBootstrap(Clingo):
description="Enable a series of Spack-specific optimizations (PGO, LTO, mimalloc)",
)
+ variant(
+ "force_setuptools",
+ default=False,
+ description="Force a dependency on setuptools to help the old concretizer",
+ )
+ depends_on("py-setuptools", type="build", when="+force_setuptools")
+
# Enable LTO
conflicts("~ipo", when="+optimized")
diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py
index ab5fe9a0430da1..f64dc6e8121dcd 100644
--- a/var/spack/repos/builtin/packages/clingo/package.py
+++ b/var/spack/repos/builtin/packages/clingo/package.py
@@ -72,7 +72,7 @@ class Clingo(CMakePackage):
# TODO: Simplify this after Spack 0.21 release. The old concretizer has problems with
# py-setuptools ^python@3.6, so we only apply the distutils -> setuptools patch for Python 3.12
with when("@:5.6.1 ^python@3.12:"):
- patch("setuptools.patch")
+ patch("setuptools-2.patch")
depends_on("py-setuptools", type="build")
def patch(self):
diff --git a/var/spack/repos/builtin/packages/clingo/setuptools-2.patch b/var/spack/repos/builtin/packages/clingo/setuptools-2.patch
new file mode 100644
index 00000000000000..6c6377936fb420
--- /dev/null
+++ b/var/spack/repos/builtin/packages/clingo/setuptools-2.patch
@@ -0,0 +1,8 @@
+diff --git a/cmake/python-site.py b/cmake/python-site.py
+--- a/cmake/python-site.py
++++ b/cmake/python-site.py
+@@ -1,3 +1,4 @@
++import setuptools # makes import distutils work
+ from distutils.sysconfig import get_python_lib, get_config_vars
+ import sys
+ if sys.argv[1] == "prefix":
\ No newline at end of file
diff --git a/var/spack/repos/builtin/packages/clingo/setuptools.patch b/var/spack/repos/builtin/packages/clingo/setuptools.patch
deleted file mode 100644
index 4a38a7e6d9ad9e..00000000000000
--- a/var/spack/repos/builtin/packages/clingo/setuptools.patch
+++ /dev/null
@@ -1,14 +0,0 @@
-diff --git a/cmake/python-site.py b/cmake/python-site.py
-index 1e7fc8ce..95ef827f 100644
---- a/cmake/python-site.py
-+++ b/cmake/python-site.py
-@@ -1,4 +1,7 @@
--from distutils.sysconfig import get_python_lib, get_config_vars
-+try:
-+ from setuptools.sysconfig import get_python_lib, get_config_vars
-+except ImportError:
-+ from distutils.sysconfig import get_python_lib, get_config_vars
- import sys
- if sys.argv[1] == "prefix":
- print(get_python_lib(True, False, sys.argv[2] if len(sys.argv) > 2 else None))
-
diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py
index 44f6b596aa1dd5..ec9aac6fa06613 100644
--- a/var/spack/repos/builtin/packages/cmake/package.py
+++ b/var/spack/repos/builtin/packages/cmake/package.py
@@ -20,13 +20,14 @@ class Cmake(Package):
url = "https://github.com/Kitware/CMake/releases/download/v3.19.0/cmake-3.19.0.tar.gz"
git = "https://gitlab.kitware.com/cmake/cmake.git"
- maintainers("alalazo")
+ maintainers("alalazo", "johnwparent")
tags = ["build-tools", "windows"]
executables = ["^cmake[0-9]*$"]
version("master", branch="master")
+ version("3.27.8", sha256="fece24563f697870fbb982ea8bf17482c9d5f855d8c9bf0b82463d76c9e8d0cc")
version("3.27.7", sha256="08f71a106036bf051f692760ef9558c0577c42ac39e96ba097e7662bd4158d8e")
version("3.27.6", sha256="ef3056df528569e0e8956f6cf38806879347ac6de6a4ff7e4105dc4578732cfb")
version("3.27.4", sha256="0a905ca8635ca81aa152e123bdde7e54cbe764fdd9a70d62af44cad8b92967af")
@@ -234,13 +235,15 @@ class Cmake(Package):
with when("~ownlibs"):
depends_on("expat")
# expat/zlib are used in CMake/CTest, so why not require them in libarchive.
- depends_on("libarchive@3.1.0: xar=expat compression=zlib")
- depends_on("libarchive@3.3.3:", when="@3.15.0:")
- depends_on("libuv@1.0.0:1.10", when="@3.7.0:3.10.3")
- depends_on("libuv@1.10.0:1.10", when="@3.11.0:3.11")
- depends_on("libuv@1.10.0:", when="@3.12.0:")
- depends_on("rhash", when="@3.8.0:")
- depends_on("jsoncpp build_system=meson", when="@3.2:")
+ for plat in ["darwin", "cray", "linux"]:
+ with when("platform=%s" % plat):
+ depends_on("libarchive@3.1.0: xar=expat compression=zlib")
+ depends_on("libarchive@3.3.3:", when="@3.15.0:")
+ depends_on("libuv@1.0.0:1.10", when="@3.7.0:3.10.3")
+ depends_on("libuv@1.10.0:1.10", when="@3.11.0:3.11")
+ depends_on("libuv@1.10.0:", when="@3.12.0:")
+ depends_on("rhash", when="@3.8.0:")
+ depends_on("jsoncpp build_system=meson", when="@3.2:")
depends_on("ncurses", when="+ncurses")
@@ -248,9 +251,6 @@ class Cmake(Package):
depends_on("python@2.7.11:", type="build")
depends_on("py-sphinx", type="build")
- # TODO: update curl package to build with Windows SSL implementation
- # at which point we can build with +ownlibs on Windows
- conflicts("~ownlibs", when="platform=windows")
# Cannot build with Intel, should be fixed in 3.6.2
# https://gitlab.kitware.com/cmake/cmake/issues/16226
patch("intel-c-gnu11.patch", when="@3.6.0:3.6.1")
diff --git a/var/spack/repos/builtin/packages/collier/package.py b/var/spack/repos/builtin/packages/collier/package.py
index c29704e354b0d2..98407d7c22076a 100644
--- a/var/spack/repos/builtin/packages/collier/package.py
+++ b/var/spack/repos/builtin/packages/collier/package.py
@@ -18,6 +18,7 @@ class Collier(CMakePackage):
maintainers("vvolkl")
+ version("1.2.8", sha256="5cb24ce24ba1f62b7a96c655b31e9fddccc603eff31e60f9033b16354a6afd89")
version("1.2.7", sha256="fde4b144a17c1bf5aa2ceaa86c71c79da10c9de8fec7bd33c8bffb4198acd5ca")
version("1.2.6", sha256="b0d517868c71d2d1b8b6d3e0c370a43c9eb18ea8393a6e80070a5a2206f7de36")
version("1.2.5", sha256="3ec58a975ff0c3b1ca870bc38973476c923ff78fd3dd5850e296037852b94a8b")
diff --git a/var/spack/repos/builtin/packages/composable-kernel/package.py b/var/spack/repos/builtin/packages/composable-kernel/package.py
index 85b383896a8af9..fc986f5fc6af0b 100644
--- a/var/spack/repos/builtin/packages/composable-kernel/package.py
+++ b/var/spack/repos/builtin/packages/composable-kernel/package.py
@@ -18,7 +18,7 @@ class ComposableKernel(CMakePackage):
version("master", branch="develop")
version("5.6.1", commit="f5ec04f091fa5c48c67d7bacec36a414d0be06a5")
- version("5.6.0", commit="f0fd02634c2f8f8c70f5a0ab2a8c84db5e36eeca")
+ version("5.6.0", commit="f5ec04f091fa5c48c67d7bacec36a414d0be06a5")
version("5.5.1", commit="ac9e01e2cc3721be24619807adc444e1f59a9d25")
version("5.5.0", commit="8b76b832420a3d69708401de6607a033163edcce")
version("5.4.3", commit="bb3d9546f186e39cefedc3e7f01d88924ba20168")
@@ -64,14 +64,14 @@ def cmake_args(self):
]
if "auto" not in self.spec.variants["amdgpu_target"]:
args.append(self.define_from_variant("AMDGPU_TARGETS", "amdgpu_target"))
- if self.spec.satisfies("@5.6.1:"):
+ if self.spec.satisfies("@5.6.0:"):
args.append(self.define("INSTANCES_ONLY", "ON"))
return args
def build(self, spec, prefix):
with working_dir(self.build_directory):
# only instances is necessary to build and install
- if self.spec.satisfies("@5.6.1:"):
+ if self.spec.satisfies("@5.6.0:"):
make()
else:
make("instances")
diff --git a/var/spack/repos/builtin/packages/conquest/package.py b/var/spack/repos/builtin/packages/conquest/package.py
index 29e9fa5777bc3f..4478881b91e98c 100644
--- a/var/spack/repos/builtin/packages/conquest/package.py
+++ b/var/spack/repos/builtin/packages/conquest/package.py
@@ -49,6 +49,15 @@ class Conquest(MakefilePackage):
build_directory = "src"
+ # The SYSTEM variable is required above version 1.2.
+ # Versions 1.2 and older should ignore it.
+ @property
+ def build_targets(self):
+ if self.version > Version("1.2"):
+ return ["SYSTEM = example", "Conquest"]
+ else:
+ return ["Conquest"]
+
def edit(self, spec, prefix):
fflags = "-O3 -fallow-argument-mismatch"
ldflags = ""
@@ -63,12 +72,23 @@ def edit(self, spec, prefix):
lapack_ld = self.spec["lapack"].libs.ld_flags
blas_ld = self.spec["blas"].libs.ld_flags
-
- defs_file = FileFilter("./src/system.make")
-
- defs_file.filter("COMPFLAGS=.*", f"COMPFLAGS= {fflags}")
- defs_file.filter("LINKFLAGS=.*", f"LINKFLAGS= {ldflags}")
- defs_file.filter("# BLAS=.*", f"BLAS= {lapack_ld} -llapack {blas_ld} -lblas")
+ fftw_ld = self.spec["fftw"].libs.ld_flags
+ libxc_ld = self.spec["libxc"].libs.ld_flags
+
+ # Starting from 1.3 there's automated logic in the Makefile that picks
+ # from a list of possible files for system/compiler-specific definitions.
+ # This is useful for manual builds, but since the spack will do its own
+ # automation of compiler-specific flags, we will override it.
+ if self.version > Version("1.2"):
+ defs_file = FileFilter("./src/system/system.example.make")
+ else:
+ defs_file = FileFilter("./src/system.make")
+
+ defs_file.filter(".*COMPFLAGS=.*", f"COMPFLAGS= {fflags}")
+ defs_file.filter(".*LINKFLAGS=.*", f"LINKFLAGS= {ldflags}")
+ defs_file.filter(".*BLAS=.*", f"BLAS= {lapack_ld} {blas_ld}")
+ defs_file.filter(".*FFT_LIB=.*", f"FFT_LIB={fftw_ld}")
+ defs_file.filter(".*XC_LIB=.*", f"XC_LIB={libxc_ld} -lxcf90 -lxc")
if "+openmp" in self.spec:
defs_file.filter("OMP_DUMMY = DUMMY", "OMP_DUMMY = ")
@@ -81,3 +101,5 @@ def edit(self, spec, prefix):
def install(self, spec, prefix):
mkdirp(prefix.bin)
install("./bin/Conquest", prefix.bin)
+ if self.version > Version("1.2"):
+ install_tree("./benchmarks/", join_path(prefix, "benchmarks"))
diff --git a/var/spack/repos/builtin/packages/cool/package.py b/var/spack/repos/builtin/packages/cool/package.py
index 9418a16cbeb5d4..f12f474c167950 100644
--- a/var/spack/repos/builtin/packages/cool/package.py
+++ b/var/spack/repos/builtin/packages/cool/package.py
@@ -14,7 +14,6 @@ class Cool(CMakePackage):
git = "https://gitlab.cern.ch/lcgcool/cool.git"
tags = ["hep"]
- maintainers("iarspider")
version("3.3.10", tag="COOL_3_3_10", commit="110b51c2b50af07cbe1f64a1c67ce9f737c4421d")
version("3.3.7", tag="COOL_3_3_7", commit="6f9a29d903e51ecbb26bdc8a694a67db9f28e234")
diff --git a/var/spack/repos/builtin/packages/cosma/package.py b/var/spack/repos/builtin/packages/cosma/package.py
index 2fccafe1872b0e..19db9a0531859c 100644
--- a/var/spack/repos/builtin/packages/cosma/package.py
+++ b/var/spack/repos/builtin/packages/cosma/package.py
@@ -48,6 +48,9 @@ class Cosma(CMakePackage):
with when("+cuda"):
variant("nccl", default=False, description="Use cuda nccl")
+ with when("+rocm"):
+ variant("rccl", default=False, description="Use rocm rccl")
+
depends_on("cmake@3.22:", type="build")
depends_on("mpi@3:")
depends_on("blas", when="~cuda ~rocm")
@@ -114,6 +117,7 @@ def cmake_args(self):
self.define_from_variant("COSMA_WITH_TESTS", "tests"),
self.define_from_variant("COSMA_WITH_APPS", "apps"),
self.define_from_variant("COSMA_WITH_NCCL", "nccl"),
+ self.define_from_variant("COSMA_WITH_RCCL", "rccl"),
self.define_from_variant("COSMA_WITH_GPU_AWARE_MPI", "gpu_direct"),
self.define_from_variant("COSMA_WITH_PROFILING", "profiling"),
self.define("COSMA_WITH_BENCHMARKS", False),
diff --git a/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch
index 2961a4ceee8d45..985edad3aa5a1c 100644
--- a/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch
+++ b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch
@@ -1,10 +1,22 @@
-From 1897cbf3e467dc765f733b09af041fe8f25fa906 Mon Sep 17 00:00:00 2001
+From b75eb217115820059aba26d1ff1a8657e3841e7d Mon Sep 17 00:00:00 2001
From: Mathieu Taillefumier
-Date: Thu, 19 Oct 2023 12:21:50 +0200
-Subject: [PATCH] [cmake] fix for building gromacs and cp2k with cmake and spack
+Date: Mon, 23 Oct 2023 15:50:44 +0200
+Subject: [PATCH] cmake-fixes-2023.2
+
+---
+ CMakeLists.txt | 63 +++++++-----
+ cmake/FindBlas.cmake | 174 +++++++++++++++++-----------------
+ cmake/FindLapack.cmake | 47 ++++-----
+ cmake/cp2k.pc.in | 19 ----
+ cmake/cp2kConfig.cmake.in | 195 ++++++++++++++++++++------------------
+ cmake/libcp2k.pc.in | 11 +++
+ src/CMakeLists.txt | 18 ++--
+ 7 files changed, 276 insertions(+), 251 deletions(-)
+ delete mode 100644 cmake/cp2k.pc.in
+ create mode 100644 cmake/libcp2k.pc.in
diff --git a/CMakeLists.txt b/CMakeLists.txt
-index 3f81c7b524..1b6c6a0636 100644
+index 3f81c7b52..f2d85d033 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -49,7 +49,8 @@ if(NOT DEFINED CMAKE_CUDA_STANDARD)
@@ -17,7 +29,18 @@ index 3f81c7b524..1b6c6a0636 100644
find_package(PkgConfig)
-@@ -115,8 +116,8 @@ cmake_dependent_option(CP2K_ENABLE_FFTW3_OPENMP_SUPPORT
+@@ -108,6 +109,10 @@ option(CP2K_USE_LIBXSMM "Use libxsmm for small gemms (supports x86 platforms)"
+ OFF)
+ option(CP2K_BUILD_DBCSR "Duild dbcsr at the same time than cp2k." OFF)
+ option(BUILD_SHARED_LIBS "Build cp2k shared library" ON)
++option(
++ CP2K_USE_FFTW3_WITH_MKL
++ "If set to ON use the original implementation of fftw3 instead of the MKL implementation."
++ OFF)
+
+ cmake_dependent_option(CP2K_ENABLE_ELPA_OPENMP_SUPPORT
+ "Enable elpa openmp support" ON "CP2K_USE_ELPA" OFF)
+@@ -115,8 +120,8 @@ cmake_dependent_option(CP2K_ENABLE_FFTW3_OPENMP_SUPPORT
"Enable FFTW openmp support" ON "CP2K_USE_FFTW3" OFF)
cmake_dependent_option(CP2K_ENABLE_FFTW3_THREADS_SUPPORT
"Enable FFTW THREADS support" OFF "CP2K_USE_FFTW3" OFF)
@@ -28,7 +51,71 @@ index 3f81c7b524..1b6c6a0636 100644
cmake_dependent_option(
DBCSR_USE_ACCEL
-@@ -748,7 +749,7 @@ add_subdirectory(src)
+@@ -527,7 +532,7 @@ if(CP2K_USE_ACCEL MATCHES "CUDA")
+ endif()
+
+ set(CP2K_USE_CUDA ON)
+- message(STATUS ``"-- CUDA compiler and libraries found")
++ message(STATUS "-- CUDA compiler and libraries found")
+ elseif(CP2K_USE_ACCEL MATCHES "HIP")
+ enable_language(HIP)
+ # Find hip
+@@ -620,27 +625,36 @@ endif()
+
+ # FFTW3
+
++set(CP2K_USE_FFTW3_ OFF)
+ if(CP2K_USE_FFTW3)
+- find_package(Fftw REQUIRED)
+- if(CP2K_ENABLE_FFTW3_THREADS_SUPPORT AND CP2K_ENABLE_FFTW3_OPENMP_SUPPORT)
+- message(
+- FATAL_ERROR
+- "Fftw3 threads and openmp supports can not be used at the same time")
+- endif()
++ if(CP2K_USE_FFTW3_WITH_MKL OR NOT CP2K_BLAS_VENDOR MATCHES "MKL")
++ find_package(Fftw REQUIRED)
++ if(CP2K_ENABLE_FFTW3_THREADS_SUPPORT AND CP2K_ENABLE_FFTW3_OPENMP_SUPPORT)
++ message(
++ FATAL_ERROR
++ "Fftw3 threads and openmp supports can not be used at the same time")
++ endif()
+
+- if((CP2K_ENABLE_FFTW3_THREADS_SUPPORT) AND (NOT TARGET
+- CP2K::FFTW3::fftw3_threads))
+- message(
+- FATAL_ERROR
+- "fftw3 was compiled without multithreading support (--enable-threads option in fftw build system)."
+- )
+- endif()
++ if((CP2K_ENABLE_FFTW3_THREADS_SUPPORT) AND (NOT TARGET
++ CP2K::FFTW3::fftw3_threads))
++ message(
++ FATAL_ERROR
++ "fftw3 was compiled without multithreading support (--enable-threads option in fftw build system)."
++ )
++ endif()
+
+- if((CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) AND (NOT TARGET CP2K::FFTW3::fftw3_omp))
+- message(
+- FATAL_ERROR
+- "fftw3 was compiled without openmp support (--enable-openmp option in fftw build system)."
+- )
++ if((CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) AND (NOT TARGET CP2K::FFTW3::fftw3_omp
++ ))
++ message(
++ FATAL_ERROR
++ "fftw3 was compiled without openmp support (--enable-openmp option in fftw build system)."
++ )
++ endif()
++ # we use this variable later on to include the fftw target whenever mkl is
++ # found or not
++ set(CP2K_USE_FFTW3_ ON)
++ else()
++ message("-- Using the MKL implementation of FFTW3.")
+ endif()
+ endif()
+
+@@ -748,7 +762,7 @@ add_subdirectory(src)
include(GNUInstallDirs)
get_target_property(CP2K_LIBS cp2k_link_libs INTERFACE_LINK_LIBRARIES)
@@ -37,19 +124,18 @@ index 3f81c7b524..1b6c6a0636 100644
message(
STATUS "--------------------------------------------------------------------")
-@@ -1039,6 +1040,10 @@ install(FILES "${PROJECT_BINARY_DIR}/cp2kConfig.cmake"
+@@ -1039,6 +1053,9 @@ install(FILES "${PROJECT_BINARY_DIR}/cp2kConfig.cmake"
"${PROJECT_BINARY_DIR}/cp2kConfigVersion.cmake"
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k")
+install(FILES "${PROJECT_BINARY_DIR}/libcp2k.pc"
+ DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
-+
+
install(
DIRECTORY "${PROJECT_SOURCE_DIR}/cmake"
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k"
diff --git a/cmake/FindBlas.cmake b/cmake/FindBlas.cmake
-index 6e5fb78240..335cbd964a 100644
+index 6e5fb7824..335cbd964 100644
--- a/cmake/FindBlas.cmake
+++ b/cmake/FindBlas.cmake
@@ -15,104 +15,108 @@ if(NOT
@@ -247,7 +333,7 @@ index 6e5fb78240..335cbd964a 100644
# having the fortran interface is usually enough. C, C++ and others languages
# might require this information though
diff --git a/cmake/FindLapack.cmake b/cmake/FindLapack.cmake
-index 966e0d78d3..77a1e04258 100644
+index 966e0d78d..77a1e0425 100644
--- a/cmake/FindLapack.cmake
+++ b/cmake/FindLapack.cmake
@@ -20,33 +20,34 @@ include(FindPackageHandleStandardArgs)
@@ -310,7 +396,7 @@ index 966e0d78d3..77a1e04258 100644
REQUIRED_VARS CP2K_LAPACK_LINK_LIBRARIES)
diff --git a/cmake/cp2k.pc.in b/cmake/cp2k.pc.in
deleted file mode 100644
-index 5b4a095660..0000000000
+index 5b4a09566..000000000
--- a/cmake/cp2k.pc.in
+++ /dev/null
@@ -1,19 +0,0 @@
@@ -335,10 +421,10 @@ index 5b4a095660..0000000000
-#Libs.private: -L"${libdir}" @CP2K_LIBS@
\ No newline at end of file
diff --git a/cmake/cp2kConfig.cmake.in b/cmake/cp2kConfig.cmake.in
-index a3acd47442..a9e0eb5a58 100644
+index a3acd4744..1c310e19b 100644
--- a/cmake/cp2kConfig.cmake.in
+++ b/cmake/cp2kConfig.cmake.in
-@@ -5,112 +5,120 @@
+@@ -5,112 +5,121 @@
#! SPDX-License-Identifier: GPL-2.0-or-later !
#!-------------------------------------------------------------------------------------------------!
@@ -405,9 +491,10 @@ index a3acd47442..a9e0eb5a58 100644
+ find_dependency(MPI REQUIRED)
+ endif()
+
-+ if(@CP2K_USE_FFTW3@)
++ if(@CP2K_USE_FFTW3@ OR @CP2K_USE_FFTW3_WITH_MKL@)
+ find_dependency(Fftw REQUIRED)
+ endif()
++
+ # QUIP
+ if(@CP2K_USE_QUIP@)
+ find_dependency(Quip REQUIRED)
@@ -554,7 +641,7 @@ index a3acd47442..a9e0eb5a58 100644
-include("${CMAKE_CURRENT_LIST_DIR}/cp2kTargets.cmake")
diff --git a/cmake/libcp2k.pc.in b/cmake/libcp2k.pc.in
new file mode 100644
-index 0000000000..618af55e28
+index 000000000..618af55e2
--- /dev/null
+++ b/cmake/libcp2k.pc.in
@@ -0,0 +1,11 @@
@@ -570,19 +657,41 @@ index 0000000000..618af55e28
+Cflags: -I"${includedir}/cp2k" -I"${includedir}/cp2k/@CMAKE_Fortran_COMPILER_ID@-@CMAKE_Fortran_COMPILER_VERSION@"
+Libs: -L"${libdir}" -lcp2k
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
-index dbc955885e..e003d4f88d 100644
+index dbc955885..1178101ad 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
+@@ -1536,9 +1536,9 @@ target_link_libraries(
+ $<$:CP2K::LIBSPG::libspg>
+ $<$:CP2K::Libxc::xc>
+ $<$:CP2K::ELPA::elpa>
+- $<$:CP2K::FFTW3::fftw3>
+- $<$:CP2K::FFTW3::fftw3_threads>
+- $<$:CP2K::FFTW3::fftw3_omp>
++ $<$:CP2K::FFTW3::fftw3>
++ $<$,$>:CP2K::FFTW3::fftw3_threads>
++ $<$,$>:CP2K::FFTW3::fftw3_omp>
+ $<$:SPLA::spla>
+ $<$:CP2K::Libint2::int2>
+ $<$:${TORCH_LIBRARIES}>
@@ -1555,7 +1555,7 @@ target_compile_definitions(
cp2k
PUBLIC $<$:__parallel>
$<$:__SCALAPACK>
- $<$:__MPI_08>
-+ $<$:__MPI_08>
++ $<$:__MPI_F08>
__COMPILE_DATE=\"${CP2K_TIMESTAMP}\"
__COMPILE_HOST=\"${CP2K_HOST_NAME}\"
__COMPILE_REVISION=\"${CP2K_GIT_HASH}\"
-@@ -1774,12 +1774,12 @@ install(
+@@ -1577,7 +1577,7 @@ target_compile_definitions(
+ $<$:__OFFLOAD_GEMM>
+ $<$:__ELPA>
+ $<$:__LIBXC>
+- $<$:__FFTW3>
++ $<$:__FFTW3>
+ $<$:__LIBINT>
+ $<$:__LIBPEXSI>
+ $<$:__LIBTORCH>
+@@ -1774,12 +1774,14 @@ install(
EXPORT cp2k_targets
FILE cp2kTargets.cmake
NAMESPACE cp2k::
@@ -590,11 +699,16 @@ index dbc955885e..e003d4f88d 100644
+ DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")
-install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k")
-+install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}")
++install(FILES start/libcp2k.h
++ DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}")
install(
DIRECTORY "${PROJECT_BINARY_DIR}/src/mod_files"
- DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k"
-+ DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}-${CMAKE_Fortran_COMPILER_VERSION}"
++ DESTINATION
++ "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}-${CMAKE_Fortran_COMPILER_VERSION}"
FILES_MATCHING
PATTERN "*.mod")
+--
+2.41.0
+
diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py
index 27deecf78472aa..5234f46f7a67c4 100644
--- a/var/spack/repos/builtin/packages/cp2k/package.py
+++ b/var/spack/repos/builtin/packages/cp2k/package.py
@@ -83,6 +83,13 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage):
description="Enable optimised diagonalisation routines from ELPA",
when="@6.1:",
)
+ variant(
+ "dlaf",
+ default=False,
+ description="Enable DLA-Future eigensolver and Cholesky decomposition",
+ # TODO: Pin version when integrated in a release
+ when="@master build_system=cmake",
+ )
variant(
"sirius",
default=False,
@@ -222,10 +229,24 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage):
conflicts("~mpi", msg="elpa requires MPI")
depends_on("elpa+openmp", when="+openmp")
depends_on("elpa~openmp", when="~openmp")
+ depends_on("elpa+cuda", when="+cuda")
+ depends_on("elpa~cuda", when="~cuda")
+ depends_on("elpa+rocm", when="+rocm")
+ depends_on("elpa~rocm", when="~rocm")
depends_on("elpa@2021.05:", when="@8.3:")
depends_on("elpa@2021.11.001:", when="@9.1:")
depends_on("elpa@2023.05.001:", when="@2023.2:")
+ with when("+dlaf"):
+ conflicts(
+ "~mpi", msg="DLA-Future requires MPI. Only the distributed eigensolver is available."
+ )
+ depends_on("dla-future@0.2.1: +scalapack")
+ depends_on("dla-future ~cuda", when="~cuda")
+ depends_on("dla-future ~rocm", when="~rocm")
+ depends_on("dla-future +cuda", when="+cuda")
+ depends_on("dla-future +rocm", when="+rocm")
+
with when("+plumed"):
depends_on("plumed+shared")
depends_on("plumed+mpi", when="+mpi")
@@ -281,6 +302,10 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage):
depends_on("dbcsr+cuda", when="+cuda")
depends_on("dbcsr+rocm", when="+rocm")
+ with when("@2022: +rocm"):
+ depends_on("hipblas")
+ depends_on("hipfft")
+
# CP2K needs compiler specific compilation flags, e.g. optflags
conflicts("%apple-clang")
conflicts("%clang")
@@ -945,6 +970,7 @@ def cmake_args(self):
args += [
self.define_from_variant("CP2K_ENABLE_REGTESTS", "enable_regtests"),
self.define_from_variant("CP2K_USE_ELPA", "elpa"),
+ self.define_from_variant("CP2K_USE_DLAF", "dlaf"),
self.define_from_variant("CP2K_USE_LIBINT2", "libint"),
self.define_from_variant("CP2K_USE_SIRIUS", "sirius"),
self.define_from_variant("CP2K_USE_SPLA", "spla"),
diff --git a/var/spack/repos/builtin/packages/cpp-logger/package.py b/var/spack/repos/builtin/packages/cpp-logger/package.py
index 93c27cf9842ccf..14f3fa2d60ab02 100644
--- a/var/spack/repos/builtin/packages/cpp-logger/package.py
+++ b/var/spack/repos/builtin/packages/cpp-logger/package.py
@@ -15,4 +15,5 @@ class CppLogger(CMakePackage):
version("develop", branch="develop")
version("master", branch="master")
- version("0.0.1", tag="v0.0.1", commit="47994ccd8958129a422950a432742b902bb283ca")
+ version("0.0.1", tag="v0.0.1", commit="d48b38ab14477bb7c53f8189b8b4be2ea214c28a")
+ version("0.0.2", tag="v0.0.2", commit="329a48401033d2d2a1f1196141763cab029220ae")
diff --git a/var/spack/repos/builtin/packages/cpr/package.py b/var/spack/repos/builtin/packages/cpr/package.py
index 71e32d9960d536..0d18a6a9199645 100644
--- a/var/spack/repos/builtin/packages/cpr/package.py
+++ b/var/spack/repos/builtin/packages/cpr/package.py
@@ -18,7 +18,7 @@ class Cpr(CMakePackage):
version("1.9.2", sha256="3bfbffb22c51f322780d10d3ca8f79424190d7ac4b5ad6ad896de08dbd06bf31")
depends_on("curl")
- depends_on("git", when="build")
+ depends_on("git", type="build")
def cmake_args(self):
_force = "_FORCE" if self.spec.satisfies("@:1.9") else ""
diff --git a/var/spack/repos/builtin/packages/cprnc/package.py b/var/spack/repos/builtin/packages/cprnc/package.py
new file mode 100644
index 00000000000000..190c375d69b5c6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cprnc/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class Cprnc(CMakePackage):
+ """CPRNC is a netcdf file comparison tool used by CESM
+ and other scientific programs."""
+
+ url = "https://github.com/ESMCI/cprnc/archive/refs/tags/v1.0.1.tar.gz"
+ homepage = "https://github.com/ESMCI/cprnc"
+
+ maintainers("jedwards4b", "billsacks")
+
+ version("1.0.1", sha256="19517b52688f5ce40c385d7a718e06bf88a8731335943bc32e2b8410c489d6eb")
+
+ depends_on("netcdf-fortran")
+ depends_on("cmake@3:", type="build")
diff --git a/var/spack/repos/builtin/packages/crtm/package.py b/var/spack/repos/builtin/packages/crtm/package.py
index 208e6e235982f7..60ff3bfd4897bc 100644
--- a/var/spack/repos/builtin/packages/crtm/package.py
+++ b/var/spack/repos/builtin/packages/crtm/package.py
@@ -28,13 +28,6 @@ class Crtm(CMakePackage):
variant(
"fix", default=False, description='Download CRTM coeffecient or "fix" files (several GBs).'
)
- variant(
- "build_type",
- default="RelWithDebInfo",
- description="CMake build type",
- values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
- )
-
depends_on("cmake@3.15:")
depends_on("git-lfs")
depends_on("netcdf-fortran", when="@2.4.0:")
diff --git a/var/spack/repos/builtin/packages/ctffind/package.py b/var/spack/repos/builtin/packages/ctffind/package.py
index d1be5c6ea6adbe..ac7bc960c358fb 100644
--- a/var/spack/repos/builtin/packages/ctffind/package.py
+++ b/var/spack/repos/builtin/packages/ctffind/package.py
@@ -40,7 +40,7 @@ def url_for_version(self, version):
def configure_args(self):
config_args = []
- if "^mkl" in self.spec:
+ if self.spec["fftw-api"].name in INTEL_MATH_LIBRARIES:
config_args.extend(
[
"--enable-mkl",
diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py
index f3d91693230842..9990ea8a0f65c6 100644
--- a/var/spack/repos/builtin/packages/cube/package.py
+++ b/var/spack/repos/builtin/packages/cube/package.py
@@ -16,7 +16,10 @@ class Cube(AutotoolsPackage):
homepage = "https://www.scalasca.org/software/cube-4.x/download.html"
url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubegui-4.4.2.tar.gz"
+ maintainers("swat-jsc")
+ version("4.8.2", sha256="bf2e02002bb2e5c4f61832ce37b62a440675c6453463014b33b2474aac78f86d")
+ version("4.8.1", sha256="a8a2a62b4e587c012d3d32385bed7c500db14232419795e0f4272d1dcefc55bc")
version("4.8", sha256="1df8fcaea95323e7eaf0cc010784a41243532c2123a27ce93cb7e3241557ff76")
version("4.7.1", sha256="7c96bf9ffb8cc132945f706657756fe6f88b7f7a5243ecd3741f599c2006d428")
version("4.7", sha256="103fe00fa9846685746ce56231f64d850764a87737dc0407c9d0a24037590f68")
diff --git a/var/spack/repos/builtin/packages/cubelib/package.py b/var/spack/repos/builtin/packages/cubelib/package.py
index 713c301f2ff246..aa142328607a6e 100644
--- a/var/spack/repos/builtin/packages/cubelib/package.py
+++ b/var/spack/repos/builtin/packages/cubelib/package.py
@@ -11,7 +11,10 @@ class Cubelib(AutotoolsPackage):
homepage = "https://www.scalasca.org/software/cube-4.x/download.html"
url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubelib-4.4.tar.gz"
+ maintainers = ("swat-jsc", "wrwilliams")
+ version("4.8.2", sha256="d6fdef57b1bc9594f1450ba46cf08f431dd0d4ae595c47e2f3454e17e4ae74f4")
+ version("4.8.1", sha256="e4d974248963edab48c5d0fc5831146d391b0ae4632cccafe840bf5f12cd80a9")
version("4.8", sha256="171c93ac5afd6bc74c50a9a58efdaf8589ff5cc1e5bd773ebdfb2347b77e2f68")
version("4.7.1", sha256="62cf33a51acd9a723fff9a4a5411cd74203e24e0c4ffc5b9e82e011778ed4f2f")
version("4.7", sha256="e44352c80a25a49b0fa0748792ccc9f1be31300a96c32de982b92477a8740938")
diff --git a/var/spack/repos/builtin/packages/cubew/package.py b/var/spack/repos/builtin/packages/cubew/package.py
index 6674a7cf662697..22a56ddda7b22e 100644
--- a/var/spack/repos/builtin/packages/cubew/package.py
+++ b/var/spack/repos/builtin/packages/cubew/package.py
@@ -11,7 +11,10 @@ class Cubew(AutotoolsPackage):
homepage = "https://www.scalasca.org/software/cube-4.x/download.html"
url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubew-4.4.tar.gz"
+ maintainers = ("swat-jsc", "wrwilliams")
+ version("4.8.2", sha256="4f3bcf0622c2429b8972b5eb3f14d79ec89b8161e3c1cc5862ceda417d7975d2")
+ version("4.8.1", sha256="42cbd743d87c16e805c8e28e79292ab33de259f2cfba46f2682cb35c1bc032d6")
version("4.8", sha256="73c7f9e9681ee45d71943b66c01cfe675b426e4816e751ed2e0b670563ca4cf3")
version("4.7.1", sha256="0d364a4930ca876aa887ec40d12399d61a225dbab69e57379b293516d7b6db8d")
version("4.7", sha256="a7c7fca13e6cb252f08d4380223d7c56a8e86a67de147bcc0279ebb849c884a5")
diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py
index 77bf03da37a368..44134e8e441098 100644
--- a/var/spack/repos/builtin/packages/cuda/package.py
+++ b/var/spack/repos/builtin/packages/cuda/package.py
@@ -25,6 +25,20 @@
preferred_ver = "11.8.0"
_versions = {
+ "12.3.0": {
+ "Linux-aarch64": (
+ "9a8fb8acf46b88faf0d711bda3149e1706efbbae02fcb40ab72addfd0e9ce5df",
+ "https://developer.download.nvidia.com/compute/cuda/12.3.0/local_installers/cuda_12.3.0_545.23.06_linux_sbsa.run",
+ ),
+ "Linux-x86_64": (
+ "7c13face3af64d6e1648d6e3101d31c8111e747143acb0077d973c1690820422",
+ "https://developer.download.nvidia.com/compute/cuda/12.3.0/local_installers/cuda_12.3.0_545.23.06_linux.run",
+ ),
+ "Linux-ppc64le": (
+ "de15c04380ec35b194c07503bf434837bac5b427cf77b19a63962b1653d195d5",
+ "https://developer.download.nvidia.com/compute/cuda/12.3.0/local_installers/cuda_12.3.0_545.23.06_linux_ppc64le.run",
+ ),
+ },
"12.2.1": {
"Linux-aarch64": (
"a9ae6bd02684c7acfb229484368bf2691d592767ce1aed10ae9aed92c81b9f09",
@@ -582,6 +596,8 @@ def setup_build_environment(self, env):
def setup_dependent_build_environment(self, env, dependent_spec):
env.set("CUDAHOSTCXX", dependent_spec.package.compiler.cxx)
+ env.set("CUDA_HOME", self.prefix)
+ env.set("NVHPC_CUDA_HOME", self.prefix)
@property
def cmake_prefix_paths(self):
@@ -593,6 +609,7 @@ def cmake_prefix_paths(self):
def setup_run_environment(self, env):
env.set("CUDA_HOME", self.prefix)
+ env.set("NVHPC_CUDA_HOME", self.prefix)
def install(self, spec, prefix):
if os.path.exists("/tmp/cuda-installer.log"):
diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py
index 46894046df60a4..362b559ab63aa0 100644
--- a/var/spack/repos/builtin/packages/curl/package.py
+++ b/var/spack/repos/builtin/packages/curl/package.py
@@ -305,6 +305,7 @@ class Curl(NMakePackage, AutotoolsPackage):
depends_on("libssh2", when="+libssh2")
depends_on("libssh", when="+libssh")
depends_on("krb5", when="+gssapi")
+ depends_on("rtmpdump", when="+librtmp")
# https://github.com/curl/curl/pull/9054
patch("easy-lock-sched-header.patch", when="@7.84.0")
diff --git a/var/spack/repos/builtin/packages/dakota/package.py b/var/spack/repos/builtin/packages/dakota/package.py
index a91764e9e43edd..e0374ad927880e 100644
--- a/var/spack/repos/builtin/packages/dakota/package.py
+++ b/var/spack/repos/builtin/packages/dakota/package.py
@@ -40,7 +40,12 @@ class Dakota(CMakePackage):
git = "https://github.com/snl-dakota/dakota.git"
url = "https://dakota.sandia.gov/sites/default/files/distributions/public/dakota-6.12-release-public.src.tar.gz"
- version("6.18", tag="v6.18.0", submodules=submodules)
+ version(
+ "6.18",
+ tag="v6.18.0",
+ commit="f6cb33b517bb304795e1e14d3673fe289df2ec9b",
+ submodules=submodules,
+ )
version("6.12", sha256="4d69f9cbb0c7319384ab9df27643ff6767eb410823930b8fbd56cc9de0885bc9")
version("6.9", sha256="989b689278964b96496e3058b8ef5c2724d74bcd232f898fe450c51eba7fe0c2")
version("6.3", sha256="0fbc310105860d77bb5c96de0e8813d75441fca1a5e6dfaf732aa095c4488d52")
diff --git a/var/spack/repos/builtin/packages/damaris/package.py b/var/spack/repos/builtin/packages/damaris/package.py
index a93bbece1318c3..3f8b6f156bead8 100644
--- a/var/spack/repos/builtin/packages/damaris/package.py
+++ b/var/spack/repos/builtin/packages/damaris/package.py
@@ -16,19 +16,29 @@ class Damaris(CMakePackage):
maintainers("jcbowden")
version("master", branch="master")
- version("1.9.2", tag="v1.9.2")
- version("1.9.1", tag="v1.9.1")
- version("1.9.0", tag="v1.9.0")
- version("1.8.2", tag="v1.8.2")
- version("1.8.1", tag="v1.8.1")
- version("1.8.0", tag="v1.8.0")
- version("1.7.1", tag="v1.7.1")
- version("1.7.0", tag="v1.7.0")
- version("1.6.0", tag="v1.6.0", deprecated=True)
- version("1.5.0", tag="v1.5.0", deprecated=True)
- version("1.3.3", tag="v1.3.3", deprecated=True)
- version("1.3.2", tag="v1.3.2", deprecated=True)
- version("1.3.1", tag="v1.3.1", deprecated=True)
+ version("1.9.2", tag="v1.9.2", commit="22c146b4b4ca047d4d36fd904d248e0280b3c0ea")
+ version("1.9.1", tag="v1.9.1", commit="2fe83f587837b7ad0b5c187b8ff453f7d3ad2c18")
+ version("1.9.0", tag="v1.9.0", commit="23cac3a8ade9f9c20499081a8ed10b3e51801428")
+ version("1.8.2", tag="v1.8.2", commit="bd447e677cdf81389f93bea3139af0fa54554a01")
+ version("1.8.1", tag="v1.8.1", commit="18513edb1e11974a4296263ff8499d2802e17891")
+ version("1.8.0", tag="v1.8.0", commit="56701eee59d464cc73d248fbd5e7a8a70e7a3933")
+ version("1.7.1", tag="v1.7.1", commit="09dfbe7828ee295b4433c9e01c6523fa6b4adab5")
+ version("1.7.0", tag="v1.7.0", commit="9ab3ea4c568de16f5d43b8b5ad71feb4864a5584")
+ version(
+ "1.6.0", tag="v1.6.0", commit="1fe4c61cce03babd24315b8e6156f226baac97a2", deprecated=True
+ )
+ version(
+ "1.5.0", tag="v1.5.0", commit="68206a696ad430aa8426ca370501aa71914fbc87", deprecated=True
+ )
+ version(
+ "1.3.3", tag="v1.3.3", commit="f1c473507c080738f7092f6a7d72deb938ade786", deprecated=True
+ )
+ version(
+ "1.3.2", tag="v1.3.2", commit="38b50664523e56900809a19f0cf52fc0ab5dca53", deprecated=True
+ )
+ version(
+ "1.3.1", tag="v1.3.1", commit="6cee3690fa7d387acc8f5f650a7b019e13b90284", deprecated=True
+ )
variant("fortran", default=True, description="Enables Fortran support")
variant("hdf5", default=False, description="Enables the HDF5 storage plugin")
diff --git a/var/spack/repos/builtin/packages/darshan-runtime/package.py b/var/spack/repos/builtin/packages/darshan-runtime/package.py
index 1b4e06b5919a04..64d95b2ec87de2 100644
--- a/var/spack/repos/builtin/packages/darshan-runtime/package.py
+++ b/var/spack/repos/builtin/packages/darshan-runtime/package.py
@@ -115,9 +115,9 @@ def configure_args(self):
if "+apmpi" in spec:
extra_args.append("--enable-apmpi-mod")
if "+apmpi_sync" in spec:
- extra_args.append(["--enable-apmpi-mod", "--enable-apmpi-coll-sync"])
+ extra_args.extend(["--enable-apmpi-mod", "--enable-apmpi-coll-sync"])
if "+apxc" in spec:
- extra_args.append(["--enable-apxc-mod"])
+ extra_args.append("--enable-apxc-mod")
extra_args.append("--with-mem-align=8")
extra_args.append("--with-log-path-by-env=DARSHAN_LOG_DIR_PATH")
diff --git a/var/spack/repos/builtin/packages/datatransferkit/package.py b/var/spack/repos/builtin/packages/datatransferkit/package.py
index 1829e2414df090..7e0d050069d0cd 100644
--- a/var/spack/repos/builtin/packages/datatransferkit/package.py
+++ b/var/spack/repos/builtin/packages/datatransferkit/package.py
@@ -11,7 +11,7 @@ class Datatransferkit(CMakePackage):
parallel solution transfer services for multiphysics simulations"""
homepage = "https://datatransferkit.readthedoc.io"
- url = "https://github.com/ORNL-CEES/DataTransferKit/archive/3.1-rc3.tar.gz"
+ url = "https://github.com/ORNL-CEES/DataTransferKit/archive/3.1.1.tar.gz"
git = "https://github.com/ORNL-CEES/DataTransferKit.git"
tags = ["e4s"]
@@ -19,6 +19,8 @@ class Datatransferkit(CMakePackage):
maintainers("Rombur")
version("master", branch="master", submodules=True)
+ version("3.1.1", commit="bfb7673cc233c26a6a541cbf096f37f26df1e5fb", submodules=True)
+ version("3.1.0", commit="60a4cbd0a55505e0450f1ac979e1eef8966dc03f", submodules=True)
version("3.1-rc3", commit="691d5a1540f7cd42141a3b3d2a7c8370cbc3560a", submodules=True)
version("3.1-rc2", commit="1abc1a43b33dffc7a16d7497b4185d09d865e36a", submodules=True)
@@ -37,7 +39,8 @@ class Datatransferkit(CMakePackage):
depends_on("trilinos+intrepid2+shards~dtk")
depends_on("trilinos+openmp", when="+openmp")
depends_on("trilinos+stratimikos+belos", when="@master")
- depends_on("trilinos@13:", when="@3.1-rc2:")
+ depends_on("trilinos@13:13.4.1", when="@3.1-rc2:3.1-rc3")
+ depends_on("trilinos@14:", when="@3.1.0:")
def cmake_args(self):
spec = self.spec
diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py
index 69cda7b4771ddd..37a1b8a694b907 100644
--- a/var/spack/repos/builtin/packages/dbus/package.py
+++ b/var/spack/repos/builtin/packages/dbus/package.py
@@ -29,6 +29,7 @@ class Dbus(AutotoolsPackage):
version("1.8.2", sha256="5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08")
variant("xml_docs", default=False, description="Build XML documentation")
+ variant("system-socket", default="default", description="Location for the DBus system socket")
depends_on("pkgconfig", type="build")
depends_on("docbook-xml", type="build")
@@ -41,6 +42,9 @@ class Dbus(AutotoolsPackage):
def configure_args(self):
args = ["--disable-systemd", "--disable-launchd"]
args += self.enable_or_disable("xml-docs", variant="xml_docs")
+ socket = self.spec.variants["system-socket"].value
+ if socket != "default":
+ args += ["--with-system-socket={0}".format(socket)]
return args
@run_after("install")
diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py
index 459582022bfe1d..3309dcc5378060 100644
--- a/var/spack/repos/builtin/packages/dd4hep/package.py
+++ b/var/spack/repos/builtin/packages/dd4hep/package.py
@@ -24,6 +24,8 @@ class Dd4hep(CMakePackage):
tags = ["hep"]
version("master", branch="master")
+ version("1.27.1", sha256="e66ae726c0a9a55e5603024a7f8a48ffbc5613ea36e5f892e9a90d87833f92e0")
+ version("1.27", sha256="51fbd0f91f2511261d9b01e4b3528c658bea1ea1b5d67b25b6812615e782a902")
version("1.26", sha256="de2cc8d8e99217e23fdf0a55b879d3fd3a864690d6660e7808f1ff99eb47f384")
version("1.25.1", sha256="6267e76c74fbb346aa881bc44de84434ebe788573f2997a189996252fc5b271b")
version("1.25", sha256="102a049166a95c2f24fc1c03395a819fc4501c175bf7915d69ccc660468d094d")
@@ -188,6 +190,12 @@ class Dd4hep(CMakePackage):
)
conflicts("~ddrec+dddetectors", msg="Need to enable +ddrec to build +dddetectors.")
+ # Geant4 needs to be (at least) the same version as DD4hep, but we don't
+ # have a very good handle on that at this stage, because we make that
+ # dependent on roots cxxstd. However, cxxstd=11 will never work
+ # See https://github.com/AIDASoft/DD4hep/pull/1191
+ conflicts("^geant4 cxxstd=11", when="+ddg4")
+
@property
def libs(self):
# We need to override libs here, because we don't build a libdd4hep so
@@ -253,7 +261,8 @@ def setup_run_environment(self, env):
env.set("DD4HEP", self.prefix.examples)
env.set("DD4hep_DIR", self.prefix)
env.set("DD4hep_ROOT", self.prefix)
- env.prepend_path("LD_LIBRARY_PATH", self.libs.directories[0])
+ if len(self.libs.directories) > 0:
+ env.prepend_path("LD_LIBRARY_PATH", self.libs.directories[0])
def url_for_version(self, version):
# dd4hep releases are dashes and padded with a leading zero
diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py
index df6f514134a799..f6df5eb5672881 100644
--- a/var/spack/repos/builtin/packages/dealii/package.py
+++ b/var/spack/repos/builtin/packages/dealii/package.py
@@ -23,9 +23,13 @@ class Dealii(CMakePackage, CudaPackage):
# only add for immediate deps.
transitive_rpaths = False
- generator("ninja")
+ # FIXME nvcc_wrapper (used for +clang) doesn't handle response files
+ # correctly when ninja is used. Those are used automatically if paths get too long.
+ generator("make")
version("master", branch="master")
+ version("9.5.1", sha256="a818b535e6488d3aef7853311657c7b4fadc29a9abe91b7b202b131aad630f5e")
+ version("9.5.0", sha256="a81f41565f0d3a22d491ee687957dd48053225da72e8d6d628d210358f4a0464")
version("9.4.2", sha256="45a76cb400bfcff25cc2d9093d9a5c91545c8367985e6798811c5e9d2a6a6fd4")
version("9.4.1", sha256="bfe5e4bf069159f93feb0f78529498bfee3da35baf5a9c6852aa59d7ea7c7a48")
version("9.4.0", sha256="238677006cd9173658e5b69cdd1861f800556982db6005a3cc5eb8329cc1e36c")
@@ -70,10 +74,11 @@ class Dealii(CMakePackage, CudaPackage):
values=("default", "11", "14", "17"),
)
variant("doc", default=False, description="Compile with documentation")
- variant("examples", default=True, description="Compile tutorial programs")
+ variant("examples", default=True, description="Compile and install tutorial programs")
variant("int64", default=False, description="Compile with 64 bit indices support")
variant("mpi", default=True, description="Compile with MPI")
variant("optflags", default=False, description="Compile using additional optimization flags")
+ variant("platform-introspection", default=True, description="Enable platform introspection")
variant("python", default=False, description="Compile with Python bindings")
# Package variants
@@ -81,11 +86,12 @@ class Dealii(CMakePackage, CudaPackage):
variant("arborx", default=True, description="Compile with Arborx support")
variant("arpack", default=True, description="Compile with Arpack and PArpack (only with MPI)")
variant("adol-c", default=True, description="Compile with ADOL-C")
- variant("cgal", default=True, when="@9.4:", description="Compile with CGAL")
+ variant("cgal", default=True, when="@9.4:~cuda", description="Compile with CGAL")
variant("ginkgo", default=True, description="Compile with Ginkgo")
variant("gmsh", default=True, description="Compile with GMSH")
variant("gsl", default=True, description="Compile with GSL")
variant("hdf5", default=True, description="Compile with HDF5 (only with MPI)")
+ variant("kokkos", default=True, when="@9.5:", description="Compile with Kokkos")
variant("metis", default=True, description="Compile with Metis")
variant("muparser", default=True, description="Compile with muParser")
variant("nanoflann", default=False, description="Compile with Nanoflann")
@@ -98,14 +104,15 @@ class Dealii(CMakePackage, CudaPackage):
variant("slepc", default=True, description="Compile with Slepc (only with Petsc and MPI)")
variant("symengine", default=True, description="Compile with SymEngine")
variant("simplex", default=True, description="Compile with Simplex support")
- # TODO @9.3: enable by default, when we know what to do
- # variant('taskflow', default=False,
- # description='Compile with multi-threading via Taskflow')
- # TODO @9.3: disable by default
- # (NB: only if tbb is removed in 9.3, as planned!!!)
+ variant(
+ "taskflow",
+ default=True,
+ when="@9.6:",
+ description="Compile with multi-threading via Taskflow",
+ )
variant("threads", default=True, description="Compile with multi-threading via TBB")
variant("trilinos", default=True, description="Compile with Trilinos (only with MPI)")
- variant("platform-introspection", default=True, description="Enable platform introspection")
+ variant("vtk", default=True, when="@9.6:", description="Compile with VTK")
# Required dependencies: Light version
depends_on("blas")
@@ -179,6 +186,8 @@ class Dealii(CMakePackage, CudaPackage):
# TODO: next line fixes concretization with petsc
depends_on("hdf5+mpi+hl+fortran", when="+hdf5+mpi+petsc")
depends_on("hdf5+mpi+hl", when="+hdf5+mpi~petsc")
+ depends_on("kokkos@3.7:", when="@9.5:+kokkos~trilinos")
+ depends_on("kokkos@3.7:+cuda+cuda_lambda+wrapper", when="@9.5:+kokkos~trilinos+cuda")
# TODO: concretizer bug. The two lines mimic what comes from PETSc
# but we should not need it
depends_on("metis@5:+int64", when="+metis+int64")
@@ -198,7 +207,7 @@ class Dealii(CMakePackage, CudaPackage):
depends_on("sundials@:3~pthread", when="@9.0:9.2+sundials")
depends_on("sundials@5:5.8", when="@9.3:9.3.3+sundials")
depends_on("sundials@5:", when="@9.3.4:+sundials")
- # depends_on('taskflow', when='@9.3:+taskflow')
+ depends_on("taskflow@3.4:", when="@9.6:+taskflow")
depends_on("trilinos gotype=int", when="+trilinos@12.18.1:")
# TODO: next line fixes concretization with trilinos and adol-c
depends_on("trilinos~exodus", when="@9.0:+adol-c+trilinos")
@@ -222,12 +231,11 @@ class Dealii(CMakePackage, CudaPackage):
# do not require +rol to make concretization of xsdk possible
depends_on("trilinos+amesos+aztec+epetra+ifpack+ml+muelu+sacado", when="+trilinos")
depends_on("trilinos~hypre", when="+trilinos+int64")
- # TODO: temporary disable Tpetra when using CUDA due to
- # namespace "Kokkos::Impl" has no member "cuda_abort"
- depends_on(
- "trilinos@master+rol~amesos2~ifpack2~intrepid2~kokkos~tpetra~zoltan2",
- when="+trilinos+cuda",
- )
+ for _arch in CudaPackage.cuda_arch_values:
+ arch_str = f"+cuda cuda_arch={_arch}"
+ trilinos_spec = f"trilinos +wrapper {arch_str}"
+ depends_on(trilinos_spec, when=f"@9.5:+trilinos {arch_str}")
+ depends_on("vtk", when="@9.6:+vtk")
# Explicitly provide a destructor in BlockVector,
# otherwise deal.II may fail to build with Intel compilers.
@@ -296,44 +304,60 @@ class Dealii(CMakePackage, CudaPackage):
msg="CGAL requires the C++ standard to be set explicitly to 17 or later.",
)
+ conflicts(
+ "cxxstd=14",
+ when="@9.6:",
+ msg="Deal.II 9.6 onwards requires the C++ standard to be set to 17 or later.",
+ )
+
# Interfaces added in 8.5.0:
- for p in ["gsl", "python"]:
+ for _package in ["gsl", "python"]:
conflicts(
- "+{0}".format(p),
+ "+{0}".format(_package),
when="@:8.4.2",
msg="The interface to {0} is supported from version 8.5.0 "
"onwards. Please explicitly disable this variant "
- "via ~{0}".format(p),
+ "via ~{0}".format(_package),
)
# Interfaces added in 9.0.0:
- for p in ["assimp", "gmsh", "nanoflann", "scalapack", "sundials", "adol-c"]:
+ for _package in ["assimp", "gmsh", "nanoflann", "scalapack", "sundials", "adol-c"]:
conflicts(
- "+{0}".format(p),
+ "+{0}".format(_package),
when="@:8.5.1",
msg="The interface to {0} is supported from version 9.0.0 "
"onwards. Please explicitly disable this variant "
- "via ~{0}".format(p),
+ "via ~{0}".format(_package),
)
# interfaces added in 9.1.0:
- for p in ["ginkgo", "symengine"]:
+ for _package in ["ginkgo", "symengine"]:
conflicts(
- "+{0}".format(p),
+ "+{0}".format(_package),
when="@:9.0",
msg="The interface to {0} is supported from version 9.1.0 "
"onwards. Please explicitly disable this variant "
- "via ~{0}".format(p),
+ "via ~{0}".format(_package),
)
# interfaces added in 9.3.0:
- for p in ["simplex", "arborx"]: # , 'taskflow']:
+ for _package in ["simplex", "arborx"]:
conflicts(
- "+{0}".format(p),
+ "+{0}".format(_package),
when="@:9.2",
msg="The interface to {0} is supported from version 9.3.0 "
"onwards. Please explicitly disable this variant "
- "via ~{0}".format(p),
+ "via ~{0}".format(_package),
+ )
+
+ # interfaces added after 9.5.0:
+ for _package in ["vtk", "taskflow"]:
+ conflicts(
+ "+{0}".format(_package),
+ when="@:9.5",
+ msg="The interface to {0} is supported from version 9.6.0 "
+ "onwards. Please explicitly disable this variant "
+ "via ~{0}".format(_package),
)
# Interfaces removed in 9.3.0:
@@ -346,18 +370,29 @@ class Dealii(CMakePackage, CudaPackage):
# Check that the combination of variants makes sense
# 64-bit BLAS:
- for p in ["openblas", "intel-mkl", "intel-parallel-studio+mkl"]:
+ for _package in ["openblas", "intel-mkl", "intel-parallel-studio+mkl"]:
conflicts(
- "^{0}+ilp64".format(p), when="@:8.5.1", msg="64bit BLAS is only supported from 9.0.0"
+ "^{0}+ilp64".format(_package),
+ when="@:8.5.1",
+ msg="64bit BLAS is only supported from 9.0.0",
)
# MPI requirements:
- for p in ["arpack", "hdf5", "netcdf", "p4est", "petsc", "scalapack", "slepc", "trilinos"]:
+ for _package in [
+ "arpack",
+ "hdf5",
+ "netcdf",
+ "p4est",
+ "petsc",
+ "scalapack",
+ "slepc",
+ "trilinos",
+ ]:
conflicts(
- "+{0}".format(p),
+ "+{0}".format(_package),
when="~mpi",
msg="To enable {0} it is necessary to build deal.II with "
- "MPI support enabled.".format(p),
+ "MPI support enabled.".format(_package),
)
# Optional dependencies:
@@ -432,6 +467,7 @@ def cmake_args(self):
# Examples / tutorial programs
options.append(self.define_from_variant("DEAL_II_COMPONENT_EXAMPLES", "examples"))
+ options.append(self.define_from_variant("DEAL_II_COMPILE_EXAMPLES", "examples"))
# Enforce the specified C++ standard
if spec.variants["cxxstd"].value != "default":
@@ -478,9 +514,6 @@ def cmake_args(self):
if "+mpi" in spec:
options.extend(
[
- self.define("CMAKE_C_COMPILER", spec["mpi"].mpicc),
- self.define("CMAKE_CXX_COMPILER", spec["mpi"].mpicxx),
- self.define("CMAKE_Fortran_COMPILER", spec["mpi"].mpifc),
self.define("MPI_C_COMPILER", spec["mpi"].mpicc),
self.define("MPI_CXX_COMPILER", spec["mpi"].mpicxx),
self.define("MPI_Fortran_COMPILER", spec["mpi"].mpifc),
@@ -499,6 +532,9 @@ def cmake_args(self):
self.define("CUDA_HOST_COMPILER", spec["mpi"].mpicxx),
]
)
+ # Make sure we use the same compiler that Trilinos uses
+ if "+trilinos" in spec:
+ options.extend([self.define("CMAKE_CXX_COMPILER", spec["trilinos"].kokkos_cxx)])
# Python bindings
if spec.satisfies("@8.5.0:"):
@@ -542,23 +578,25 @@ def cmake_args(self):
# Optional dependencies for which library names are the same as CMake
# variables:
for library in (
+ "arborx",
+ "assimp",
+ "cgal",
+ "ginkgo",
+ "gmsh",
"gsl",
"hdf5",
+ "metis",
+ "muparser",
+ "nanoflann",
"p4est",
"petsc",
"slepc",
- "trilinos",
- "metis",
"sundials",
- "nanoflann",
- "assimp",
- "gmsh",
- "muparser",
"symengine",
- "ginkgo",
- "arborx",
- "cgal",
- ): # 'taskflow'):
+ "taskflow",
+ "trilinos",
+ "vtk",
+ ):
options.append(
self.define_from_variant("DEAL_II_WITH_{0}".format(library.upper()), library)
)
diff --git a/var/spack/repos/builtin/packages/dihydrogen/package.py b/var/spack/repos/builtin/packages/dihydrogen/package.py
index ca53a897654b33..a6e030d0e8efb1 100644
--- a/var/spack/repos/builtin/packages/dihydrogen/package.py
+++ b/var/spack/repos/builtin/packages/dihydrogen/package.py
@@ -8,7 +8,39 @@
from spack.package import *
-class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage):
+# This is a hack to get around some deficiencies in Hydrogen.
+def get_blas_entries(inspec):
+ entries = []
+ spec = inspec["hydrogen"]
+ if "blas=openblas" in spec:
+ entries.append(cmake_cache_option("DiHydrogen_USE_OpenBLAS", True))
+ elif "blas=mkl" in spec or spec.satisfies("^intel-mkl"):
+ entries.append(cmake_cache_option("DiHydrogen_USE_MKL", True))
+ elif "blas=essl" in spec or spec.satisfies("^essl"):
+ entries.append(cmake_cache_string("BLA_VENDOR", "IBMESSL"))
+ # IF IBM ESSL is used it needs help finding the proper LAPACK libraries
+ entries.append(
+ cmake_cache_string(
+ "LAPACK_LIBRARIES",
+ "%s;-llapack;-lblas"
+ % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names),
+ )
+ )
+ entries.append(
+ cmake_cache_string(
+ "BLAS_LIBRARIES",
+ "%s;-lblas"
+ % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names),
+ )
+ )
+ elif "blas=accelerate" in spec:
+ entries.append(cmake_cache_option("DiHydrogen_USE_ACCELERATE", True))
+ elif spec.satisfies("^netlib-lapack"):
+ entries.append(cmake_cache_string("BLA_VENDOR", "Generic"))
+ return entries
+
+
+class Dihydrogen(CachedCMakePackage, CudaPackage, ROCmPackage):
"""DiHydrogen is the second version of the Hydrogen fork of the
well-known distributed linear algebra library,
Elemental. DiHydrogen aims to be a basic distributed
@@ -20,117 +52,179 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/LLNL/DiHydrogen.git"
tags = ["ecp", "radiuss"]
- maintainers("bvanessen")
+ maintainers("benson31", "bvanessen")
version("develop", branch="develop")
version("master", branch="master")
- version("0.2.1", sha256="11e2c0f8a94ffa22e816deff0357dde6f82cc8eac21b587c800a346afb5c49ac")
- version("0.2.0", sha256="e1f597e80f93cf49a0cb2dbc079a1f348641178c49558b28438963bd4a0bdaa4")
- version("0.1", sha256="171d4b8adda1e501c38177ec966e6f11f8980bf71345e5f6d87d0a988fef4c4e")
+ version("0.3.0", sha256="8dd143441a28e0c7662cd92694e9a4894b61fd48508ac1d77435f342bc226dcf")
+
+ # Primary features
+
+ variant("dace", default=False, sticky=True, description="Enable DaCe backend.")
+
+ variant(
+ "distconv",
+ default=False,
+ sticky=True,
+ description="Enable (legacy) Distributed Convolution support.",
+ )
+
+ variant(
+ "nvshmem",
+ default=False,
+ sticky=True,
+ description="Enable support for NVSHMEM-based halo exchanges.",
+ when="+distconv",
+ )
+
+ variant(
+ "shared", default=True, sticky=True, description="Enables the build of shared libraries"
+ )
+
+ # Some features of developer interest
- variant("al", default=True, description="Builds with Aluminum communication library")
variant(
"developer",
default=False,
description="Enable extra warnings and force tests to be enabled.",
)
- variant("half", default=False, description="Enable FP16 support on the CPU.")
+
+ variant("ci", default=False, description="Use default options for CI builds")
+
variant(
- "distconv",
+ "coverage",
default=False,
- description="Support distributed convolutions: spatial, channel, " "filter.",
+ description="Decorate build with code coverage instrumentation options",
+ when="%gcc",
)
- variant("nvshmem", default=False, description="Builds with support for NVSHMEM")
- variant("openmp", default=False, description="Enable CPU acceleration with OpenMP threads.")
- variant("rocm", default=False, description="Enable ROCm/HIP language features.")
- variant("shared", default=True, description="Enables the build of shared libraries")
-
- # Variants related to BLAS
variant(
- "openmp_blas", default=False, description="Use OpenMP for threading in the BLAS library"
+ "coverage",
+ default=False,
+ description="Decorate build with code coverage instrumentation options",
+ when="%clang",
)
- variant("int64_blas", default=False, description="Use 64bit integers for BLAS.")
variant(
- "blas",
- default="openblas",
- values=("openblas", "mkl", "accelerate", "essl", "libsci"),
- description="Enable the use of OpenBlas/MKL/Accelerate/ESSL/LibSci",
+ "coverage",
+ default=False,
+ description="Decorate build with code coverage instrumentation options",
+ when="%rocmcc",
)
- conflicts("~cuda", when="+nvshmem")
+ # Package conflicts and requirements
- depends_on("mpi")
- depends_on("catch2", type="test")
+ conflicts("+nvshmem", when="~cuda", msg="NVSHMEM requires CUDA support.")
- # Specify the correct version of Aluminum
- depends_on("aluminum@0.4.0:0.4", when="@0.1 +al")
- depends_on("aluminum@0.5.0:0.5", when="@0.2.0 +al")
- depends_on("aluminum@0.7.0:0.7", when="@0.2.1 +al")
- depends_on("aluminum@0.7.0:", when="@:0.0,0.2.1: +al")
+ conflicts("+cuda", when="+rocm", msg="CUDA and ROCm are mutually exclusive.")
- # Add Aluminum variants
- depends_on("aluminum +cuda +nccl +cuda_rma", when="+al +cuda")
- depends_on("aluminum +rocm +rccl", when="+al +rocm")
- depends_on("aluminum +ht", when="+al +distconv")
+ requires(
+ "+cuda",
+ "+rocm",
+ when="+distconv",
+ policy="any_of",
+ msg="DistConv support requires CUDA or ROCm.",
+ )
- for arch in CudaPackage.cuda_arch_values:
- depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch)
- depends_on("nvshmem cuda_arch=%s" % arch, when="+nvshmem +cuda cuda_arch=%s" % arch)
+ # Dependencies
- # variants +rocm and amdgpu_targets are not automatically passed to
- # dependencies, so do it manually.
- for val in ROCmPackage.amdgpu_targets:
- depends_on("aluminum amdgpu_target=%s" % val, when="amdgpu_target=%s" % val)
+ depends_on("catch2@3.0.1:", type=("build", "test"), when="+developer")
+ depends_on("cmake@3.21.0:", type="build")
+ depends_on("cuda@11.0:", when="+cuda")
+ depends_on("spdlog@1.11.0", when="@:0.1,0.2:")
- depends_on("roctracer-dev", when="+rocm +distconv")
+ with when("@0.3.0:"):
+ depends_on("hydrogen +al")
+ for arch in CudaPackage.cuda_arch_values:
+ depends_on(
+ "hydrogen +cuda cuda_arch={0}".format(arch),
+ when="+cuda cuda_arch={0}".format(arch),
+ )
- depends_on("cudnn", when="+cuda")
- depends_on("cub", when="^cuda@:10")
+ for val in ROCmPackage.amdgpu_targets:
+ depends_on(
+ "hydrogen amdgpu_target={0}".format(val),
+ when="+rocm amdgpu_target={0}".format(val),
+ )
- # Note that #1712 forces us to enumerate the different blas variants
- depends_on("openblas", when="blas=openblas")
- depends_on("openblas +ilp64", when="blas=openblas +int64_blas")
- depends_on("openblas threads=openmp", when="blas=openblas +openmp_blas")
+ with when("+distconv"):
+ depends_on("mpi")
- depends_on("intel-mkl", when="blas=mkl")
- depends_on("intel-mkl +ilp64", when="blas=mkl +int64_blas")
- depends_on("intel-mkl threads=openmp", when="blas=mkl +openmp_blas")
+ # All this nonsense for one silly little package.
+ depends_on("aluminum@1.4.1:")
- depends_on("veclibfort", when="blas=accelerate")
- conflicts("blas=accelerate +openmp_blas")
+ # Add Aluminum variants
+ depends_on("aluminum +cuda +nccl", when="+distconv +cuda")
+ depends_on("aluminum +rocm +nccl", when="+distconv +rocm")
- depends_on("essl", when="blas=essl")
- depends_on("essl +ilp64", when="blas=essl +int64_blas")
- depends_on("essl threads=openmp", when="blas=essl +openmp_blas")
- depends_on("netlib-lapack +external-blas", when="blas=essl")
+ # TODO: Debug linker errors when NVSHMEM is built with UCX
+ depends_on("nvshmem +nccl~ucx", when="+nvshmem")
- depends_on("cray-libsci", when="blas=libsci")
- depends_on("cray-libsci +openmp", when="blas=libsci +openmp_blas")
+ # OMP support is only used in DistConv, and only Apple needs
+ # hand-holding with it.
+ depends_on("llvm-openmp", when="%apple-clang")
+ # FIXME: when="platform=darwin"??
- # Distconv builds require cuda or rocm
- conflicts("+distconv", when="~cuda ~rocm")
+ # CUDA/ROCm arch forwarding
- conflicts("+distconv", when="+half")
- conflicts("+rocm", when="+half")
+ for arch in CudaPackage.cuda_arch_values:
+ depends_on(
+ "aluminum +cuda cuda_arch={0}".format(arch),
+ when="+cuda cuda_arch={0}".format(arch),
+ )
+
+ # This is a workaround for a bug in the Aluminum package,
+ # as it should be responsible for its own NCCL dependency.
+ # Rather than failing to concretize, we help it along.
+ depends_on(
+ "nccl cuda_arch={0}".format(arch),
+ when="+distconv +cuda cuda_arch={0}".format(arch),
+ )
- depends_on("half", when="+half")
+ # NVSHMEM also needs arch forwarding
+ depends_on(
+ "nvshmem +cuda cuda_arch={0}".format(arch),
+ when="+nvshmem +cuda cuda_arch={0}".format(arch),
+ )
- generator("ninja")
- depends_on("cmake@3.17.0:", type="build")
+ # Idenfity versions of cuda_arch that are too old from
+ # lib/spack/spack/build_systems/cuda.py. We require >=60.
+ illegal_cuda_arch_values = [
+ "10",
+ "11",
+ "12",
+ "13",
+ "20",
+ "21",
+ "30",
+ "32",
+ "35",
+ "37",
+ "50",
+ "52",
+ "53",
+ ]
+ for value in illegal_cuda_arch_values:
+ conflicts("cuda_arch=" + value)
- depends_on("spdlog", when="@:0.1,0.2:")
+ for val in ROCmPackage.amdgpu_targets:
+ depends_on(
+ "aluminum amdgpu_target={0}".format(val),
+ when="+rocm amdgpu_target={0}".format(val),
+ )
- depends_on("llvm-openmp", when="%apple-clang +openmp")
+ # CUDA-specific distconv dependencies
+ depends_on("cudnn", when="+cuda")
- # TODO: Debug linker errors when NVSHMEM is built with UCX
- depends_on("nvshmem +nccl~ucx", when="+nvshmem")
+ # ROCm-specific distconv dependencies
+ depends_on("hipcub", when="+rocm")
+ depends_on("miopen-hip", when="+rocm")
+ depends_on("roctracer-dev", when="+rocm")
- # Idenfity versions of cuda_arch that are too old
- # from lib/spack/spack/build_systems/cuda.py
- illegal_cuda_arch_values = ["10", "11", "12", "13", "20", "21"]
- for value in illegal_cuda_arch_values:
- conflicts("cuda_arch=" + value)
+ with when("+ci+coverage"):
+ depends_on("lcov", type=("build", "run"))
+ depends_on("py-gcovr", type=("build", "run"))
+ # Technically it's not used in the build, but CMake sets up a
+ # target, so it needs to be found.
@property
def libs(self):
@@ -138,104 +232,127 @@ def libs(self):
return find_libraries("libH2Core", root=self.prefix, shared=shared, recursive=True)
def cmake_args(self):
+ args = []
+ return args
+
+ def get_cuda_flags(self):
spec = self.spec
+ args = []
+ if spec.satisfies("^cuda+allow-unsupported-compilers"):
+ args.append("-allow-unsupported-compiler")
+
+ if spec.satisfies("%clang"):
+ for flag in spec.compiler_flags["cxxflags"]:
+ if "gcc-toolchain" in flag:
+ args.append("-Xcompiler={0}".format(flag))
+ return args
- args = [
- "-DCMAKE_CXX_STANDARD=17",
- "-DCMAKE_INSTALL_MESSAGE:STRING=LAZY",
- "-DBUILD_SHARED_LIBS:BOOL=%s" % ("+shared" in spec),
- "-DH2_ENABLE_ALUMINUM=%s" % ("+al" in spec),
- "-DH2_ENABLE_CUDA=%s" % ("+cuda" in spec),
- "-DH2_ENABLE_DISTCONV_LEGACY=%s" % ("+distconv" in spec),
- "-DH2_ENABLE_OPENMP=%s" % ("+openmp" in spec),
- "-DH2_ENABLE_FP16=%s" % ("+half" in spec),
- "-DH2_DEVELOPER_BUILD=%s" % ("+developer" in spec),
- ]
+ def initconfig_compiler_entries(self):
+ spec = self.spec
+ entries = super(Dihydrogen, self).initconfig_compiler_entries()
+
+ # FIXME: Enforce this better in the actual CMake.
+ entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17"))
+ entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec))
+ entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True))
+
+ # It's possible this should have a `if "platform=cray" in
+ # spec:` in front of it, but it's not clear to me when this is
+ # set. In particular, I don't actually see this blurb showing
+ # up on Tioga builds. Which is causing the obvious problem
+ # (namely, the one this was added to supposedly solve in the
+ # first place.
+ entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True))
+
+ if spec.satisfies("%clang +distconv platform=darwin"):
+ clang = self.compiler.cc
+ clang_bin = os.path.dirname(clang)
+ clang_root = os.path.dirname(clang_bin)
+ entries.append(cmake_cache_string("OpenMP_CXX_FLAGS", "-fopenmp=libomp"))
+ entries.append(cmake_cache_string("OpenMP_CXX_LIB_NAMES", "libomp"))
+ entries.append(
+ cmake_cache_string(
+ "OpenMP_libomp_LIBRARY", "{0}/lib/libomp.dylib".format(clang_root)
+ )
+ )
+
+ return entries
+
+ def initconfig_hardware_entries(self):
+ spec = self.spec
+ entries = super(Dihydrogen, self).initconfig_hardware_entries()
- if spec.version < Version("0.3"):
- args.append("-DH2_ENABLE_HIP_ROCM=%s" % ("+rocm" in spec))
- else:
- args.append("-DH2_ENABLE_ROCM=%s" % ("+rocm" in spec))
-
- if not spec.satisfies("^cmake@3.23.0"):
- # There is a bug with using Ninja generator in this version
- # of CMake
- args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON")
-
- if "+cuda" in spec:
- if self.spec.satisfies("%clang"):
- for flag in self.spec.compiler_flags["cxxflags"]:
- if "gcc-toolchain" in flag:
- args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag))
- if spec.satisfies("^cuda@11.0:"):
- args.append("-DCMAKE_CUDA_STANDARD=17")
- else:
- args.append("-DCMAKE_CUDA_STANDARD=14")
- archs = spec.variants["cuda_arch"].value
- if archs != "none":
+ entries.append(cmake_cache_option("H2_ENABLE_CUDA", "+cuda" in spec))
+ if spec.satisfies("+cuda"):
+ entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17"))
+ if not spec.satisfies("cuda_arch=none"):
+ archs = spec.variants["cuda_arch"].value
arch_str = ";".join(archs)
- args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str)
+ entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str))
+
+ # FIXME: Should this use the "cuda_flags" function of the
+ # CudaPackage class or something? There might be other
+ # flags in play, and we need to be sure to get them all.
+ cuda_flags = self.get_cuda_flags()
+ if len(cuda_flags) > 0:
+ entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
+
+ enable_rocm_var = (
+ "H2_ENABLE_ROCM" if spec.version < Version("0.3") else "H2_ENABLE_HIP_ROCM"
+ )
+ entries.append(cmake_cache_option(enable_rocm_var, "+rocm" in spec))
+ if spec.satisfies("+rocm"):
+ entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17"))
+ if not spec.satisfies("amdgpu_target=none"):
+ archs = self.spec.variants["amdgpu_target"].value
+ arch_str = ";".join(archs)
+ entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
+ entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
+ entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
+ entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix))
- if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"):
- args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler")
+ return entries
- if "+cuda" in spec:
- args.append("-DcuDNN_DIR={0}".format(spec["cudnn"].prefix))
+ def initconfig_package_entries(self):
+ spec = self.spec
+ entries = super(Dihydrogen, self).initconfig_package_entries()
- if spec.satisfies("^cuda@:10"):
- if "+cuda" in spec or "+distconv" in spec:
- args.append("-DCUB_DIR={0}".format(spec["cub"].prefix))
+ # Basic H2 options
+ entries.append(cmake_cache_option("H2_DEVELOPER_BUILD", "+developer" in spec))
+ entries.append(cmake_cache_option("H2_ENABLE_TESTS", "+developer" in spec))
- # Add support for OpenMP with external (Brew) clang
- if spec.satisfies("%clang +openmp platform=darwin"):
- clang = self.compiler.cc
- clang_bin = os.path.dirname(clang)
- clang_root = os.path.dirname(clang_bin)
- args.extend(
- [
- "-DOpenMP_CXX_FLAGS=-fopenmp=libomp",
- "-DOpenMP_CXX_LIB_NAMES=libomp",
- "-DOpenMP_libomp_LIBRARY={0}/lib/libomp.dylib".format(clang_root),
- ]
- )
+ entries.append(cmake_cache_option("H2_ENABLE_CODE_COVERAGE", "+coverage" in spec))
+ entries.append(cmake_cache_option("H2_CI_BUILD", "+ci" in spec))
- if "+rocm" in spec:
- args.extend(
- [
- "-DCMAKE_CXX_FLAGS=-std=c++17",
- "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix),
- "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc),
- ]
- )
- if "platform=cray" in spec:
- args.extend(["-DMPI_ASSUME_NO_BUILTIN_MPI=ON"])
- archs = self.spec.variants["amdgpu_target"].value
- if archs != "none":
- arch_str = ",".join(archs)
- args.append(
- "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}"
- " -g -fsized-deallocation -fPIC -std=c++17".format(arch_str)
- )
- args.extend(
- [
- "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str,
- "-DAMDGPU_TARGETS=%s" % arch_str,
- "-DGPU_TARGETS=%s" % arch_str,
- ]
- )
+ entries.append(cmake_cache_option("H2_ENABLE_DACE", "+dace" in spec))
- if self.spec.satisfies("^essl"):
- # IF IBM ESSL is used it needs help finding the proper LAPACK libraries
- args.extend(
- [
- "-DLAPACK_LIBRARIES=%s;-llapack;-lblas"
- % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names),
- "-DBLAS_LIBRARIES=%s;-lblas"
- % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names),
- ]
- )
+ # DistConv options
+ entries.append(cmake_cache_option("H2_ENABLE_ALUMINUM", "+distconv" in spec))
+ entries.append(cmake_cache_option("H2_ENABLE_DISTCONV_LEGACY", "+distconv" in spec))
+ entries.append(cmake_cache_option("H2_ENABLE_OPENMP", "+distconv" in spec))
- return args
+ # Paths to stuff, just in case. CMAKE_PREFIX_PATH should catch
+ # all this, but this shouldn't hurt to have.
+ entries.append(cmake_cache_path("spdlog_ROOT", spec["spdlog"].prefix))
+
+ if "+developer" in spec:
+ entries.append(cmake_cache_path("Catch2_ROOT", spec["catch2"].prefix))
+
+ if "+coverage" in spec:
+ entries.append(cmake_cache_path("lcov_ROOT", spec["lcov"].prefix))
+ entries.append(cmake_cache_path("genhtml_ROOT", spec["lcov"].prefix))
+ if "+ci" in spec:
+ entries.append(cmake_cache_path("gcovr_ROOT", spec["py-gcovr"].prefix))
+
+ if "+distconv" in spec:
+ entries.append(cmake_cache_path("Aluminum_ROOT", spec["aluminum"].prefix))
+ if "+cuda" in spec:
+ entries.append(cmake_cache_path("cuDNN_ROOT", spec["cudnn"].prefix))
+
+ # Currently this is a hack for all Hydrogen versions. WIP to
+ # fix this at develop.
+ entries.extend(get_blas_entries(spec))
+ return entries
def setup_build_environment(self, env):
if self.spec.satisfies("%apple-clang +openmp"):
diff --git a/var/spack/repos/builtin/packages/discotec/package.py b/var/spack/repos/builtin/packages/discotec/package.py
index 7693f97c83c805..31b03bb7661283 100644
--- a/var/spack/repos/builtin/packages/discotec/package.py
+++ b/var/spack/repos/builtin/packages/discotec/package.py
@@ -18,6 +18,7 @@ class Discotec(CMakePackage):
version("main", branch="main")
+ variant("compression", default=False, description="Write sparse grid files compressed")
variant("ft", default=False, description="DisCoTec with algorithm-based fault tolerance")
variant("gene", default=False, description="Build for GENE (as task library)")
variant("hdf5", default=True, description="Interpolation output with HDF5")
@@ -31,6 +32,7 @@ class Discotec(CMakePackage):
depends_on("cmake@3.24.2:", type="build")
depends_on("glpk")
depends_on("highfive+mpi+boost+ipo", when="+hdf5")
+ depends_on("lz4", when="+compression")
depends_on("mpi")
depends_on("selalib", when="+selalib")
depends_on("vtk", when="+vtk")
@@ -38,6 +40,7 @@ class Discotec(CMakePackage):
def cmake_args(self):
args = [
self.define("DISCOTEC_BUILD_MISSING_DEPS", False),
+ self.define_from_variant("DISCOTEC_WITH_COMPRESSION", "compression"),
self.define_from_variant("DISCOTEC_ENABLEFT", "ft"),
self.define_from_variant("DISCOTEC_GENE", "gene"),
self.define_from_variant("DISCOTEC_OPENMP", "openmp"),
diff --git a/var/spack/repos/builtin/packages/dla-future/package.py b/var/spack/repos/builtin/packages/dla-future/package.py
index 5b3a3215f39c0a..c360aa30784bb1 100644
--- a/var/spack/repos/builtin/packages/dla-future/package.py
+++ b/var/spack/repos/builtin/packages/dla-future/package.py
@@ -14,6 +14,10 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/eth-cscs/DLA-Future.git"
maintainers = ["rasolca", "albestro", "msimberg", "aurianer"]
+ license("BSD-3-Clause")
+
+ version("0.3.1", sha256="350a7fd216790182aa52639a3d574990a9d57843e02b92d87b854912f4812bfe")
+ version("0.3.0", sha256="9887ac0b466ca03d704a8738bc89e68550ed33509578c576390e98e76b64911b")
version("0.2.1", sha256="4c2669d58f041304bd618a9d69d9879a42e6366612c2fc932df3894d0326b7fe")
version("0.2.0", sha256="da73cbd1b88287c86d84b1045a05406b742be924e65c52588bbff200abd81a10")
version("0.1.0", sha256="f7ffcde22edabb3dc24a624e2888f98829ee526da384cd752b2b271c731ca9b1")
@@ -42,9 +46,12 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
depends_on("cmake@3.22:", type="build")
depends_on("doxygen", type="build", when="+doc")
depends_on("mpi")
+
+ depends_on("blas")
+ depends_on("lapack")
+ depends_on("scalapack", when="+scalapack")
depends_on("blaspp@2022.05.00:")
depends_on("lapackpp@2022.05.00:")
- depends_on("scalapack", when="+scalapack")
depends_on("umpire~examples")
depends_on("umpire~cuda", when="~cuda")
@@ -55,8 +62,9 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
depends_on("pika@0.15.1:", when="@0.1")
depends_on("pika@0.16:", when="@0.2.0")
- depends_on("pika@0.17:", when="@0.2.1:")
- depends_on("pika-algorithms@0.1:")
+ depends_on("pika@0.17:", when="@0.2.1")
+ depends_on("pika@0.18:", when="@0.3.0:")
+ depends_on("pika-algorithms@0.1:", when="@:0.2")
depends_on("pika +mpi")
depends_on("pika +cuda", when="+cuda")
depends_on("pika +rocm", when="+rocm")
@@ -107,7 +115,7 @@ def cmake_args(self):
args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared"))
# BLAS/LAPACK
- if "^mkl" in spec:
+ if self.spec["lapack"].name in INTEL_MATH_LIBRARIES:
vmap = {
"none": "seq",
"openmp": "omp",
diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py
index 6fe97263834fe5..a3e5899f8a6d4e 100644
--- a/var/spack/repos/builtin/packages/doxygen/package.py
+++ b/var/spack/repos/builtin/packages/doxygen/package.py
@@ -18,6 +18,8 @@ class Doxygen(CMakePackage):
homepage = "https://www.doxygen.org"
url = "https://github.com/doxygen/doxygen/archive/refs/tags/Release_1_9_5.tar.gz"
+ version("1.9.8", sha256="77371e8a58d22d5e03c52729844d1043e9cbf8d0005ec5112ffa4c8f509ddde8")
+ version("1.9.7", sha256="691777992a7240ed1f822a5c2ff2c4273b57c1cf9fc143553d87f91a0c5970ee")
version("1.9.6", sha256="2a3ee47f7276b759f74bac7614c05a1296a5b028d3f6a79a88e4c213db78e7dc")
version("1.9.5", sha256="1c5c9cd4445f694e43f089c17529caae6fe889b732fb0b145211025a1fcda1bb")
version("1.9.4", sha256="1b083d15b29817463129ae1ae73b930d883030eeec090ea7a99b3a04fdb51c76")
diff --git a/var/spack/repos/builtin/packages/draco/package.py b/var/spack/repos/builtin/packages/draco/package.py
index c81c124257acb6..dbe118e550f871 100644
--- a/var/spack/repos/builtin/packages/draco/package.py
+++ b/var/spack/repos/builtin/packages/draco/package.py
@@ -41,12 +41,6 @@ class Draco(CMakePackage):
version("6.20.1", sha256="b1c51000c9557e0818014713fce70d681869c50ed9c4548dcfb2e9219c354ebe")
version("6.20.0", sha256="a6e3142c1c90b09c4ff8057bfee974369b815122b01d1f7b57888dcb9b1128f6")
- variant(
- "build_type",
- default="Release",
- description="CMake build type",
- values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
- )
variant("caliper", default=False, description="Enable caliper timers support")
variant("cuda", default=False, description="Enable Cuda/GPU support")
variant("eospac", default=True, description="Enable EOSPAC support")
diff --git a/var/spack/repos/builtin/packages/dwz/package.py b/var/spack/repos/builtin/packages/dwz/package.py
index cb1b1f5e2accc3..feb805ee6917ee 100644
--- a/var/spack/repos/builtin/packages/dwz/package.py
+++ b/var/spack/repos/builtin/packages/dwz/package.py
@@ -13,8 +13,6 @@ class Dwz(MakefilePackage, SourcewarePackage):
sourceware_mirror_path = "dwz/releases/dwz-0.14.tar.gz"
git = "git://sourceware.org/git/dwz.git"
- maintainers("iarspider")
-
depends_on("elf")
version("0.14-patches", branch="dwz-0.14-branch")
diff --git a/var/spack/repos/builtin/packages/dxt-explorer/package.py b/var/spack/repos/builtin/packages/dxt-explorer/package.py
index 4f7df14c186af9..90ef64818346c4 100644
--- a/var/spack/repos/builtin/packages/dxt-explorer/package.py
+++ b/var/spack/repos/builtin/packages/dxt-explorer/package.py
@@ -26,5 +26,5 @@ class DxtExplorer(PythonPackage):
depends_on("darshan-util", type=("run"))
- depends_on("python@3.6:", type=("build", "run"))
+ depends_on("py-setuptools", type="build")
depends_on("py-pandas", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/eccodes/package.py b/var/spack/repos/builtin/packages/eccodes/package.py
index 8d18055a4c5197..1c24effccc8430 100644
--- a/var/spack/repos/builtin/packages/eccodes/package.py
+++ b/var/spack/repos/builtin/packages/eccodes/package.py
@@ -81,17 +81,9 @@ class Eccodes(CMakePackage):
variant("shared", default=True, description="Build shared versions of the libraries")
variant(
- "definitions",
- values=disjoint_sets(("auto",), ("default",) + tuple(_definitions.keys())).with_default(
- "auto"
- ),
- description="List of definitions to install",
- )
-
- variant(
- "samples",
- values=disjoint_sets(("auto",), ("default",)).with_default("auto"),
- description="List of samples to install",
+ "extra_definitions",
+ values=any_combination_of(*_definitions.keys()),
+ description="List of extra definitions to install",
)
depends_on("netcdf-c", when="+netcdf")
@@ -132,7 +124,7 @@ class Eccodes(CMakePackage):
for center, definitions in _definitions.items():
kwargs = definitions.get("conflicts", None)
if kwargs:
- conflicts("definitions={0}".format(center), **kwargs)
+ conflicts("extra_definitions={0}".format(center), **kwargs)
for kwargs in definitions.get("resources", []):
resource(
name=center,
@@ -357,25 +349,12 @@ def cmake_args(self):
if "+memfs" in self.spec:
args.append(self.define("PYTHON_EXECUTABLE", python.path))
- definitions = self.spec.variants["definitions"].value
-
- if "auto" not in definitions:
- args.append(
- self.define("ENABLE_INSTALL_ECCODES_DEFINITIONS", "default" in definitions)
- )
-
- samples = self.spec.variants["samples"].value
-
- if "auto" not in samples:
- args.append(self.define("ENABLE_INSTALL_ECCODES_SAMPLES", "default" in samples))
-
return args
@run_after("install")
def install_extra_definitions(self):
- noop = set(["auto", "none", "default"])
- for center in self.spec.variants["definitions"].value:
- if center not in noop:
+ for center in self.spec.variants["extra_definitions"].value:
+ if center != "none":
center_dir = "definitions.{0}".format(center)
install_tree(
join_path(self.stage.source_path, "spack-definitions", center_dir),
diff --git a/var/spack/repos/builtin/packages/ecflow/package.py b/var/spack/repos/builtin/packages/ecflow/package.py
index 05519fc04d4963..7fa77e386a0e2d 100644
--- a/var/spack/repos/builtin/packages/ecflow/package.py
+++ b/var/spack/repos/builtin/packages/ecflow/package.py
@@ -23,7 +23,8 @@ class Ecflow(CMakePackage):
maintainers("climbfuji", "AlexanderRichert-NOAA")
- # https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-5.8.3-Source.tar.gz?api=v2
+ version("5.11.4", sha256="4836a876277c9a65a47a3dc87cae116c3009699f8a25bab4e3afabf160bcf212")
+ version("5.8.4", sha256="bc628556f8458c269a309e4c3b8d5a807fae7dfd415e27416fe9a3f544f88951")
version("5.8.3", sha256="1d890008414017da578dbd5a95cb1b4d599f01d5a3bb3e0297fe94a87fbd81a6")
version("4.13.0", sha256="c743896e0ec1d705edd2abf2ee5a47f4b6f7b1818d8c159b521bdff50a403e39")
version("4.12.0", sha256="566b797e8d78e3eb93946b923ef540ac61f50d4a17c9203d263c4fd5c39ab1d1")
@@ -99,6 +100,9 @@ def cmake_args(self):
ssllibs = ";".join(spec["openssl"].libs + spec["zlib"].libs)
args.append(self.define("OPENSSL_CRYPTO_LIBRARY", ssllibs))
+ if self.spec.satisfies("@5.8.3:"):
+ args.append("-DCMAKE_CXX_FLAGS=-DBOOST_NO_CXX98_FUNCTION_BASE")
+
return args
# A recursive link in the ecflow source code causes the binary cache
diff --git a/var/spack/repos/builtin/packages/ecmwf-atlas/package.py b/var/spack/repos/builtin/packages/ecmwf-atlas/package.py
index 00fce1ec788247..d764f8e1bda484 100644
--- a/var/spack/repos/builtin/packages/ecmwf-atlas/package.py
+++ b/var/spack/repos/builtin/packages/ecmwf-atlas/package.py
@@ -76,8 +76,8 @@ def cmake_args(self):
@when("+fismahigh")
def patch(self):
- filter_file("http://www\.ecmwf\.int", "", "cmake/atlas-import.cmake.in") # noqa: W605
- filter_file("int\.ecmwf", "", "cmake/atlas-import.cmake.in") # noqa: W605
+ filter_file("http://www.ecmwf.int", "", "cmake/atlas-import.cmake.in", string=True)
+ filter_file("int.ecmwf", "", "cmake/atlas-import.cmake.in", string=True)
filter_file('http[^"]+', "", "cmake/atlas_export.cmake")
patterns = [".travis.yml", "tools/install*.sh", "tools/github-sha.sh"]
for pattern in patterns:
diff --git a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py
index 189515b05638eb..f23a736569f24a 100644
--- a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py
+++ b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py
@@ -102,7 +102,7 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage):
amdgpu_target_variants = ["amdgpu_target={0}".format(x) for x in ROCmPackage.amdgpu_targets]
dav_sdk_depends_on(
- "adios2+shared+mpi+python+blosc+sst+ssc+dataman",
+ "adios2+shared+mpi+python+sst+dataman",
when="+adios2",
propagate=["cuda", "hdf5", "sz", "zfp", "fortran"] + cuda_arch_variants,
)
diff --git a/var/spack/repos/builtin/packages/edm4hep/package.py b/var/spack/repos/builtin/packages/edm4hep/package.py
index a0204eeefd678a..04176d572a9967 100644
--- a/var/spack/repos/builtin/packages/edm4hep/package.py
+++ b/var/spack/repos/builtin/packages/edm4hep/package.py
@@ -14,11 +14,13 @@ class Edm4hep(CMakePackage):
url = "https://github.com/key4hep/EDM4hep/archive/v00-01.tar.gz"
git = "https://github.com/key4hep/EDM4hep.git"
- maintainers("vvolkl", "jmcarcell")
+ maintainers("vvolkl", "jmcarcell", "tmadlener")
tags = ["hep", "key4hep"]
version("master", branch="master")
+ version("0.10.2", sha256="c22c5c2f0fd1d09da9b734c1fa7ee546675fd2b047406db6ab8266e7657486d2")
+ version("0.10.1", sha256="28a3bd4df899309b14ec0d441f8b6ed0065206a08a0018113bb490e9d008caed")
version("0.10", sha256="a95c917c19793cfad6b0959854a653c5ce698c965598cabd649d544da07712c0")
version(
"0.9",
diff --git a/var/spack/repos/builtin/packages/elbencho/package.py b/var/spack/repos/builtin/packages/elbencho/package.py
index a9417a4dfc135a..12562f97f30dbb 100644
--- a/var/spack/repos/builtin/packages/elbencho/package.py
+++ b/var/spack/repos/builtin/packages/elbencho/package.py
@@ -16,9 +16,13 @@ class Elbencho(MakefilePackage):
homepage = "https://github.com/breuner/elbencho"
url = "https://github.com/breuner/elbencho/archive/refs/tags/v3.0-1.tar.gz"
+ git = "https://github.com/breuner/elbencho.git"
maintainers("ethanjjjjjjj")
+ version("master", branch="master")
+
+ version("3.0-3", sha256="5769abcdaebefe2984ac3053fb6e91a54e1863d5ea8f72daea830e10b27c0eaf")
version("3.0-1", sha256="19dad85e1fc74419dcdf740f11a47d3f6d566770a06e40976755a3404566c11d")
version("2.2-5", sha256="4b598639452665a8b79c4c9d8a22ae63fb9b04057635a45e686aa3939ee255b4")
version("2.2-3", sha256="0ae2d495d2863b84f21f55b7c526674fab1be723d0697087017946647f79d0e6")
diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py
index 15f0b11130e7bf..d20cc09fdb826f 100644
--- a/var/spack/repos/builtin/packages/elfutils/package.py
+++ b/var/spack/repos/builtin/packages/elfutils/package.py
@@ -25,6 +25,7 @@ class Elfutils(AutotoolsPackage, SourcewarePackage):
maintainers("mwkrentel")
+ version("0.190", sha256="8e00a3a9b5f04bc1dc273ae86281d2d26ed412020b391ffcc23198f10231d692")
version("0.189", sha256="39bd8f1a338e2b7cd4abc3ff11a0eddc6e690f69578a57478d8179b4148708c8")
version("0.188", sha256="fb8b0e8d0802005b9a309c60c1d8de32dd2951b56f0c3a3cb56d21ce01595dff")
version("0.187", sha256="e70b0dfbe610f90c4d1fe0d71af142a4e25c3c4ef9ebab8d2d72b65159d454c8")
diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py
index 30061eb9eff857..2f107bb6642bc3 100644
--- a/var/spack/repos/builtin/packages/elpa/package.py
+++ b/var/spack/repos/builtin/packages/elpa/package.py
@@ -132,16 +132,7 @@ def configure_args(self):
options.append("--enable-generic")
if self.compiler.name == "gcc":
- gcc_options = []
- gfortran_options = ["-ffree-line-length-none"]
-
- space_separator = " "
- options.extend(
- [
- "CFLAGS=" + space_separator.join(gcc_options),
- "FCFLAGS=" + space_separator.join(gfortran_options),
- ]
- )
+ options.extend(["CFLAGS=-O3", "FCFLAGS=-O3 -ffree-line-length-none"])
if "%aocc" in spec:
options.extend(["FCFLAGS=-O3", "CFLAGS=-O3"])
diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py
index 50c2d11a015a0d..38bcd9b3bac08b 100644
--- a/var/spack/repos/builtin/packages/environment-modules/package.py
+++ b/var/spack/repos/builtin/packages/environment-modules/package.py
@@ -59,13 +59,15 @@ class EnvironmentModules(Package):
variant("X", default=True, description="Build with X functionality")
- depends_on("autoconf", type="build", when="@main")
- depends_on("automake", type="build", when="@main")
- depends_on("libtool", type="build", when="@main")
- depends_on("m4", type="build", when="@main")
- depends_on("python", type="build", when="@main")
- depends_on("py-sphinx@1.0:", type="build", when="@main")
- depends_on("gzip", type="build", when="@main")
+ depends_on("less", type=("build", "run"), when="@4.1:")
+ with when("@main"):
+ depends_on("autoconf", type="build")
+ depends_on("automake", type="build")
+ depends_on("libtool", type="build")
+ depends_on("m4", type="build")
+ depends_on("python", type="build")
+ depends_on("py-sphinx@1.0:", type="build")
+ depends_on("gzip", type="build")
# Dependencies:
depends_on("tcl", type=("build", "link", "run"))
@@ -135,6 +137,9 @@ def install(self, spec, prefix):
]
)
+ if self.spec.satisfies("@4.1:"):
+ config_args.append(f"--with-pager={str(self.spec['less'].prefix.bin.less)}")
+
configure(*config_args)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/epics-base/package.py b/var/spack/repos/builtin/packages/epics-base/package.py
index 9aae46a46bea54..e3ef52ffe3b3c9 100644
--- a/var/spack/repos/builtin/packages/epics-base/package.py
+++ b/var/spack/repos/builtin/packages/epics-base/package.py
@@ -26,6 +26,7 @@ class EpicsBase(MakefilePackage):
def patch(self):
filter_file(r"^\s*CC\s*=.*", "CC = " + spack_cc, "configure/CONFIG.gnuCommon")
filter_file(r"^\s*CCC\s*=.*", "CCC = " + spack_cxx, "configure/CONFIG.gnuCommon")
+ filter_file(r"\$\(PERL\)\s+\$\(XSUBPP\)", "$(XSUBPP)", "modules/ca/src/perl/Makefile")
@property
def install_targets(self):
diff --git a/var/spack/repos/builtin/packages/esmf/package.py b/var/spack/repos/builtin/packages/esmf/package.py
index 2d9cde52f7ffa1..7cf4fb67675d24 100644
--- a/var/spack/repos/builtin/packages/esmf/package.py
+++ b/var/spack/repos/builtin/packages/esmf/package.py
@@ -28,7 +28,8 @@ class Esmf(MakefilePackage):
# Develop is a special name for spack and is always considered the newest version
version("develop", branch="develop")
- # generate chksum with spack checksum esmf@x.y.z
+ # generate chksum with 'spack checksum esmf@x.y.z'
+ version("8.6.0", sha256="ed057eaddb158a3cce2afc0712b49353b7038b45b29aee86180f381457c0ebe7")
version("8.5.0", sha256="acd0b2641587007cc3ca318427f47b9cae5bfd2da8d2a16ea778f637107c29c4")
version("8.4.2", sha256="969304efa518c7859567fa6e65efd960df2b4f6d72dbf2c3f29e39e4ab5ae594")
version("8.4.1", sha256="1b54cee91aacaa9df400bd284614cbb0257e175f6f3ec9977a2d991ed8aa1af6")
diff --git a/var/spack/repos/builtin/packages/exa/package.py b/var/spack/repos/builtin/packages/exa/package.py
index 8488f3b3a11438..21be99414a934d 100644
--- a/var/spack/repos/builtin/packages/exa/package.py
+++ b/var/spack/repos/builtin/packages/exa/package.py
@@ -6,17 +6,22 @@
from spack.package import *
-class Exa(Package):
- """exa is a replacement for ls written in Rust."""
+class Exa(CargoPackage):
+ """DEPRECATED: The exa upstream is no longer maintained, see the eza package for a
+ replacement.
+
+ exa is a replacement for ls written in Rust."""
homepage = "https://the.exa.website"
url = "https://github.com/ogham/exa/archive/v0.9.0.tar.gz"
- version("0.10.1", sha256="ff0fa0bfc4edef8bdbbb3cabe6fdbd5481a71abbbcc2159f402dea515353ae7c")
- version("0.9.0", sha256="96e743ffac0512a278de9ca3277183536ee8b691a46ff200ec27e28108fef783")
-
- depends_on("rust")
-
- def install(self, spec, prefix):
- cargo = which("cargo")
- cargo("install", "--root", prefix, "--path", ".")
+ version(
+ "0.10.1",
+ sha256="ff0fa0bfc4edef8bdbbb3cabe6fdbd5481a71abbbcc2159f402dea515353ae7c",
+ deprecated=True,
+ )
+ version(
+ "0.9.0",
+ sha256="96e743ffac0512a278de9ca3277183536ee8b691a46ff200ec27e28108fef783",
+ deprecated=True,
+ )
diff --git a/var/spack/repos/builtin/packages/exago/exago-1.6.0.patch b/var/spack/repos/builtin/packages/exago/exago-1.6.0.patch
new file mode 100644
index 00000000000000..f6b03cec8eb6a1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/exago/exago-1.6.0.patch
@@ -0,0 +1,18 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 90bddf2e..47ab5998 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -217,11 +217,13 @@ endif(EXAGO_ENABLE_GPU)
+ if(EXAGO_ENABLE_PETSC)
+ include(FindPkgConfig)
+ # Include petsc package path in pkg_config_path
++ set(PKG_CONFIG_PATH_save $ENV{PKG_CONFIG_PATH})
+ set(ENV{PKG_CONFIG_PATH}
+ ${PETSC_DIR}/lib/pkgconfig:${PETSC_DIR}/${PETSC_ARCH}/lib/pkgconfig
+ )
+ pkg_check_modules(PETSC REQUIRED IMPORTED_TARGET PETSc)
+ set(EXAGO_HAVE_PETSC 1)
++ set(ENV{PKG_CONFIG_PATH} ${PKG_CONFIG_PATH_save})
+ endif()
+
+ # Set install rpath to the locations where EXAGO and PETSc libraries reside.
diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py
index d28b4fa1f82018..abe7f2605a1c29 100644
--- a/var/spack/repos/builtin/packages/exago/package.py
+++ b/var/spack/repos/builtin/packages/exago/package.py
@@ -17,22 +17,40 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/pnnl/ExaGO.git"
maintainers("ryandanehy", "cameronrutherford", "pelesh")
- version("1.5.1", commit="7abe482c8da0e247f9de4896f5982c4cacbecd78", submodules=True)
- version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True)
- version("1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True)
- version("1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True)
- version("1.3.0", commit="58b039d746a6eac8e84b0afc01354cd58caec485", submodules=True)
- version("1.2.0", commit="255a214ec747b7bdde7a6d8151c083067b4d0907", submodules=True)
- version("1.1.2", commit="db3bb16e19c09e01402071623258dae4d13e5133", submodules=True)
- version("1.1.1", commit="0e0a3f27604876749d47c06ec71daaca4b270df9", submodules=True)
- version("1.1.0", commit="dc8dd85544ff1b55a64a3cbbbdf12b8a0c6fdaf6", submodules=True)
- version("1.0.0", commit="230d7df2f384f68b952a1ea03aad41431eaad283")
- version("0.99.2", commit="56961641f50827b3aa4c14524f2f978dc48b9ce5")
- version("0.99.1", commit="0ae426c76651ba5a9dbcaeb95f18d1b8ba961690")
+ version(
+ "1.6.0", tag="v1.6.0", commit="159cd173572280ac0f6f094a71dcc3ebeeb34076", submodules=True
+ )
+ version(
+ "1.5.1", tag="v1.5.1", commit="84e9faf9d9dad8d851075eba26038338d90e6d3a", submodules=True
+ )
+ version(
+ "1.5.0", tag="v1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True
+ )
+ version(
+ "1.4.1", tag="v1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True
+ )
+ version(
+ "1.4.0", tag="v1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True
+ )
+ version(
+ "1.3.0", tag="v1.3.0", commit="58b039d746a6eac8e84b0afc01354cd58caec485", submodules=True
+ )
+ version(
+ "1.1.2", tag="v1.1.2", commit="db3bb16e19c09e01402071623258dae4d13e5133", submodules=True
+ )
+ version(
+ "1.1.1", tag="v1.1.1", commit="0e0a3f27604876749d47c06ec71daaca4b270df9", submodules=True
+ )
+ version(
+ "1.1.0", tag="v1.1.0", commit="dc8dd85544ff1b55a64a3cbbbdf12b8a0c6fdaf6", submodules=True
+ )
+ version("1.0.0", tag="v1.0.0", commit="230d7df2f384f68b952a1ea03aad41431eaad283")
+ version("0.99.2", tag="v0.99.2", commit="56961641f50827b3aa4c14524f2f978dc48b9ce5")
+ version("0.99.1", tag="v0.99.1", commit="0ae426c76651ba5a9dbcaeb95f18d1b8ba961690")
version("main", branch="main", submodules=True)
version("develop", branch="develop", submodules=True)
version(
- "5-18-2022-snapshot",
+ "snapshot.5-18-2022",
tag="5-18-2022-snapshot",
commit="3eb58335db71bb72341153a7867eb607402067ca",
submodules=True,
@@ -44,10 +62,13 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage):
variant("raja", default=False, description="Enable/Disable RAJA")
variant("python", default=True, when="@1.4:", description="Enable/Disable Python bindings")
variant("logging", default=True, description="Enable/Disable spdlog based logging")
+
conflicts(
"+python", when="+ipopt+rocm", msg="Python bindings require -fPIC with Ipopt for rocm."
)
- variant("logging", default=False, description="Enable/Disable spdlog based logging")
+
+ # Adds ExaGO's python wrapper to PYTHONPATH
+ extends("python", when="+python")
# Solver options
variant("hiop", default=False, description="Enable/Disable HiOp")
@@ -62,9 +83,14 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage):
"~hiop~ipopt+python @:1.5.0",
msg="ExaGO Python wrapper requires at least one solver enabled.",
)
-
+ conflicts(
+ "+hiop~mpi ^hiop@1.0.0:~mpi",
+ when="@1.5.1:1.6.1",
+ msg="#18 - builds with hiop and without MPI cause compile time errors",
+ )
+ conflicts("+python~mpi", msg="#16 - Python wrapper requires MPI enabled")
# Dependencies
- depends_on("python@3.6:", when="@1.3.0:+python")
+ depends_on("python@3.6:3.10", when="@1.3.0:1.5+python")
depends_on("py-pytest", type=("build", "run"), when="@1.5.0:+python")
depends_on("py-mpi4py", when="@1.3.0:+mpi+python")
depends_on("pkgconfig", type="build")
@@ -74,7 +100,6 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage):
depends_on("cuda", when="+cuda")
depends_on("raja", when="+raja")
depends_on("umpire", when="+raja")
-
depends_on("cmake@3.18:", type="build")
# Profiling
@@ -115,7 +140,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage):
depends_on("hiop@0.3.99:", when="@0.99:+hiop")
depends_on("hiop@0.5.1:", when="@1.1.0:+hiop")
depends_on("hiop@0.5.3:", when="@1.3.0:+hiop")
- depends_on("hiop@0.7.0:", when="@1.5.0:+hiop")
+ depends_on("hiop@0.7.0:1.0.0", when="@1.5.0:+hiop")
depends_on("hiop~mpi", when="+hiop~mpi")
depends_on("hiop+mpi", when="+hiop+mpi")
@@ -132,9 +157,10 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage):
# This is no longer a requirement in RAJA > 0.14
depends_on("umpire+cuda~shared", when="+raja+cuda ^raja@:0.14")
- depends_on("petsc@3.13:3.14", when="@:1.2.99")
- depends_on("petsc@3.16.0:3.16", when="@1.3.0:1.4")
- depends_on("petsc@3.18.0:3.19", when="@1.5.0:")
+ depends_on("petsc@3.13:3.14", when="@:1.2")
+ depends_on("petsc@3.16", when="@1.3:1.4")
+ depends_on("petsc@3.18:3.19", when="@1.5")
+ depends_on("petsc@3.20:", when="@1.6:")
depends_on("petsc~mpi", when="~mpi")
@@ -152,6 +178,8 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage):
depends_on("umpire {0}".format(rocm_dep), when="+raja {0}".format(rocm_dep))
depends_on("camp {0}".format(rocm_dep), when="+raja {0}".format(rocm_dep))
+ patch("exago-1.6.0.patch", when="@1.6.0")
+
flag_handler = build_system_flags
def cmake_args(self):
@@ -189,7 +217,6 @@ def cmake_args(self):
self.define_from_variant("EXAGO_ENABLE_HIOP", "hiop"),
self.define_from_variant("EXAGO_ENABLE_IPOPT", "ipopt"),
self.define_from_variant("EXAGO_ENABLE_PYTHON", "python"),
- self.define_from_variant("EXAGO_ENABLE_LOGGING", "logging"),
]
)
diff --git a/var/spack/repos/builtin/packages/eza/package.py b/var/spack/repos/builtin/packages/eza/package.py
new file mode 100644
index 00000000000000..df9dd5aa8ba5bb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/eza/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class Eza(CargoPackage):
+ """A modern, maintained replacement for ls."""
+
+ homepage = "https://github.com/eza-community/eza"
+ url = "https://github.com/eza-community/eza/archive/refs/tags/v0.15.3.tar.gz"
+
+ maintainers("trws")
+
+ license("MIT")
+
+ version("0.15.3", sha256="09093e565913104acb7a8eba974f8067c95566b6fbedf31138c9923a8cfde42f")
diff --git a/var/spack/repos/builtin/packages/fairmq/package.py b/var/spack/repos/builtin/packages/fairmq/package.py
index 2af3ce52c1a8b6..4639e25e791ecf 100644
--- a/var/spack/repos/builtin/packages/fairmq/package.py
+++ b/var/spack/repos/builtin/packages/fairmq/package.py
@@ -14,31 +14,15 @@ class Fairmq(CMakePackage):
maintainers("dennisklein", "ChristianTackeGSI")
version("dev", branch="dev", submodules=True, get_full_repo=True)
- version(
- "1.7.0",
- tag="v1.7.0",
- commit="d1c99f7e150c1177dc1cab1b2adc16475cade24e",
- submodules=True,
- no_cache=True,
- )
- version(
- "1.6.0",
- tag="v1.6.0",
- commit="42d27af20fb5cbbbc0b0fdfef1c981d51a8baf87",
- submodules=True,
- no_cache=True,
- )
- version(
- "1.5.0",
- tag="v1.5.0",
- commit="c8fde17b6a10a467035590fd800bb693f50c4826",
- submodules=True,
- no_cache=True,
- )
- # no_cache=True is currently needed, because FairMQ's build system
- # depends on the git metadata, see also
- # https://github.com/spack/spack/issues/19972
- # https://github.com/spack/spack/issues/14344
+ with default_args(submodules=True, no_cache=True):
+ # no_cache=True is currently needed, because FairMQ's build system
+ # depends on the git metadata, see also
+ # https://github.com/spack/spack/issues/19972
+ # https://github.com/spack/spack/issues/14344
+ version("1.8.1", tag="v1.8.1", commit="961eca52761a31a0200c567b44e2b2d6d6e50df3")
+ version("1.7.0", tag="v1.7.0", commit="d1c99f7e150c1177dc1cab1b2adc16475cade24e")
+ version("1.6.0", tag="v1.6.0", commit="42d27af20fb5cbbbc0b0fdfef1c981d51a8baf87")
+ version("1.5.0", tag="v1.5.0", commit="c8fde17b6a10a467035590fd800bb693f50c4826")
variant(
"autobind", default=True, when="@1.7:", description="Override the channel autoBind default"
@@ -61,9 +45,10 @@ class Fairmq(CMakePackage):
generator("make", "ninja", default="ninja")
- depends_on("cmake@3.15:", type="build")
- depends_on("faircmakemodules", type="build")
- depends_on("git", type="build")
+ with default_args(type="build"):
+ depends_on("cmake@3.15:")
+ depends_on("faircmakemodules")
+ depends_on("git")
depends_on("boost@1.66: +container+program_options+filesystem+date_time+regex")
depends_on("fairlogger@1.6: +pretty")
@@ -72,6 +57,7 @@ class Fairmq(CMakePackage):
def cmake_args(self):
args = [
self.define("DISABLE_COLOR", True),
+ self.define("BUILD_TESTING", self.run_tests),
self.define_from_variant("BUILD_EXAMPLES", "examples"),
self.define_from_variant("FAIRMQ_CHANNEL_DEFAULT_AUTOBIND", "autobind"),
]
diff --git a/var/spack/repos/builtin/packages/fdb/package.py b/var/spack/repos/builtin/packages/fdb/package.py
index c6a2d6aa66b298..3beb0efbfb19da 100644
--- a/var/spack/repos/builtin/packages/fdb/package.py
+++ b/var/spack/repos/builtin/packages/fdb/package.py
@@ -16,8 +16,9 @@ class Fdb(CMakePackage):
maintainers("skosukhin")
- # master version of fdb is subject to frequent changes and is to be used experimentally.
version("master", branch="master")
+ version("5.11.23", sha256="09b1d93f2b71d70c7b69472dfbd45a7da0257211f5505b5fcaf55bfc28ca6c65")
+ version("5.11.17", sha256="375c6893c7c60f6fdd666d2abaccb2558667bd450100817c0e1072708ad5591e")
version("5.10.8", sha256="6a0db8f98e13c035098dd6ea2d7559f883664cbf9cba8143749539122ac46099")
version("5.7.8", sha256="6adac23c0d1de54aafb3c663d077b85d0f804724596623b381ff15ea4a835f60")
@@ -39,8 +40,10 @@ class Fdb(CMakePackage):
depends_on("cmake@3.12:", type="build")
depends_on("ecbuild@3.4:", type="build")
+ depends_on("ecbuild@3.7:", type="build", when="@5.11.6:")
depends_on("eckit@1.16:")
+ depends_on("eckit@1.24.4:", when="@5.11.22:")
depends_on("eckit+admin", when="+tools")
depends_on("eccodes@2.10:")
diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py
index bb150b154dc9b8..7e698119a4c07a 100644
--- a/var/spack/repos/builtin/packages/flux-core/package.py
+++ b/var/spack/repos/builtin/packages/flux-core/package.py
@@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage):
maintainers("grondo")
version("master", branch="master")
+ version("0.56.0", sha256="dfce5aa21bcb1f990397343cdff8a60542b2d18cbd929e46bdb444d21a961efb")
version("0.55.0", sha256="2925b8a084e9d1069a96de7689b515ad6f2051ecfb9fbbe4d2643507de7ccd30")
version("0.54.0", sha256="721fc3fff64b3b167ae55d0e29379ff3211729248ef97e3b9855816219063b42")
version("0.53.0", sha256="2f14d032a2d54f34e066c8a15c79917089e9f7f8558baa03dbfe63dbf56918b7")
diff --git a/var/spack/repos/builtin/packages/flux-sched/package.py b/var/spack/repos/builtin/packages/flux-sched/package.py
index 8c8b7b617c800d..c0a2e0e6085135 100644
--- a/var/spack/repos/builtin/packages/flux-sched/package.py
+++ b/var/spack/repos/builtin/packages/flux-sched/package.py
@@ -6,11 +6,13 @@
import os
import spack.util.executable
+from spack.build_systems.autotools import AutotoolsBuilder
+from spack.build_systems.cmake import CMakeBuilder
from spack.package import *
-class FluxSched(AutotoolsPackage):
- """A scheduler for flux-core (pre-alpha)"""
+class FluxSched(CMakePackage, AutotoolsPackage):
+ """A scheduler for flux-core"""
homepage = "https://github.com/flux-framework/flux-sched"
url = "https://github.com/flux-framework/flux-sched/releases/download/v0.5.0/flux-sched-0.5.0.tar.gz"
@@ -20,6 +22,8 @@ class FluxSched(AutotoolsPackage):
maintainers("grondo")
version("master", branch="master")
+ version("0.30.0", sha256="1ccb2e53f4caede0233f19b2707e868f0cee9d2c957a06f97c22936ba9a43552")
+ version("0.29.0", sha256="b93b18788e677535aa8ef945cdbeeced6d1408a4d16cb4a816ead53f31dd78d2")
version("0.28.0", sha256="9431c671bed5d76fd95b4a4a7f36224d4bf76f416a2a1a5c4908f3ca790d434d")
version("0.27.0", sha256="1e131924440c904fa0c925b7aa14c47b97f4e67b43af7efd2ebc0ef7ce90eb7c")
version("0.26.0", sha256="184faec800cf45952ef79bda113f710bf91a05be584034d36a3234627d4a54c7")
@@ -81,6 +85,18 @@ class FluxSched(AutotoolsPackage):
depends_on("automake", type="build", when="@master")
depends_on("libtool", type="build", when="@master")
+ # Set default to cmake so master (and branches) use it
+ build_system(
+ conditional("cmake", when="@0.29.0:"),
+ conditional("autotools", when="@:0.28.0"),
+ default="cmake",
+ )
+
+ # Required dependencies
+ with when("build_system=cmake"):
+ generator("ninja")
+ depends_on("cmake@3.18:", type="build")
+
# Disable t5000-valgrind.t by default due to false positives not yet
# in the suppressions file. (This patch will be in v0.21.0)
patch("no-valgrind.patch", when="@:0.20.0")
@@ -136,19 +152,6 @@ def patch(self):
filter_file("NULL", "nullptr", "resource/schema/sched_data.hpp")
filter_file("size_t", "std::size_t", "resource/planner/planner.h")
- def configure_args(self):
- args = []
- if self.spec.satisfies("@0.9.0:"):
- args.append("CXXFLAGS=-Wno-uninitialized")
- if self.spec.satisfies("%clang@12:"):
- args.append("CXXFLAGS=-Wno-defaulted-function-deleted")
- if self.spec.satisfies("%oneapi"):
- args.append("CXXFLAGS=-Wno-tautological-constant-compare")
- # flux-sched's ax_boost is sometimes weird about non-system locations
- # explicitly setting the path guarantees success
- args.append("--with-boost={0}".format(self.spec["boost"].prefix))
- return args
-
@property
def lua_version(self):
return self.spec["lua"].version.up_to(2)
@@ -173,3 +176,23 @@ def setup_run_environment(self, env):
env.prepend_path("FLUX_MODULE_PATH", self.prefix.lib.flux.modules.sched)
env.prepend_path("FLUX_EXEC_PATH", self.prefix.libexec.flux.cmd)
env.prepend_path("FLUX_RC_EXTRA", self.prefix.etc.flux)
+
+
+class CMakeBuilder(CMakeBuilder):
+ def cmake_args(self):
+ return []
+
+
+class AutotoolsBuilder(AutotoolsBuilder):
+ def configure_args(self):
+ args = []
+ if self.spec.satisfies("@0.9.0:"):
+ args.append("CXXFLAGS=-Wno-uninitialized")
+ if self.spec.satisfies("%clang@12:"):
+ args.append("CXXFLAGS=-Wno-defaulted-function-deleted")
+ if self.spec.satisfies("%oneapi"):
+ args.append("CXXFLAGS=-Wno-tautological-constant-compare")
+ # flux-sched's ax_boost is sometimes weird about non-system locations
+ # explicitly setting the path guarantees success
+ args.append("--with-boost={0}".format(self.spec["boost"].prefix))
+ return args
diff --git a/var/spack/repos/builtin/packages/flux-security/package.py b/var/spack/repos/builtin/packages/flux-security/package.py
index 71f0081f7125be..dd87223e663132 100644
--- a/var/spack/repos/builtin/packages/flux-security/package.py
+++ b/var/spack/repos/builtin/packages/flux-security/package.py
@@ -20,6 +20,7 @@ class FluxSecurity(AutotoolsPackage):
maintainers("grondo")
version("master", branch="master")
+ version("0.11.0", sha256="d1ef78a871155a252f07e4f0a636eb272d6c2048d5e0e943860dd687c6cf808a")
version("0.10.0", sha256="b0f39c5e32322f901454469ffd6154019b6dffafc064b55b3e593f70db6a6f68")
version("0.9.0", sha256="2258120c6f32ca0b5b13b166bae56d9bd82a44c6eeaa6bc6187e4a4419bdbcc0")
version("0.8.0", sha256="9963628063b4abdff6bece03208444c8f23fbfda33c20544c48b21e9f4819ce2")
diff --git a/var/spack/repos/builtin/packages/fmt/package.py b/var/spack/repos/builtin/packages/fmt/package.py
index ea7abc909284ec..d88823705d2efa 100644
--- a/var/spack/repos/builtin/packages/fmt/package.py
+++ b/var/spack/repos/builtin/packages/fmt/package.py
@@ -15,6 +15,8 @@ class Fmt(CMakePackage):
url = "https://github.com/fmtlib/fmt/releases/download/7.1.3/fmt-7.1.3.zip"
maintainers("msimberg")
+ license("MIT")
+
version("10.1.1", sha256="b84e58a310c9b50196cda48d5678d5fa0849bca19e5fdba6b684f0ee93ed9d1b")
version("10.1.0", sha256="d725fa83a8b57a3cedf238828fa6b167f963041e8f9f7327649bddc68ae316f4")
version("10.0.0", sha256="4943cb165f3f587f26da834d3056ee8733c397e024145ca7d2a8a96bb71ac281")
diff --git a/var/spack/repos/builtin/packages/form/package.py b/var/spack/repos/builtin/packages/form/package.py
index 13c303ca508802..2d1bed59c035e8 100644
--- a/var/spack/repos/builtin/packages/form/package.py
+++ b/var/spack/repos/builtin/packages/form/package.py
@@ -11,7 +11,7 @@ class Form(AutotoolsPackage):
homepage = "https://www.nikhef.nl/~form/"
url = "https://github.com/vermaseren/form/releases/download/v4.2.1/form-4.2.1.tar.gz"
- maintainers("iarspider", "tueda")
+ maintainers("tueda")
version("4.3.1", sha256="f1f512dc34fe9bbd6b19f2dfef05fcb9912dfb43c8368a75b796ec472ee8bbce")
version("4.3.0", sha256="b234e0d095f73ecb0904cdc3b0d8d8323a9fa7f46770a52fb22267c624aafbf6")
diff --git a/var/spack/repos/builtin/packages/fp16/package.py b/var/spack/repos/builtin/packages/fp16/package.py
index f3d535efbdccb1..5e56aec0113a74 100644
--- a/var/spack/repos/builtin/packages/fp16/package.py
+++ b/var/spack/repos/builtin/packages/fp16/package.py
@@ -14,7 +14,7 @@ class Fp16(CMakePackage):
git = "https://github.com/Maratyszcza/FP16.git"
version("master", branch="master")
- version("2020-05-14", commit="4dfe081cf6bcd15db339cf2680b9281b8451eeb3") # py-torch@1.5:1.9
+ version("2020-05-14", commit="4dfe081cf6bcd15db339cf2680b9281b8451eeb3") # py-torch@1.5:
version("2018-11-28", commit="febbb1c163726b5db24bed55cc9dc42529068997") # py-torch@1.1:1.4
version("2018-10-10", commit="34d4bf01bbf7376f2baa71b8fa148b18524d45cf") # py-torch@1.0
version("2018-02-25", commit="43d6d17df48ebf622587e7ed9472ea76573799b9") # py-torch@:0.4
@@ -29,31 +29,11 @@ class Fp16(CMakePackage):
destination="deps",
placement="psimd",
)
- resource(
- name="googletest",
- url="https://github.com/google/googletest/archive/release-1.8.0.zip",
- sha256="f3ed3b58511efd272eb074a3a6d6fb79d7c2e6a0e374323d1e6bcbcc1ef141bf",
- destination="deps",
- placement="googletest",
- )
- resource(
- name="googlebenchmark",
- url="https://github.com/google/benchmark/archive/v1.2.0.zip",
- sha256="cc463b28cb3701a35c0855fbcefb75b29068443f1952b64dd5f4f669272e95ea",
- destination="deps",
- placement="googlebenchmark",
- )
def cmake_args(self):
return [
self.define("PSIMD_SOURCE_DIR", join_path(self.stage.source_path, "deps", "psimd")),
- self.define(
- "GOOGLETEST_SOURCE_DIR", join_path(self.stage.source_path, "deps", "googletest")
- ),
- self.define(
- "GOOGLEBENCHMARK_SOURCE_DIR",
- join_path(self.stage.source_path, "deps", "googlebenchmark"),
- ),
- self.define("FP16_BUILD_TESTS", self.run_tests),
- self.define("FP16_BUILD_BENCHMARKS", self.run_tests),
+ self.define("FP16_BUILD_TESTS", False),
+ # https://github.com/Maratyszcza/FP16/issues/21
+ self.define("FP16_BUILD_BENCHMARKS", False),
]
diff --git a/var/spack/repos/builtin/packages/fpart/package.py b/var/spack/repos/builtin/packages/fpart/package.py
index 1a6d36331c935d..26e3fee0b72956 100644
--- a/var/spack/repos/builtin/packages/fpart/package.py
+++ b/var/spack/repos/builtin/packages/fpart/package.py
@@ -17,13 +17,33 @@ class Fpart(AutotoolsPackage):
maintainers("drkrynstrng")
+ license("BSD-2-Clause")
+
version("master", branch="master")
version("1.5.1", sha256="c353a28f48e4c08f597304cb4ebb88b382f66b7fabfc8d0328ccbb0ceae9220c")
+ variant("embfts", default=False, description="Build with embedded fts functions")
+ variant("static", default=False, description="Build static binary")
+ variant("debug", default=False, description="Build with debugging support")
+ # fpsync has the following run dependencies, at least one is required
+ variant(
+ "fpsynctools",
+ default="rsync",
+ values=("rsync", "tar", "cpio"),
+ multi=True,
+ description="Tools used by fpsync to copy files",
+ )
+
depends_on("autoconf", type="build")
depends_on("automake", type="build")
depends_on("libtool", type="build")
- # fpsync has the following run dependencies
- depends_on("rsync", type="run")
- depends_on("tar", type="run")
- depends_on("cpio", type="run")
+ depends_on("rsync", when="fpsynctools=rsync", type="run")
+ depends_on("tar", when="fpsynctools=tar", type="run")
+ depends_on("cpio", when="fpsynctools=cpio", type="run")
+
+ def configure_args(self):
+ config_args = []
+ config_args.extend(self.enable_or_disable("embfts"))
+ config_args.extend(self.enable_or_disable("static"))
+ config_args.extend(self.enable_or_disable("debug"))
+ return config_args
diff --git a/var/spack/repos/builtin/packages/fplo/package.py b/var/spack/repos/builtin/packages/fplo/package.py
index 2c025218ac8ac2..a9e8cbc2c2fb11 100644
--- a/var/spack/repos/builtin/packages/fplo/package.py
+++ b/var/spack/repos/builtin/packages/fplo/package.py
@@ -83,7 +83,7 @@ def edit(self, spec, prefix):
filter_file(r"^\s*F90\s*=.*", "F90=" + spack_fc, *files)
# patch for 64 bit integers
- if "^mkl+ilp64" in spec:
+ if spec["mkl"].satisfies("+ilp64"):
setuphelper = FileFilter(join_path(self.build_directory, "PYTHON", "setuphelper.py"))
setuphelper.filter("mkl 64bit integer 32bit", "mkl 64bit integer 64bit")
diff --git a/var/spack/repos/builtin/packages/freesurfer/package.py b/var/spack/repos/builtin/packages/freesurfer/package.py
index 4bf4a4a2f5ef71..0e7188db06c51a 100644
--- a/var/spack/repos/builtin/packages/freesurfer/package.py
+++ b/var/spack/repos/builtin/packages/freesurfer/package.py
@@ -3,7 +3,11 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import glob
+import os
+
from spack.package import *
+from spack.util.environment import EnvironmentModifications
class Freesurfer(Package):
@@ -14,11 +18,11 @@ class Freesurfer(Package):
# A license is required, but is free to obtain.
license_required = True
- license_files = ["./license.txt"]
+ license_files = [".license"]
maintainers("robgics")
- version("7.4.1", sha256="eb6545d1ffdee17a90abd2e7dc444aa1091a6138e257f6f956a7ff214635b092")
+ version("7.4.1", sha256="313a96caeb246c5985f483633b5cf43f86ed8f7ccc6d6acfac8eedb638443010")
version("7.4.0", sha256="6b65c2edf3b88973ced0324269a88966c541f221b799337c6570c38c2f884431")
version("7.3.2", sha256="58518d3ee5abd2e05109208aed2eef145c4e3b994164df8c4e0033c1343b9e56")
version("7.2.0", sha256="4cca78602f898bf633428b9d82cbb9b07e3ab97a86c620122050803779c86d62")
@@ -27,6 +31,9 @@ class Freesurfer(Package):
depends_on("mesa-glu")
depends_on("qt")
+ depends_on("tcsh")
+ depends_on("bc")
+ depends_on("perl")
def url_for_version(self, version):
return "https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/{0}/freesurfer-linux-centos7_x86_64-{1}.tar.gz".format(
@@ -34,10 +41,24 @@ def url_for_version(self, version):
)
def setup_run_environment(self, env):
+ source_file = join_path(self.prefix, "SetUpFreeSurfer.sh")
env.prepend_path("PATH", self.prefix.bin)
env.set("FREESURFER_HOME", self.prefix)
env.set("SUBJECTS_DIR", join_path(self.prefix, "subjects"))
env.set("FUNCTIONALS_DIR", join_path(self.prefix, "sessions"))
+ env.append_path("PERL5LIB", join_path(self.prefix, "mni/share/perl5"))
+ env.append_path("PATH", join_path(self.prefix, "mni/bin"))
+ env.extend(EnvironmentModifications.from_sourcing_file(source_file))
def install(self, spec, prefix):
+ scripts = ["sources.csh", "SetUpFreeSurfer.csh"]
+ scripts.extend(glob.glob("bin/*"))
+ scripts.extend(glob.glob("subjects/**/*", recursive=True))
+ scripts.extend(glob.glob("fsfast/bin/*", recursive=True))
+ scripts.extend(glob.glob("mni/bin/*", recursive=True))
+ for s in scripts:
+ if os.path.isfile(s):
+ filter_file(r"(\/usr)?(\/local?)\/bin\/tcsh", "/usr/bin/env -S tcsh", s)
+ filter_file(r"(\/usr)?(\/local?)\/bin\/csh", "/usr/bin/env -S csh", s)
+ filter_file(r"(\/usr)?(\/local)?\/bin\/perl", "/usr/bin/env -S perl", s)
install_tree(".", prefix)
diff --git a/var/spack/repos/builtin/packages/fzf/package.py b/var/spack/repos/builtin/packages/fzf/package.py
index 95362138f1ec0c..6e12b3c74c8fd2 100644
--- a/var/spack/repos/builtin/packages/fzf/package.py
+++ b/var/spack/repos/builtin/packages/fzf/package.py
@@ -17,6 +17,7 @@ class Fzf(MakefilePackage):
executables = ["^fzf$"]
+ version("0.44.1", sha256="295f3aec9519f0cf2dce67a14e94d8a743d82c19520e5671f39c71c9ea04f90c")
version("0.42.0", sha256="743c1bfc7851b0796ab73c6da7db09d915c2b54c0dd3e8611308985af8ed3df2")
version("0.41.1", sha256="982682eaac377c8a55ae8d7491fcd0e888d6c13915d01da9ebb6b7c434d7f4b5")
version("0.40.0", sha256="9597f297a6811d300f619fff5aadab8003adbcc1566199a43886d2ea09109a65")
diff --git a/var/spack/repos/builtin/packages/garfieldpp/package.py b/var/spack/repos/builtin/packages/garfieldpp/package.py
index 40671403cc7eb4..0bbdda3e3d9d8a 100644
--- a/var/spack/repos/builtin/packages/garfieldpp/package.py
+++ b/var/spack/repos/builtin/packages/garfieldpp/package.py
@@ -18,8 +18,8 @@ class Garfieldpp(CMakePackage):
maintainers("mirguest")
patch(
- "https://gitlab.cern.ch/garfield/garfieldpp/-/commit/882c3023cfa89b45ca7a0c95ab1518454536e8e1.patch",
- sha256="440bc8129c55168e6c45d39e4344911d48ddb13fd3f9ee05974b2ede46a23b93",
+ "https://gitlab.cern.ch/garfield/garfieldpp/-/commit/882c3023cfa89b45ca7a0c95ab1518454536e8e1.diff",
+ sha256="ea3b91d67011abe41e72c7b55578d14b77bd2ef5e7f344077091934b24f38f0d",
when="@4.0",
)
diff --git a/var/spack/repos/builtin/packages/gaudi/package.py b/var/spack/repos/builtin/packages/gaudi/package.py
index e065b649ceb003..12a095c301989a 100644
--- a/var/spack/repos/builtin/packages/gaudi/package.py
+++ b/var/spack/repos/builtin/packages/gaudi/package.py
@@ -17,6 +17,8 @@ class Gaudi(CMakePackage):
tags = ["hep"]
version("master", branch="master")
+ version("37.1", sha256="1d7038fd5dfb5f2517ce57623cf8090549ffe2ea8f0171d534e5c1ca20bd009a")
+ version("37.0", sha256="823f3821a4f498ddd2dd123fbb8a3787b361ddfd818f4ab13572076fc9afdfe4")
version("36.14", sha256="b11e0afcb797d61a305856dfe8079d48d74c6b6867ceccc0a83aab5978c9ba5f")
version("36.13", sha256="41e711c83428663996c825044b268ce515bef85dad74b4a9453f2207b4b1be7b")
version("36.12", sha256="dfce9156cedfa0a7234f880a3c395e592a5f3dc79070d5d196fdb94b83ae203e")
@@ -72,7 +74,8 @@ class Gaudi(CMakePackage):
depends_on("cppgsl")
depends_on("fmt", when="@33.2:")
depends_on("fmt@:8", when="@:36.9")
- depends_on("intel-tbb@:2020.3")
+ depends_on("intel-tbb@:2020.3", when="@:37.0")
+ depends_on("tbb", when="@37.1:")
depends_on("uuid")
depends_on("nlohmann-json", when="@35.0:")
depends_on("python", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/gbl/package.py b/var/spack/repos/builtin/packages/gbl/package.py
index 98aeb67fab3bc3..62f4bed7be196e 100644
--- a/var/spack/repos/builtin/packages/gbl/package.py
+++ b/var/spack/repos/builtin/packages/gbl/package.py
@@ -12,7 +12,6 @@ class Gbl(CMakePackage):
homepage = "https://www.desy.de/~kleinwrt/GBL/doc/cpp/html/"
git = "https://gitlab.desy.de/claus.kleinwort/general-broken-lines.git"
- maintainers("iarspider")
tags = ["hep"]
version("V02-04-01", commit="1061b643c6656fbf7ceba579997eb43f0a9e9d3c")
diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py
index 0130b6662a0939..39b9afcb08596d 100644
--- a/var/spack/repos/builtin/packages/gdal/package.py
+++ b/var/spack/repos/builtin/packages/gdal/package.py
@@ -30,6 +30,8 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
maintainers("adamjstewart")
+ version("3.8.0", sha256="ec0f78d9dc32352aeac6edc9c3b27a991b91f9dc6f92c452207d84431c58757d")
+ version("3.7.3", sha256="e0a6f0c453ea7eb7c09967f50ac49426808fcd8f259dbc9888140eb69d7ffee6")
version("3.7.2", sha256="40c0068591d2c711c699bbb734319398485ab169116ac28005d8302f80b923ad")
version("3.7.1", sha256="9297948f0a8ba9e6369cd50e87c7e2442eda95336b94d2b92ef1829d260b9a06")
version("3.7.0", sha256="af4b26a6b6b3509ae9ccf1fcc5104f7fe015ef2110f5ba13220816398365adce")
@@ -89,6 +91,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
version("2.0.0", sha256="91704fafeea2349c5e268dc1e2d03921b3aae64b05ee01d59fdfc1a6b0ffc061")
# Optional dependencies
+ variant("archive", default=False, when="@3.7:", description="Optional for vsi7z VFS driver")
variant(
"armadillo",
default=False,
@@ -136,9 +139,11 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
variant("kdu", default=False, description="Required for JP2KAK and JPIPKAK drivers")
variant("kea", default=False, description="Required for KEA driver")
variant("lerc", default=False, when="@2.4:", description="Required for LERC compression")
+ variant("libaec", default=False, when="@3.8:", description="Optional for GRIB driver")
variant("libcsf", default=False, description="Required for PCRaster driver")
variant("libkml", default=False, description="Required for LIBKML driver")
variant("liblzma", default=False, description="Required for Zarr driver")
+ variant("libqb3", default=False, when="@3.6:", description="Required for MRF driver")
variant(
"libxml2", default=False, description="Required for XML validation in many OGR drivers"
)
@@ -189,7 +194,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
)
variant("pcidsk", default=False, description="Required for PCIDSK driver")
variant(
- "pcre", default=False, description="Required for REGEXP operator in drivers using SQLite3"
+ "pcre2", default=False, description="Required for REGEXP operator in drivers using SQLite3"
)
variant("pdfium", default=False, when="@2.1:", description="Possible backend for PDF driver")
variant("png", default=True, description="Required for PNG driver")
@@ -200,7 +205,6 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
default=False,
description="Required for PostgreSQL and PostGISRaster drivers",
)
- variant("qb3", default=False, when="@3.6:", description="Required for MRF driver")
variant(
"qhull",
default=False,
@@ -261,6 +265,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
depends_on("json-c@0.12.1", when="@:2.2")
# Optional dependencies
+ depends_on("libarchive", when="+archive")
depends_on("armadillo", when="+armadillo")
depends_on("blas", when="+armadillo")
depends_on("lapack", when="+armadillo")
@@ -302,6 +307,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
# depends_on('kakadu', when='+kdu')
depends_on("kealib", when="+kea")
depends_on("lerc", when="+lerc")
+ depends_on("libaec", when="+libaec")
# depends_on('libcsf', when='+libcsf')
depends_on("libkml@1.3:", when="+libkml")
depends_on("xz", when="+liblzma")
@@ -329,8 +335,8 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
depends_on("oracle-instant-client", when="+oracle")
depends_on("parquet-cpp", when="+parquet")
# depends_on('pcidsk', when='+pcidsk')
- depends_on("pcre2", when="@3.5:+pcre")
- depends_on("pcre", when="@:3.4+pcre")
+ depends_on("pcre2", when="@3.5:+pcre2")
+ depends_on("pcre", when="@:3.4+pcre2")
# depends_on('pdfium', when='+pdfium')
depends_on("libpng", when="+png")
# depends_on('podofo', when='+podofo')
@@ -340,7 +346,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
depends_on("poppler@:0.71", when="@:2.4 +poppler")
depends_on("poppler@:21", when="@:3.4.1 +poppler")
depends_on("postgresql", when="+postgresql")
- depends_on("qb3", when="+qb3")
+ depends_on("qb3", when="+libqb3")
depends_on("qhull", when="+qhull")
depends_on("qhull@2015:", when="@3.5:+qhull")
depends_on("qhull@:2020.1", when="@:3.3+qhull")
@@ -489,6 +495,7 @@ def cmake_args(self):
# be necessary.
self.define("ENABLE_DEFLATE64", "zlib-ng" not in self.spec),
# Optional dependencies
+ self.define_from_variant("GDAL_USE_ARCHIVE", "archive"),
self.define_from_variant("GDAL_USE_ARMADILLO", "armadillo"),
self.define_from_variant("GDAL_USE_ARROW", "arrow"),
self.define_from_variant("GDAL_USE_BASISU", "basisu"),
@@ -518,9 +525,11 @@ def cmake_args(self):
self.define_from_variant("GDAL_USE_KDU", "kdu"),
self.define_from_variant("GDAL_USE_KEA", "kea"),
self.define_from_variant("GDAL_USE_LERC", "lerc"),
+ self.define_from_variant("GDAL_USE_LIBAEC", "libaec"),
self.define_from_variant("GDAL_USE_LIBCSF", "libcsf"),
self.define_from_variant("GDAL_USE_LIBKML", "libkml"),
self.define_from_variant("GDAL_USE_LIBLZMA", "liblzma"),
+ self.define_from_variant("GDAL_USE_LIBQB3", "libqb3"),
self.define_from_variant("GDAL_USE_LIBXML2", "libxml2"),
self.define_from_variant("GDAL_USE_LURATECH", "luratech"),
self.define_from_variant("GDAL_USE_LZ4", "lz4"),
@@ -540,13 +549,12 @@ def cmake_args(self):
self.define_from_variant("GDAL_USE_OPENSSL", "openssl"),
self.define_from_variant("GDAL_USE_ORACLE", "oracle"),
self.define_from_variant("GDAL_USE_PARQUET", "parquet"),
- self.define_from_variant("GDAL_USE_PCRE2", "pcre"),
+ self.define_from_variant("GDAL_USE_PCRE2", "pcre2"),
self.define_from_variant("GDAL_USE_PDFIUM", "pdfium"),
self.define_from_variant("GDAL_USE_PNG", "png"),
self.define_from_variant("GDAL_USE_PODOFO", "podofo"),
self.define_from_variant("GDAL_USE_POPPLER", "poppler"),
self.define_from_variant("GDAL_USE_POSTGRESQL", "postgresql"),
- self.define_from_variant("GDAL_USE_LIBQB3", "qb3"),
self.define_from_variant("GDAL_USE_QHULL", "qhull"),
self.define_from_variant("GDAL_USE_RASDAMAN", "rasdaman"),
self.define_from_variant("GDAL_USE_RASTERLITE2", "rasterlite2"),
@@ -668,7 +676,7 @@ def configure_args(self):
self.with_or_without("crypto", variant="openssl", package="openssl"),
self.with_or_without("oci", variant="oracle", package="oracle-instant-client"),
self.with_or_without("pcidsk", package="pcidsk"),
- self.with_or_without("pcre"),
+ self.with_or_without("pcre", variant="pcre2"),
self.with_or_without("pdfium", package="pdfium"),
self.with_or_without("png", package="libpng"),
self.with_or_without("podofo", package="podofo"),
diff --git a/var/spack/repos/builtin/packages/geant4/package-cache.patch b/var/spack/repos/builtin/packages/geant4/package-cache.patch
new file mode 100644
index 00000000000000..835a4c34098d0e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/geant4/package-cache.patch
@@ -0,0 +1,48 @@
+diff --git a/cmake/Modules/G4CMakeUtilities.cmake b/cmake/Modules/G4CMakeUtilities.cmake
+index 16f7b3c8c0..84acfcd5e7 100644
+--- a/cmake/Modules/G4CMakeUtilities.cmake
++++ b/cmake/Modules/G4CMakeUtilities.cmake
+@@ -221,6 +221,21 @@ function(geant4_export_package_variables _file)
+ get_property(__var_value CACHE ${__var} PROPERTY VALUE)
+ get_property(__var_type CACHE ${__var} PROPERTY TYPE)
+ get_property(__var_help CACHE ${__var} PROPERTY HELPSTRING)
++ # Variable may not be in cache, only local (canonical case being EXPAT_LIBRARY since CMake 3.27)
++ # We still need to account for these because they may be required to be in the CACHE at least set in
++ # earlier versions.
++ # 1. Variable may not be in cache, only local (canonical case being EXPAT_LIBRARY since CMake 3.27)
++ # We still need to account for these because they may be required to be in the CACHE at least set in
++ # earlier versions.
++ # 2. Depending on CMake version, variable may be in cache but unitialized, here we want the local value
++ if(((NOT __var_value) AND (NOT __var_type) AND (NOT __var_help)) OR (__var_type STREQUAL "UNINITIALIZED"))
++ set(__var_value ${${__var}})
++ # TODO: set type based on whether it looks like a bool or path, but PATH almost invariably what we save
++ # Only important in cmake GUI and if value needs to be changed, which we don't if package cache is used
++ set(__var_type PATH)
++ set(__var_help "no documentation, not a cache value")
++ endif()
++
+ list(APPEND __local_build_setting "geant4_set_and_check_package_variable(${__var} \"${__var_value}\" ${__var_type} \"${__var_help}\")")
+ endforeach()
+
+diff --git a/cmake/Modules/G4OptionalComponents.cmake b/cmake/Modules/G4OptionalComponents.cmake
+index 7b3a1f9836..f503a2994a 100644
+--- a/cmake/Modules/G4OptionalComponents.cmake
++++ b/cmake/Modules/G4OptionalComponents.cmake
+@@ -78,6 +78,8 @@ else()
+ unset(EXPAT_FOUND)
+ unset(EXPAT_INCLUDE_DIR CACHE)
+ unset(EXPAT_LIBRARY CACHE)
++ unset(EXPAT_LIBRARY_RELEASE CACHE)
++ unset(EXPAT_LIBRARY_DEBUG CACHE)
+ message(FATAL_ERROR
+ "Detected system expat header and library:
+ EXPAT_INCLUDE_DIR = ${__badexpat_include_dir}
+@@ -88,7 +90,7 @@ Set the above CMake variables to point to an expat install of the required versi
+
+ # Backward compatibility for sources.cmake using the variable
+ set(EXPAT_LIBRARIES EXPAT::EXPAT)
+- geant4_save_package_variables(EXPAT EXPAT_INCLUDE_DIR EXPAT_LIBRARY)
++ geant4_save_package_variables(EXPAT EXPAT_INCLUDE_DIR EXPAT_LIBRARY EXPAT_LIBRARY_RELEASE EXPAT_LIBRARY_DEBUG)
+ else()
+ set(EXPAT_FOUND TRUE)
+ set(GEANT4_USE_BUILTIN_EXPAT TRUE)
\ No newline at end of file
diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py
index 0cdc559559ec17..25757ac2a8a901 100644
--- a/var/spack/repos/builtin/packages/geant4/package.py
+++ b/var/spack/repos/builtin/packages/geant4/package.py
@@ -22,6 +22,7 @@ class Geant4(CMakePackage):
maintainers("drbenmorgan")
+ version("11.1.3", sha256="5d9a05d4ccf8b975649eab1d615fc1b8dce5937e01ab9e795bffd04149240db6")
version("11.1.2", sha256="e9df8ad18c445d9213f028fd9537e174d6badb59d94bab4eeae32f665beb89af")
version("11.1.1", sha256="c5878634da9ba6765ce35a469b2893044f4a6598aa948733da8436cdbfeef7d2")
version("11.1.0", sha256="c4a23f2f502efeab56de43a4412b21f65c7ca1b0877b9bc1d7e845ee12edf70a")
@@ -134,6 +135,9 @@ def std_when(values):
patch("cxx17_geant4_10_0.patch", level=1, when="@10.4.0 cxxstd=17")
patch("geant4-10.4.3-cxx17-removed-features.patch", level=1, when="@10.4.3 cxxstd=17")
+ # See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556
+ patch("package-cache.patch", level=1, when="@10.7.0:11.1.2^cmake@3.17:")
+
# NVHPC: "thread-local declaration follows non-thread-local declaration"
conflicts("%nvhpc", when="+threads")
diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py
index dc23024cdc2116..8675db20f0b25f 100644
--- a/var/spack/repos/builtin/packages/geos/package.py
+++ b/var/spack/repos/builtin/packages/geos/package.py
@@ -21,20 +21,30 @@ class Geos(CMakePackage):
maintainers("adamjstewart")
+ version("3.12.1", sha256="d6ea7e492224b51193e8244fe3ec17c4d44d0777f3c32ca4fb171140549a0d03")
version("3.12.0", sha256="d96db96011259178a35555a0f6d6e75a739e52a495a6b2aa5efb3d75390fbc39")
+ version("3.11.3", sha256="80d60a2bbc0cde7745a3366b9eb8c0d65a142b03e063ea0a52c364758cd5ee89")
version("3.11.2", sha256="b1f077669481c5a3e62affc49e96eb06f281987a5d36fdab225217e5b825e4cc")
version("3.11.1", sha256="6d0eb3cfa9f92d947731cc75f1750356b3bdfc07ea020553daf6af1c768e0be2")
version("3.11.0", sha256="79ab8cabf4aa8604d161557b52e3e4d84575acdc0d08cb09ab3f7aaefa4d858a")
+ version("3.10.6", sha256="078403158da66cad8be39ad1ede5e2fe4b70dcf7bb292fb06a65bdfe8afa6daf")
+ version("3.10.5", sha256="cc47d95e846e2745c493d8f9f3a9913b1c61f26717a1165898da64352aec4dde")
version("3.10.4", sha256="d6fc11bcfd265cbf2714199174e4c3392d657551e5fd84c74c07c863b29357e3")
version("3.10.3", sha256="3c141b07d61958a758345d5f54e3c735834b2f4303edb9f67fb26914f0d44770")
version("3.10.2", sha256="50bbc599ac386b4c2b3962dcc411f0040a61f204aaef4eba7225ecdd0cf45715")
version("3.10.1", sha256="a8148eec9636814c8ab0f8f5266ce6f9b914ed65b0d083fc43bb0bbb01f83648")
version("3.10.0", sha256="097d70e3c8f688e59633ceb8d38ad5c9b0d7ead5729adeb925dbc489437abe13")
+ version("3.9.5", sha256="c6c9aedfa8864fb44ba78911408442382bfd0690cf2d4091ae3805c863789036")
version("3.9.4", sha256="70dff2530d8cd2dfaeeb91a5014bd17afb1baee8f0e3eb18e44d5b4dbea47b14")
version("3.9.3", sha256="f8b2314e311456f7a449144efb5e3188c2a28774752bc50fc882a3cd5c89ee35")
version("3.9.2", sha256="44a5a9be21d7d473436bf621c2ddcc3cf5a8bbe3c786e13229618a3b9d861297")
version("3.9.1", sha256="7e630507dcac9dc07565d249a26f06a15c9f5b0c52dd29129a0e3d381d7e382a")
+ version("3.9.0", sha256="bd8082cf12f45f27630193c78bdb5a3cba847b81e72b20268356c2a4fc065269")
+ version("3.8.4", sha256="6de8c98c1ae7cb0cd2d726a8dc9b7467308c4b4e05f9df94742244e64e441499")
+ version("3.8.3", sha256="f98315d1ba35c8d1a94a2947235f9e9dfb7057fdec343683f64ff9ad1061255c")
+ version("3.8.2", sha256="5a102f4614b0c9291504bbefd847ebac18ea717843506bd251d015c7cf9726b4")
version("3.8.1", sha256="4258af4308deb9dbb5047379026b4cd9838513627cb943a44e16c40e42ae17f7")
+ version("3.8.0", sha256="99114c3dc95df31757f44d2afde73e61b9f742f0b683fd1894cbbee05dda62d5")
version("3.7.2", sha256="2166e65be6d612317115bfec07827c11b403c3f303e0a7420a2106bc999d7707")
version("3.6.2", sha256="045a13df84d605a866602f6020fc6cbf8bf4c42fb50de237a08926e1d7d7652a")
version("3.6.1", sha256="4a2e4e3a7a09a7cfda3211d0f4a235d9fd3176ddf64bd8db14b4ead266189fc5")
diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py
index 7d010555889855..8e564020caac23 100644
--- a/var/spack/repos/builtin/packages/gettext/package.py
+++ b/var/spack/repos/builtin/packages/gettext/package.py
@@ -19,6 +19,7 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage):
executables = [r"^gettext$"]
+ version("0.22.4", sha256="29217f1816ee2e777fa9a01f9956a14139c0c23cc1b20368f06b2888e8a34116")
version("0.22.3", sha256="b838228b3f8823a6c1eddf07297197c4db13f7e1b173b9ef93f3f945a63080b6")
version("0.21.1", sha256="50dbc8f39797950aa2c98e939947c527e5ac9ebd2c1b99dd7b06ba33a6767ae6")
version("0.21", sha256="d20fcbb537e02dcf1383197ba05bd0734ef7bf5db06bdb241eb69b7d16b73192")
diff --git a/var/spack/repos/builtin/packages/ginkgo/package.py b/var/spack/repos/builtin/packages/ginkgo/package.py
index 7bdfdb390370d5..37d974f9b70eb0 100644
--- a/var/spack/repos/builtin/packages/ginkgo/package.py
+++ b/var/spack/repos/builtin/packages/ginkgo/package.py
@@ -24,7 +24,8 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
version("develop", branch="develop")
version("master", branch="master")
- version("1.6.0", commit="1f1ed46e724334626f016f105213c047e16bc1ae", preferred=True) # v1.6.0
+ version("1.7.0", commit="49242ff89af1e695d7794f6d50ed9933024b66fe") # v1.7.0
+ version("1.6.0", commit="1f1ed46e724334626f016f105213c047e16bc1ae") # v1.6.0
version("1.5.0", commit="234594c92b58e2384dfb43c2d08e7f43e2b58e7a") # v1.5.0
version("1.5.0.glu_experimental", branch="glu_experimental")
version("1.4.0", commit="f811917c1def4d0fcd8db3fe5c948ce13409e28e") # v1.4.0
@@ -37,13 +38,18 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
variant("shared", default=True, description="Build shared libraries")
variant("full_optimizations", default=False, description="Compile with all optimizations")
variant("openmp", default=sys.platform != "darwin", description="Build with OpenMP")
- variant("oneapi", default=False, description="Build with oneAPI support")
+ variant("sycl", default=False, description="Enable SYCL backend")
variant("develtools", default=False, description="Compile with develtools enabled")
variant("hwloc", default=False, description="Enable HWLOC support")
variant("mpi", default=False, description="Enable MPI support")
- depends_on("cmake@3.9:", type="build")
- depends_on("cuda@9:", when="+cuda")
+ depends_on("cmake@3.9:", type="build", when="@:1.3.0")
+ depends_on("cmake@3.13:", type="build", when="@1.4.0:1.6.0")
+ depends_on("cmake@3.16:", type="build", when="@1.7.0:")
+ depends_on("cmake@3.18:", type="build", when="+cuda@1.7.0:")
+ depends_on("cuda@9:", when="+cuda @:1.4.0")
+ depends_on("cuda@9.2:", when="+cuda @1.5.0:")
+ depends_on("cuda@10.1:", when="+cuda @1.7.0:")
depends_on("mpi", when="+mpi")
depends_on("rocthrust", when="+rocm")
@@ -60,14 +66,13 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
depends_on("googletest", type="test")
depends_on("numactl", type="test", when="+hwloc")
- depends_on("intel-oneapi-mkl", when="+oneapi")
- depends_on("intel-oneapi-dpl", when="+oneapi")
+ depends_on("intel-oneapi-mkl", when="+sycl")
+ depends_on("intel-oneapi-dpl", when="+sycl")
+ depends_on("intel-oneapi-tbb", when="+sycl")
conflicts("%gcc@:5.2.9")
conflicts("+rocm", when="@:1.1.1")
conflicts("+mpi", when="@:1.4.0")
- conflicts("+cuda", when="+rocm")
- conflicts("+openmp", when="+oneapi")
# ROCm 4.1.0 breaks platform settings which breaks Ginkgo's HIP support.
conflicts("^hip@4.1.0:", when="@:1.3.0")
@@ -76,22 +81,35 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
conflicts("^rocthrust@4.1.0:", when="@:1.3.0")
conflicts("^rocprim@4.1.0:", when="@:1.3.0")
+ # Ginkgo 1.6.0 start relying on ROCm 4.5.0
+ conflicts("^hip@:4.3.1", when="@1.6.0:")
+ conflicts("^hipblas@:4.3.1", when="@1.6.0:")
+ conflicts("^hipsparse@:4.3.1", when="@1.6.0:")
+ conflicts("^rocthrust@:4.3.1", when="@1.6.0:")
+ conflicts("^rocprim@:4.3.1", when="@1.6.0:")
+
+ conflicts(
+ "+sycl", when="@:1.4.0", msg="For SYCL support, please use Ginkgo version 1.4.0 and newer."
+ )
+
# Skip smoke tests if compatible hardware isn't found
patch("1.4.0_skip_invalid_smoke_tests.patch", when="@1.4.0")
- # Newer DPC++ compilers use the updated SYCL 2020 standard which change
- # kernel attribute propagation rules. This doesn't work well with the
- # initial Ginkgo oneAPI support.
- patch("1.4.0_dpcpp_use_old_standard.patch", when="+oneapi @1.4.0")
-
# Add missing include statement
patch("thrust-count-header.patch", when="+rocm @1.5.0")
def setup_build_environment(self, env):
spec = self.spec
- if "+oneapi" in spec:
+ if "+sycl" in spec:
env.set("MKLROOT", join_path(spec["intel-oneapi-mkl"].prefix, "mkl", "latest"))
env.set("DPL_ROOT", join_path(spec["intel-oneapi-dpl"].prefix, "dpl", "latest"))
+ # The `IntelSYCLConfig.cmake` is broken with spack. By default, it
+ # relies on the CMAKE_CXX_COMPILER being the real ipcx/dpcpp
+ # compiler. If not, the variable SYCL_COMPILER of that script is
+ # broken, and all the SYCL detection mechanism is wrong. We fix it
+ # by giving hint environment variables.
+ env.set("SYCL_LIBRARY_DIR_HINT", os.path.dirname(os.path.dirname(self.compiler.cxx)))
+ env.set("SYCL_INCLUDE_DIR_HINT", os.path.dirname(os.path.dirname(self.compiler.cxx)))
def cmake_args(self):
# Check that the have the correct C++ standard is available
@@ -106,18 +124,19 @@ def cmake_args(self):
except UnsupportedCompilerFlag:
raise InstallError("Ginkgo requires a C++14-compliant C++ compiler")
- cxx_is_dpcpp = os.path.basename(self.compiler.cxx) == "dpcpp"
- if self.spec.satisfies("+oneapi") and not cxx_is_dpcpp:
- raise InstallError(
- "Ginkgo's oneAPI backend requires the" + "DPC++ compiler as main CXX compiler."
- )
+ if self.spec.satisfies("@1.4.0:1.6.0 +sycl") and not self.spec.satisfies(
+ "%oneapi@2021.3.0:"
+ ):
+ raise InstallError("ginkgo +sycl requires %oneapi@2021.3.0:")
+ elif self.spec.satisfies("@1.7.0: +sycl") and not self.spec.satisfies("%oneapi@2022.1.0:"):
+ raise InstallError("ginkgo +sycl requires %oneapi@2022.1.0:")
spec = self.spec
from_variant = self.define_from_variant
args = [
from_variant("GINKGO_BUILD_CUDA", "cuda"),
from_variant("GINKGO_BUILD_HIP", "rocm"),
- from_variant("GINKGO_BUILD_DPCPP", "oneapi"),
+ from_variant("GINKGO_BUILD_SYCL", "sycl"),
from_variant("GINKGO_BUILD_OMP", "openmp"),
from_variant("GINKGO_BUILD_MPI", "mpi"),
from_variant("BUILD_SHARED_LIBS", "shared"),
@@ -161,6 +180,11 @@ def cmake_args(self):
args.append(
self.define("CMAKE_MODULE_PATH", self.spec["hip"].prefix.lib.cmake.hip)
)
+
+ if "+sycl" in self.spec:
+ sycl_compatible_compilers = ["dpcpp", "icpx"]
+ if not (os.path.basename(self.compiler.cxx) in sycl_compatible_compilers):
+ raise InstallError("ginkgo +sycl requires DPC++ (dpcpp) or icpx compiler.")
return args
@property
diff --git a/var/spack/repos/builtin/packages/glab/package.py b/var/spack/repos/builtin/packages/glab/package.py
index 1f5d5fc20d6bad..a363d6acf5a2b9 100644
--- a/var/spack/repos/builtin/packages/glab/package.py
+++ b/var/spack/repos/builtin/packages/glab/package.py
@@ -14,6 +14,7 @@ class Glab(Package):
maintainers("alecbcs")
+ version("1.35.0", sha256="7ed31c7a9b425fc15922f83c5dd8634a2758262a4f25f92583378655fcad6303")
version("1.33.0", sha256="447a9b76acb5377642a4975908f610a3082026c176329c7c8cfed1461d2e1570")
version("1.31.0", sha256="5648e88e7d6cc993227f5a4e80238af189bed09c7aed1eb12be7408e9a042747")
version("1.30.0", sha256="d3c1a9ba723d94a0be10fc343717cf7b61732644f5c42922f1c8d81047164b99")
diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py
index 3fb58552ca43a2..8e318369402672 100644
--- a/var/spack/repos/builtin/packages/glib/package.py
+++ b/var/spack/repos/builtin/packages/glib/package.py
@@ -178,6 +178,13 @@ class Glib(MesonPackage, AutotoolsPackage):
patch("meson-gettext-2.66.patch", when="@2.66:2.68,2.72")
patch("meson-gettext-2.70.patch", when="@2.70")
+ # Don't use PTRACE_O_EXITKILL if it's not defined
+ patch(
+ "https://gitlab.gnome.org/GNOME/glib/-/commit/bda87264372c006c94e21ffb8ff9c50ecb3e14bd.diff",
+ sha256="2c25d7b3bf581b3ec992d7af997fa6c769174d49b9350e0320c33f5e048cba99",
+ when="@2.78.0",
+ )
+
def url_for_version(self, version):
"""Handle glib's version-based custom URLs."""
url = "https://download.gnome.org/sources/glib"
diff --git a/var/spack/repos/builtin/packages/gloo/package.py b/var/spack/repos/builtin/packages/gloo/package.py
index 4ca7d55f43a168..eadcdfd7d6bf2d 100644
--- a/var/spack/repos/builtin/packages/gloo/package.py
+++ b/var/spack/repos/builtin/packages/gloo/package.py
@@ -13,7 +13,10 @@ class Gloo(CMakePackage, CudaPackage):
git = "https://github.com/facebookincubator/gloo.git"
version("master", branch="master")
- version("2021-05-21", commit="c22a5cfba94edf8ea4f53a174d38aa0c629d070f") # py-torch@1.10:
+ version("2023-05-19", commit="597accfd79f5b0f9d57b228dec088ca996686475") # py-torch@2.1:
+ version("2023-01-17", commit="10909297fedab0a680799211a299203e53515032") # py-torch@2.0
+ version("2022-05-18", commit="5b143513263133af2b95547e97c07cebeb72bf72") # py-torch@1.13
+ version("2021-05-21", commit="c22a5cfba94edf8ea4f53a174d38aa0c629d070f") # py-torch@1.10:1.12
version("2021-05-04", commit="6f7095f6e9860ce4fd682a7894042e6eba0996f1") # py-torch@1.9
version("2020-09-18", commit="3dc0328fe6a9d47bd47c0c6ca145a0d8a21845c6") # py-torch@1.7:1.8
version("2020-03-17", commit="113bde13035594cafdca247be953610b53026553") # py-torch@1.5:1.6
@@ -32,6 +35,13 @@ class Gloo(CMakePackage, CudaPackage):
sha256="8e6e9a44e0533ba4303a95a651b1934e5d73632cab08cc7d5a9435e1e64aa424",
when="@:2023-01-16",
)
+ # Fix building with gcc 12, see https://github.com/facebookincubator/gloo/pull/333
+ patch(
+ "https://github.com/facebookincubator/gloo/commit/4a5e339b764261d20fc409071dc7a8b8989aa195.patch?full_index=1",
+ sha256="dc8b3a9bea4693f32d6850ea2ce6ce75e1778538bfba464b50efca92bac425e3",
+ when="@2021-05-21:2022-05-18",
+ )
+
generator("ninja")
depends_on("cmake@2.8.12:", type="build")
diff --git a/var/spack/repos/builtin/packages/gmake/package.py b/var/spack/repos/builtin/packages/gmake/package.py
index 0cfbccb80e6739..29469758c5a2a7 100644
--- a/var/spack/repos/builtin/packages/gmake/package.py
+++ b/var/spack/repos/builtin/packages/gmake/package.py
@@ -67,6 +67,8 @@ def configure_args(self):
return [
"--with-guile" if self.spec.satisfies("+guile") else "--without-guile",
"--disable-nls",
+ # configure needs make to enable dependency tracking, disable explicitly
+ "--disable-dependency-tracking",
]
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/go-bootstrap/package.py b/var/spack/repos/builtin/packages/go-bootstrap/package.py
index 047f9f3353b937..4f5c8f00732439 100644
--- a/var/spack/repos/builtin/packages/go-bootstrap/package.py
+++ b/var/spack/repos/builtin/packages/go-bootstrap/package.py
@@ -59,7 +59,7 @@ class GoBootstrap(Package):
# determine system os and architecture/target
os = platform.system().lower()
- target = go_targets[platform.machine().lower()]
+ target = go_targets.get(platform.machine().lower(), platform.machine().lower())
# construct releases for current system configuration
for release in go_releases:
diff --git a/var/spack/repos/builtin/packages/gobject-introspection/package.py b/var/spack/repos/builtin/packages/gobject-introspection/package.py
index 7a64fe9cce08fd..d8ea6d81ddbb6d 100644
--- a/var/spack/repos/builtin/packages/gobject-introspection/package.py
+++ b/var/spack/repos/builtin/packages/gobject-introspection/package.py
@@ -74,8 +74,8 @@ class GobjectIntrospection(MesonPackage, AutotoolsPackage):
# https://gitlab.gnome.org/GNOME/gobject-introspection/-/issues/325
patch(
"https://gitlab.gnome.org/GNOME/gobject-introspection/-/commit/"
- "1f9284228092b2a7200e8a78bc0ea6702231c6db.patch",
- sha256="7700828b638c85255c87fcc317ea7e9572ff443f65c86648796528885e5b4cea",
+ "1f9284228092b2a7200e8a78bc0ea6702231c6db.diff",
+ sha256="dcb9e7c956dff49c3a73535829382e8662fa6bd13bdfb416e8eac47b2604fa0a",
when="@:1.63.1",
)
diff --git a/var/spack/repos/builtin/packages/googletest/package.py b/var/spack/repos/builtin/packages/googletest/package.py
index b97296ddff9d7b..8771020dd3de4e 100644
--- a/var/spack/repos/builtin/packages/googletest/package.py
+++ b/var/spack/repos/builtin/packages/googletest/package.py
@@ -16,14 +16,16 @@ class Googletest(CMakePackage):
maintainers("sethrj")
version("main", branch="main")
+ version("1.14.0", sha256="8ad598c73ad796e0d8280b082cebd82a630d73e73cd3c70057938a6501bba5d7")
+ version("1.13.0", sha256="ad7fdba11ea011c1d925b3289cf4af2c66a352e18d4c7264392fead75e919363")
version("1.12.1", sha256="81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2")
version("1.12.0", sha256="2a4f11dce6188b256f3650061525d0fe352069e5c162452818efbbf8d0b5fe1c")
- version("1.11.0", sha256="07b0896360f8e14414a8419e35515da0be085c5b4547c914ab8f4684ef0a3a8e")
- version("1.10.0", sha256="e4a7cd97c903818abe7ddb129db9c41cc9fd9e2ded654be57ced26d45c72e4c9")
- version("1.8.1", sha256="8e40a005e098b1ba917d64104549e3da274e31261dedc57d6250fe91391b2e84")
- version("1.8.0", sha256="d8c33605d23d303b08a912eaee7f84c4e091d6e3d90e9a8ec8aaf7450dfe2568")
- version("1.7.0", sha256="9639cf8b7f37a4d0c6575f52c01ef167c5f11faee65252296b3ffc2d9acd421b")
- version("1.6.0", sha256="a61e20c65819eb39a2da85c88622bac703b865ca7fe2bfdcd3da734d87d5521a")
+ version("1.11.0", sha256="b4870bf121ff7795ba20d20bcdd8627b8e088f2d1dab299a031c1034eddc93d5")
+ version("1.10.0", sha256="9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb")
+ version("1.8.1", sha256="9bf1fe5182a604b4135edc1a425ae356c9ad15e9b23f9f12a02e80184c3a249c")
+ version("1.8.0", sha256="58a6f4277ca2bc8565222b3bbd58a177609e9c488e8a72649359ba51450db7d8")
+ version("1.7.0", sha256="f73a6546fdf9fce9ff93a5015e0333a8af3062a152a9ad6bcb772c96687016cc")
+ version("1.6.0", sha256="5fbc058e5b662b9c86d93ac76fefb58eec89cbf26144b49669a38ecb62758447")
variant("gmock", default=True, when="@1.8:", description="Build with gmock")
variant("pthreads", default=True, description="Build multithreaded version with pthreads")
@@ -76,3 +78,20 @@ def darwin_fix(self):
# The shared library is not installed correctly on Darwin; fix this
if self.spec.satisfies("platform=darwin"):
fix_darwin_install_name(self.prefix.lib)
+
+ def url_for_version(self, version):
+ """googletest has changed how they publish releases on github. Up until,
+ including version 1.12.1 they were tagged as `release-`.
+ Afterwards things switched to the format `v`. Additionally,
+ newer versions are available from `archive/refs/tags/.tar.gz`,
+ while versions up to, and including, 1.8.0 are available only from
+ `archive/release-.tar.gz`
+ """
+ if version.satisfies("@:1.8.0"):
+ return f"{self.git}/archive/release-{version}.tar.gz"
+
+ tagname = f"release-{version}"
+ if version.satisfies("@1.13:"):
+ tagname = f"v{version}"
+
+ return f"{self.git}/archive/refs/tags/{tagname}.tar.gz"
diff --git a/var/spack/repos/builtin/packages/gotcha/package.py b/var/spack/repos/builtin/packages/gotcha/package.py
index 0efc4d986914e8..82bc308fa8f353 100644
--- a/var/spack/repos/builtin/packages/gotcha/package.py
+++ b/var/spack/repos/builtin/packages/gotcha/package.py
@@ -17,6 +17,7 @@ class Gotcha(CMakePackage):
version("develop", branch="develop")
version("master", branch="master")
+ version("1.0.5", tag="1.0.5", commit="e28f10c45a0cda0e1ec225eaea6abfe72c8353aa")
version("1.0.4", tag="1.0.4", commit="46f2aaedc885f140a3f31a17b9b9a9d171f3d6f0")
version("1.0.3", tag="1.0.3", commit="1aafd1e30d46ce4e6555c8a4ea5f5edf6a5eade5")
version("1.0.2", tag="1.0.2", commit="bed1b7c716ebb0604b3e063121649b5611640f25")
diff --git a/var/spack/repos/builtin/packages/gperftools/package.py b/var/spack/repos/builtin/packages/gperftools/package.py
index 6dd96c36678682..38ff5a25d4b535 100644
--- a/var/spack/repos/builtin/packages/gperftools/package.py
+++ b/var/spack/repos/builtin/packages/gperftools/package.py
@@ -17,6 +17,8 @@ class Gperftools(AutotoolsPackage):
url = "https://github.com/gperftools/gperftools/releases/download/gperftools-2.7/gperftools-2.7.tar.gz"
maintainers("albestro", "eschnett", "msimberg", "teonnik")
+ license("BSD-3-Clause")
+
version("2.13", sha256="4882c5ece69f8691e51ffd6486df7d79dbf43b0c909d84d3c0883e30d27323e7")
version("2.12", sha256="fb611b56871a3d9c92ab0cc41f9c807e8dfa81a54a4a9de7f30e838756b5c7c6")
version("2.11", sha256="8ffda10e7c500fea23df182d7adddbf378a203c681515ad913c28a64b87e24dc")
diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py
index e280234a0e45fa..4481a551b5fb89 100644
--- a/var/spack/repos/builtin/packages/gromacs/package.py
+++ b/var/spack/repos/builtin/packages/gromacs/package.py
@@ -90,9 +90,26 @@ class Gromacs(CMakePackage, CudaPackage):
default=False,
description="Produces a double precision version of the executables",
)
- variant("cufftmp", default=False, when="+cuda+mpi", description="Enable Multi GPU FFT support")
+ variant(
+ "cufftmp",
+ default=False,
+ when="@2022: +cuda+mpi",
+ description="Enable multi-GPU FFT support with cuFFTMp",
+ )
+ variant(
+ "heffte",
+ default=False,
+ when="@2021: +sycl+mpi",
+ description="Enable multi-GPU FFT support with HeFFTe",
+ )
variant("opencl", default=False, description="Enable OpenCL support")
- variant("sycl", default=False, description="Enable SYCL support")
+ variant("sycl", default=False, when="@2021:", description="Enable SYCL support")
+ variant(
+ "intel-data-center-gpu-max",
+ default=False,
+ when="@2022: +sycl",
+ description="Enable support for Intel Data Center GPU Max",
+ )
variant("nosuffix", default=False, description="Disable default suffixes")
variant(
"build_type",
@@ -108,6 +125,18 @@ class Gromacs(CMakePackage, CudaPackage):
"Profile",
),
)
+ variant(
+ "nblib",
+ default=True,
+ when="@2021:",
+ description="Build and install the NB-LIB C++ API for GROMACS",
+ )
+ variant(
+ "gmxapi",
+ default=True,
+ when="@2019:",
+ description="Build and install the gmxlib python API for GROMACS",
+ )
variant(
"mdrun_only",
default=False,
@@ -142,8 +171,6 @@ class Gromacs(CMakePackage, CudaPackage):
msg="GMX_RELAXED_DOUBLE_PRECISION option removed for GROMACS 2021.",
)
variant("hwloc", default=True, description="Use the hwloc portable hardware locality library")
- variant("lapack", default=False, description="Enables an external LAPACK library")
- variant("blas", default=False, description="Enables an external BLAS library")
variant("cycle_subcounters", default=False, description="Enables cycle subcounters")
variant("cp2k", default=False, description="CP2K QM/MM interface integration")
@@ -151,16 +178,6 @@ class Gromacs(CMakePackage, CudaPackage):
"+cp2k", when="@:2021", msg="CP2K QM/MM support have been introduced in GROMACS 2022"
)
conflicts("+shared", when="+cp2k", msg="Enabling CP2K requires static build")
- conflicts(
- "~lapack",
- when="+cp2k",
- msg="GROMACS and CP2K should use the same lapack, please disable bundled lapack",
- )
- conflicts(
- "~blas",
- when="+cp2k",
- msg="GROMACS and CP2K should use the same blas, please disable bundled blas",
- )
conflicts("%intel", when="@2022:", msg="GROMACS %intel support was removed in version 2022")
conflicts("%gcc@:8", when="@2023:", msg="GROMACS requires GCC 9 or later since version 2023")
conflicts(
@@ -255,8 +272,8 @@ class Gromacs(CMakePackage, CudaPackage):
depends_on("cmake@3.16.0:3", type="build", when="%fj")
depends_on("cuda", when="+cuda")
depends_on("sycl", when="+sycl")
- depends_on("lapack", when="+lapack")
- depends_on("blas", when="+blas")
+ depends_on("lapack")
+ depends_on("blas")
depends_on("gcc", when="%oneapi ~intel_provided_gcc")
depends_on("gcc", when="%intel ~intel_provided_gcc")
@@ -266,6 +283,7 @@ class Gromacs(CMakePackage, CudaPackage):
depends_on("cp2k@8.1:", when="+cp2k")
depends_on("nvhpc", when="+cufftmp")
+ depends_on("heffte", when="+heffte")
requires(
"%intel",
@@ -275,6 +293,11 @@ class Gromacs(CMakePackage, CudaPackage):
msg="Only attempt to find gcc libs for Intel compiler if Intel compiler is used.",
)
+ # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa
+ for _intel_pkg in INTEL_MATH_LIBRARIES:
+ requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}")
+ requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}")
+
patch("gmxDetectCpu-cmake-3.14.patch", when="@2018:2019.3^cmake@3.14.0:")
patch("gmxDetectSimd-cmake-3.14.patch", when="@5.0:2017^cmake@3.14.0:")
# 2021.2 will always try to build tests (see https://gromacs.bioexcel.eu/t/compilation-failure-for-gromacs-2021-1-and-2021-2-with-cmake-3-20-2/2129)
@@ -504,21 +527,13 @@ def cmake_args(self):
if "+cuda" in self.spec:
options.append("-DCUDA_TOOLKIT_ROOT_DIR:STRING=" + self.spec["cuda"].prefix)
- if "+lapack" in self.spec:
- options.append("-DGMX_EXTERNAL_LAPACK:BOOL=ON")
- if self.spec["lapack"].libs:
- options.append(
- "-DGMX_LAPACK_USER={0}".format(self.spec["lapack"].libs.joined(";"))
- )
- else:
- options.append("-DGMX_EXTERNAL_LAPACK:BOOL=OFF")
+ options.append("-DGMX_EXTERNAL_LAPACK:BOOL=ON")
+ if self.spec["lapack"].libs:
+ options.append("-DGMX_LAPACK_USER={0}".format(self.spec["lapack"].libs.joined(";")))
- if "+blas" in self.spec:
- options.append("-DGMX_EXTERNAL_BLAS:BOOL=ON")
- if self.spec["blas"].libs:
- options.append("-DGMX_BLAS_USER={0}".format(self.spec["blas"].libs.joined(";")))
- else:
- options.append("-DGMX_EXTERNAL_BLAS:BOOL=OFF")
+ options.append("-DGMX_EXTERNAL_BLAS:BOOL=ON")
+ if self.spec["blas"].libs:
+ options.append("-DGMX_BLAS_USER={0}".format(self.spec["blas"].libs.joined(";")))
if "+cp2k" in self.spec:
options.append("-DGMX_CP2K:BOOL=ON")
@@ -531,6 +546,19 @@ def cmake_args(self):
+ f'/{self.spec["nvhpc"].version}/math_libs'
)
+ if "+heffte" in self.spec:
+ options.append("-DGMX_USE_HEFFTE=on")
+ options.append(f'-DHeffte_ROOT={self.spec["heffte"].prefix}')
+
+ if "+intel-data-center-gpu-max" in self.spec:
+ options.append("-DGMX_GPU_NB_CLUSTER_SIZE=8")
+ options.append("-DGMX_GPU_NB_NUM_CLUSTER_PER_CELL_X=1")
+
+ if "~nblib" in self.spec:
+ options.append("-DGMX_INSTALL_NBLIB_API=OFF")
+ if "~gmxapi" in self.spec:
+ options.append("-DGMXAPI=OFF")
+
# Activate SIMD based on properties of the target
target = self.spec.target
if target >= "zen4":
@@ -614,11 +642,11 @@ def cmake_args(self):
"-DGMX_OPENMP_MAX_THREADS=%s" % self.spec.variants["openmp_max_threads"].value
)
- if "^mkl" in self.spec:
+ if self.spec["lapack"].name in INTEL_MATH_LIBRARIES:
# fftw-api@3 is provided by intel-mkl or intel-parllel-studio
# we use the mkl interface of gromacs
options.append("-DGMX_FFT_LIBRARY=mkl")
- if not self.spec["mkl"].satisfies("@2023:"):
+ if self.spec.satisfies("@:2022"):
options.append(
"-DMKL_INCLUDE_DIR={0}".format(self.spec["mkl"].headers.directories[0])
)
diff --git a/var/spack/repos/builtin/packages/grpc/package.py b/var/spack/repos/builtin/packages/grpc/package.py
index 67f5484c427576..7e8487af806aef 100644
--- a/var/spack/repos/builtin/packages/grpc/package.py
+++ b/var/spack/repos/builtin/packages/grpc/package.py
@@ -60,7 +60,7 @@ class Grpc(CMakePackage):
depends_on("zlib-api")
depends_on("c-ares")
depends_on("abseil-cpp", when="@1.27:")
- depends_on("re2+pic", when="@1.33.1:")
+ depends_on("re2+pic@2023-09-01", when="@1.33.1:")
def cmake_args(self):
args = [
diff --git a/var/spack/repos/builtin/packages/gzip/package.py b/var/spack/repos/builtin/packages/gzip/package.py
index 76a06818251e37..6645969dd0e4fd 100644
--- a/var/spack/repos/builtin/packages/gzip/package.py
+++ b/var/spack/repos/builtin/packages/gzip/package.py
@@ -12,9 +12,22 @@ class Gzip(AutotoolsPackage):
homepage = "https://www.gnu.org/software/gzip/"
url = "https://ftp.gnu.org/gnu/gzip/gzip-1.10.tar.gz"
- version("1.12", sha256="5b4fb14d38314e09f2fc8a1c510e7cd540a3ea0e3eb9b0420046b82c3bf41085")
- version("1.11", sha256="3e8a0e0c45bad3009341dce17d71536c4c655d9313039021ce7554a26cd50ed9")
- version("1.10", sha256="c91f74430bf7bc20402e1f657d0b252cb80aa66ba333a25704512af346633c68")
+ version("1.13", sha256="20fc818aeebae87cdbf209d35141ad9d3cf312b35a5e6be61bfcfbf9eddd212a")
+ version(
+ "1.12",
+ sha256="5b4fb14d38314e09f2fc8a1c510e7cd540a3ea0e3eb9b0420046b82c3bf41085",
+ deprecated=True,
+ )
+ version(
+ "1.11",
+ sha256="3e8a0e0c45bad3009341dce17d71536c4c655d9313039021ce7554a26cd50ed9",
+ deprecated=True,
+ )
+ version(
+ "1.10",
+ sha256="c91f74430bf7bc20402e1f657d0b252cb80aa66ba333a25704512af346633c68",
+ deprecated=True,
+ )
# Gzip makes a recursive symlink if built in-source
build_directory = "spack-build"
diff --git a/var/spack/repos/builtin/packages/hdf5-vol-async/package.py b/var/spack/repos/builtin/packages/hdf5-vol-async/package.py
index 017093911bc8d4..ab34a7f0b1a148 100644
--- a/var/spack/repos/builtin/packages/hdf5-vol-async/package.py
+++ b/var/spack/repos/builtin/packages/hdf5-vol-async/package.py
@@ -35,9 +35,8 @@ class Hdf5VolAsync(CMakePackage):
depends_on("hdf5@1.14.0: +mpi +threadsafe")
# Require MPI_THREAD_MULTIPLE.
- depends_on("openmpi +thread_multiple", when="^openmpi@:2")
- depends_on("openmpi", when="^openmpi@3:")
- depends_on("mvapich2 threads=multiple", when="^mvapich2")
+ depends_on("openmpi +thread_multiple", when="^[virtuals=mpi] openmpi@:2")
+ depends_on("mvapich2 threads=multiple", when="^[virtuals=mpi] mvapich2")
def setup_run_environment(self, env):
env.prepend_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py
index 59756e87dcd8a8..dd2f8b904bb036 100644
--- a/var/spack/repos/builtin/packages/hdf5/package.py
+++ b/var/spack/repos/builtin/packages/hdf5/package.py
@@ -20,7 +20,7 @@ class Hdf5(CMakePackage):
"""
homepage = "https://portal.hdfgroup.org"
- url = "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.8/src/hdf5-1.10.8.tar.gz"
+ url = "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.3/src/hdf5-1.14.3.tar.gz"
list_url = "https://support.hdfgroup.org/ftp/HDF5/releases"
list_depth = 3
git = "https://github.com/HDFGroup/hdf5.git"
@@ -41,6 +41,11 @@ class Hdf5(CMakePackage):
# Odd versions are considered experimental releases
# Even versions are maintenance versions
+ version(
+ "1.14.3",
+ sha256="09cdb287aa7a89148c1638dd20891fdbae08102cf433ef128fd345338aa237c7",
+ preferred=True,
+ )
version(
"1.14.2",
sha256="1c342e634008284a8c2794c8e7608e2eaf26d01d445fb3dfd7f33cb2fb51ac53",
@@ -71,6 +76,11 @@ class Hdf5(CMakePackage):
sha256="a62dcb276658cb78e6795dd29bf926ed7a9bc4edf6e77025cd2c689a8f97c17a",
preferred=True,
)
+ version(
+ "1.10.11",
+ sha256="341684c5c0976b8c7e6951735a400275a90693604464cac73e9f323c696fc79c",
+ preferred=True,
+ )
version(
"1.10.10",
sha256="a6877ab7bd5d769d2d68618fdb54beb50263dcc2a8c157fe7e2186925cdb02db",
@@ -205,6 +215,7 @@ class Hdf5(CMakePackage):
description="C++ standard",
)
variant("map", when="@1.14:", default=False, description="Enable MAP API support")
+ variant("subfiling", when="@1.14:", default=False, description="Enable Subfiling VFD support")
variant("fortran", default=False, description="Enable Fortran support")
variant("java", when="@1.10:", default=False, description="Enable Java support")
variant("threadsafe", default=False, description="Enable thread-safe capabilities")
@@ -327,7 +338,7 @@ class Hdf5(CMakePackage):
patch("fortran-kinds.patch", when="@1.10.7")
- # This patch may only be needed with GCC11.2 on macOS, but it's valid for
+ # This patch may only be needed with GCC 11.2 on macOS, but it's valid for
# any of the head HDF5 versions as of 12/2021. Since it's impossible to
# tell what Fortran version is part of a mixed apple-clang toolchain on
# macOS (which is the norm), and this might be an issue for other compilers
@@ -610,6 +621,7 @@ def cmake_args(self):
"CMAKE_CXX_FLAGS", "-std=c++{0}".format(self.spec.variants["cxxstd"].value)
),
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
+ self.define_from_variant("HDF5_ENABLE_SUBFILING_VFD", "subfiling"),
self.define_from_variant("HDF5_ENABLE_MAP_API", "map"),
self.define("HDF5_ENABLE_Z_LIB_SUPPORT", True),
self.define_from_variant("HDF5_ENABLE_SZIP_SUPPORT", "szip"),
@@ -670,7 +682,7 @@ def ensure_parallel_compiler_wrappers(self):
# 1.10.6 and 1.12.0. The current develop versions do not produce 'h5pfc'
# at all. Here, we make sure that 'h5pfc' is available when Fortran and
# MPI support are enabled (only for versions that generate 'h5fc').
- if self.spec.satisfies("@1.8.22:1.8," "1.10.6:1.10," "1.12.0:1.12" "+fortran+mpi"):
+ if self.spec.satisfies("@1.8.22:1.8," "1.10.6:1.10.9," "1.12.0:1.12" "+fortran+mpi"):
with working_dir(self.prefix.bin):
# No try/except here, fix the condition above instead:
symlink("h5fc", "h5pfc")
@@ -714,6 +726,17 @@ def fix_package_config(self):
if not os.path.exists(tgt_filename):
symlink(src_filename, tgt_filename)
+ @run_after("install")
+ def link_debug_libs(self):
+ # When build_type is Debug, the hdf5 build appends _debug to all library names.
+ # Dependents of hdf5 (netcdf-c etc.) can't handle those, thus make symlinks.
+ if "build_type=Debug" in self.spec:
+ libs = find(self.prefix.lib, "libhdf5*_debug.*", recursive=False)
+ with working_dir(self.prefix.lib):
+ for lib in libs:
+ libname = os.path.split(lib)[1]
+ os.symlink(libname, libname.replace("_debug", ""))
+
@property
@llnl.util.lang.memoized
def _output_version(self):
diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py
index 32d7e18cc6f848..dd957bb26820c0 100644
--- a/var/spack/repos/builtin/packages/heffte/package.py
+++ b/var/spack/repos/builtin/packages/heffte/package.py
@@ -10,7 +10,7 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage):
"""Highly Efficient FFT for Exascale"""
homepage = "https://github.com/icl-utk-edu/heffte/"
- url = "https://github.com/icl-utk-edu/heffte/archive/refs/tags/v2.3.0.tar.gz"
+ url = "https://github.com/icl-utk-edu/heffte/archive/refs/tags/v2.4.0.tar.gz"
git = "https://github.com/icl-utk-edu/heffte/"
maintainers("mkstoyanov", "G-Ragghianti")
@@ -19,6 +19,7 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage):
test_requires_compiler = True
version("develop", branch="master")
+ version("2.4.0", sha256="02310fb4f9688df02f7181667e61c3adb7e38baf79611d80919d47452ff7881d")
version("2.3.0", sha256="63db8c9a8822211d23e29f7adf5aa88bb462c91d7a18c296c3ef3a06be8d6171")
version("2.2.0", sha256="332346d5c1d1032288d09839134c79e4a9704e213a2d53051e96c3c414c74df0")
version("2.1.0", sha256="63b8ea45a220afc4fa0b14769c0dd291e614a2fe9d5a91c50d28f16ee29b3f1c")
@@ -27,32 +28,22 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage):
sha256="b575fafe19a635265904ca302d48e778341b1567c055ea7f2939c8c6718f7212",
deprecated=True,
)
- version(
- "1.0",
- sha256="00e66cdff664ba90eeb26b4824f2a7341ba791b1d7220ece8180aba7623d36d5",
- deprecated=True,
- )
- version(
- "0.2",
- sha256="6e606aa9de91912925ec49f463de4369459e509e0e21a97ca72dfa07651056e5",
- deprecated=True,
- )
- version(
- "0.1",
- sha256="bcdc940c4cb254b178446d16c969b85ea6b5c69fdf4b6332bb3c8fbce00bccdf",
- deprecated=True,
- )
- patch("threads10.patch", when="@1.0")
- patch("fortran200.patch", when="@2.0.0")
patch("cmake-magma-v230.patch", when="@2.3.0")
+ patch("fortran200.patch", when="@2.0.0")
- depends_on("cmake@3.10:", type=("build", "run"))
- depends_on("cmake@3.19:", when="@develop", type=("build", "run"))
- depends_on("cmake@3.21:", when="@develop+rocm", type=("build", "run"))
+ depends_on("cmake@3.10:", when="@:2.3.0", type=("build", "run"))
+ depends_on("cmake@3.19:", when="@2.4.0:", type=("build", "run"))
+ depends_on("cmake@3.21:", when="@2.4.0:+rocm", type=("build", "run"))
variant("shared", default=True, description="Builds with shared libraries")
variant("fftw", default=False, description="Builds with support for FFTW backend")
+ variant(
+ "sycl",
+ default=False,
+ when="%oneapi",
+ description="Builds with support for oneAPI SYCL+oneMKL backend",
+ )
variant("mkl", default=False, description="Builds with support for MKL backend")
variant("magma", default=False, description="Use helper methods from the UTK MAGMA library")
variant("python", default=False, description="Install the Python bindings")
@@ -64,14 +55,9 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage):
depends_on("py-numba", when="+python+cuda", type=("build", "run"))
extends("python", when="+python", type=("build", "run"))
- conflicts("~fftw", when="@:2.1.0~mkl~cuda") # requires at least one backend
- conflicts("+fftw", when="+mkl@:1.0") # old API supports at most one CPU backend
conflicts("^openmpi~cuda", when="+cuda") # +cuda requires CUDA enabled OpenMPI
conflicts("~cuda~rocm", when="+magma") # magma requires CUDA or HIP
conflicts("+rocm", when="@:2.1.0") # heffte+rocm is in in development in spack
- conflicts("+python", when="@:1.0") # python support was added post v1.0
- conflicts("+fortran", when="@:1.0") # fortran support was added post v1.0
- conflicts("+magma", when="@:1.0") # magma support was added post v1.0
depends_on("mpi", type=("build", "run"))
@@ -80,23 +66,27 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage):
depends_on("cuda@8.0:", when="+cuda", type=("build", "run"))
depends_on("hip@3.8.0:", when="+rocm", type=("build", "run"))
depends_on("rocfft@3.8.0:", when="+rocm", type=("build", "run"))
- depends_on("hip@5.2.3:", when="@develop+rocm", type=("build", "run"))
- depends_on("rocfft@5.2.3:", when="@develop+rocm", type=("build", "run"))
+ depends_on("hip@5.2.3:", when="@2.4.0:+rocm", type=("build", "run"))
+ depends_on("rocfft@5.2.3:", when="@2.4.0:+rocm", type=("build", "run"))
depends_on("magma@2.5.3:", when="+cuda+magma", type=("build", "run"))
depends_on("magma+rocm@2.6.1:", when="+magma+rocm @2.1:", type=("build", "run"))
depends_on("rocblas@3.8:", when="+magma+rocm", type=("build", "run"))
depends_on("rocsparse@3.8:", when="+magma+rocm", type=("build", "run"))
depends_on("hipblas@3.8:", when="+magma+rocm", type=("build", "run"))
depends_on("hipsparse@3.8:", when="+magma+rocm", type=("build", "run"))
+ depends_on("intel-oneapi-mkl@2023.2.0:", when="+sycl", type=("build", "run"))
+ depends_on("intel-oneapi-mpi@2021.10.0:", when="+sycl", type=("build", "run"))
examples_src_dir = "examples"
def cmake_args(self):
args = [
"-DHeffte_SEQUENTIAL_TESTING=ON",
+ "-DHeffte_ENABLE_TESTING=ON",
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
self.define_from_variant("Heffte_ENABLE_CUDA", "cuda"),
self.define_from_variant("Heffte_ENABLE_ROCM", "rocm"),
+ self.define_from_variant("Heffte_ENABLE_ONEAPI", "sycl"),
self.define_from_variant("Heffte_ENABLE_FFTW", "fftw"),
self.define_from_variant("Heffte_ENABLE_MKL", "mkl"),
self.define_from_variant("Heffte_ENABLE_MAGMA", "magma"),
@@ -115,7 +105,7 @@ def cmake_args(self):
archs = ";".join(cuda_arch)
args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % archs)
- if "+rocm" in self.spec and self.spec.satisfies("@:2.3.0"):
+ if "+rocm" in self.spec:
args.append("-DCMAKE_CXX_COMPILER={0}".format(self.spec["hip"].hipcc))
rocm_arch = self.spec.variants["amdgpu_target"].value
@@ -146,22 +136,26 @@ def test_make_test(self):
cmake_dir = self.test_suite.current_test_cache_dir.testing
options = [cmake_dir]
+ options.append(self.define("Heffte_DIR", self.spec.prefix.lib.cmake.Heffte))
if "+rocm" in self.spec:
+ # path name is 'hsa-runtime64' but python cannot have '-' in variable name
+ hsa_runtime = join_path(self.spec["hsa-rocr-dev"].prefix.lib.cmake, "hsa-runtime64")
options.extend(
[
- f"-Dhip_DIR={self.spec['hip'].prefix.lib.cmake.hip}",
- "-DAMDDeviceLibs_DIR="
- + f"{self.spec['llvm-amdgpu'].prefix.lib.cmake.AMDDeviceLibs}",
- f"-Damd_comgr_DIR={self.spec['comgr'].prefix.lib.cmake.amd_comgr}",
- "-Dhsa-runtime64_DIR="
- + f"{self.spec['hsa-rocr-dev'].prefix.lib.cmake.hsa-runtime64}",
- "-DHSA_HEADER={self.spec['hsa-rocr-dev'].prefix.include}",
- "-Drocfft_DIR={self.spec['rocfft'].prefix.lib.cmake.rocfft}",
+ self.define("hip_DIR", self.spec["hip"].prefix.lib.cmake.hip),
+ self.define(
+ "AMDDeviceLibs_DIR",
+ self.spec["llvm-amdgpu"].prefix.lib.cmake.AMDDeviceLibs,
+ ),
+ self.define("amd_comgr_DIR", self.spec["comgr"].prefix.lib.cmake.amd_comgr),
+ self.define("hsa-runtime64_DIR", hsa_runtime),
+ self.define("HSA_HEADER", self.spec["hsa-rocr-dev"].prefix.include),
+ self.define("rocfft_DIR", self.spec["rocfft"].prefix.lib.cmake.rocfft),
]
)
# Provide the root directory of the MPI installation.
- options.append(f"-DMPI_HOME={self.spec['mpi'].prefix}")
+ options.append(self.define("MPI_HOME", self.spec["mpi"].prefix))
cmake = which(self.spec["cmake"].prefix.bin.cmake)
cmake(*options)
diff --git a/var/spack/repos/builtin/packages/heffte/threads10.patch b/var/spack/repos/builtin/packages/heffte/threads10.patch
deleted file mode 100644
index 41d55d9bb80919..00000000000000
--- a/var/spack/repos/builtin/packages/heffte/threads10.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/cmake/HeffteConfig.cmake b/cmake/HeffteConfig.cmake
-index bd67de9..ca06086 100644
---- a/cmake/HeffteConfig.cmake
-+++ b/cmake/HeffteConfig.cmake
-@@ -19,6 +19,8 @@ if (NOT TARGET MPI::MPI_CXX)
- find_package(MPI REQUIRED)
- endif()
-
-+find_package(Threads)
-+
- if ("@BUILD_SHARED_LIBS@")
- set(Heffte_SHARED_FOUND "ON")
- else()
diff --git a/var/spack/repos/builtin/packages/highfive/package.py b/var/spack/repos/builtin/packages/highfive/package.py
index e818a7805f4e90..1d7c82257f87a0 100644
--- a/var/spack/repos/builtin/packages/highfive/package.py
+++ b/var/spack/repos/builtin/packages/highfive/package.py
@@ -17,6 +17,7 @@ class Highfive(CMakePackage):
maintainers("alkino")
version("develop", branch="master")
+ version("2.8.0", sha256="cd2502cae61bfb00e32dd18c9dc75289e09ad1db5c2a46d3b0eefd32e0df983b")
version("2.7.1", sha256="25b4c51a94d1e670dc93b9b73f51e79b65d8ff49bcd6e5d5582d5ecd2789a249")
version("2.7.0", sha256="8e05672ddf81a59ce014b1d065bd9a8c5034dbd91a5c2578e805ef880afa5907")
version("2.6.2", sha256="ab51b9fbb49e877dd1aa7b53b4b26875f41e4e0b8ee0fc2f1d735e0d1e43d708")
diff --git a/var/spack/repos/builtin/packages/highway/package.py b/var/spack/repos/builtin/packages/highway/package.py
index a708d3f3157df9..75f5398ab29717 100644
--- a/var/spack/repos/builtin/packages/highway/package.py
+++ b/var/spack/repos/builtin/packages/highway/package.py
@@ -12,7 +12,13 @@ class Highway(CMakePackage):
homepage = "https://github.com/google/highway"
url = "https://github.com/google/highway/archive/refs/tags/1.0.0.tar.gz"
+ version("1.0.7", sha256="5434488108186c170a5e2fca5e3c9b6ef59a1caa4d520b008a9b8be6b8abe6c5")
+ version("1.0.6", sha256="d89664a045a41d822146e787bceeefbf648cc228ce354f347b18f2b419e57207")
+ version("1.0.5", sha256="99b7dad98b8fa088673b720151458fae698ae5df9154016e39de4afdc23bb927")
version("1.0.4", sha256="faccd343935c9e98afd1016e9d20e0b8b89d908508d1af958496f8c2d3004ac2")
+ version("1.0.3", sha256="566fc77315878473d9a6bd815f7de78c73734acdcb745c3dde8579560ac5440e")
+ version("1.0.2", sha256="e8ef71236ac0d97f12d553ec1ffc5b6375d57b5f0b860c7447dd69b6ed1072db")
+ version("1.0.1", sha256="7ca6af7dc2e3e054de9e17b9dfd88609a7fd202812b1c216f43cc41647c97311")
version("1.0.0", sha256="ab4f5f864932268356f9f6aa86f612fa4430a7db3c8de0391076750197e876b8")
depends_on("cmake@3.10:", type="build")
diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py
index ff62c7da56c0ee..4f68978ab640fa 100644
--- a/var/spack/repos/builtin/packages/hiop/package.py
+++ b/var/spack/repos/builtin/packages/hiop/package.py
@@ -22,33 +22,43 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage):
maintainers("ryandanehy", "cameronrutherford", "pelesh")
# Most recent tagged snapshot is the preferred version when profiling.
- version("1.0.1", commit="c5e156c6f27d046f590dc35114980e3f9c573ca6", submodules=True)
- version("1.0.0", commit="10b7d3ee0a15cb4949ccee8c905d447b9528794f", submodules=True)
- version("0.7.2", commit="d0f57c880d4202a72c62dd1f5c92e3bc8acb9788", submodules=True)
- version("0.7.1", commit="8064ef6b2249ad2feca92a9d1e90060bad3eebc7", submodules=True)
- version("0.7.0", commit="5f42ab34b419b7cf64d0fffb29d443b009dbfd75", submodules=True)
- version("0.6.2", commit="55652fbe923ab9107d002d0d070865bd22375b28")
- version("0.6.1", commit="a9e2697b00aa13ecf0ae4783dd8a41dee11dc50e")
- version("0.6.0", commit="21af7eb0d6427be73546cf303abc84e834a5a55d")
- version("0.5.4", commit="a37a7a677884e95d1c0ad37936aef3778fc91c3e")
- version("0.5.3", commit="698e8d0fdc0ff9975d8714339ff8c782b70d85f9")
- version("0.5.2", commit="662ad76dee1f501f648a8bec9a490cb5881789e9")
- version("0.5.1", commit="6789bbb55824e68e428c2df1009d647af81f9cf1")
- version("0.5.0", commit="a39da8025037c7c8ae2eb31234eb80cc73bec2af")
- version("0.4.6", commit="b72d163d52c9225c3196ceb2baebdc7cf09a69de")
- version("0.4.5", commit="c353580456c4776c50811b97cf8ff802dc27b90c")
- version("0.4.4", commit="e858eefa6b914f5c87c3717bbce811931ea69386")
- version("0.4.3", commit="c0394af4d84ebb84b7d2b95283ad65ffd84e0d45")
- version("0.4.2", commit="3fcb788d223eec24c0241680070c4a9a5ec71ef3")
- version("0.4.1", commit="3f269560f76d5a89bcbd1d3c4f9f0e5acaa6fd64")
- version("0.4", commit="91d21085a1149eacdb27cd738d4a74a7e412fcff")
- version("0.3.99.3", commit="bed1dbef260e53a9d139ccfb77d2e83a98aab216")
- version("0.3.99.2", commit="9eb026768bc5e0a2c1293d0487cc39913001ae19")
- version("0.3.99.1", commit="220e32c0f318665d6d394ca3cd0735b9d26a65eb")
- version("0.3.99.0", commit="589b9c76781447108fa55788d5fa1b83ff71a3d1")
- version("0.3", commit="7e8adae9db757aed48e5c2bc448316307598258f")
- version("0.2", commit="c52a6f6b9baaaa2d7f233a749aa98f901349723f")
- version("0.1", commit="5f60e11b79d532115fb41694378b54c9c707aad9")
+ version(
+ "1.0.1", tag="v1.0.1", commit="c5e156c6f27d046f590dc35114980e3f9c573ca6", submodules=True
+ )
+ version(
+ "1.0.0", tag="v1.0.0", commit="10b7d3ee0a15cb4949ccee8c905d447b9528794f", submodules=True
+ )
+ version(
+ "0.7.2", tag="v0.7.2", commit="d0f57c880d4202a72c62dd1f5c92e3bc8acb9788", submodules=True
+ )
+ version(
+ "0.7.1", tag="v0.7.1", commit="8064ef6b2249ad2feca92a9d1e90060bad3eebc7", submodules=True
+ )
+ version(
+ "0.7.0", tag="v0.7.0", commit="5f42ab34b419b7cf64d0fffb29d443b009dbfd75", submodules=True
+ )
+ version("0.6.2", tag="v0.6.2", commit="55652fbe923ab9107d002d0d070865bd22375b28")
+ version("0.6.1", tag="v0.6.1", commit="a9e2697b00aa13ecf0ae4783dd8a41dee11dc50e")
+ version("0.6.0", tag="v0.6.0", commit="21af7eb0d6427be73546cf303abc84e834a5a55d")
+ version("0.5.4", tag="v0.5.4", commit="a37a7a677884e95d1c0ad37936aef3778fc91c3e")
+ version("0.5.3", tag="v0.5.3", commit="698e8d0fdc0ff9975d8714339ff8c782b70d85f9")
+ version("0.5.2", tag="v0.5.2", commit="662ad76dee1f501f648a8bec9a490cb5881789e9")
+ version("0.5.1", tag="v0.5.1", commit="6789bbb55824e68e428c2df1009d647af81f9cf1")
+ version("0.5.0", tag="v0.5.0", commit="a39da8025037c7c8ae2eb31234eb80cc73bec2af")
+ version("0.4.6", tag="v0.4.6", commit="b72d163d52c9225c3196ceb2baebdc7cf09a69de")
+ version("0.4.5", tag="v0.4.5", commit="c353580456c4776c50811b97cf8ff802dc27b90c")
+ version("0.4.4", tag="v0.4.4", commit="e858eefa6b914f5c87c3717bbce811931ea69386")
+ version("0.4.3", tag="v0.4.3", commit="c0394af4d84ebb84b7d2b95283ad65ffd84e0d45")
+ version("0.4.2", tag="v0.4.2", commit="3fcb788d223eec24c0241680070c4a9a5ec71ef3")
+ version("0.4.1", tag="v0.4.1", commit="3f269560f76d5a89bcbd1d3c4f9f0e5acaa6fd64")
+ version("0.4", tag="v0.4", commit="91d21085a1149eacdb27cd738d4a74a7e412fcff")
+ version("0.3.99.3", tag="v0.3.99.3", commit="bed1dbef260e53a9d139ccfb77d2e83a98aab216")
+ version("0.3.99.2", tag="v0.3.99.2", commit="9eb026768bc5e0a2c1293d0487cc39913001ae19")
+ version("0.3.99.1", tag="v0.3.99.1", commit="220e32c0f318665d6d394ca3cd0735b9d26a65eb")
+ version("0.3.99.0", tag="v0.3.99.0", commit="589b9c76781447108fa55788d5fa1b83ff71a3d1")
+ version("0.3", tag="v0.3", commit="7e8adae9db757aed48e5c2bc448316307598258f")
+ version("0.2", tag="v0.2", commit="c52a6f6b9baaaa2d7f233a749aa98f901349723f")
+ version("0.1", tag="v0.1", commit="5f60e11b79d532115fb41694378b54c9c707aad9")
# Development branches
version("master", branch="master")
@@ -103,7 +113,13 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage):
depends_on("magma@{0}:".format(magma_v), when="@{0}:+cuda".format(hiop_v))
depends_on("magma@{0}:".format(magma_v), when="@{0}:+rocm".format(hiop_v))
+ # 1.0.2 fixes bug with cuda 12 compatibility
+ # hiop@0.6.0 requires cusolver API in cuda@11
+ depends_on("cuda@11:11.9", when="@0.6.0:1.0.1+cuda")
depends_on("cuda@11:", when="@develop:+cuda")
+ # Before hiop@0.6.0 only cuda requirement was magma
+ depends_on("cuda", when="@:0.5.4+cuda")
+
depends_on("raja", when="+raja")
depends_on("umpire", when="+raja")
depends_on("raja+openmp", when="+raja~cuda~rocm")
diff --git a/var/spack/repos/builtin/packages/hip-examples/package.py b/var/spack/repos/builtin/packages/hip-examples/package.py
index c625d0fe4b76c2..f3d3aed50cd86c 100644
--- a/var/spack/repos/builtin/packages/hip-examples/package.py
+++ b/var/spack/repos/builtin/packages/hip-examples/package.py
@@ -18,14 +18,18 @@ class HipExamples(Package):
maintainers("srekolam", "renjithravindrankannath", "afzpatel")
- version("master", branch="master")
-
+ version("5.6.1", sha256="c1b5d30e387f869fae21170790ea3d604f7f0dba7771a9c096d9a5c2351dd001")
+ version("5.6.0", sha256="b751a0cac938248f7ea0fbeaa9df35688357b54ddd13359e2842a770b7923dfe")
+ version("5.5.1", sha256="c8522ef3f0804c85eef7e9efe2671f375b0d7f2100de85f55dcc2401efed6389")
+ version("5.5.0", sha256="bea8a4155bbfbdb3bc1f83c22e4bd1214b1b4e1840b58dc7d37704620de5b103")
version("5.4.3", sha256="053b8b7892e2929e3f90bd978d8bb1c9801e4803eadd7d97fc6692ce60af1d47")
patch("0001-add-inc-and-lib-paths-to-openmp-helloworld.patch")
patch("0002-add-fpic-compile-to-add4.patch")
- depends_on("hip")
- depends_on("rocm-openmp-extras")
+
+ for ver in ["5.6.1", "5.6.0", "5.5.1", "5.5.0", "5.4.3"]:
+ depends_on("hip@" + ver, when="@" + ver)
+ depends_on("rocm-openmp-extras@" + ver, when="@" + ver)
def install(self, spec, prefix):
stage = os.getcwd()
diff --git a/var/spack/repos/builtin/packages/hip/package.py b/var/spack/repos/builtin/packages/hip/package.py
index 1200cfdd2cb72d..a34805eb6757ea 100644
--- a/var/spack/repos/builtin/packages/hip/package.py
+++ b/var/spack/repos/builtin/packages/hip/package.py
@@ -709,6 +709,14 @@ def cmake_args(self):
args.append(self.define("CLR_BUILD_OCL", False)),
return args
+ test_src_dir_old = "samples"
+ test_src_dir = "hip-tests/samples"
+
+ @run_after("install")
+ def install_samples(self):
+ if self.spec.satisfies("@5.6.0:"):
+ install_tree(self.test_src_dir, self.spec.prefix.share.samples)
+
@run_after("install")
def cache_test_sources(self):
"""Copy the tests source files after the package is installed to an
@@ -716,16 +724,18 @@ def cache_test_sources(self):
if self.spec.satisfies("@:5.1.0"):
return
elif self.spec.satisfies("@5.1:5.5"):
- self.test_src_dir = "samples"
+ self.cache_extra_test_sources([self.test_src_dir_old])
elif self.spec.satisfies("@5.6:"):
- self.test_src_dir = "hip-tests/samples"
- self.cache_extra_test_sources([self.test_src_dir])
+ self.cache_extra_test_sources([self.test_src_dir])
def test_samples(self):
# configure, build and run all hip samples
if self.spec.satisfies("@:5.1.0"):
raise SkipTest("Test is only available for specs after version 5.1.0")
- test_dir = join_path(self.test_suite.current_test_cache_dir, self.test_src_dir)
+ elif self.spec.satisfies("@5.1:5.5"):
+ test_dir = join_path(self.test_suite.current_test_cache_dir, self.test_src_dir_old)
+ elif self.spec.satisfies("@5.6:"):
+ test_dir = join_path(self.test_suite.current_test_cache_dir, self.test_src_dir)
prefixes = ";".join(
[
self.spec["hip"].prefix,
diff --git a/var/spack/repos/builtin/packages/hipblas/package.py b/var/spack/repos/builtin/packages/hipblas/package.py
index b0261bd5db545c..973a8c34b32881 100644
--- a/var/spack/repos/builtin/packages/hipblas/package.py
+++ b/var/spack/repos/builtin/packages/hipblas/package.py
@@ -132,10 +132,7 @@ class Hipblas(CMakePackage, CudaPackage, ROCmPackage):
patch("link-clients-blas.patch", when="@4.3.0:4.3.2")
patch("link-clients-blas-4.5.0.patch", when="@4.5.0:4.5.2")
patch("hipblas-link-clients-blas-5.0.0.patch", when="@5.0.0:5.0.2")
-
- def check(self):
- exe = join_path(self.build_directory, "clients", "staging", "hipblas-test")
- self.run_test(exe, options=["--gtest_filter=-*known_bug*"])
+ patch("remove-hipblas-clients-file-installation.patch", when="@5.5:")
depends_on("rocm-cmake@5.2.0:", type="build", when="@5.2.0:")
depends_on("rocm-cmake@4.5.0:", type="build", when="@4.5.0:")
@@ -222,3 +219,9 @@ def cmake_args(self):
args.append("-DCMAKE_INSTALL_LIBDIR=lib")
return args
+
+ @run_after("build")
+ @on_package_attributes(run_tests=True)
+ def check_build(self):
+ exe = Executable(join_path(self.build_directory, "clients", "staging", "hipblas-test"))
+ exe("--gtest_filter=-*known_bug*")
diff --git a/var/spack/repos/builtin/packages/hipblas/remove-hipblas-clients-file-installation.patch b/var/spack/repos/builtin/packages/hipblas/remove-hipblas-clients-file-installation.patch
new file mode 100644
index 00000000000000..a1adf8930675be
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hipblas/remove-hipblas-clients-file-installation.patch
@@ -0,0 +1,18 @@
+diff --git a/clients/CMakeLists.txt b/clients/CMakeLists.txt
+index 2ae1535..c956e00 100644
+--- a/clients/CMakeLists.txt
++++ b/clients/CMakeLists.txt
+@@ -134,13 +134,3 @@ add_custom_command( OUTPUT "${HIPBLAS_GENTEST}"
+
+ add_custom_target( hipblas-common DEPENDS "${HIPBLAS_COMMON}" "${HIPBLAS_TEMPLATE}" "${HIPBLAS_SMOKE}" "${HIPBLAS_GENTEST}" )
+
+-rocm_install(
+- FILES ${HIPBLAS_COMMON} ${HIPBLAS_TEMPLATE} ${HIPBLAS_SMOKE}
+- DESTINATION "${CMAKE_INSTALL_BINDIR}"
+- COMPONENT clients-common
+-)
+-rocm_install(
+- PROGRAMS ${HIPBLAS_GENTEST}
+- DESTINATION "${CMAKE_INSTALL_BINDIR}"
+- COMPONENT clients-common
+-)
diff --git a/var/spack/repos/builtin/packages/hipsycl/package.py b/var/spack/repos/builtin/packages/hipsycl/package.py
index e8a5ba9201dd06..b6b30c2e5cfa7a 100644
--- a/var/spack/repos/builtin/packages/hipsycl/package.py
+++ b/var/spack/repos/builtin/packages/hipsycl/package.py
@@ -39,6 +39,9 @@ class Hipsycl(CMakePackage):
depends_on("python@3:")
depends_on("llvm@8: +clang", when="~cuda")
depends_on("llvm@9: +clang", when="+cuda")
+ # hipSYCL 0.8.0 supported only LLVM 8-10:
+ # (https://github.com/AdaptiveCpp/AdaptiveCpp/blob/v0.8.0/CMakeLists.txt#L29-L37)
+ depends_on("llvm@8:10", when="@0.8.0")
# https://github.com/OpenSYCL/OpenSYCL/pull/918 was introduced after 0.9.4
conflicts("^llvm@16:", when="@:0.9.4")
# LLVM PTX backend requires cuda7:10.1 (https://tinyurl.com/v82k5qq)
diff --git a/var/spack/repos/builtin/packages/hpcc/package.py b/var/spack/repos/builtin/packages/hpcc/package.py
index 4b281cf42426eb..05d08014482541 100644
--- a/var/spack/repos/builtin/packages/hpcc/package.py
+++ b/var/spack/repos/builtin/packages/hpcc/package.py
@@ -118,7 +118,10 @@ def edit(self, spec, prefix):
lin_alg_libs.append(join_path(spec["fftw-api"].prefix.lib, "libsfftw_mpi.so"))
lin_alg_libs.append(join_path(spec["fftw-api"].prefix.lib, "libsfftw.so"))
- elif self.spec.variants["fft"].value == "mkl" and "^mkl" in spec:
+ elif (
+ self.spec.variants["fft"].value == "mkl"
+ and spec["fftw-api"].name in INTEL_MATH_LIBRARIES
+ ):
mklroot = env["MKLROOT"]
self.config["@LAINC@"] += " -I{0}".format(join_path(mklroot, "include/fftw"))
libfftw2x_cdft = join_path(
diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py
index 8d58956508a1d9..bb1e28f13cd0ee 100644
--- a/var/spack/repos/builtin/packages/hpctoolkit/package.py
+++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py
@@ -109,6 +109,11 @@ class Hpctoolkit(AutotoolsPackage):
"python", default=False, description="Support unwinding Python source.", when="@2023.03:"
)
+ with when("@develop build_system=autotools"):
+ depends_on("autoconf", type="build")
+ depends_on("automake", type="build")
+ depends_on("libtool", type="build")
+
boost_libs = (
"+atomic +chrono +date_time +filesystem +system +thread +timer"
" +graph +regex +shared +multithreaded visibility=global"
diff --git a/var/spack/repos/builtin/packages/hpx-kokkos/package.py b/var/spack/repos/builtin/packages/hpx-kokkos/package.py
index 27e88238294c6d..e98c0bb17ccd78 100644
--- a/var/spack/repos/builtin/packages/hpx-kokkos/package.py
+++ b/var/spack/repos/builtin/packages/hpx-kokkos/package.py
@@ -16,6 +16,8 @@ class HpxKokkos(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/STEllAR-GROUP/hpx-kokkos.git"
maintainers("G-071", "msimberg")
+ license("BSL-1.0")
+
version("master", branch="master")
version("0.4.0", sha256="dafef55521cf4bf7ab28ebad546ea1d3fb83fac3a9932e292db4ab3666cd833f")
version("0.3.0", sha256="83c1d11dab95552ad0abdae767c71f757811d7b51d82bd231653dc942e89a45d")
diff --git a/var/spack/repos/builtin/packages/hpx/package.py b/var/spack/repos/builtin/packages/hpx/package.py
index b55c9ea6143b6a..628358b38caf8f 100644
--- a/var/spack/repos/builtin/packages/hpx/package.py
+++ b/var/spack/repos/builtin/packages/hpx/package.py
@@ -18,10 +18,12 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/STEllAR-GROUP/hpx.git"
maintainers("msimberg", "albestro", "teonnik", "hkaiser")
+ license("BSL-1.0")
+
tags = ["e4s"]
version("master", branch="master")
- version("stable", tag="stable", commit="38d5bf935e5a49f9466c5e615e04e8d553a73dc6")
+ version("stable", tag="stable", commit="103a7b8e3719a0db948d1abde29de0ff91e070be")
version("1.9.1", sha256="1adae9d408388a723277290ddb33c699aa9ea72defadf3f12d4acc913a0ff22d")
version("1.9.0", sha256="2a8dca78172fbb15eae5a5e9facf26ab021c845f9c09e61b1912e6cf9e72915a")
version("1.8.1", sha256="2fc4c10f55e2e6bcdc6f6ff950e26c6d8e218e138fdbd885ee71ccf5c5549054")
diff --git a/var/spack/repos/builtin/packages/hsakmt-roct/package.py b/var/spack/repos/builtin/packages/hsakmt-roct/package.py
index 571cffd6577ee1..444f7adcc1adb3 100644
--- a/var/spack/repos/builtin/packages/hsakmt-roct/package.py
+++ b/var/spack/repos/builtin/packages/hsakmt-roct/package.py
@@ -132,6 +132,7 @@ def install_targets(self):
else:
return ["install"]
+ def cmake_args(self):
args = []
if self.spec.satisfies("@:5.4.3"):
args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared"))
diff --git a/var/spack/repos/builtin/packages/hub/package.py b/var/spack/repos/builtin/packages/hub/package.py
index 861872b40956a7..0213c142fd942b 100644
--- a/var/spack/repos/builtin/packages/hub/package.py
+++ b/var/spack/repos/builtin/packages/hub/package.py
@@ -16,15 +16,37 @@ class Hub(Package):
git = "https://github.com/github/hub.git"
version("master", branch="master")
- version("2.2.2", sha256="610572ee903aea1fa8622c16ab7ddef2bd1bfec9f4854447ab8e0fbdbe6a0cae")
- version("2.2.1", sha256="9350aba6a8e3da9d26b7258a4020bf84491af69595f7484f922d75fc8b86dc10")
- version("2.2.0", sha256="2da1351197eb5696c207f22c69a5422af052d74277b73d0b8661efb9ec1d0eb1")
- version("1.12.4", sha256="b7fe404d7dc5f60554f088bec12de5e80229331430ea0ced46d5bf89ecae5117")
+ version("2.14.2", sha256="e19e0fdfd1c69c401e1c24dd2d4ecf3fd9044aa4bd3f8d6fd942ed1b2b2ad21a")
+ version(
+ "2.2.2",
+ sha256="610572ee903aea1fa8622c16ab7ddef2bd1bfec9f4854447ab8e0fbdbe6a0cae",
+ deprecated=True,
+ )
+ version(
+ "2.2.1",
+ sha256="9350aba6a8e3da9d26b7258a4020bf84491af69595f7484f922d75fc8b86dc10",
+ deprecated=True,
+ )
+ version(
+ "2.2.0",
+ sha256="2da1351197eb5696c207f22c69a5422af052d74277b73d0b8661efb9ec1d0eb1",
+ deprecated=True,
+ )
+ version(
+ "1.12.4",
+ sha256="b7fe404d7dc5f60554f088bec12de5e80229331430ea0ced46d5bf89ecae5117",
+ deprecated=True,
+ )
extends("go")
def install(self, spec, prefix):
env = os.environ
- env["GOPATH"] = self.stage.source_path + ":" + env["GOPATH"]
- bash = which("bash")
- bash(os.path.join("script", "build"), "-o", os.path.join(prefix, "bin", "hub"))
+ if spec.version < Version("2.14"):
+ env["GOPATH"] = self.stage.source_path + ":" + env["GOPATH"]
+ env["GO111MODULE"] = "off"
+ bash = which("bash")
+ bash(os.path.join("script", "build"), "-o", prefix.bin.hub)
+ return
+ env["GO111MODULE"] = "on"
+ go("build", "-o", prefix.bin.hub)
diff --git a/var/spack/repos/builtin/packages/hydrogen/package.py b/var/spack/repos/builtin/packages/hydrogen/package.py
index df00235725b9f1..6bf6114d4175ee 100644
--- a/var/spack/repos/builtin/packages/hydrogen/package.py
+++ b/var/spack/repos/builtin/packages/hydrogen/package.py
@@ -7,254 +7,269 @@
from spack.package import *
+# This limits the versions of lots of things pretty severely.
+#
+# - Only v1.5.2 and newer are buildable.
+# - CMake must be v3.22 or newer.
+# - CUDA must be v11.0.0 or newer.
+
-class Hydrogen(CMakePackage, CudaPackage, ROCmPackage):
+class Hydrogen(CachedCMakePackage, CudaPackage, ROCmPackage):
"""Hydrogen: Distributed-memory dense and sparse-direct linear algebra
and optimization library. Based on the Elemental library."""
homepage = "https://libelemental.org"
- url = "https://github.com/LLNL/Elemental/archive/v1.0.1.tar.gz"
+ url = "https://github.com/LLNL/Elemental/archive/v1.5.1.tar.gz"
git = "https://github.com/LLNL/Elemental.git"
tags = ["ecp", "radiuss"]
maintainers("bvanessen")
version("develop", branch="hydrogen")
+ version("1.5.3", sha256="faefbe738bd364d0e26ce9ad079a11c93a18c6f075719a365fd4fa5f1f7a989a")
+ version("1.5.2", sha256="a902cad3962471216cfa278ba0561c18751d415cd4d6b2417c02a43b0ab2ea33")
version("1.5.1", sha256="447da564278f98366906d561d9c8bc4d31678c56d761679c2ff3e59ee7a2895c")
- version("1.5.0", sha256="03dd487fb23b9fdbc715554a8ea48c3196a1021502e61b0172ef3fdfbee75180")
- version("1.4.0", sha256="c13374ff4a6c4d1076e47ba8c8d91a7082588b9958d1ed89cffb12f1d2e1452e")
- version("1.3.4", sha256="7979f6656f698f0bbad6798b39d4b569835b3013ff548d98089fce7c283c6741")
- version("1.3.3", sha256="a51a1cfd40ac74d10923dfce35c2c04a3082477683f6b35e7b558ea9f4bb6d51")
- version("1.3.2", sha256="50bc5e87955f8130003d04dfd9dcad63107e92b82704f8107baf95b0ccf98ed6")
- version("1.3.1", sha256="a8b8521458e9e747f2b24af87c4c2749a06e500019c383e0cefb33e5df6aaa1d")
- version("1.3.0", sha256="0f3006aa1d8235ecdd621e7344c99f56651c6836c2e1bc0cf006331b70126b36")
- version("1.2.0", sha256="8545975139582ee7bfe5d00f8d83a8697afc285bf7026b0761e9943355974806")
- version("1.1.0-1", sha256="73ce05e4166853a186469269cb00a454de71e126b2019f95bbae703b65606808")
- version("1.1.0", sha256="b4c12913acd01c72d31f4522266bfeb8df1d4d3b4aef02e07ccbc9a477894e71")
- version("1.0.1", sha256="27cf76e1ef1d58bd8f9b1e34081a14a682b7ff082fb5d1da56713e5e0040e528")
- version("1.0", sha256="d8a97de3133f2c6b6bb4b80d32b4a4cc25eb25e0df4f0cec0f8cb19bf34ece98")
-
- variant("shared", default=True, description="Enables the build of shared libraries")
- variant("openmp", default=True, description="Make use of OpenMP within CPU-kernels")
- variant(
- "openmp_blas", default=False, description="Use OpenMP for threading in the BLAS library"
- )
- variant("quad", default=False, description="Enable quad precision")
- variant("int64", default=False, description="Use 64bit integers")
- variant("int64_blas", default=False, description="Use 64bit integers for BLAS.")
- variant("scalapack", default=False, description="Build with ScaLAPACK library")
+ # Older versions are no longer supported.
+
+ variant("shared", default=True, description="Enables the build of shared libraries.")
variant(
"build_type",
default="Release",
description="The build type to build",
values=("Debug", "Release"),
)
+ variant("int64", default=False, description="Use 64-bit integers")
+ variant("al", default=True, sticky=True, description="Use Aluminum communication library")
variant(
- "blas",
- default="openblas",
- values=("openblas", "mkl", "accelerate", "essl", "libsci"),
- description="Enable the use of OpenBlas/MKL/Accelerate/ESSL/LibSci",
+ "cub", default=True, when="+cuda", description="Use CUB/hipCUB for GPU memory management"
)
variant(
- "mpfr",
- default=False,
- description="Support GNU MPFR's" "arbitrary-precision floating-point arithmetic",
+ "cub", default=True, when="+rocm", description="Use CUB/hipCUB for GPU memory management"
)
- variant("test", default=False, description="Builds test suite")
- variant("al", default=False, description="Builds with Aluminum communication library")
+ variant("half", default=False, description="Support for FP16 precision data types")
+
+ # TODO: Add netlib-lapack. For GPU-enabled builds, typical
+ # workflows don't touch host BLAS/LAPACK all that often, and even
+ # less frequently in performance-critical regions.
+ variant(
+ "blas",
+ default="any",
+ values=("any", "openblas", "mkl", "accelerate", "essl", "libsci"),
+ description="Specify a host BLAS library preference",
+ )
+ variant("int64_blas", default=False, description="Use 64-bit integers for (host) BLAS.")
+
+ variant("openmp", default=True, description="Make use of OpenMP within CPU kernels")
variant(
"omp_taskloops",
+ when="+openmp",
default=False,
- description="Use OpenMP taskloops instead of parallel for loops.",
+ description="Use OpenMP taskloops instead of parallel for loops",
)
- variant("half", default=False, description="Builds with support for FP16 precision data types")
- conflicts("~openmp", when="+omp_taskloops")
+ # Users should spec this on their own on the command line, no?
+ # This doesn't affect Hydrogen itself at all. Not one bit.
+ # variant(
+ # "openmp_blas",
+ # default=False,
+ # description="Use OpenMP for threading in the BLAS library")
+
+ variant("test", default=False, description="Builds test suite")
+
conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive")
+ conflicts("+half", when="+rocm", msg="FP16 support not implemented for ROCm.")
- depends_on("cmake@3.21.0:", type="build", when="@1.5.2:")
- depends_on("cmake@3.17.0:", type="build", when="@:1.5.1")
- depends_on("cmake@3.22.0:", type="build", when="%cce")
+ depends_on("cmake@3.22.0:", type="build", when="@1.5.2:")
+ depends_on("cmake@3.17.0:", type="build", when="@1.5.1")
depends_on("mpi")
- depends_on("hwloc@1.11:")
- depends_on("hwloc +cuda +nvml", when="+cuda")
- depends_on("hwloc@2.3.0:", when="+rocm")
+ depends_on("blas")
+ depends_on("lapack")
# Note that #1712 forces us to enumerate the different blas variants
+ # Note that this forces us to use OpenBLAS until #1712 is fixed
depends_on("openblas", when="blas=openblas")
depends_on("openblas +ilp64", when="blas=openblas +int64_blas")
- depends_on("openblas threads=openmp", when="blas=openblas +openmp_blas")
+ depends_on("openblas@0.3.21:0.3.23", when="blas=openblas arch=ppc64le:")
depends_on("intel-mkl", when="blas=mkl")
depends_on("intel-mkl +ilp64", when="blas=mkl +int64_blas")
- depends_on("intel-mkl threads=openmp", when="blas=mkl +openmp_blas")
+ # I don't think this is true...
depends_on("veclibfort", when="blas=accelerate")
- conflicts("blas=accelerate +openmp_blas")
depends_on("essl", when="blas=essl")
depends_on("essl +ilp64", when="blas=essl +int64_blas")
- depends_on("essl threads=openmp", when="blas=essl +openmp_blas")
+
depends_on("netlib-lapack +external-blas", when="blas=essl")
depends_on("cray-libsci", when="blas=libsci")
- depends_on("cray-libsci +openmp", when="blas=libsci +openmp_blas")
# Specify the correct version of Aluminum
- depends_on("aluminum@:0.3", when="@:1.3 +al")
- depends_on("aluminum@0.4.0:0.4", when="@1.4.0:1.4 +al")
- depends_on("aluminum@0.6.0:0.6", when="@1.5.0:1.5.1 +al")
- depends_on("aluminum@0.7.0:", when="@:1.0,1.5.2: +al")
+ depends_on("aluminum@0.7.0:", when="@1.5.2: +al")
# Add Aluminum variants
- depends_on("aluminum +cuda +nccl +cuda_rma", when="+al +cuda")
- depends_on("aluminum +rocm +rccl", when="+al +rocm")
+ depends_on("aluminum +cuda +ht", when="+al +cuda")
+ depends_on("aluminum +rocm +ht", when="+al +rocm")
for arch in CudaPackage.cuda_arch_values:
- depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch)
+ depends_on("aluminum +cuda cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch)
# variants +rocm and amdgpu_targets are not automatically passed to
# dependencies, so do it manually.
for val in ROCmPackage.amdgpu_targets:
- depends_on("aluminum amdgpu_target=%s" % val, when="+al +rocm amdgpu_target=%s" % val)
+ depends_on(
+ "aluminum +rocm amdgpu_target=%s" % val, when="+al +rocm amdgpu_target=%s" % val
+ )
- # Note that this forces us to use OpenBLAS until #1712 is fixed
- depends_on("lapack", when="blas=openblas ~openmp_blas")
-
- depends_on("scalapack", when="+scalapack")
- depends_on("gmp", when="+mpfr")
- depends_on("mpc", when="+mpfr")
- depends_on("mpfr", when="+mpfr")
-
- depends_on("cuda", when="+cuda")
- depends_on("cub", when="^cuda@:10")
- depends_on("hipcub", when="+rocm")
+ depends_on("cuda@11.0.0:", when="+cuda")
+ depends_on("hipcub +rocm", when="+rocm +cub")
depends_on("half", when="+half")
depends_on("llvm-openmp", when="%apple-clang +openmp")
- conflicts(
- "@0:0.98",
- msg="Hydrogen did not exist before v0.99. " + "Did you mean to use Elemental instead?",
- )
-
- generator("ninja")
-
@property
def libs(self):
shared = True if "+shared" in self.spec else False
- return find_libraries("libEl", root=self.prefix, shared=shared, recursive=True)
+ return find_libraries("libHydrogen", root=self.prefix, shared=shared, recursive=True)
def cmake_args(self):
+ args = []
+ return args
+
+ def get_cuda_flags(self):
spec = self.spec
+ args = []
+ if spec.satisfies("^cuda+allow-unsupported-compilers"):
+ args.append("-allow-unsupported-compiler")
+
+ if spec.satisfies("%clang"):
+ for flag in spec.compiler_flags["cxxflags"]:
+ if "gcc-toolchain" in flag:
+ args.append("-Xcompiler={0}".format(flag))
+ return args
- enable_gpu_fp16 = "+cuda" in spec and "+half" in spec
-
- args = [
- "-DCMAKE_CXX_STANDARD=17",
- "-DCMAKE_INSTALL_MESSAGE:STRING=LAZY",
- "-DBUILD_SHARED_LIBS:BOOL=%s" % ("+shared" in spec),
- "-DHydrogen_ENABLE_OPENMP:BOOL=%s" % ("+openmp" in spec),
- "-DHydrogen_ENABLE_QUADMATH:BOOL=%s" % ("+quad" in spec),
- "-DHydrogen_USE_64BIT_INTS:BOOL=%s" % ("+int64" in spec),
- "-DHydrogen_USE_64BIT_BLAS_INTS:BOOL=%s" % ("+int64_blas" in spec),
- "-DHydrogen_ENABLE_MPC:BOOL=%s" % ("+mpfr" in spec),
- "-DHydrogen_GENERAL_LAPACK_FALLBACK=ON",
- "-DHydrogen_ENABLE_ALUMINUM=%s" % ("+al" in spec),
- "-DHydrogen_ENABLE_CUB=%s" % ("+cuda" in spec or "+rocm" in spec),
- "-DHydrogen_ENABLE_CUDA=%s" % ("+cuda" in spec),
- "-DHydrogen_ENABLE_ROCM=%s" % ("+rocm" in spec),
- "-DHydrogen_ENABLE_TESTING=%s" % ("+test" in spec),
- "-DHydrogen_ENABLE_HALF=%s" % ("+half" in spec),
- "-DHydrogen_ENABLE_GPU_FP16=%s" % enable_gpu_fp16,
- ]
-
- if not spec.satisfies("^cmake@3.23.0"):
- # There is a bug with using Ninja generator in this version
- # of CMake
- args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON")
-
- if "+cuda" in spec:
- if self.spec.satisfies("%clang"):
- for flag in self.spec.compiler_flags["cxxflags"]:
- if "gcc-toolchain" in flag:
- args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag))
- args.append("-DCMAKE_CUDA_STANDARD=14")
- archs = spec.variants["cuda_arch"].value
- if archs != "none":
- arch_str = ";".join(archs)
- args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str)
-
- if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"):
- args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler")
-
- if "+rocm" in spec:
- args.extend(
- [
- "-DCMAKE_CXX_FLAGS=-std=c++17",
- "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix),
- "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc),
- ]
- )
- archs = self.spec.variants["amdgpu_target"].value
- if archs != "none":
- arch_str = ",".join(archs)
- cxxflags_str = " ".join(self.spec.compiler_flags["cxxflags"])
- args.append(
- "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}"
- " -g -fsized-deallocation -fPIC {1}"
- " -std=c++17".format(arch_str, cxxflags_str)
- )
- args.extend(
- [
- "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str,
- "-DAMDGPU_TARGETS=%s" % arch_str,
- "-DGPU_TARGETS=%s" % arch_str,
- ]
- )
+ def std_initconfig_entries(self):
+ entries = super(Hydrogen, self).std_initconfig_entries()
+
+ # CMAKE_PREFIX_PATH, in CMake types, is a "STRING", not a "PATH". :/
+ entries = [x for x in entries if "CMAKE_PREFIX_PATH" not in x]
+ cmake_prefix_path = os.environ["CMAKE_PREFIX_PATH"].replace(":", ";")
+ entries.append(cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path))
+ # IDK why this is here, but it was in the original recipe. So, yeah.
+ entries.append(cmake_cache_string("CMAKE_INSTALL_MESSAGE", "LAZY"))
+ return entries
+
+ def initconfig_compiler_entries(self):
+ spec = self.spec
+ entries = super(Hydrogen, self).initconfig_compiler_entries()
+
+ # FIXME: Enforce this better in the actual CMake.
+ entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17"))
+ entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec))
+ entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True))
+
+ entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True))
- # Add support for OS X to find OpenMP (LLVM installed via brew)
- if self.spec.satisfies("%clang +openmp platform=darwin"):
+ if spec.satisfies("%clang +openmp platform=darwin") or spec.satisfies(
+ "%clang +omp_taskloops platform=darwin"
+ ):
clang = self.compiler.cc
clang_bin = os.path.dirname(clang)
clang_root = os.path.dirname(clang_bin)
- args.extend(["-DOpenMP_DIR={0}".format(clang_root)])
+ entries.append(cmake_cache_string("OpenMP_CXX_FLAGS", "-fopenmp=libomp"))
+ entries.append(cmake_cache_string("OpenMP_CXX_LIB_NAMES", "libomp"))
+ entries.append(
+ cmake_cache_string(
+ "OpenMP_libomp_LIBRARY", "{0}/lib/libomp.dylib".format(clang_root)
+ )
+ )
+
+ return entries
+
+ def initconfig_hardware_entries(self):
+ spec = self.spec
+ entries = super(Hydrogen, self).initconfig_hardware_entries()
+
+ entries.append(cmake_cache_option("Hydrogen_ENABLE_CUDA", "+cuda" in spec))
+ if spec.satisfies("+cuda"):
+ entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17"))
+ if not spec.satisfies("cuda_arch=none"):
+ archs = spec.variants["cuda_arch"].value
+ arch_str = ";".join(archs)
+ entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str))
+
+ # FIXME: Should this use the "cuda_flags" function of the
+ # CudaPackage class or something? There might be other
+ # flags in play, and we need to be sure to get them all.
+ cuda_flags = self.get_cuda_flags()
+ if len(cuda_flags) > 0:
+ entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
+
+ entries.append(cmake_cache_option("Hydrogen_ENABLE_ROCM", "+rocm" in spec))
+ if spec.satisfies("+rocm"):
+ entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17"))
+ if not spec.satisfies("amdgpu_target=none"):
+ archs = self.spec.variants["amdgpu_target"].value
+ arch_str = ";".join(archs)
+ entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
+ entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
+ entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
+ entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix))
+
+ return entries
+
+ def initconfig_package_entries(self):
+ spec = self.spec
+ entries = super(Hydrogen, self).initconfig_package_entries()
+
+ # Basic Hydrogen options
+ entries.append(cmake_cache_option("Hydrogen_ENABLE_TESTING", "+test" in spec))
+ entries.append(cmake_cache_option("Hydrogen_GENERAL_LAPACK_FALLBACK", True))
+ entries.append(cmake_cache_option("Hydrogen_USE_64BIT_INTS", "+int64" in spec))
+ entries.append(cmake_cache_option("Hydrogen_USE_64BIT_BLAS_INTS", "+int64_blas" in spec))
+
+ # Advanced dependency options
+ entries.append(cmake_cache_option("Hydrogen_ENABLE_ALUMINUM", "+al" in spec))
+ entries.append(cmake_cache_option("Hydrogen_ENABLE_CUB", "+cub" in spec))
+ entries.append(cmake_cache_option("Hydrogen_ENABLE_GPU_FP16", "+cuda +half" in spec))
+ entries.append(cmake_cache_option("Hydrogen_ENABLE_HALF", "+half" in spec))
+ entries.append(cmake_cache_option("Hydrogen_ENABLE_OPENMP", "+openmp" in spec))
+ entries.append(
+ cmake_cache_option("Hydrogen_ENABLE_OMP_TASKLOOP", "+omp_taskloops" in spec)
+ )
+
+ # Note that CUDA/ROCm are handled above.
if "blas=openblas" in spec:
- args.extend(
- [
- "-DHydrogen_USE_OpenBLAS:BOOL=%s" % ("blas=openblas" in spec),
- "-DOpenBLAS_DIR:STRING={0}".format(spec["openblas"].prefix),
- ]
- )
- elif "blas=mkl" in spec:
- args.extend(["-DHydrogen_USE_MKL:BOOL=%s" % ("blas=mkl" in spec)])
- elif "blas=accelerate" in spec:
- args.extend(["-DHydrogen_USE_ACCELERATE:BOOL=TRUE"])
- elif "blas=essl" in spec:
+ entries.append(cmake_cache_option("Hydrogen_USE_OpenBLAS", "blas=openblas" in spec))
+ # CMAKE_PREFIX_PATH should handle this
+ entries.append(cmake_cache_string("OpenBLAS_DIR", spec["openblas"].prefix))
+ elif "blas=mkl" in spec or spec.satisfies("^intel-mkl"):
+ entries.append(cmake_cache_option("Hydrogen_USE_MKL", True))
+ elif "blas=essl" in spec or spec.satisfies("^essl"):
+ entries.append(cmake_cache_string("BLA_VENDOR", "IBMESSL"))
# IF IBM ESSL is used it needs help finding the proper LAPACK libraries
- args.extend(
- [
- "-DLAPACK_LIBRARIES=%s;-llapack;-lblas"
- % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names),
- "-DBLAS_LIBRARIES=%s;-lblas"
+ entries.append(
+ cmake_cache_string(
+ "LAPACK_LIBRARIES",
+ "%s;-llapack;-lblas"
% ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names),
- ]
+ )
)
-
- if "+omp_taskloops" in spec:
- args.extend(["-DHydrogen_ENABLE_OMP_TASKLOOP:BOOL=%s" % ("+omp_taskloops" in spec)])
-
- if "+al" in spec:
- args.extend(
- [
- "-DHydrogen_ENABLE_ALUMINUM:BOOL=%s" % ("+al" in spec),
- "-DALUMINUM_DIR={0}".format(spec["aluminum"].prefix),
- ]
+ entries.append(
+ cmake_cache_string(
+ "BLAS_LIBRARIES",
+ "%s;-lblas"
+ % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names),
+ )
)
+ elif "blas=accelerate" in spec:
+ entries.append(cmake_cache_option("Hydrogen_USE_ACCELERATE", True))
+ elif spec.satisfies("^netlib-lapack"):
+ entries.append(cmake_cache_string("BLA_VENDOR", "Generic"))
- return args
+ return entries
def setup_build_environment(self, env):
if self.spec.satisfies("%apple-clang +openmp"):
diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py
index 433d60a2ce7765..6968ec8fd6d08b 100644
--- a/var/spack/repos/builtin/packages/hypre/package.py
+++ b/var/spack/repos/builtin/packages/hypre/package.py
@@ -24,6 +24,7 @@ class Hypre(AutotoolsPackage, CudaPackage, ROCmPackage):
test_requires_compiler = True
version("develop", branch="master")
+ version("2.30.0", sha256="8e2af97d9a25bf44801c6427779f823ebc6f306438066bba7fcbc2a5f9b78421")
version("2.29.0", sha256="98b72115407a0e24dbaac70eccae0da3465f8f999318b2c9241631133f42d511")
version("2.28.0", sha256="2eea68740cdbc0b49a5e428f06ad7af861d1e169ce6a12d2cf0aa2fc28c4a2ae")
version("2.27.0", sha256="507a3d036bb1ac21a55685ae417d769dd02009bde7e09785d0ae7446b4ae1f98")
@@ -107,6 +108,7 @@ def patch(self): # fix sequential compilation in 'src/seq_mv'
depends_on("rocthrust", when="+rocm")
depends_on("rocrand", when="+rocm")
depends_on("rocprim", when="+rocm")
+ depends_on("hipblas", when="+rocm +superlu-dist")
depends_on("umpire", when="+umpire")
depends_on("caliper", when="+caliper")
@@ -258,6 +260,8 @@ def configure_args(self):
if "+rocm" in spec:
rocm_pkgs = ["rocsparse", "rocthrust", "rocprim", "rocrand"]
+ if "+superlu-dist" in spec:
+ rocm_pkgs.append("hipblas")
rocm_inc = ""
for pkg in rocm_pkgs:
if "^" + pkg in spec:
diff --git a/var/spack/repos/builtin/packages/icarus/package.py b/var/spack/repos/builtin/packages/icarus/package.py
index b290cd9fedbdfb..d81f7e6fb4e067 100644
--- a/var/spack/repos/builtin/packages/icarus/package.py
+++ b/var/spack/repos/builtin/packages/icarus/package.py
@@ -13,6 +13,9 @@ class Icarus(AutotoolsPackage):
url = "https://github.com/steveicarus/iverilog/archive/refs/tags/v12_0.tar.gz"
git = "https://github.com/steveicarus/iverilog.git"
+ maintainers("davekeeshan")
+
+ version("master", branch="master")
version("12_0", sha256="a68cb1ef7c017ef090ebedb2bc3e39ef90ecc70a3400afb4aa94303bc3beaa7d")
version("11_0", sha256="6327fb900e66b46803d928b7ca439409a0dc32731d82143b20387be0833f1c95")
version("10_3", commit="453c5465895eaca4a792d18b75e9ec14db6ea50e")
@@ -38,3 +41,14 @@ def create_install_folders(self):
mkdirp(join_path(prefix.lib, "ivl", "include"))
mkdirp(join_path(prefix.share, "man"))
mkdirp(join_path(prefix.share, "man", "man1"))
+
+ # We need to fix the CC and CXX paths, as they point to the spack
+ # wrapper scripts which aren't usable without spack
+ @run_after("install")
+ def patch_compiler(self):
+ filter_file(
+ r"^CC\s*=.*", f"CC={self.compiler.cc}", join_path(self.prefix.bin, "iverilog-vpi")
+ )
+ filter_file(
+ r"^CXX\s*=.*", f"CXX={self.compiler.cxx}", join_path(self.prefix.bin, "iverilog-vpi")
+ )
diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py
index 7dd8ab41227aaa..c66235f382dae8 100644
--- a/var/spack/repos/builtin/packages/intel-mkl/package.py
+++ b/var/spack/repos/builtin/packages/intel-mkl/package.py
@@ -153,8 +153,7 @@ class IntelMkl(IntelPackage):
multi=False,
)
- provides("blas")
- provides("lapack")
+ provides("blas", "lapack")
provides("lapack@3.9.0", when="@2020.4")
provides("lapack@3.7.0", when="@11.3")
provides("scalapack")
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py
index fe2b7f34387c26..7cc61247935885 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py
@@ -7,7 +7,7 @@
@IntelOneApiPackage.update_description
-class IntelOneapiAdvisor(IntelOneApiPackage):
+class IntelOneapiAdvisor(IntelOneApiLibraryPackageWithSdk):
"""Intel Advisor is a design and analysis tool for developing
performant code. The tool supports C, C++, Fortran, SYCL, OpenMP,
OpenCL code, and Python. It helps with the following: Performant
@@ -24,6 +24,12 @@ class IntelOneapiAdvisor(IntelOneApiPackage):
"https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/advisor.html"
)
+ version(
+ "2024.0.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/88c5bdaa-7a2d-491f-9871-7170fadc3d52/l_oneapi_advisor_p_2024.0.0.49522_offline.sh",
+ sha256="0ef3cf39c2fbb39371ac2470dad7d0d8cc0a2709c4f78dcab58d115b446c81c4",
+ expand=False,
+ )
version(
"2023.2.0",
url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/0b0e8bf2-30e4-4a26-b1ef-e369b0181b35/l_oneapi_advisor_p_2023.2.0.49489_offline.sh",
@@ -73,6 +79,10 @@ class IntelOneapiAdvisor(IntelOneApiPackage):
expand=False,
)
+ @property
+ def v2_layout_versions(self):
+ return "@2024:"
+
@property
def component_dir(self):
return "advisor"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py
index 2d300a55ff63ef..35f31c095ff480 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py
@@ -27,6 +27,12 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage):
depends_on("intel-oneapi-mpi")
+ version(
+ "2021.11.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/9e63eba5-2b3d-4032-ad22-21f02e35b518/l_oneapi_ccl_p_2021.11.0.49161_offline.sh",
+ sha256="35fde9862d620c211064addfd3c15c4fc33bcaac6fe050163eb59a006fb9d476",
+ expand=False,
+ )
version(
"2021.10.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/3230823d-f799-4d1f-8ef3-a17f086a7719/l_oneapi_ccl_p_2021.10.0.49084_offline.sh",
@@ -100,6 +106,10 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage):
expand=False,
)
+ @property
+ def v2_layout_versions(self):
+ return "@2021.11:"
+
@property
def component_dir(self):
return "ccl"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py
index 4fca5381e6f7ef..656417e38b9a86 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py
@@ -36,8 +36,10 @@ class IntelOneapiCompilersClassic(Package):
"2021.8.0": "2023.0.0",
"2021.9.0": "2023.1.0",
"2021.10.0": "2023.2.0",
+ "2021.11.0": "2024.0.0",
}.items():
- version(ver)
+ # prefer 2021.10.0 because it is the last one that has a C compiler
+ version(ver, preferred=(ver == "2021.10.0"))
depends_on("intel-oneapi-compilers@" + oneapi_ver, when="@" + ver, type="run")
# icc@2021.6.0 does not support gcc@12 headers
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py
index 5545053cc8d368..2c3c460a2ae525 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py
@@ -7,6 +7,17 @@
from spack.package import *
versions = [
+ {
+ "version": "2024.0.0",
+ "cpp": {
+ "url": "https://registrationcenter-download.intel.com/akdlm//IRC_NAS/5c8e686a-16a7-4866-b585-9cf09e97ef36/l_dpcpp-cpp-compiler_p_2024.0.0.49524_offline.sh",
+ "sha256": "d10bad2009c98c631fbb834aae62012548daeefc806265ea567316cd9180a684",
+ },
+ "ftn": {
+ "url": "https://registrationcenter-download.intel.com/akdlm//IRC_NAS/89b0fcf9-5c00-448a-93a1-5ee4078e008e/l_fortran-compiler_p_2024.0.0.49493_offline.sh",
+ "sha256": "57faf854b8388547ee4ef2db387a9f6f3b4d0cebd67b765cf5e844a0a970d1f9",
+ },
+ },
{
"version": "2023.2.1",
"cpp": {
@@ -182,13 +193,29 @@ class IntelOneapiCompilers(IntelOneApiPackage):
**v["ftn"],
)
+ @property
+ def v2_layout_versions(self):
+ return "@2024:"
+
@property
def component_dir(self):
return "compiler"
+ @property
+ def _llvm_bin(self):
+ return self.component_prefix.bin if self.v2_layout else self.component_prefix.linux.bin
+
+ @property
+ def _classic_bin(self):
+ return (
+ self.component_prefix.bin
+ if self.v2_layout
+ else self.component_prefix.linux.bin.intel64
+ )
+
@property
def compiler_search_prefix(self):
- return self.prefix.compiler.join(str(self.version)).linux.bin
+ return self._llvm_bin
def setup_run_environment(self, env):
"""Adds environment variables to the generated module file.
@@ -203,14 +230,15 @@ def setup_run_environment(self, env):
"""
super().setup_run_environment(env)
- env.set("CC", self.component_prefix.linux.bin.icx)
- env.set("CXX", self.component_prefix.linux.bin.icpx)
- env.set("F77", self.component_prefix.linux.bin.ifx)
- env.set("FC", self.component_prefix.linux.bin.ifx)
+ env.set("CC", self._llvm_bin.icx)
+ env.set("CXX", self._llvm_bin.icpx)
+ env.set("F77", self._llvm_bin.ifx)
+ env.set("FC", self._llvm_bin.ifx)
def install(self, spec, prefix):
# Copy instead of install to speed up debugging
# install_tree("/opt/intel/oneapi/compiler", self.prefix)
+ # return
# install cpp
super().install(spec, prefix)
@@ -219,15 +247,29 @@ def install(self, spec, prefix):
self.install_component(find("fortran-installer", "*")[0])
# Some installers have a bug and do not return an error code when failing
- if not is_exe(self.component_prefix.linux.bin.intel64.ifort):
- raise RuntimeError("install failed")
+ if not is_exe(self._llvm_bin.ifx):
+ raise RuntimeError("Fortran install failed")
@run_after("install")
def inject_rpaths(self):
- # Sets rpath so the compilers can work without setting LD_LIBRARY_PATH.
+ # The oneapi compilers cannot find their own internal shared
+ # libraries. If you are using an externally installed oneapi,
+ # then you need to source setvars.sh, which will set
+ # LD_LIBRARY_PATH. If you are using spack to install the
+ # compilers, then we patch the binaries that have this
+ # problem. Over time, intel has corrected most of the
+ # issues. I am using the 2024 release as a milestone to stop
+ # patching everything and just patching the binaries that have
+ # a problem.
patchelf = which("patchelf")
- patchelf.add_default_arg("--set-rpath")
- patchelf.add_default_arg(":".join(self._ld_library_path()))
+ if self.spec.satisfies("@2024:"):
+ patchelf.add_default_arg("--set-rpath", self.component_prefix.lib)
+ patchelf(self.component_prefix.bin.join("sycl-post-link"))
+ patchelf(self.component_prefix.bin.compiler.join("llvm-spirv"))
+ return
+
+ # Sets rpath so the compilers can work without setting LD_LIBRARY_PATH.
+ patchelf.add_default_arg("--set-rpath", ":".join(self._ld_library_path()))
for pd in ["bin", "lib", join_path("compiler", "lib", "intel64_lin")]:
for file in find(self.component_prefix.linux.join(pd), "*", recursive=False):
# Try to patch all files, patchelf will do nothing and fail if file
@@ -255,7 +297,10 @@ def extend_config_flags(self):
# TODO: it is unclear whether we should really use all elements of
# _ld_library_path because it looks like the only rpath that needs to be
# injected is self.component_prefix.linux.compiler.lib.intel64_lin.
- common_flags = ["-Wl,-rpath,{}".format(d) for d in self._ld_library_path()]
+ if self.v2_layout:
+ common_flags = ["-Wl,-rpath,{}".format(self.component_prefix.lib)]
+ else:
+ common_flags = ["-Wl,-rpath,{}".format(d) for d in self._ld_library_path()]
# Make sure that underlying clang gets the right GCC toolchain by default
llvm_flags = ["--gcc-toolchain={}".format(self.compiler.prefix)]
@@ -267,20 +312,17 @@ def extend_config_flags(self):
# The cfg flags are treated as command line flags apparently. Newer versions
# do not trigger these warnings. In some build systems these warnings can
# cause feature detection to fail, so we silence them with -Wno-unused-...
- if self.spec.version < Version("2022.1.0"):
+ if self.spec.satisfies("@:2022.0"):
llvm_flags.append("-Wno-unused-command-line-argument")
- self.write_config_file(
- common_flags + llvm_flags, self.component_prefix.linux.bin, ["icx", "icpx"]
- )
- self.write_config_file(
- common_flags + classic_flags, self.component_prefix.linux.bin, ["ifx"]
- )
- self.write_config_file(
- common_flags + classic_flags,
- self.component_prefix.linux.bin.intel64,
- ["icc", "icpc", "ifort"],
- )
+ self.write_config_file(common_flags + llvm_flags, self._llvm_bin, ["icx", "icpx"])
+ self.write_config_file(common_flags + classic_flags, self._llvm_bin, ["ifx"])
+ self.write_config_file(common_flags + classic_flags, self._classic_bin, ["ifort"])
+ # 2023 is the last release that includes icc
+ if self.spec.satisfies("@:2023"):
+ self.write_config_file(
+ common_flags + classic_flags, self._classic_bin, ["icc", "icpc"]
+ )
def _ld_library_path(self):
# Returns an iterable of directories that might contain shared runtime libraries
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py
index 29984f04381c80..e6ed1d37931ba4 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py
@@ -26,6 +26,12 @@ class IntelOneapiDal(IntelOneApiLibraryPackage):
"https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onedal.html"
)
+ version(
+ "2024.0.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/37364086-b3cd-4a54-8736-7893732c1a86/l_daal_oneapi_p_2024.0.0.49569_offline.sh",
+ sha256="45e71c7cbf38b04a34c47e36e2d86a48847f2f0485bafbc3445077a9ba3fa73c",
+ expand=False,
+ )
version(
"2023.2.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/fa218373-4b06-451f-8f4c-66b7d14b8e8b/l_daal_oneapi_p_2023.2.0.49574_offline.sh",
@@ -104,6 +110,10 @@ class IntelOneapiDal(IntelOneApiLibraryPackage):
provides("daal")
provides("onedal")
+ @property
+ def v2_layout_versions(self):
+ return "@2024:"
+
@property
def component_dir(self):
return "dal"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py
index 0c8a0aeb76f513..f5df8d90cfb2d1 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py
@@ -26,6 +26,12 @@ class IntelOneapiDnn(IntelOneApiLibraryPackage):
"https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onednn.html"
)
+ version(
+ "2024.0.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/dc309221-d210-4f3a-9406-d897df8deab8/l_onednn_p_2024.0.0.49548_offline.sh",
+ sha256="17fbd5cc5d08de33625cf2879c0cceec53c91bbcd0b863e8f29d27885bac88c9",
+ expand=False,
+ )
version(
"2023.2.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/2d218b97-0175-4f8c-8dba-b528cec24d55/l_onednn_p_2023.2.0.49517_offline.sh",
@@ -101,16 +107,25 @@ class IntelOneapiDnn(IntelOneApiLibraryPackage):
depends_on("tbb")
+ @property
+ def v2_layout_versions(self):
+ return "@2024:"
+
@property
def component_dir(self):
return "dnnl"
+ def __target(self):
+ if self.v2_layout:
+ return self.component_prefix
+ else:
+ return self.component_prefix.cpu_dpcpp_gpu_dpcpp
+
@property
def headers(self):
- include_path = join_path(self.component_prefix, "cpu_dpcpp_gpu_dpcpp", "include")
- return find_headers("dnnl", include_path)
+ return find_headers("dnnl", self.__target().include)
@property
def libs(self):
- lib_path = join_path(self.component_prefix, "cpu_dpcpp_gpu_dpcpp", "lib")
- return find_libraries(["libdnnl", "libmkldnn"], root=lib_path, shared=True)
+ # libmkldnn was removed before 2024, but not sure when
+ return find_libraries(["libdnnl", "libmkldnn"], self.__target().lib)
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py
index 3c4fa410f598bb..d784358ac19570 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py
@@ -19,6 +19,12 @@ class IntelOneapiDpct(IntelOneApiPackage):
homepage = "https://www.intel.com/content/www/us/en/developer/tools/oneapi/dpc-compatibility-tool.html#gs.2p8km6"
+ version(
+ "2024.0.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/6633bc4b-5356-471a-9aae-d5e63e7acd95/l_dpcpp-ct_p_2024.0.0.49394_offline.sh",
+ sha256="5fdba92edf24084187d98f083f9a6e17ee6b33ad8a736d6c9cdd3dbd4e0eab8a",
+ expand=False,
+ )
version(
"2023.2.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/764119eb-2959-4b51-bb3c-3cf581c16186/l_dpcpp-ct_p_2023.2.0.49333_offline.sh",
@@ -56,6 +62,10 @@ class IntelOneapiDpct(IntelOneApiPackage):
expand=False,
)
+ @property
+ def v2_layout_versions(self):
+ return "@2024:"
+
@property
def component_dir(self):
return "dpcpp-ct"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py
index 05282f92f4c4b4..d71688eda1d5a9 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py
@@ -22,6 +22,12 @@ class IntelOneapiDpl(IntelOneApiLibraryPackage):
homepage = "https://github.com/oneapi-src/oneDPL"
+ version(
+ "2022.3.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/be027095-148a-4433-aff4-c6e8582da3ca/l_oneDPL_p_2022.3.0.49386_offline.sh",
+ sha256="1e40c6562bc41fa5a46c80c09222bf12d36d8e82f749476d0a7e97503d4659df",
+ expand=False,
+ )
version(
"2022.2.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/44f88a97-7526-48f0-8515-9bf1356eb7bb/l_oneDPL_p_2022.2.0.49287_offline.sh",
@@ -77,16 +83,16 @@ class IntelOneapiDpl(IntelOneApiLibraryPackage):
expand=False,
)
+ @property
+ def v2_layout_versions(self):
+ return "@2022.3:"
+
@property
def component_dir(self):
return "dpl"
@property
def headers(self):
- include_path = join_path(self.component_prefix, "linux", "include")
- headers = find_headers("*", include_path, recursive=True)
- # Force this directory to be added to include path, even
- # though no files are here because all includes are relative
- # to this path
- headers.directories = [include_path]
- return headers
+ return self.header_directories(
+ [self.component_prefix.include, self.component_prefix.linux.include]
+ )
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py
index 0c1e0f79abaf31..4c1870af6b7682 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py
@@ -7,7 +7,7 @@
@IntelOneApiPackage.update_description
-class IntelOneapiInspector(IntelOneApiPackage):
+class IntelOneapiInspector(IntelOneApiLibraryPackageWithSdk):
"""Intel Inspector is a dynamic memory and threading error debugger
for C, C++, and Fortran applications that run on Windows and Linux
operating systems. Save money: locate the root cause of memory,
@@ -24,6 +24,12 @@ class IntelOneapiInspector(IntelOneApiPackage):
homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/inspector.html"
+ version(
+ "2024.0.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/44ae6846-719c-49bd-b196-b16ce5835a1e/l_inspector_oneapi_p_2024.0.0.49433_offline.sh",
+ sha256="2b281c3a704a242aa3372284960ea8ed5ed1ba293cc2f70c2f873db3300c80a3",
+ expand=False,
+ )
version(
"2023.2.0",
url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/2a99eafd-5109-41a1-9762-aee0c7ecbeb7/l_inspector_oneapi_p_2023.2.0.49304_offline.sh",
@@ -79,6 +85,10 @@ class IntelOneapiInspector(IntelOneApiPackage):
expand=False,
)
+ @property
+ def v2_layout_versions(self):
+ return "@2024:"
+
@property
def component_dir(self):
return "inspector"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py
index c29fb423b21a2f..2e8e561c60e053 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py
@@ -27,6 +27,12 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage):
"https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html"
)
+ version(
+ "2021.10.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/2d48c7d9-e716-4c73-8fe5-77a9599a405f/l_ipp_oneapi_p_2021.10.0.670_offline.sh",
+ sha256="c4ad98f96760b0a821dbcd59963c5148fd9dc4eb790af0e6e765a5f36525d202",
+ expand=False,
+ )
version(
"2021.9.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/616a3fba-4ab6-4317-a17b-2be4b737fc37/l_ipp_oneapi_p_2021.9.0.49454_offline.sh",
@@ -104,6 +110,10 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage):
provides("ipp")
+ @property
+ def v2_layout_versions(self):
+ return "@2021.10:"
+
@property
def component_dir(self):
return "ipp"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py
index 4d0d6fe3c20733..7a38dd262e793a 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py
@@ -28,6 +28,12 @@ class IntelOneapiIppcp(IntelOneApiLibraryPackage):
"https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html"
)
+ version(
+ "2021.9.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/6792a758-2d69-4ff3-ad24-233fb3bf56e4/l_ippcp_oneapi_p_2021.9.0.533_offline.sh",
+ sha256="5eca6fd18d9117f8cb7c599cee418b9cc3d7d5d5404f1350d47289095b6a1254",
+ expand=False,
+ )
version(
"2021.8.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/f488397a-bd8f-449f-9127-04de8426aa35/l_ippcp_oneapi_p_2021.8.0.49493_offline.sh",
@@ -101,6 +107,10 @@ class IntelOneapiIppcp(IntelOneApiLibraryPackage):
expand=False,
)
+ @property
+ def v2_layout_versions(self):
+ return "@2021.9:"
+
@property
def component_dir(self):
return "ippcp"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py
index 3b53af927348de..742afb4ffe4c79 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py
@@ -27,6 +27,12 @@ class IntelOneapiItac(IntelOneApiPackage):
maintainers("rscohn2")
+ version(
+ "2022.0.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/e83526f5-7e0f-4708-9e0d-47f1e65f29aa/l_itac_oneapi_p_2022.0.0.49690_offline.sh",
+ sha256="6ab2888afcfc981273aed3df316463fbaf511faf83ee091ca79016459b03b79e",
+ expand=False,
+ )
version(
"2021.10.0",
url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/226adf12-b7f6-407e-95a9-8e9ab76d7631/l_itac_oneapi_p_2021.10.0.14_offline.sh",
@@ -58,6 +64,10 @@ class IntelOneapiItac(IntelOneApiPackage):
expand=False,
)
+ @property
+ def v2_layout_versions(self):
+ return "@2022:"
+
@property
def component_dir(self):
return "itac"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py
index 2eef32fa14e0a2..0be9195efa2bdf 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py
@@ -25,6 +25,12 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage):
"https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html"
)
+ version(
+ "2024.0.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/86d6a4c1-c998-4c6b-9fff-ca004e9f7455/l_onemkl_p_2024.0.0.49673_offline.sh",
+ sha256="2a3be7d01d75ba8cc3059f9a32ae72e5bfc93e68e72e94e79d7fa6ea2f7814de",
+ expand=False,
+ )
version(
"2023.2.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/adb8a02c-4ee7-4882-97d6-a524150da358/l_onemkl_p_2023.2.0.49497_offline.sh",
@@ -126,16 +132,15 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage):
provides("fftw-api@3")
provides("scalapack", when="+cluster")
provides("mkl")
- provides("lapack")
- provides("blas")
+ provides("lapack", "blas")
@property
- def component_dir(self):
- return "mkl"
+ def v2_layout_versions(self):
+ return "@2024:"
@property
- def headers(self):
- return find_headers("*", self.component_prefix.include)
+ def component_dir(self):
+ return "mkl"
@property
def libs(self):
@@ -149,21 +154,6 @@ def libs(self):
else:
return IntelOneApiStaticLibraryList(libs, system_libs)
- def setup_run_environment(self, env):
- super().setup_run_environment(env)
-
- # Support RPATH injection to the library directories when the '-mkl' or '-qmkl'
- # flag of the Intel compilers are used outside the Spack build environment. We
- # should not try to take care of other compilers because the users have to
- # provide the linker flags anyway and are expected to take care of the RPATHs
- # flags too. We prefer the __INTEL_POST_CFLAGS/__INTEL_POST_FFLAGS flags over
- # the PRE ones so that any other RPATHs provided by the users on the command
- # line come before and take precedence over the ones we inject here.
- for d in self._find_mkl_libs(self.spec.satisfies("+shared")).directories:
- flag = "-Wl,-rpath,{0}".format(d)
- env.append_path("__INTEL_POST_CFLAGS", flag, separator=" ")
- env.append_path("__INTEL_POST_FFLAGS", flag, separator=" ")
-
def setup_dependent_build_environment(self, env, dependent_spec):
# Only if environment modifications are desired (default is +envmods)
if self.spec.satisfies("+envmods"):
@@ -214,7 +204,9 @@ def _find_mkl_libs(self, shared):
)
)
- lib_path = self.component_prefix.lib.intel64
+ lib_path = (
+ self.component_prefix.lib if self.v2_layout else self.component_prefix.lib.intel64
+ )
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
resolved_libs = find_libraries(libs, lib_path, shared=shared)
@@ -235,5 +227,11 @@ def _xlp64_lib(self, lib):
@run_after("install")
def fixup_prefix(self):
+ # The motivation was to provide a more standard layout so mkl
+ # would be more likely to work as a virtual dependence. I am
+ # not sure if this mechanism is useful and it became a problem
+ # for mpi so disabling for v2_layout.
+ if self.v2_layout:
+ return
self.symlink_dir(self.component_prefix.include, self.prefix.include)
self.symlink_dir(self.component_prefix.lib, self.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py
index fab6c8ca1f466b..2de3c5f7e894bc 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py
@@ -21,6 +21,12 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage):
homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/mpi-library.html"
+ version(
+ "2021.11.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/2c45ede0-623c-4c8e-9e09-bed27d70fa33/l_mpi_oneapi_p_2021.11.0.49513_offline.sh",
+ sha256="9a96caeb7abcf5aa08426216db38a2c7936462008b9825036266bc79cb0e30d8",
+ expand=False,
+ )
version(
"2021.10.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/4f5871da-0533-4f62-b563-905edfb2e9b7/l_mpi_oneapi_p_2021.10.0.49374_offline.sh",
@@ -107,6 +113,10 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage):
provides("mpi@:3.1")
+ @property
+ def v2_layout_versions(self):
+ return "@2021.11:"
+
@property
def component_dir(self):
return "mpi"
@@ -155,10 +165,9 @@ def setup_dependent_build_environment(self, env, dependent_spec):
@property
def headers(self):
- headers = find_headers("*", self.component_prefix.include)
- if "+ilp64" in self.spec:
- headers += find_headers("*", self.component_prefix.include.ilp64)
- return headers
+ return self.header_directories(
+ [self.component_prefix.include, self.component_prefix.include.ilp64]
+ )
@property
def libs(self):
@@ -192,6 +201,13 @@ def fix_wrappers(self):
@run_after("install")
def fixup_prefix(self):
+ # The motivation was to provide a more standard layout so impi
+ # would be more likely to work as a virtual dependence. It
+ # does not work for v2_layout because of a library conflict. I
+ # am not sure if this mechanism is useful so disabling for
+ # v2_layout rather than try to make it work.
+ if self.v2_layout:
+ return
self.symlink_dir(self.component_prefix.include, self.prefix.include)
self.symlink_dir(self.component_prefix.lib, self.prefix.lib)
self.symlink_dir(self.component_prefix.lib.release, self.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py
index 6e169cbd84fe56..03ec36c37105ec 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py
@@ -22,6 +22,12 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage):
"https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onetbb.html"
)
+ version(
+ "2021.11.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/af3ad519-4c87-4534-87cb-5c7bda12754e/l_tbb_oneapi_p_2021.11.0.49527_offline.sh",
+ sha256="dd878ee979d7b6da4eb973adfebf814d9d7eed86b875d31e3662d100b2fa0956",
+ expand=False,
+ )
version(
"2021.10.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/c95cd995-586b-4688-b7e8-2d4485a1b5bf/l_tbb_oneapi_p_2021.10.0.49543_offline.sh",
@@ -101,7 +107,17 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage):
def component_dir(self):
return "tbb"
+ @property
+ def v2_layout_versions(self):
+ return "@2021.11:"
+
@run_after("install")
def fixup_prefix(self):
+ # The motivation was to provide a more standard layout so tbb
+ # would be more likely to work as a virtual dependence. I am
+ # not sure if this mechanism is useful and it became a problem
+ # for mpi so disabling for v2_layout.
+ if self.v2_layout:
+ return
self.symlink_dir(self.component_prefix.include, self.prefix.include)
self.symlink_dir(self.component_prefix.lib, self.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py
index 18d93b6afc0344..758a9542989f13 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py
@@ -74,6 +74,12 @@ class IntelOneapiVpl(IntelOneApiLibraryPackage):
expand=False,
)
+ # VPL no longer releases as part of oneapi, so there will never be
+ # a 2024 release
+ @property
+ def v2_layout_versions(self):
+ return "@2024:"
+
@property
def component_dir(self):
return "vpl"
diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py
index c2ed3f164e42af..1eaf35dda163b7 100644
--- a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py
+++ b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py
@@ -7,7 +7,7 @@
@IntelOneApiPackage.update_description
-class IntelOneapiVtune(IntelOneApiPackage):
+class IntelOneapiVtune(IntelOneApiLibraryPackageWithSdk):
"""Intel VTune Profiler is a profiler to optimize application
performance, system performance, and system configuration for HPC,
cloud, IoT, media, storage, and more. CPU, GPU, and FPGA: Tune
@@ -25,6 +25,12 @@ class IntelOneapiVtune(IntelOneApiPackage):
homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/vtune-profiler.html"
+ version(
+ "2024.0.0",
+ url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/1722cc83-ceb2-4304-b4dc-2813780222a3/l_oneapi_vtune_p_2024.0.0.49503_offline.sh",
+ sha256="09537329bdf6e105b0e164f75dc8ae122adc99a64441f6a52225509bcff3b848",
+ expand=False,
+ )
version(
"2023.2.0",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/dfae6f23-6c90-4b9f-80e2-fa2a5037fe36/l_oneapi_vtune_p_2023.2.0.49485_offline.sh",
@@ -80,6 +86,10 @@ class IntelOneapiVtune(IntelOneApiPackage):
expand=False,
)
+ @property
+ def v2_layout_versions(self):
+ return "@2024:"
+
@property
def component_dir(self):
return "vtune"
diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py
index 84810bacfa3370..50e7021de85d41 100644
--- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py
+++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py
@@ -536,8 +536,7 @@ class IntelParallelStudio(IntelPackage):
provides("ipp", when="+ipp")
provides("mkl", when="+mkl")
- provides("blas", when="+mkl")
- provides("lapack", when="+mkl")
+ provides("blas", "lapack", when="+mkl")
provides("scalapack", when="+mkl")
provides("fftw-api@3", when="+mkl@professional.2017:")
diff --git a/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch b/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch
similarity index 92%
rename from var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch
rename to var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch
index e1e1b1116bf6f8..d1e87cd7c4d5ac 100644
--- a/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch
+++ b/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch
@@ -16,6 +16,8 @@ Signed-off-by: Sam James
diff --git a/test/common/utils_assert.h b/test/common/utils_assert.h
index 1df8ae72acc49fe38dac4d9bed4e9f4f26affcf5..0123ab881e124a800a5ebf8507050148038747d5 100644
+--- a/test/common/utils_assert.h
++++ b/test/common/utils_assert.h
@@ -20,6 +20,8 @@
#include "config.h"
#include "utils_report.h"
diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py
index 664471b0ecf3d6..9c10693aae25e3 100644
--- a/var/spack/repos/builtin/packages/intel-tbb/package.py
+++ b/var/spack/repos/builtin/packages/intel-tbb/package.py
@@ -125,7 +125,7 @@ class IntelTbb(CMakePackage, MakefilePackage):
patch("gcc_generic-pedantic-4.4.patch", level=1, when="@:2019.0")
# Patch and conflicts for GCC 13 support (#1031).
- patch("gcc_13-2021.patch", when="@2021.1:")
+ patch("gcc_13-2021-v2.patch", when="@2021.1:")
conflicts("%gcc@13", when="@:2021.3")
# Patch cmakeConfig.cmake.in to find the libraries where we install them.
diff --git a/var/spack/repos/builtin/packages/intel-xed/package.py b/var/spack/repos/builtin/packages/intel-xed/package.py
index 555d4154a220fd..7cda2358e20d37 100644
--- a/var/spack/repos/builtin/packages/intel-xed/package.py
+++ b/var/spack/repos/builtin/packages/intel-xed/package.py
@@ -21,6 +21,8 @@ class IntelXed(Package):
# Current versions now have actual releases and tags.
version("main", branch="main")
+ version("2023.10.11", tag="v2023.10.11", commit="d7d46c73fb04a1742e99c9382a4acb4ed07ae272")
+ version("2023.08.21", tag="v2023.08.21", commit="01a6da8090af84cd52f6c1070377ae6e885b078f")
version("2023.07.09", tag="v2023.07.09", commit="539a6a349cf7538a182ed3ee1f48bb9317eb185f")
version("2023.06.07", tag="v2023.06.07", commit="4dc77137f651def2ece4ac0416607b215c18e6e4")
version("2023.04.16", tag="v2023.04.16", commit="a3055cd0209f5c63c88e280bbff9579b1e2942e2")
@@ -40,7 +42,12 @@ class IntelXed(Package):
# Match xed more closely with the version of mbuild at the time.
resource(
- name="mbuild", placement=mdir, git=mbuild_git, tag="v2022.07.28", when="@2022.07:9999"
+ name="mbuild",
+ placement=mdir,
+ git=mbuild_git,
+ tag="v2022.07.28",
+ commit="75cb46e6536758f1a3cdb3d6bd83a4a9fd0338bb",
+ when="@2022.07:9999",
)
resource(
@@ -48,7 +55,7 @@ class IntelXed(Package):
placement=mdir,
git=mbuild_git,
tag="v2022.04.17",
- commit="ef19f00de14a9c2c253c1c9b1119e1617280e3f2",
+ commit="b41485956bf65d51b8c2379768de7eaaa7a4245b",
when="@:2022.06",
)
diff --git a/var/spack/repos/builtin/packages/interproscan/package.py b/var/spack/repos/builtin/packages/interproscan/package.py
index 82380135a76feb..4143dc6ff2899b 100644
--- a/var/spack/repos/builtin/packages/interproscan/package.py
+++ b/var/spack/repos/builtin/packages/interproscan/package.py
@@ -45,21 +45,21 @@ class Interproscan(Package):
)
resource(
- when="5.56-89.0 +databases",
+ when="@5.56-89.0 +databases",
name="databases",
url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.56-89.0/alt/interproscan-data-5.56-89.0.tar.gz",
sha256="49cd0c69711f9469f3b68857f4581b23ff12765ca2b12893d18e5a9a5cd8032d",
)
resource(
- when="5.38-76.0 +databases",
+ when="@5.38-76.0 +databases",
name="databases",
url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.38-76.0/alt/interproscan-data-5.38-76.0.tar.gz",
sha256="e05e15d701037504f92ecf849c20317e70df28e78ff1945826b3c1e16d9b9cce",
)
resource(
- when="5.36-75.0 +databases",
+ when="@5.36-75.0 +databases",
name="databases",
url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.36-75.0/alt/interproscan-data-5.36-75.0.tar.gz",
sha256="e9b1e6f2d1c20d06661a31a08c973bc8ddf039a4cf1e45ec4443200375e5d6a4",
diff --git a/var/spack/repos/builtin/packages/ipm/package.py b/var/spack/repos/builtin/packages/ipm/package.py
index 0c4b94c8c47b4f..654707a96abdf2 100644
--- a/var/spack/repos/builtin/packages/ipm/package.py
+++ b/var/spack/repos/builtin/packages/ipm/package.py
@@ -19,7 +19,7 @@ class Ipm(AutotoolsPackage):
maintainers("Christoph-TU")
version("master", branch="master", preferred=True)
- version("2.0.6", tag="2.0.6")
+ version("2.0.6", tag="2.0.6", commit="b008141ee16d39b33e20bffde615564afa107575")
variant("papi", default=False, description="Enable PAPI")
variant("cuda", default=False, description="Enable CUDA")
diff --git a/var/spack/repos/builtin/packages/ispc/package.py b/var/spack/repos/builtin/packages/ispc/package.py
index 1da8faa0fd6956..0a2d60a9108b11 100644
--- a/var/spack/repos/builtin/packages/ispc/package.py
+++ b/var/spack/repos/builtin/packages/ispc/package.py
@@ -25,6 +25,8 @@ class Ispc(CMakePackage):
executables = ["^ispc$"]
version("main", branch="main")
+ version("1.21.1", sha256="99bbb1d1f15bc4433d6a63b5bb35b321af3e3af753c3b28a61850d1748e8a89f")
+ version("1.21.0", sha256="023782f721bfb5893bac24bc2153a8214c916be82c290bf63a3ec6678949b5ef")
version("1.20.0", sha256="8bd30ded7f96859451ead1cecf6f58ac8e937288fe0e5b98c56f6eba4be370b4")
version("1.19.0", sha256="c1aeae4bdfb28004a6949394ea1b3daa3fdf12f646e17fcc0614861077dc8b6a")
version("1.18.1", sha256="fee76d42fc0129f81489b7c2b9143e22a44c281940693c1c13cf1e3dd2ab207f")
@@ -45,15 +47,17 @@ class Ispc(CMakePackage):
depends_on("tbb", type="link", when="platform=linux @1.20:")
depends_on("llvm+clang")
depends_on("llvm libcxx=none", when="platform=darwin")
- depends_on("llvm@13:15", when="@1.19:")
- depends_on("llvm@11.0:14.0", when="@1.18")
- depends_on("llvm@11:14", when="@1.17")
- depends_on("llvm@:12", when="@:1.16")
- depends_on("llvm@11:", when="@1.16")
- depends_on("llvm@10:11", when="@1.15.0:1.15")
- depends_on("llvm@10.0:10", when="@1.13:1.14")
depends_on("llvm targets=arm,aarch64", when="target=arm:")
depends_on("llvm targets=arm,aarch64", when="target=aarch64:")
+ depends_on("llvm@:17", when="@:1.21")
+ depends_on("llvm@:15", when="@:1.20")
+ depends_on("llvm@:14", when="@:1.18")
+ depends_on("llvm@:12", when="@:1.16")
+ depends_on("llvm@:11", when="@:1.15")
+ depends_on("llvm@:10", when="@:1.14")
+ depends_on("llvm@13:", when="@1.19:")
+ depends_on("llvm@11:", when="@1.16:")
+ depends_on("llvm@10:", when="@1.13:")
patch(
"don-t-assume-that-ncurses-zlib-are-system-libraries.patch",
@@ -67,10 +71,15 @@ class Ispc(CMakePackage):
sha256="d3ccf547d3ba59779fd375e10417a436318f2200d160febb9f830a26f0daefdc",
)
+ # Fix library lookup for NCurses in CMake
+ patch(
+ "https://patch-diff.githubusercontent.com/raw/ispc/ispc/pull/2638.patch?full_index=1",
+ when="@1.18:1.20",
+ sha256="3f7dae8d4a683fca2a6157bbcb7cbe9692ff2094b0f4afaf29be121c02b0b3ad",
+ )
+
def setup_build_environment(self, env):
if self.spec.satisfies("@1.18.0:"):
- env.append_flags("LDFLAGS", "-lcurses")
- env.append_flags("LDFLAGS", "-ltinfo")
env.append_flags("LDFLAGS", "-lz")
def patch(self):
diff --git a/var/spack/repos/builtin/packages/itk/package.py b/var/spack/repos/builtin/packages/itk/package.py
index bd20a28d3dc7c5..0a956f3dfdb30a 100644
--- a/var/spack/repos/builtin/packages/itk/package.py
+++ b/var/spack/repos/builtin/packages/itk/package.py
@@ -58,15 +58,20 @@ class Itk(CMakePackage):
depends_on("expat")
depends_on("fftw-api")
depends_on("googletest")
- depends_on("hdf5+cxx")
+ depends_on("hdf5+cxx+hl")
depends_on("jpeg")
depends_on("libpng")
depends_on("libtiff")
- depends_on("mpi")
depends_on("zlib-api")
+ patch(
+ "https://github.com/InsightSoftwareConsortium/ITK/commit/9a719a0d2f5f489eeb9351b0ef913c3693147a4f.patch?full_index=1",
+ sha256="ec1f7fa71f2b7f05d9632c6b0321e7d436fff86fca92c60c12839b13ea79bd70",
+ when="@5.2.0:5.3.0",
+ )
+
def cmake_args(self):
- use_mkl = "^mkl" in self.spec
+ use_mkl = self.spec["fftw-api"].name in INTEL_MATH_LIBRARIES
args = [
self.define("BUILD_SHARED_LIBS", True),
self.define("ITK_USE_SYSTEM_LIBRARIES", True),
diff --git a/var/spack/repos/builtin/packages/iwyu/package.py b/var/spack/repos/builtin/packages/iwyu/package.py
index 20a3c1c1223368..6aedb6a0acb686 100644
--- a/var/spack/repos/builtin/packages/iwyu/package.py
+++ b/var/spack/repos/builtin/packages/iwyu/package.py
@@ -17,6 +17,7 @@ class Iwyu(CMakePackage):
homepage = "https://include-what-you-use.org"
url = "https://include-what-you-use.org/downloads/include-what-you-use-0.13.src.tar.gz"
+ git = "https://github.com/include-what-you-use/include-what-you-use.git"
maintainers("sethrj")
@@ -24,6 +25,9 @@ class Iwyu(CMakePackage):
executables = ["^include-what-you-use$"]
+ version("0.21", sha256="6a351919ff89bda7c95c895472601868db3daab96a958b38e0362890d58760b6")
+ version("0.20", sha256="75fce1e6485f280f8f13f4c2d090b11d2fd2102b50857507c8413a919b7af899")
+ version("0.19", sha256="2b10157b60ea08adc08e3896b4921c73fcadd5ec4eb652b29a34129d501e5ee0")
version("0.18", sha256="9102fc8419294757df86a89ce6ec305f8d90a818d1f2598a139d15eb1894b8f3")
version("0.17", sha256="eca7c04f8b416b6385ed00e33669a7fa4693cd26cb72b522cde558828eb0c665")
version("0.16", sha256="8d6fc9b255343bc1e5ec459e39512df1d51c60e03562985e0076036119ff5a1c")
@@ -35,6 +39,9 @@ class Iwyu(CMakePackage):
patch("iwyu-013-cmake.patch", when="@0.13:0.14")
+ depends_on("llvm+clang@17.0:17", when="@0.21")
+ depends_on("llvm+clang@16.0:16", when="@0.20")
+ depends_on("llvm+clang@15.0:15", when="@0.19")
depends_on("llvm+clang@14.0:14", when="@0.18")
depends_on("llvm+clang@13.0:13", when="@0.17")
depends_on("llvm+clang@12.0:12", when="@0.16")
@@ -55,7 +62,11 @@ def determine_version(cls, exe):
match = re.search(r"include-what-you-use\s+(\S+)", output)
return match.group(1) if match else None
- @when("@0.14:")
+ @when("@0.19:")
+ def cmake_args(self):
+ return [self.define("CMAKE_CXX_STANDARD", 17), self.define("CMAKE_CXX_EXTENSIONS", False)]
+
+ @when("@0.14:0.18")
def cmake_args(self):
return [self.define("CMAKE_CXX_STANDARD", 14), self.define("CMAKE_CXX_EXTENSIONS", False)]
diff --git a/var/spack/repos/builtin/packages/jemalloc/package.py b/var/spack/repos/builtin/packages/jemalloc/package.py
index 646120ebb02913..516cdc1e4d23b2 100644
--- a/var/spack/repos/builtin/packages/jemalloc/package.py
+++ b/var/spack/repos/builtin/packages/jemalloc/package.py
@@ -13,8 +13,6 @@ class Jemalloc(AutotoolsPackage):
homepage = "http://jemalloc.net/"
url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2"
- maintainers("iarspider")
-
version("5.3.0", sha256="2db82d1e7119df3e71b7640219b6dfe84789bc0537983c3b7ac4f7189aecfeaa")
version("5.2.1", sha256="34330e5ce276099e2e8950d9335db5a875689a4c6a56751ef3b1d8c537f887f6")
version("5.2.0", sha256="74be9f44a60d2a99398e706baa921e4efde82bf8fd16e5c0643c375c5851e3b4")
diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py
index 99e71f0b9a63f3..55716116d577f2 100644
--- a/var/spack/repos/builtin/packages/julia/package.py
+++ b/var/spack/repos/builtin/packages/julia/package.py
@@ -26,6 +26,7 @@ class Julia(MakefilePackage):
maintainers("vchuravy", "haampie", "giordano")
version("master", branch="master")
+ version("1.9.3", sha256="8d7dbd8c90e71179e53838cdbe24ff40779a90d7360e29766609ed90d982081d")
version("1.9.2", sha256="015438875d591372b80b09d01ba899657a6517b7c72ed41222298fef9d4ad86b")
version("1.9.0", sha256="48f4c8a7d5f33d0bc6ce24226df20ab49e385c2d0c3767ec8dfdb449602095b2")
version("1.8.5", sha256="d31026cc6b275d14abce26fd9fd5b4552ac9d2ce8bde4291e494468af5743031")
@@ -163,9 +164,11 @@ class Julia(MakefilePackage):
)
# patchelf 0.13 is required because the rpath patch uses --add-rpath
- depends_on("patchelf@0.13:", type="build")
+ # patchelf 0.18 breaks (at least) libjulia-internal.so
+ depends_on("patchelf@0.13:0.17", type="build")
depends_on("perl", type="build")
depends_on("libwhich", type="build")
+ depends_on("python", type="build")
depends_on("blas") # note: for now openblas is fixed...
depends_on("curl tls=mbedtls +nghttp2 +libssh2")
diff --git a/var/spack/repos/builtin/packages/justbuild/package.py b/var/spack/repos/builtin/packages/justbuild/package.py
index 2251535dc8bf2e..2b9a7ad4d5b6e1 100644
--- a/var/spack/repos/builtin/packages/justbuild/package.py
+++ b/var/spack/repos/builtin/packages/justbuild/package.py
@@ -22,6 +22,8 @@ class Justbuild(Package):
maintainers("asartori86")
version("master", branch="master")
+ version("1.2.3", tag="v1.2.3", commit="45e9c1c85399f00372ad8b72894979a0002d8f95")
+ version("1.2.2", tag="v1.2.2", commit="e1ee04684c34ae30ac3c91b6753e99a81a9dc51c")
version("1.2.1", tag="v1.2.1", commit="959cd90083d0c783389cd09e187c98322c16469f")
version("1.1.4", tag="v1.1.4", commit="32e96afd159f2158ca129fd00bf02c273d8e1e48")
version("1.1.3", tag="v1.1.3", commit="3aed5d450aec38be18edec822ac2efac6d49a938")
diff --git a/var/spack/repos/builtin/packages/knem/package.py b/var/spack/repos/builtin/packages/knem/package.py
index 2b229c93d28755..3e1bcd925c8b3b 100644
--- a/var/spack/repos/builtin/packages/knem/package.py
+++ b/var/spack/repos/builtin/packages/knem/package.py
@@ -32,8 +32,8 @@ class Knem(AutotoolsPackage):
variant("hwloc", default=True, description="Enable hwloc in the user-space tools")
patch(
- "https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.patch",
- sha256="78885a02d6f031a793db6a7190549f8d64c8606b353051d65f8e3f802b801902",
+ "https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.diff",
+ sha256="a422277f02247bde680d4a3c8ccb8c05498a79109ba1ade4a037bedd6efe3c79",
when="@1.1.4",
)
diff --git a/var/spack/repos/builtin/packages/lammps/package.py b/var/spack/repos/builtin/packages/lammps/package.py
index a44c7bd603cc6c..b2d3d111334b8e 100644
--- a/var/spack/repos/builtin/packages/lammps/package.py
+++ b/var/spack/repos/builtin/packages/lammps/package.py
@@ -791,7 +791,7 @@ def cmake_args(self):
# FFTW libraries are available and enable them by default.
if "^fftw" in spec or "^cray-fftw" in spec or "^amdfftw" in spec:
args.append(self.define("FFT", "FFTW3"))
- elif "^mkl" in spec:
+ elif spec["fftw-api"].name in INTEL_MATH_LIBRARIES:
args.append(self.define("FFT", "MKL"))
elif "^armpl-gcc" in spec or "^acfl" in spec:
args.append(self.define("FFT", "FFTW3"))
diff --git a/var/spack/repos/builtin/packages/lazygit/package.py b/var/spack/repos/builtin/packages/lazygit/package.py
new file mode 100644
index 00000000000000..81395262bff28a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lazygit/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class Lazygit(GoPackage):
+ """A simple terminal UI for git commands"""
+
+ homepage = "https://github.com/jesseduffield/lazygit"
+ url = "https://github.com/jesseduffield/lazygit/archive/refs/tags/v0.40.2.tar.gz"
+
+ maintainers("twrs")
+
+ license("MIT")
+
+ version("0.40.2", sha256="146bd63995fcf2f2373bbc2143b3565b7a2be49a1d4e385496265ac0f69e4128")
diff --git a/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch b/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch
new file mode 100644
index 00000000000000..3020af37b07d9d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch
@@ -0,0 +1,39 @@
+diff --git a/src/callbacks/memory_profiler.cpp b/src/callbacks/memory_profiler.cpp
+index 0d5cec5d2..6f40705af 100644
+--- a/src/callbacks/memory_profiler.cpp
++++ b/src/callbacks/memory_profiler.cpp
+@@ -158,7 +158,10 @@ struct MemUsage
+ size_t total_mem;
+
+ MemUsage(const std::string& r, size_t m) : report(r), total_mem(m) {}
+- bool operator<(const MemUsage& other) { return total_mem < other.total_mem; }
++ bool operator<(const MemUsage& other) const
++ {
++ return total_mem < other.total_mem;
++ }
+ };
+ } // namespace
+
+diff --git a/src/optimizers/adam.cpp b/src/optimizers/adam.cpp
+index d00dfbe7c..1d9ad3949 100644
+--- a/src/optimizers/adam.cpp
++++ b/src/optimizers/adam.cpp
+@@ -34,14 +34,12 @@
+
+ namespace lbann {
+
+-#if defined (LBANN_HAS_ROCM) && defined (LBANN_HAS_GPU_FP16)
++#if defined(LBANN_HAS_ROCM) && defined(LBANN_HAS_GPU_FP16)
+ namespace {
+-bool isfinite(fp16 const& x)
+-{
+- return std::isfinite(float(x));
+-}
+-}
++bool isfinite(fp16 const& x) { return std::isfinite(float(x)); }
++} // namespace
+ #endif
++using std::isfinite;
+
+ template
+ adam::adam(TensorDataType learning_rate,
diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py
index b5ed6df831503f..14f257a3415ffa 100644
--- a/var/spack/repos/builtin/packages/lbann/package.py
+++ b/var/spack/repos/builtin/packages/lbann/package.py
@@ -5,7 +5,6 @@
import os
import socket
-import sys
from spack.package import *
@@ -24,109 +23,42 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
maintainers("bvanessen")
version("develop", branch="develop")
- version("0.102", sha256="3734a76794991207e2dd2221f05f0e63a86ddafa777515d93d99d48629140f1a")
+ version("benchmarking", branch="benchmarking")
+ version("0.104", sha256="a847c7789082ab623ed5922ab1248dd95f5f89d93eed44ac3d6a474703bbc0bf")
+ version("0.103", sha256="9da1bf308f38323e30cb07f8ecf8efa05c7f50560e8683b9cd961102b1b3e25a")
version(
- "0.101",
- sha256="69d3fe000a88a448dc4f7e263bcb342c34a177bd9744153654528cd86335a1f7",
- deprecated=True,
- )
- version(
- "0.100",
- sha256="d1bab4fb6f1b80ae83a7286cc536a32830890f6e5b0c3107a17c2600d0796912",
- deprecated=True,
- )
- version(
- "0.99",
- sha256="3358d44f1bc894321ce07d733afdf6cb7de39c33e3852d73c9f31f530175b7cd",
- deprecated=True,
- )
- version(
- "0.98.1",
- sha256="9a2da8f41cd8bf17d1845edf9de6d60f781204ebd37bffba96d8872036c10c66",
- deprecated=True,
- )
- version(
- "0.98",
- sha256="8d64b9ac0f1d60db553efa4e657f5ea87e790afe65336117267e9c7ae6f68239",
- deprecated=True,
- )
- version(
- "0.97.1",
- sha256="2f2756126ac8bb993202cf532d72c4d4044e877f4d52de9fdf70d0babd500ce4",
- deprecated=True,
- )
- version(
- "0.97",
- sha256="9794a706fc7ac151926231efdf74564c39fbaa99edca4acb745ee7d20c32dae7",
- deprecated=True,
- )
- version(
- "0.96",
- sha256="97af78e9d3c405e963361d0db96ee5425ee0766fa52b43c75b8a5670d48e4b4a",
- deprecated=True,
- )
- version(
- "0.95",
- sha256="d310b986948b5ee2bedec36383a7fe79403721c8dc2663a280676b4e431f83c2",
- deprecated=True,
- )
- version(
- "0.94",
- sha256="567e99b488ebe6294933c98a212281bffd5220fc13a0a5cd8441f9a3761ceccf",
- deprecated=True,
- )
- version(
- "0.93",
- sha256="77bfd7fe52ee7495050f49bcdd0e353ba1730e3ad15042c678faa5eeed55fb8c",
- deprecated=True,
- )
- version(
- "0.92",
- sha256="9187c5bcbc562c2828fe619d53884ab80afb1bcd627a817edb935b80affe7b84",
- deprecated=True,
- )
- version(
- "0.91",
- sha256="b69f470829f434f266119a33695592f74802cff4b76b37022db00ab32de322f5",
+ "0.102",
+ sha256="3734a76794991207e2dd2221f05f0e63a86ddafa777515d93d99d48629140f1a",
deprecated=True,
)
- variant("al", default=True, description="Builds with support for Aluminum Library")
variant(
"build_type",
default="Release",
description="The build type to build",
values=("Debug", "Release"),
)
- variant(
- "conduit",
- default=True,
- description="Builds with support for Conduit Library "
- "(note that for v0.99 conduit is required)",
- )
variant(
"deterministic",
default=False,
description="Builds with support for deterministic execution",
)
- variant(
- "dihydrogen", default=True, description="Builds with support for DiHydrogen Tensor Library"
- )
variant(
"distconv",
default=False,
+ sticky=True,
description="Builds with support for spatial, filter, or channel "
"distributed convolutions",
)
variant(
"dtype",
default="float",
+ sticky=True,
description="Type for floating point representation of weights",
values=("float", "double"),
)
variant("fft", default=False, description="Support for FFT operations")
variant("half", default=False, description="Builds with support for FP16 precision data types")
- variant("hwloc", default=True, description="Add support for topology aware algorithms")
variant("nvprof", default=False, description="Build with region annotations for NVPROF")
variant(
"numpy", default=False, description="Builds with support for processing NumPy data files"
@@ -139,7 +71,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
variant("vtune", default=False, description="Builds with support for Intel VTune")
variant("onednn", default=False, description="Support for OneDNN")
variant("onnx", default=False, description="Support for exporting models into ONNX format")
- variant("nvshmem", default=False, description="Support for NVSHMEM")
+ variant("nvshmem", default=False, description="Support for NVSHMEM", when="+distconv")
variant(
"python",
default=True,
@@ -156,6 +88,9 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
variant("asan", default=False, description="Build with support for address-sanitizer")
variant("unit_tests", default=False, description="Support for unit testing")
variant("caliper", default=False, description="Support for instrumentation with caliper")
+ variant(
+ "shared", default=True, sticky=True, description="Enables the build of shared libraries"
+ )
# LBANN benefits from high performance linkers, but passing these in as command
# line options forces the linker flags to unnecessarily propagate to all
@@ -165,20 +100,13 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
# Don't expose this a dependency until Spack can find the external properly
# depends_on('binutils+gold', type='build', when='+gold')
+ patch("lbann_v0.104_build_cleanup.patch", when="@0.104:")
+
# Variant Conflicts
- conflicts("@:0.90,0.99:", when="~conduit")
- conflicts("@0.90:0.101", when="+fft")
- conflicts("@:0.90,0.102:", when="~dihydrogen")
conflicts("~cuda", when="+nvprof")
- conflicts("~hwloc", when="+al")
conflicts("~cuda", when="+nvshmem")
conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive")
- conflicts("~vision", when="@0.91:0.101")
- conflicts("~numpy", when="@0.91:0.101")
- conflicts("~python", when="@0.91:0.101")
- conflicts("~pfe", when="@0.91:0.101")
-
requires("%clang", when="+lld")
conflicts("+lld", when="+gold")
@@ -188,82 +116,56 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
depends_on("cmake@3.17.0:", type="build")
depends_on("cmake@3.21.0:", type="build", when="@0.103:")
- # Specify the correct versions of Hydrogen
- depends_on("hydrogen@:1.3.4", when="@0.95:0.100")
- depends_on("hydrogen@1.4.0:1.4", when="@0.101:0.101.99")
- depends_on("hydrogen@1.5.0:", when="@:0.90,0.102:")
+ # Specify the core libraries: Hydrogen, DiHydrogen, Aluminum
+ depends_on("hydrogen@1.5.3:")
+ depends_on("aluminum@1.4.1:")
+ depends_on("dihydrogen@0.2.0:")
+
+ # Align the following variants across Hydrogen and DiHydrogen
+ forwarded_variants = ["cuda", "rocm", "half", "nvshmem"]
+ for v in forwarded_variants:
+ if v != "nvshmem":
+ depends_on("hydrogen +{0}".format(v), when="+{0}".format(v))
+ depends_on("hydrogen ~{0}".format(v), when="~{0}".format(v))
+ if v != "al" and v != "half":
+ depends_on("dihydrogen +{0}".format(v), when="+{0}".format(v))
+ depends_on("dihydrogen ~{0}".format(v), when="~{0}".format(v))
+ if v == "cuda" or v == "rocm":
+ depends_on("aluminum +{0} +nccl".format(v), when="+{0}".format(v))
# Add Hydrogen variants
depends_on("hydrogen +openmp +shared +int64")
- depends_on("hydrogen +openmp_blas", when=sys.platform != "darwin")
- depends_on("hydrogen ~al", when="~al")
- depends_on("hydrogen +al", when="+al")
- depends_on("hydrogen ~cuda", when="~cuda")
- depends_on("hydrogen +cuda", when="+cuda")
- depends_on("hydrogen ~half", when="~half")
- depends_on("hydrogen +half", when="+half")
- depends_on("hydrogen ~rocm", when="~rocm")
- depends_on("hydrogen +rocm", when="+rocm")
depends_on("hydrogen build_type=Debug", when="build_type=Debug")
- # Older versions depended on Elemental not Hydrogen
- depends_on("elemental +openmp_blas +shared +int64", when="@0.91:0.94")
- depends_on(
- "elemental +openmp_blas +shared +int64 build_type=Debug",
- when="build_type=Debug @0.91:0.94",
- )
-
- # Specify the correct version of Aluminum
- depends_on("aluminum@:0.3", when="@0.95:0.100 +al")
- depends_on("aluminum@0.4.0:0.4", when="@0.101:0.101.99 +al")
- depends_on("aluminum@0.5.0:", when="@:0.90,0.102: +al")
+ # Add DiHydrogen variants
+ depends_on("dihydrogen +distconv", when="+distconv")
+ depends_on("dihydrogen@develop", when="@develop")
# Add Aluminum variants
- depends_on("aluminum +cuda +nccl", when="+al +cuda")
- depends_on("aluminum +rocm +rccl", when="+al +rocm")
-
- depends_on("dihydrogen@0.2.0:", when="@:0.90,0.102:")
- depends_on("dihydrogen +openmp", when="+dihydrogen")
- depends_on("dihydrogen +openmp_blas", when=sys.platform != "darwin")
- depends_on("dihydrogen ~cuda", when="+dihydrogen ~cuda")
- depends_on("dihydrogen +cuda", when="+dihydrogen +cuda")
- depends_on("dihydrogen ~al", when="+dihydrogen ~al")
- depends_on("dihydrogen +al", when="+dihydrogen +al")
- depends_on("dihydrogen +distconv +cuda", when="+distconv +cuda")
- depends_on("dihydrogen +distconv +rocm", when="+distconv +rocm")
- depends_on("dihydrogen ~half", when="+dihydrogen ~half")
- depends_on("dihydrogen +half", when="+dihydrogen +half")
- depends_on("dihydrogen ~nvshmem", when="+dihydrogen ~nvshmem")
- depends_on("dihydrogen +nvshmem", when="+dihydrogen +nvshmem")
- depends_on("dihydrogen ~rocm", when="+dihydrogen ~rocm")
- depends_on("dihydrogen +rocm", when="+dihydrogen +rocm")
- depends_on("dihydrogen@0.1", when="@0.101:0.101.99 +dihydrogen")
- depends_on("dihydrogen@:0.0,0.2:", when="@:0.90,0.102: +dihydrogen")
- conflicts("~dihydrogen", when="+distconv")
+ depends_on("aluminum@master", when="@develop")
+
+ depends_on("hdf5+mpi", when="+distconv")
for arch in CudaPackage.cuda_arch_values:
depends_on("hydrogen cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch)
- depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch)
- depends_on("dihydrogen cuda_arch=%s" % arch, when="+dihydrogen +cuda cuda_arch=%s" % arch)
+ depends_on("aluminum cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch)
+ depends_on("dihydrogen cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch)
depends_on("nccl cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch)
# variants +rocm and amdgpu_targets are not automatically passed to
# dependencies, so do it manually.
for val in ROCmPackage.amdgpu_targets:
depends_on("hydrogen amdgpu_target=%s" % val, when="amdgpu_target=%s" % val)
- depends_on("aluminum amdgpu_target=%s" % val, when="+al amdgpu_target=%s" % val)
- depends_on("dihydrogen amdgpu_target=%s" % val, when="+dihydrogen amdgpu_target=%s" % val)
+ depends_on("aluminum amdgpu_target=%s" % val, when="amdgpu_target=%s" % val)
+ depends_on("dihydrogen amdgpu_target=%s" % val, when="amdgpu_target=%s" % val)
depends_on("roctracer-dev", when="+rocm +distconv")
- depends_on("cudnn", when="@0.90:0.100 +cuda")
- depends_on("cudnn@8.0.2:", when="@:0.90,0.101: +cuda")
- depends_on("cub", when="@0.94:0.98.2 +cuda ^cuda@:10")
- depends_on("cutensor", when="@:0.90,0.102: +cuda")
+ depends_on("cudnn@8.0.2:", when="+cuda")
+ depends_on("cutensor", when="+cuda")
depends_on("hipcub", when="+rocm")
depends_on("mpi")
- depends_on("hwloc@1.11:", when="@:0.90,0.102: +hwloc")
- depends_on("hwloc@1.11.0:1.11", when="@0.95:0.101 +hwloc")
+ depends_on("hwloc@1.11:")
depends_on("hwloc +cuda +nvml", when="+cuda")
depends_on("hwloc@2.3.0:", when="+rocm")
depends_on("hiptt", when="+rocm")
@@ -291,9 +193,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
# Note that conduit defaults to +fortran +parmetis +python, none of which are
# necessary by LBANN: you may want to disable those options in your
# packages.yaml
- depends_on("conduit@0.4.0: +hdf5", when="@0.94:0 +conduit")
- depends_on("conduit@0.5.0:0.6 +hdf5", when="@0.100:0.101 +conduit")
- depends_on("conduit@0.6.0: +hdf5", when="@:0.90,0.99:")
+ depends_on("conduit@0.6.0: +hdf5")
# LBANN can use Python in two modes 1) as part of an extensible framework
# and 2) to drive the front end model creation and launch
@@ -303,13 +203,13 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
extends("python", when="+python")
# Python front end and possible extra packages
- depends_on("python@3: +shared", type=("build", "run"), when="@:0.90,0.99: +pfe")
+ depends_on("python@3: +shared", type=("build", "run"), when="+pfe")
extends("python", when="+pfe")
depends_on("py-setuptools", type="build", when="+pfe")
- depends_on("py-protobuf+cpp@3.10.0:", type=("build", "run"), when="@:0.90,0.99: +pfe")
+ depends_on("py-protobuf+cpp@3.10.0:4.21.12", type=("build", "run"), when="+pfe")
- depends_on("protobuf+shared@3.10.0:", when="@:0.90,0.99:")
- depends_on("zlib-api", when="protobuf@3.11.0:")
+ depends_on("protobuf+shared@3.10.0:3.21.12")
+ depends_on("zlib-api", when="^protobuf@3.11.0:")
# using cereal@1.3.1 and above requires changing the
# find_package call to lowercase, so stick with :1.3.0
@@ -323,7 +223,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
depends_on("onnx", when="+onnx")
depends_on("nvshmem", when="+nvshmem")
- depends_on("spdlog", when="@:0.90,0.102:")
+ depends_on("spdlog@1.11.0")
depends_on("zstr")
depends_on("caliper+adiak+mpi", when="+caliper")
@@ -331,6 +231,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
generator("ninja")
def setup_build_environment(self, env):
+ env.append_flags("CXXFLAGS", "-fno-omit-frame-pointer")
if self.spec.satisfies("%apple-clang"):
env.append_flags("CPPFLAGS", self.compiler.openmp_flag)
env.append_flags("CFLAGS", self.spec["llvm-openmp"].headers.include_flags)
@@ -343,11 +244,16 @@ def _get_sys_type(self, spec):
sys_type = env["SYS_TYPE"]
return sys_type
+ @property
+ def libs(self):
+ shared = True if "+shared" in self.spec else False
+ return find_libraries("liblbann", root=self.prefix, shared=shared, recursive=True)
+
@property
def cache_name(self):
hostname = socket.gethostname()
# Get a hostname that has no node identifier
- hostname = hostname.rstrip("1234567890")
+ hostname = hostname.rstrip("1234567890-")
return "LBANN_{0}_{1}-{2}-{3}@{4}.cmake".format(
hostname,
self.spec.version,
@@ -360,6 +266,7 @@ def initconfig_compiler_entries(self):
spec = self.spec
entries = super().initconfig_compiler_entries()
entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17"))
+ entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec))
if not spec.satisfies("^cmake@3.23.0"):
# There is a bug with using Ninja generator in this version
# of CMake
@@ -429,12 +336,9 @@ def initconfig_package_entries(self):
cmake_variant_fields = [
("LBANN_WITH_CNPY", "numpy"),
("LBANN_DETERMINISTIC", "deterministic"),
- ("LBANN_WITH_HWLOC", "hwloc"),
- ("LBANN_WITH_ALUMINUM", "al"),
("LBANN_WITH_ADDRESS_SANITIZER", "asan"),
("LBANN_WITH_BOOST", "boost"),
("LBANN_WITH_CALIPER", "caliper"),
- ("LBANN_WITH_CONDUIT", "conduit"),
("LBANN_WITH_NVSHMEM", "nvshmem"),
("LBANN_WITH_FFT", "fft"),
("LBANN_WITH_ONEDNN", "onednn"),
@@ -449,6 +353,9 @@ def initconfig_package_entries(self):
for opt, val in cmake_variant_fields:
entries.append(self.define_cmake_cache_from_variant(opt, val))
+ entries.append(cmake_cache_option("LBANN_WITH_ALUMINUM", True))
+ entries.append(cmake_cache_option("LBANN_WITH_CONDUIT", True))
+ entries.append(cmake_cache_option("LBANN_WITH_HWLOC", True))
entries.append(cmake_cache_option("LBANN_WITH_ROCTRACER", "+rocm +distconv" in spec))
entries.append(cmake_cache_option("LBANN_WITH_TBINF", False))
entries.append(
@@ -481,7 +388,7 @@ def initconfig_package_entries(self):
)
)
- entries.append(self.define_cmake_cache_from_variant("LBANN_WITH_DIHYDROGEN", "dihydrogen"))
+ entries.append(cmake_cache_option("LBANN_WITH_DIHYDROGEN", True))
entries.append(self.define_cmake_cache_from_variant("LBANN_WITH_DISTCONV", "distconv"))
# IF IBM ESSL is used it needs help finding the proper LAPACK libraries
diff --git a/var/spack/repos/builtin/packages/lcio/package.py b/var/spack/repos/builtin/packages/lcio/package.py
index 8f02063af4c760..6b42bb41751fbe 100644
--- a/var/spack/repos/builtin/packages/lcio/package.py
+++ b/var/spack/repos/builtin/packages/lcio/package.py
@@ -19,6 +19,7 @@ class Lcio(CMakePackage):
maintainers("gaede", "vvolkl", "jmcarcell")
version("master", branch="master")
+ version("2.20.1", sha256="125f657297de12b40694cb0dddec1d1ce3379058492f2a6a2a6f992ee51604d6")
version("2.20", sha256="5ef92c9ef04ce468ffb48be0ec6010377a400b064e352cb50f9f4c9599e7e990")
version("2.19", sha256="2d6b37094d8d556ab0ba0efa632f10d8b851f533ca5c767e436397df18cb57c7")
version("2.18", sha256="e722df7f4a6adcc2459ea1c6488a2a6e40bb04f7ee99536fdc60b51e6c80f565")
diff --git a/var/spack/repos/builtin/packages/lcov/package.py b/var/spack/repos/builtin/packages/lcov/package.py
index dc7d3aa1b637c3..0c71a9b7aeda63 100644
--- a/var/spack/repos/builtin/packages/lcov/package.py
+++ b/var/spack/repos/builtin/packages/lcov/package.py
@@ -13,14 +13,42 @@ class Lcov(MakefilePackage):
supports statement, function and branch coverage measurement."""
homepage = "http://ltp.sourceforge.net/coverage/lcov.php"
- url = "https://github.com/linux-test-project/lcov/releases/download/v1.14/lcov-1.14.tar.gz"
+ url = "https://github.com/linux-test-project/lcov/releases/download/v2.0/lcov-2.0.tar.gz"
maintainers("KineticTheory")
+ version("2.0", sha256="1857bb18e27abe8bcec701a907d5c47e01db4d4c512fc098d1a6acd29267bf46")
version("1.16", sha256="987031ad5528c8a746d4b52b380bc1bffe412de1f2b9c2ba5224995668e3240b")
version("1.15", sha256="c1cda2fa33bec9aa2c2c73c87226cfe97de0831887176b45ee523c5e30f8053a")
version("1.14", sha256="14995699187440e0ae4da57fe3a64adc0a3c5cf14feab971f8db38fb7d8f071a")
- depends_on("perl")
+ # dependencies from
+ # https://github.com/linux-test-project/lcov/blob/02ece21d54ccd16255d74f8b00f8875b6c15653a/README#L91-L111
+ depends_on("perl", type=("build", "run"))
+ depends_on("perl-b-hooks-endofscope", type=("run"))
+ depends_on("perl-capture-tiny", type=("run"))
+ depends_on("perl-class-inspector", type=("run"))
+ depends_on("perl-class-singleton", type=("run"))
+ depends_on("perl-datetime", type=("run"))
+ depends_on("perl-datetime-locale", type=("run"))
+ depends_on("perl-datetime-timezone", type=("run"))
+ depends_on("perl-devel-cover", type=("run"))
+ depends_on("perl-devel-stacktrace", type=("run"))
+ depends_on("perl-digest-md5", type=("run"))
+ depends_on("perl-eval-closure", type=("run"))
+ depends_on("perl-exception-class", type=("run"))
+ depends_on("perl-file-sharedir", type=("run"))
+ depends_on("perl-file-spec", type=("run"))
+ depends_on("perl-json", type=("run"))
+ depends_on("perl-memory-process", type=("run"))
+ depends_on("perl-module-implementation", type=("run"))
+ depends_on("perl-mro-compat", type=("run"))
+ depends_on("perl-namespace-clean", type=("run"))
+ depends_on("perl-package-stash", type=("run"))
+ depends_on("perl-params-validationcompiler", type=("run"))
+ depends_on("perl-role-tiny", type=("run"))
+ depends_on("perl-specio", type=("run"))
+ depends_on("perl-sub-identify", type=("run"))
+ depends_on("perl-time-hires", type=("run"))
def install(self, spec, prefix):
make(
diff --git a/var/spack/repos/builtin/packages/ldak/package.py b/var/spack/repos/builtin/packages/ldak/package.py
index 022a3beacbe10e..d074d90ea6830b 100644
--- a/var/spack/repos/builtin/packages/ldak/package.py
+++ b/var/spack/repos/builtin/packages/ldak/package.py
@@ -12,31 +12,68 @@ class Ldak(Package):
homepage = "https://dougspeed.com/ldak/"
url = "https://dougspeed.com/wp-content/uploads/source.zip"
- version("5.1", sha256="ae3eb8c2ef31af210e138336fd6edcd0e3a26ea9bae89fd6c0c6ea33e3a1517e")
+ maintainers("snehring")
- variant("mkl", default=False, description="Use MKL")
+ version("5.2", sha256="ba3de4eb4f2d664b3c2a54bef2eb66d1a498ac423179e97a5795d010161b1805")
+ version(
+ "5.1",
+ sha256="ae3eb8c2ef31af210e138336fd6edcd0e3a26ea9bae89fd6c0c6ea33e3a1517e",
+ deprecated=True,
+ )
+
+ variant("glpk", default=False, description="Use glpk instead of vendored qsopt")
depends_on("zlib-api")
depends_on("blas")
depends_on("lapack")
- depends_on("mkl", when="+mkl")
-
- for t in ["aarch64", "arm", "ppc", "ppc64", "ppc64le", "ppcle", "sparc", "sparc64", "x86"]:
- conflicts("target={0}:".format(t), msg="libspot is available linux x86_64 only")
-
- def setup_build_environment(self, env):
- env.append_flags("LDLIBS", "-lm")
- env.append_flags("LDLIBS", "-lz")
- libs = (self.spec["lapack"].libs + self.spec["blas"].libs).ld_flags
- env.append_flags("LDLIBS", libs)
- if self.spec.platform == "darwin":
- env.append_flags("LDLIBS", "libqsopt.mac.a")
+ depends_on("openblas threads=openmp", when="^openblas")
+ depends_on("intel-mkl threads=openmp", when="^intel-mkl")
+ depends_on("intel-oneapi-mkl threads=openmp", when="^intel-oneapi-mkl")
+ depends_on("glpk", when="+glpk")
+
+ requires("target=x86_64:", when="~glpk", msg="bundled qsopt is only for x86_64")
+ requires(
+ "^openblas",
+ *[f"^{intel_pkg}" for intel_pkg in INTEL_MATH_LIBRARIES],
+ policy="one_of",
+ msg="Only mkl or openblas are supported for blas/lapack with ldak",
+ )
+ conflicts("platform=cray", when="~glpk", msg="bundled qsopt only for linux or mac")
+
+ phases = ["build", "install"]
+
+ def build(self, spec, prefix):
+ libs = [
+ "-lm",
+ (self.spec["lapack"].libs + self.spec["blas"].libs).link_flags,
+ self.spec["zlib-api"].libs.link_flags,
+ ]
+ includes = [
+ (self.spec["lapack"].headers + self.spec["blas"].headers).include_flags,
+ self.spec["zlib-api"].headers.include_flags,
+ ]
+
+ if self.spec.satisfies("~glpk"):
+ if self.spec.satisfies("platform=darwin"):
+ libs.append("libqsopt.mac.a")
+ else:
+ libs.append("libqsopt.linux.a")
else:
- env.append_flags("LDLIBS", "libqsopt.linux.a")
+ includes.append(self.spec["glpk"].headers.include_flags)
+ libs.append(self.spec["glpk"].libs.link_flags)
+ if self.spec.satisfies("^mkl"):
+ filter_file("#define MKL.*", "#define MKL 1", "ldak.c")
+ if self.spec.satisfies("^openblas"):
+ filter_file("#define MKL.*", "#define MKL 2", "ldak.c")
+ filter_file("#if MKL==2", "#if MKL==2\n#include \n", "ldak.c")
+ if self.spec.satisfies("+glpk"):
+ filter_file("#define MET.*", "#define MET 1", "ldak.c")
+ filter_file('#include"glpk.h"', "#include", "ldak.c")
+ filter_file(r"weights\[", "tally3[", "weightfuns.c")
+ cc = Executable(spack_cc)
+ args = ["ldak.c", self.compiler.openmp_flag, "-o", "ldak"] + includes + libs
+ cc(*args)
def install(self, spec, prefix):
- if self.spec.satisfies("~mkl"):
- filter_file("#define MKL.*", "#define MKL 0", "ldak.c")
- make("ldak")
mkdirp(prefix.bin)
install("ldak", prefix.bin.ldak)
diff --git a/var/spack/repos/builtin/packages/lemon/package.py b/var/spack/repos/builtin/packages/lemon/package.py
index bcf759ea198ec7..61a7fc46e3dce9 100644
--- a/var/spack/repos/builtin/packages/lemon/package.py
+++ b/var/spack/repos/builtin/packages/lemon/package.py
@@ -16,3 +16,30 @@ class Lemon(CMakePackage):
url = "https://lemon.cs.elte.hu/pub/sources/lemon-1.3.1.tar.gz"
version("1.3.1", sha256="71b7c725f4c0b4a8ccb92eb87b208701586cf7a96156ebd821ca3ed855bad3c8")
+
+ # variant("coin", default=False, description="Enable Coin solver backend") #TODO build fails
+ variant("ilog", default=False, description="Enable ILOG (CPLEX) solver backend")
+ variant("glpk", default=True, description="Enable GLPK solver backend")
+ # soplex not mentioned in docs but shown in cmakecache
+ # variant("soplex", default=False, description="Enable SOPLEX solver backend") #TODO
+
+ depends_on("glpk", when="+glpk")
+ depends_on("cplex", when="+ilog")
+ # depends_on("coinutils", when="+coin") # just a guess
+ # depends_on("cbc", when="+coin")
+ # depends_on("clp", when="+coin")
+ # depends_on("bzip2", when="+coin")
+ # depends_on("soplex", when="+soplex") # no such package in Spack yet. TODO
+
+ def cmake_args(self):
+ spec = self.spec
+ args = []
+ args.extend(
+ [
+ # f"-DLEMON_ENABLE_COIN={spec.variants['coin'].value}", #TODO
+ f"-DLEMON_ENABLE_ILOG={spec.variants['ilog'].value}",
+ f"-DLEMON_ENABLE_GLPK={spec.variants['glpk'].value}",
+ # f"-DLEMON_ENABLE_SOPLEX={spec.variants['soplex'].value}", #TODO
+ ]
+ )
+ return args
diff --git a/var/spack/repos/builtin/packages/libceed/package.py b/var/spack/repos/builtin/packages/libceed/package.py
index 43397349802a52..e74468646b824c 100644
--- a/var/spack/repos/builtin/packages/libceed/package.py
+++ b/var/spack/repos/builtin/packages/libceed/package.py
@@ -15,6 +15,7 @@ class Libceed(MakefilePackage, CudaPackage, ROCmPackage):
maintainers("jedbrown", "v-dobrev", "tzanio", "jeremylt")
version("develop", branch="main")
+ version("0.12.0", tag="v0.12.0", commit="60ef3feef7f5137af55ea7336903743d94ee71a8")
version("0.11.0", tag="v0.11.0", commit="8ec64e9ae9d5df169dba8c8ee61d8ec8907b8f80")
version("0.10.1", tag="v0.10.1", commit="74532b27052d94e943eb8bc76257fbd710103614")
version("0.9", tag="v0.9.0", commit="d66340f5aae79e564186ab7514a1cd08b3a1b06b")
diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py
index c0ece0386c5a5a..dc0c34191c8387 100644
--- a/var/spack/repos/builtin/packages/libevent/package.py
+++ b/var/spack/repos/builtin/packages/libevent/package.py
@@ -41,6 +41,10 @@ class Libevent(AutotoolsPackage):
depends_on("openssl@:1.0", when="@:2.0+openssl")
depends_on("openssl", when="+openssl")
+ depends_on("autoconf", type="build")
+ depends_on("automake", type="build")
+ depends_on("libtool", type="build")
+
def url_for_version(self, version):
if version >= Version("2.0.22"):
url = "https://github.com/libevent/libevent/releases/download/release-{0}-stable/libevent-{0}-stable.tar.gz"
@@ -54,6 +58,9 @@ def libs(self):
libs = find_libraries("libevent", root=self.prefix, shared=True, recursive=True)
return LibraryList(libs)
+ def autoreconf(self, spec, prefix):
+ autoreconf("--force", "--install", "--symlink")
+
def configure_args(self):
spec = self.spec
configure_args = []
diff --git a/var/spack/repos/builtin/packages/libfabric/package.py b/var/spack/repos/builtin/packages/libfabric/package.py
index fe0c9c6a40e70d..621e2b31f7cecb 100644
--- a/var/spack/repos/builtin/packages/libfabric/package.py
+++ b/var/spack/repos/builtin/packages/libfabric/package.py
@@ -22,6 +22,7 @@ class Libfabric(AutotoolsPackage):
executables = ["^fi_info$"]
version("main", branch="main")
+ version("1.20.0", sha256="7fbbaeb0e15c7c4553c0ac5f54e4ef7aecaff8a669d4ba96fa04b0fc780b9ddc")
version("1.19.0", sha256="f14c764be9103e80c46223bde66e530e5954cb28b3835b57c8e728479603ef9e")
version("1.18.2", sha256="64d7837853ca84d2a413fdd96534b6a81e6e777cc13866e28cf86cd0ccf1b93e")
version("1.18.1", sha256="4615ae1e22009e59c72ae03c20adbdbd4a3dce95aeefbc86cc2bf1acc81c9e38")
diff --git a/var/spack/repos/builtin/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py
index c67bd82ddd2f52..d32400c3cfac5f 100644
--- a/var/spack/repos/builtin/packages/libffi/package.py
+++ b/var/spack/repos/builtin/packages/libffi/package.py
@@ -32,6 +32,11 @@ class Libffi(AutotoolsPackage):
patch("clang-powerpc-3.2.1.patch", when="@3.2.1%clang platform=linux")
# ref.: https://github.com/libffi/libffi/pull/561
patch("powerpc-3.3.patch", when="@3.3")
+ patch(
+ "https://github.com/libffi/libffi/commit/ce077e5565366171aa1b4438749b0922fce887a4.patch?full_index=1",
+ sha256="070b1f3aa87f2b56f83aff38afc42157e1692bfaa580276ecdbad2048b818ed7",
+ when="@3.4.3:3.4.4",
+ )
@property
def headers(self):
diff --git a/var/spack/repos/builtin/packages/libgcrypt/package.py b/var/spack/repos/builtin/packages/libgcrypt/package.py
index cd207db083c0c2..aae41faa590111 100644
--- a/var/spack/repos/builtin/packages/libgcrypt/package.py
+++ b/var/spack/repos/builtin/packages/libgcrypt/package.py
@@ -14,6 +14,7 @@ class Libgcrypt(AutotoolsPackage):
maintainers("alalazo")
+ version("1.10.3", sha256="8b0870897ac5ac67ded568dcfadf45969cfa8a6beb0fd60af2a9eadc2a3272aa")
version("1.10.2", sha256="3b9c02a004b68c256add99701de00b383accccf37177e0d6c58289664cce0c03")
version("1.10.1", sha256="ef14ae546b0084cd84259f61a55e07a38c3b53afc0f546bffcef2f01baffe9de")
version("1.10.0", sha256="6a00f5c05caa4c4acc120c46b63857da0d4ff61dc4b4b03933fa8d46013fae81")
diff --git a/var/spack/repos/builtin/packages/libgit2/package.py b/var/spack/repos/builtin/packages/libgit2/package.py
index dd09fd8e1e843c..20410cf1c6d814 100644
--- a/var/spack/repos/builtin/packages/libgit2/package.py
+++ b/var/spack/repos/builtin/packages/libgit2/package.py
@@ -83,6 +83,7 @@ class Libgit2(CMakePackage):
depends_on("cmake@2.8:", type="build", when="@:0.28")
depends_on("cmake@3.5:", type="build", when="@0.99:")
depends_on("pkgconfig", type="build")
+ depends_on("python", type="test")
# Runtime Dependencies
depends_on("libssh2", when="+ssh")
@@ -123,5 +124,6 @@ def cmake_args(self):
# Control tests
args.append(self.define("BUILD_CLAR", self.run_tests))
+ args.append(self.define("BUILD_TESTS", self.run_tests))
return args
diff --git a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py
index a30e29b138d574..25425557854651 100644
--- a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py
+++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py
@@ -59,6 +59,22 @@ class LibjpegTurbo(CMakePackage, AutotoolsPackage):
variant("shared", default=True, description="Build shared libs")
variant("static", default=True, description="Build static libs")
variant("jpeg8", default=False, description="Emulate libjpeg v8 API/ABI")
+ variant(
+ "partial_decoder",
+ default=False,
+ description="add partial_decode_scale functionality required for rocAL",
+ )
+
+ patch(
+ "https://github.com/libjpeg-turbo/libjpeg-turbo/commit/09c71da06a6346dca132db66f26f959f7e4dd5ad.patch?full_index=1",
+ sha256="4d5bdfb5de5b04399144254ea383f5357ab7beb830b398aeb35b65f21dd6b4b0",
+ when="@2.0.6 +partial_decoder",
+ )
+ patch(
+ "https://github.com/libjpeg-turbo/libjpeg-turbo/commit/640d7ee1917fcd3b6a5271aa6cf4576bccc7c5fb.patch?full_index=1",
+ sha256="dc1ec567c2356b652100ecdc28713bbf25f544e46f7d2947f31a2395c362cc48",
+ when="@2.0.6 +partial_decoder",
+ )
# Can use either of these. But in the current version of the package
# only nasm is used. In order to use yasm an environmental variable
diff --git a/var/spack/repos/builtin/packages/libksba/package.py b/var/spack/repos/builtin/packages/libksba/package.py
index 5230bcb6a5bf42..cd2183f841f014 100644
--- a/var/spack/repos/builtin/packages/libksba/package.py
+++ b/var/spack/repos/builtin/packages/libksba/package.py
@@ -17,6 +17,7 @@ class Libksba(AutotoolsPackage):
maintainers("alalazo")
+ version("1.6.5", sha256="a564628c574c99287998753f98d750babd91a4e9db451f46ad140466ef2a6d16")
version("1.6.4", sha256="bbb43f032b9164d86c781ffe42213a83bf4f2fee91455edfa4654521b8b03b6b")
version("1.6.3", sha256="3f72c68db30971ebbf14367527719423f0a4d5f8103fc9f4a1c01a9fa440de5c")
diff --git a/var/spack/repos/builtin/packages/libluv/package.py b/var/spack/repos/builtin/packages/libluv/package.py
index abf42d47f08ee5..ff9a9db5e62d09 100644
--- a/var/spack/repos/builtin/packages/libluv/package.py
+++ b/var/spack/repos/builtin/packages/libluv/package.py
@@ -14,17 +14,26 @@ class Libluv(CMakePackage):
homepage = "https://github.com/luvit/luv"
url = "https://github.com/luvit/luv/releases/download/1.36.0-0/luv-1.36.0-0.tar.gz"
+ version("1.45.0-0", sha256="fa6c46fb09f88320afa7f88017efd7b0d2b3a0158c5ba5b6851340b0332a2b81")
version("1.44.2-1", sha256="3eb5c7bc44f61fbc4148ea30e3221d410263e0ffa285672851fc19debf9e5c30")
+ version("1.44.2-0", sha256="30639f8e0fac7fb0c3a04b94a00f73c6d218c15765347ceb0998a6b72464b6cf")
version("1.43.0-0", sha256="567a6f3dcdcf8a9b54ddc57ffef89d1e950d72832b85ee81c8c83a9d4e0e9de2")
version("1.42.0-1", sha256="4b6fbaa89d2420edf6070ad9e522993e132bd7eb2540ff754c2b9f1497744db2")
version("1.42.0-0", sha256="b5228a9d0eaacd9f862b6270c732d5c90773a28ce53b6d9e32a14050e7947f36")
version("1.36.0-0", sha256="f2e7eb372574f25c6978c1dc74280d22efdcd7df2dda4a286c7fe7dceda26445")
+ # https://github.com/neovim/neovim/issues/25770
+ # up to 1.45 (included) dynamic library on macOS did not have the @rpath prefix, being not
+ # usable on this platform.
+ # from 1.46, by requiring a newer cmake version, CMP0042 is in place and it works correctly.
+ depends_on("cmake@3:", type="build")
+
depends_on("lua-lang", type="link")
depends_on("libuv", type="link")
def cmake_args(self):
args = [
+ self.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"),
"-DLUA_BUILD_TYPE=System",
"-DBUILD_STATIC_LIBS=ON",
"-DBUILD_SHARED_LIBS=ON",
diff --git a/var/spack/repos/builtin/packages/libpressio-sperr/package.py b/var/spack/repos/builtin/packages/libpressio-sperr/package.py
index cd636f2fc9c070..35576e33a7a08a 100644
--- a/var/spack/repos/builtin/packages/libpressio-sperr/package.py
+++ b/var/spack/repos/builtin/packages/libpressio-sperr/package.py
@@ -10,17 +10,19 @@ class LibpressioSperr(CMakePackage):
"""A LibPressio plugin for Sperr"""
homepage = "https://github.com/robertu94/libpressio-sperr"
- url = "https://github.com/robertu94/libpressio-sperr/archive/refs/tags/0.0.1.tar.gz"
+ url = "https://github.com/robertu94/libpressio-sperr/archive/refs/tags/0.0.4.tar.gz"
git = homepage
maintainers("robertu94")
- depends_on("libpressio@0.88.0:", when="@0.0.3:")
- depends_on("libpressio@:0.88.0", when="@:0.0.2")
- depends_on("sperr")
- depends_on("pkgconfig", type="build")
-
version("master", branch="master")
+ version("0.0.4", sha256="97f2879460b1a28ed8ebf0c300c1cf7ceeb2c7aa7b8a1307ed19bf8cce0b7941")
version("0.0.3", sha256="e0d1fd083419aaaa243cbf780b7de17aeb96533000071088aa21ec238d358ecc")
version("0.0.2", sha256="61995d687f9e7e798e17ec7238d19d917890dc0ff5dec18293b840c4d6f8c115")
version("0.0.1", sha256="e2c164822708624b97654046b42abff704594cba6537d6d0646d485bdf2d03ca")
+
+ depends_on("libpressio@0.88.0:", when="@0.0.3:")
+ depends_on("libpressio@:0.88.0", when="@:0.0.2")
+ depends_on("sperr@:0.6.2", when="@:0.0.3")
+ depends_on("sperr@0.7.1:", when="@0.0.4:")
+ depends_on("pkgconfig", type="build")
diff --git a/var/spack/repos/builtin/packages/libpsm3/package.py b/var/spack/repos/builtin/packages/libpsm3/package.py
index 620777fc17af5a..4a1f9e039f3bab 100644
--- a/var/spack/repos/builtin/packages/libpsm3/package.py
+++ b/var/spack/repos/builtin/packages/libpsm3/package.py
@@ -16,10 +16,11 @@ class Libpsm3(AutotoolsPackage):
git = "https://github.com/intel/eth-psm3-fi.git"
version(
- "11.4.1.0",
- sha256="272adb9ec10edf709bfcfccc6b6e9296d25d892c36b845ad577caeb82b70c9ac",
+ "11.5.1.1",
+ sha256="59fe731f4dd2cfcd90c8274df1c6ca9014a45cdebfdf1f1a830fcb8fcb65bb79",
preferred=True,
)
+ version("11.4.1.0", sha256="272adb9ec10edf709bfcfccc6b6e9296d25d892c36b845ad577caeb82b70c9ac")
variant("atomics", default=True, description="Enable atomics")
variant("debug", default=False, description="Enable debugging")
diff --git a/var/spack/repos/builtin/packages/libtheora/exit-prior-to-running-configure.patch b/var/spack/repos/builtin/packages/libtheora/exit-prior-to-running-configure.patch
deleted file mode 100644
index 99992c39c28dc1..00000000000000
--- a/var/spack/repos/builtin/packages/libtheora/exit-prior-to-running-configure.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-From 0060fd48c12a59a080974ca3754bf0eab9ab6d35 Mon Sep 17 00:00:00 2001
-From: Howard Pritchard
-Date: Tue, 24 Nov 2020 15:14:41 -0700
-Subject: [PATCH] exit prior to running configure
-
-Signed-off-by: Howard Pritchard
-
-diff --git a/autogen.sh b/autogen.sh
-index bbca69dc..4de1e783 100755
---- a/autogen.sh
-+++ b/autogen.sh
-@@ -112,6 +112,8 @@ if test -z "$*"; then
- echo "to pass any to it, please specify them on the $0 command line."
- fi
-
-+exit 0
-+
- echo "Generating configuration files for $package, please wait...."
-
- echo " $ACLOCAL $ACLOCAL_FLAGS"
---
-2.18.2
-
diff --git a/var/spack/repos/builtin/packages/libtheora/package.py b/var/spack/repos/builtin/packages/libtheora/package.py
index 6386da3497d85f..6ec88aa91ccf24 100644
--- a/var/spack/repos/builtin/packages/libtheora/package.py
+++ b/var/spack/repos/builtin/packages/libtheora/package.py
@@ -17,7 +17,10 @@ class Libtheora(AutotoolsPackage, MSBuildPackage):
homepage = "https://www.theora.org"
url = "http://downloads.xiph.org/releases/theora/libtheora-1.1.1.tar.xz"
+ git = "https://gitlab.xiph.org/xiph/theora.git"
+ version("master", branch="master")
+ version("stable", branch="theora-1.1")
version("1.1.1", sha256="f36da409947aa2b3dcc6af0a8c2e3144bc19db2ed547d64e9171c59c66561c61")
version("1.1.0", sha256="3d7b4fb1c115f1a530afd430eed2e8861fa57c8b179ec2d5a5d8f1cd0c7a4268")
@@ -43,12 +46,23 @@ class Libtheora(AutotoolsPackage, MSBuildPackage):
"msbuild", "autotools", default="autotools" if sys.platform != "win32" else "msbuild"
)
- patch("exit-prior-to-running-configure.patch", when="@1.1.1")
patch("fix_encoding.patch", when="@1.1:")
patch(
- "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.patch",
- sha256="8b1f256fa6bfb4ce1355c5be1104e8cfe695c8484d8ea19db06c006880a02298",
- when="^libpng@1.6:",
+ "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.diff",
+ sha256="e01ef71a1c19783a0b323b90a625e5c360ddb7ee03d2b6c201f1519f1704ea11",
+ when="@:1.1.1 ^libpng@1.6:",
+ )
+ # add -no-undefined
+ patch(
+ "https://gitlab.xiph.org/xiph/theora/-/commit/391ab0e99f2ad730231dbe5fc1154b990087f17d.diff",
+ sha256="d9bb5a9573819a27b3a925b1b66c33b36d9bca11b05d8aef88566eb6c8700690",
+ when="@:1.1.1",
+ )
+ # link theoraenc to theoradec
+ patch(
+ "https://gitlab.xiph.org/xiph/theora/-/commit/133b951b60fd845eabbc38bf7acd998bb9be75fc.diff",
+ sha256="e01511aff0130a40c889868d3713a56458744f39d1bb5ad98c8058da50233aa7",
+ when="@:1.1.1",
)
patch("libtheora-inc-external-ogg.patch", when="platform=windows")
@@ -62,10 +76,9 @@ def configure_args(self):
def autoreconf(self, pkg, spec, prefix):
sh = which("sh")
- if self.spec.satisfies("target=aarch64:"):
- sh("./autogen.sh", "prefix={0}".format(prefix), "--build=arm-linux")
- else:
- sh("./autogen.sh", "prefix={0}".format(prefix))
+ # arguments are passed on to configure, let it just print its version
+ # and exit, so that configure can run in the configure build phase
+ sh("./autogen.sh", "-V")
class MSBuildBuilder(MSBuildBuilder):
diff --git a/var/spack/repos/builtin/packages/libtree/package.py b/var/spack/repos/builtin/packages/libtree/package.py
index 8cc847ae1f81e5..d79243c45899bb 100644
--- a/var/spack/repos/builtin/packages/libtree/package.py
+++ b/var/spack/repos/builtin/packages/libtree/package.py
@@ -54,12 +54,6 @@ def url_for_version(self, version):
with when("build_system=cmake"):
variant("chrpath", default=False, description="Use chrpath for deployment")
variant("strip", default=False, description="Use binutils strip for deployment")
- variant(
- "build_type",
- default="RelWithDebInfo",
- description="CMake build type",
- values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
- )
depends_on("googletest", type="test")
depends_on("cmake@3:", type="build")
depends_on("chrpath", when="+chrpath", type="run")
diff --git a/var/spack/repos/builtin/packages/libvips/package.py b/var/spack/repos/builtin/packages/libvips/package.py
index 95f7cf08ae83e5..6a6596dadd611d 100644
--- a/var/spack/repos/builtin/packages/libvips/package.py
+++ b/var/spack/repos/builtin/packages/libvips/package.py
@@ -32,6 +32,7 @@ class Libvips(AutotoolsPackage):
# TODO: Add more variants!
+ depends_on("pkgconfig", type="build")
depends_on("glib")
depends_on("expat")
diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py
index f0de744c590f41..ca92e6994a682b 100644
--- a/var/spack/repos/builtin/packages/libxml2/package.py
+++ b/var/spack/repos/builtin/packages/libxml2/package.py
@@ -74,8 +74,8 @@ def url_for_version(self, version):
# Use NAN/INFINITY if available to avoid SIGFPE
# See https://gitlab.gnome.org/GNOME/libxml2/-/merge_requests/186
patch(
- "https://gitlab.gnome.org/GNOME/libxml2/-/commit/c9925454fd384a17c8c03d358c6778a552e9287b.patch",
- sha256="3e06d42596b105839648070a5921157fe284b932289ffdbfa304ddc3457e5637",
+ "https://gitlab.gnome.org/GNOME/libxml2/-/commit/c9925454fd384a17c8c03d358c6778a552e9287b.diff",
+ sha256="5dc43fed02b443d2563a502a52caafe39477c06fc30b70f786d5ed3eb5aea88d",
when="@2.9.11:2.9.14",
)
build_system(conditional("nmake", when="platform=windows"), "autotools", default="autotools")
diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py
index 4de81dace46dd2..d7aa1a535ba56a 100644
--- a/var/spack/repos/builtin/packages/libxsmm/package.py
+++ b/var/spack/repos/builtin/packages/libxsmm/package.py
@@ -20,7 +20,12 @@ class Libxsmm(MakefilePackage):
maintainers("hfp")
+ # 2.0 release is planned for Jan / Feb 2024. This commit from main is added
+ # as a stable version that supports other targets than x86. Remove this
+ # after 2.0 release.
+ version("main-2023-11", commit="0d9be905527ba575c14ca5d3b4c9673916c868b2")
version("main", branch="main")
+
version("1.17", sha256="8b642127880e92e8a75400125307724635ecdf4020ca4481e5efe7640451bb92")
version("1.16.3", sha256="e491ccadebc5cdcd1fc08b5b4509a0aba4e2c096f53d7880062a66b82a0baf84")
version("1.16.2", sha256="bdc7554b56b9e0a380fc9c7b4f4394b41be863344858bc633bc9c25835c4c64e")
@@ -82,8 +87,9 @@ class Libxsmm(MakefilePackage):
# ().
depends_on("binutils+ld+gas@2.33:", type="build", when="@:1.17")
- # Intel Architecture or compatible CPU required
- requires("target=x86_64:")
+ # Version 2.0 supports both x86_64 and aarch64
+ requires("target=x86_64:", "target=aarch64:")
+ requires("target=x86_64:", when="@:1")
@property
def libs(self):
diff --git a/var/spack/repos/builtin/packages/liggghts/cpp-17.patch b/var/spack/repos/builtin/packages/liggghts/cpp-17.patch
new file mode 100644
index 00000000000000..73c4bffdd0f0de
--- /dev/null
+++ b/var/spack/repos/builtin/packages/liggghts/cpp-17.patch
@@ -0,0 +1,75 @@
+diff --git a/src/math_vector.h b/src/math_vector.h
+index 2b8704af..79c0cedd 100644
+--- a/src/math_vector.h
++++ b/src/math_vector.h
+@@ -94,7 +94,7 @@ inline void vec_neg(vector &dest) { // -a
+ dest[2] = -dest[2]; }
+
+ inline void vec_norm(vector &dest) { // a/|a|
+- register double f = sqrt(vec_dot(dest, dest));
++ double f = sqrt(vec_dot(dest, dest));
+ dest[0] /= f;
+ dest[1] /= f;
+ dest[2] /= f; }
+@@ -222,7 +222,7 @@ inline void form_subtr(shape &dest, form &src) { // m_a-m_b
+ dest[3] -= src[3]; dest[4] -= src[4]; dest[5] -= src[5]; }
+
+ inline int form_inv(form &m_inv, form &m) { // m^-1
+- register double det = form_det(m);
++ double det = form_det(m);
+ if (fzero(det)) return 0;
+ m_inv[0] = (m[1]*m[2]-m[3]*m[3])/det;
+ m_inv[1] = (m[0]*m[2]-m[4]*m[4])/det;
+@@ -377,7 +377,7 @@ inline void form4_unit(form4 &dest) {
+ dest[0] = dest[1] = dest[2] = dest[3] = 1.0; }
+
+ inline double form4_det(form4 &m) {
+- register double f = m[6]*m[7]-m[5]*m[8];
++ double f = m[6]*m[7]-m[5]*m[8];
+ return m[0]*(
+ m[1]*(m[2]*m[3]-m[4]*m[4])+
+ m[5]*(2.0*m[4]*m[7]-m[2]*m[5])-m[3]*m[7]*m[7])+f*f+
+@@ -387,7 +387,7 @@ inline double form4_det(form4 &m) {
+ m[9]*(m[4]*m[4]-m[2]*m[3])); }
+
+ inline int form4_inv(form4 &m_inv, form4 &m) {
+- register double det = form4_det(m);
++ double det = form4_det(m);
+ if (fzero(det)) return 0;
+ m_inv[0] = (m[1]*(m[2]*m[3]-m[4]*m[4])+
+ m[5]*(2.0*m[4]*m[7]-m[2]*m[5])-m[3]*m[7]*m[7])/det;
+diff --git a/src/pair.cpp b/src/pair.cpp
+index c0889f72..8c212715 100644
+--- a/src/pair.cpp
++++ b/src/pair.cpp
+@@ -566,7 +566,7 @@ void Pair::init_tables_disp(double cut_lj_global)
+ }
+
+ rsq = rsq_lookup.f;
+- register double x2 = g2*rsq, a2 = 1.0/x2;
++ double x2 = g2*rsq, a2 = 1.0/x2;
+ x2 = a2*exp(-x2);
+
+ rdisptable[i] = rsq_lookup.f;
+@@ -612,7 +612,7 @@ void Pair::init_tables_disp(double cut_lj_global)
+ if (rsq_lookup.f < (cut_lj_globalsq = cut_lj_global * cut_lj_global)) {
+ rsq_lookup.f = cut_lj_globalsq;
+
+- register double x2 = g2*rsq, a2 = 1.0/x2;
++ double x2 = g2*rsq, a2 = 1.0/x2;
+ x2 = a2*exp(-x2);
+ f_tmp = g8*(((6.0*a2+6.0)*a2+3.0)*a2+1.0)*x2*rsq;
+ e_tmp = g6*((a2+1.0)*a2+0.5)*x2;
+diff --git a/src/utils.h b/src/utils.h
+index fab00e9b..5a122627 100644
+--- a/src/utils.h
++++ b/src/utils.h
+@@ -67,7 +67,7 @@ namespace Utils {
+
+ inline std::string int_to_string(int a)
+ {
+- return static_cast< std::ostringstream & >(( std::ostringstream() << std::dec << a ) ).str();
++ return static_cast< std::ostringstream & >(( std::ostringstream().flush() << std::dec << a ) ).str();
+ }
+
+ inline std::string double_to_string(double dbl)
diff --git a/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch b/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch
new file mode 100644
index 00000000000000..a5c26300a34392
--- /dev/null
+++ b/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch
@@ -0,0 +1,21 @@
+diff --git a/src/MAKE/Makefile.auto b/src/MAKE/Makefile.auto
+index 239f886..8f42e73 100644
+--- a/src/MAKE/Makefile.auto
++++ b/src/MAKE/Makefile.auto
+@@ -816,12 +816,14 @@ ifeq ($(USE_VTK), "ON")
+ endif
+ endif
+ endif
+- open_bracket := (
+- close_bracket := )
++ open_bracket := ("
++ close_bracket := ")
++ message := message
+ space :=
+ space +=
+ VTK_TMP := $(subst $(open_bracket),$(space),$(VTK_TMP))
+ VTK_TMP := $(subst $(close_bracket),$(space),$(VTK_TMP))
++ VTK_TMP := $(subst $(message),$(space),$(VTK_TMP))
+ VTK_MAJOR_VERSION := $(patsubst "%",%,$(word $(words $(VTK_TMP)),$(VTK_TMP)))
+ ifeq ($(AUTO_DEBUG),1)
+ $(shell $(ECHO) "#vtk_major_version: $(VTK_MAJOR_VERSION)" >> $(AUTO_LOG_FILE))
diff --git a/var/spack/repos/builtin/packages/liggghts/makefile.patch b/var/spack/repos/builtin/packages/liggghts/makefile.patch
new file mode 100644
index 00000000000000..370e4b8dad21f0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/liggghts/makefile.patch
@@ -0,0 +1,240 @@
+diff --git a/src/MAKE/Makefile.auto b/src/MAKE/Makefile.auto
+index dde9e72..239f886 100644
+--- a/src/MAKE/Makefile.auto
++++ b/src/MAKE/Makefile.auto
+@@ -440,12 +440,12 @@ ifeq ($(USE_MPI), "ON")
+ TMP_INC = -I$(MPI_INC)
+ endif
+ # We assume that the compiler supports #pragma message
+- TMP := $(shell $(ECHO) '\#include \n \#if defined(MPICH) \n \#pragma message "MPICH" \n \#elif defined(OPEN_MPI) \n \#pragma message "OpenMPI" \n \#else \n \#pragma message "Unknown" \n \#endif' > $(TMPFILE) && $(MPICXX) $(OPT_LVL) $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1)
++ TMP := $(shell $(ECHO) '#include \n #if defined(MPICH) \n #pragma message "MPICH" \n #elif defined(OPEN_MPI) \n #pragma message "OpenMPI" \n #else \n #pragma message "Unknown" \n #endif' > $(TMPFILE) && $(MPICXX) $(OPT_LVL) $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1)
+ # See if compilation has worked out
+ ifeq ($(TMP), -1)
+ # Maybe it failed because of the optimization as -Og is not known
+ ifeq ($(USE_DEBUG), "ON")
+- TMP := $(shell $(ECHO) '\#include \n \#if defined(MPICH) \n \#pragma message "MPICH" \n \#elif defined(OPEN_MPI) \n \#pragma message "OpenMPI" \n \#else \n \#pragma message "Unknown" \n \#endif' > $(TMPFILE) && $(MPICXX) -O0 -g $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1)
++ TMP := $(shell $(ECHO) '#include \n #if defined(MPICH) \n #pragma message "MPICH" \n #elif defined(OPEN_MPI) \n #pragma message "OpenMPI" \n #else \n #pragma message "Unknown" \n #endif' > $(TMPFILE) && $(MPICXX) -O0 -g $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile a simple MPI example (testing with -Og and -O0). Test was done with MPI_INC="$(TMP_INC)" and MPICXX="$(MPICXX)"')
+ else
+@@ -566,7 +566,7 @@ else
+ $(shell $(ECHO) "#Compiling with mpi stubs" >> $(AUTO_LOG_FILE))
+ $(shell $(ECHO) "#Command: $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE)")
+ endif
+- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile a simple c++ example. Please make sure that you have run "make stubs" before compiling LIGGGHTS itself. Test was done with CXX=$(CXX), EXTRA_INC=$(EXTRA_INC), EXTRA_LIB=$(EXTRA_LIB) and EXTRA_ADDLIBS=$(EXTRA_ADDLIBS).')
+ endif
+@@ -595,7 +595,7 @@ endif
+ HAVE_MATH_SPECIAL_FUNCS = 0
+ # For c++17 this is included without any further defines
+ ifeq ($(CXXVERSION),17)
+- TMP := $(shell $(ECHO) '\#include \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP),0)
+ HAVE_MATH_SPECIAL_FUNCS = 1
+ endif
+@@ -604,14 +604,14 @@ ifeq ($(CXXVERSION),17)
+ else
+ # For c++11 we need to check if ISO 29124:2010 is supported
+ ifeq ($(CXXVERSION),11)
+- TMP := $(shell $(ECHO) '\#define __STDCPP_WANT_MATH_SPEC_FUNCS__ 1 \n \#include \n \#if !defined(__STDCPP_MATH_SPEC_FUNCS__) || __STDCPP_MATH_SPEC_FUNCS__ < 201003L \n \#error "STOP" \n \#endif \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#define __STDCPP_WANT_MATH_SPEC_FUNCS__ 1 \n #include \n #if !defined(__STDCPP_MATH_SPEC_FUNCS__) || __STDCPP_MATH_SPEC_FUNCS__ < 201003L \n #error "STOP" \n #endif \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP),0)
+ HAVE_MATH_SPECIAL_FUNCS = 1
+ endif
+ endif
+ endif
+ ifeq ($(HAVE_MATH_SPECIAL_FUNCS),0)
+- TMP := $(shell $(ECHO) '\#include \n int main(){ std::tr1::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n int main(){ std::tr1::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ HAVE_TR1_CMATH = 0
+ ifeq ($(TMP),0)
+ HAVE_TR1_CMATH = 1
+@@ -729,7 +729,7 @@ ifeq ($(USE_VTK), "ON")
+ $(shell $(ECHO) "#vtk major version detection" >> $(AUTO_LOG_FILE))
+ endif
+ # note we assume here that our compiler supports #pragma message
+- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1)
++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1)
+ ifeq ($(AUTO_DEBUG),1)
+ $(shell $(ECHO) "#vtk major version detection result: $(VTK_TMP)" >> $(AUTO_LOG_FILE))
+ endif
+@@ -744,7 +744,7 @@ ifeq ($(USE_VTK), "ON")
+ ifeq ($(VTK_INC),-I)
+ VTK_INC =
+ endif
+- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1)
++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1)
+ ifeq ($(AUTO_DEBUG),1)
+ $(shell $(ECHO) "#vtk major version detection result (lib): $(VTK_TMP)" >> $(AUTO_LOG_FILE))
+ endif
+@@ -797,7 +797,7 @@ ifeq ($(USE_VTK), "ON")
+ # At this stage we now have VTK downloaded. Next we need to compile it
+ $(info VTK has been downloaded and will be compiled now. This can take several minutes.)
+ OBJDIR := $(PWD)
+- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/vtk" \n mkdir -p build \n cd src \n git checkout $(VTK_VERSION_TAG) &>> $(AUTO_LOG_FILE) \n cd ../build \n cmake -DBUILD_TESTING:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_INSTALL_PREFIX=../install -DModule_vtkIOMPIParallel:BOOL=ON -DVTK_Group_MPI:BOOL=ON -DVTK_Group_Rendering:BOOL=OFF -DVTK_RENDERING_BACKEND:STRING=None -DVTK_USE_X:BOOL=OFF -DModule_vtkIOMPIImage:BOOL=ON -DModule_vtkParallelMPI:BOOL=ON ../src &>> $(AUTO_LOG_FILE) \n make &>> $(AUTO_LOG_FILE) \n make install &>> $(AUTO_LOG_FILE)' > $(TMPFILE))
++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/vtk" \n mkdir -p build \n cd src \n git checkout $(VTK_VERSION_TAG) &>> $(AUTO_LOG_FILE) \n cd ../build \n cmake -DBUILD_TESTING:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_INSTALL_PREFIX=../install -DModule_vtkIOMPIParallel:BOOL=ON -DVTK_Group_MPI:BOOL=ON -DVTK_Group_Rendering:BOOL=OFF -DVTK_RENDERING_BACKEND:STRING=None -DVTK_USE_X:BOOL=OFF -DModule_vtkIOMPIImage:BOOL=ON -DModule_vtkParallelMPI:BOOL=ON ../src &>> $(AUTO_LOG_FILE) \n make &>> $(AUTO_LOG_FILE) \n make install &>> $(AUTO_LOG_FILE)' > $(TMPFILE))
+ TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Compilation of vtk failed. Please install it manually')
+@@ -807,7 +807,7 @@ ifeq ($(USE_VTK), "ON")
+ ifeq ($(VTK_INC),-I)
+ VTK_INC =
+ endif
+- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1)
++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1)
+ ifeq ($(AUTO_DEBUG),1)
+ $(shell $(ECHO) "#vtk major version detection result (lib): $(VTK_TMP)" >> $(AUTO_LOG_FILE))
+ endif
+@@ -826,7 +826,7 @@ ifeq ($(USE_VTK), "ON")
+ ifeq ($(AUTO_DEBUG),1)
+ $(shell $(ECHO) "#vtk_major_version: $(VTK_MAJOR_VERSION)" >> $(AUTO_LOG_FILE))
+ endif
+- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MINOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1)
++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MINOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1)
+ ifeq ($(VTK_TMP), -1)
+ $(error Could not obtain VTK_MINOR_VERSION)
+ endif
+@@ -885,7 +885,7 @@ ifeq ($(USE_VTK), "ON")
+ VTK_LIB =
+ endif
+ endif
+- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon$(VTK_APPENDIX_5) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon$(VTK_APPENDIX_5) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ ifeq ($(VTK_LIB_SET), 0)
+ VTK_LIB := -L$(dir $(shell find $(VTK_BASE_PATH)/lib* -name 'libvtkCommon.so' | tail -n 1))
+@@ -893,7 +893,7 @@ ifeq ($(USE_VTK), "ON")
+ VTK_LIB =
+ endif
+ endif
+- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not determine suitable appendix of VTK library with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_APPENDIX="$(VTK_APPENDIX)"')
+ else
+@@ -924,7 +924,7 @@ ifeq ($(USE_VTK), "ON")
+ $(shell $(ECHO) "#vtk_lib: $(VTK_LIB)" >> $(AUTO_LOG_FILE))
+ $(shell $(ECHO) "#appendix command: $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE)" >> $(AUTO_LOG_FILE))
+ endif
+- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ ifeq ($(AUTO_DEBUG),1)
+ $(shell $(ECHO) "#attempting without appendix" >> $(AUTO_LOG_FILE))
+@@ -935,7 +935,7 @@ ifeq ($(USE_VTK), "ON")
+ VTK_LIB =
+ endif
+ endif
+- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not determine suitable appendix of VTK library with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_APPENDIX="$(VTK_APPENDIX)"')
+ else
+@@ -1025,9 +1025,9 @@ ifeq ($(USE_VTK), "ON")
+ $(shell $(ECHO) "#vtk_addlibs: $(VTK_ADDLIBS)" >> $(AUTO_LOG_FILE))
+ $(shell $(ECHO) "#vtk_rpath: $(VTK_RPATH)" >> $(AUTO_LOG_FILE))
+ $(shell $(ECHO) "#vtk compile test:" >> $(AUTO_LOG_FILE))
+- TMP := $(shell $(ECHO) "\#include \n int main(){}" > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) &>> $(AUTO_LOG_FILE))
++ TMP := $(shell $(ECHO) "#include \n int main(){}" > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) &>> $(AUTO_LOG_FILE))
+ endif
+- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile VTK example with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_ADDLIBS="$(VTK_ADDLIBS)"')
+ endif
+@@ -1057,7 +1057,7 @@ ifeq ($(USE_SUPERQUADRICS), "ON")
+ ifeq ($(REQUIRE_BOOST),1)
+ BOOST_INC ?= $(BOOST_INC_USR)
+ # Include test
+- TMP := $(shell $(ECHO) '\#include "boost/math/special_functions/beta.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(BOOST_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include "boost/math/special_functions/beta.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(BOOST_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile boost example with BOOST_INC="$(BOOST_INC)" as boost/math/special_functions/beta.hpp could not be found')
+ endif
+@@ -1082,7 +1082,7 @@ ifeq ($(USE_JPG), "ON")
+ $(shell $(ECHO) "#JPG_ADDLIBS: $(JPG_ADDLIBS)" >> $(AUTO_LOG_FILE))
+ $(shell $(ECHO) "jpg compile test:" >> $(AUTO_LOG_FILE))
+ endif
+- TMP := $(shell $(ECHO) '\#include \n \#include \n \#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n #include \n #include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile jpg example with JPG_INC="$(JPG_INC)"')
+ endif
+@@ -1090,7 +1090,7 @@ ifeq ($(USE_JPG), "ON")
+ $(shell $(ECHO) "jpg link test:" >> $(AUTO_LOG_FILE))
+ endif
+ # Linking test
+- TMP := $(shell $(ECHO) '\#include \n \#include \n \#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(JPG_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include \n #include \n #include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(JPG_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile and link jpg example with JPG_INC="$(JPG_INC)", JPG_LIB="$(JPG_LIB)" and JPG_ADDLIBS="$(JPG_ADDLIBS)"')
+ endif
+@@ -1119,7 +1119,7 @@ ifeq ($(USE_CONVEX), "ON")
+ CONVEX_ADDLIBS += -lccd
+ # Test settings
+ # Link test
+- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ # Automatic download and compilation if AUTODOWNLOAD_CONVEX is set
+ ifeq ($(TMP), -1)
+ ifeq ($(AUTOINSTALL_CONVEX), "ON")
+@@ -1168,7 +1168,7 @@ ifeq ($(USE_CONVEX), "ON")
+ endif
+ # At this stage we now have libccd downloaded. Next we need to compile it
+ OBJDIR := $(PWD)
+- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/libccd/src" \n make PREFIX="$(PWD)/../../" USE_DOUBLE=yes &> /dev/null' > $(TMPFILE))
++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/libccd/src" \n make PREFIX="$(PWD)/../../" USE_DOUBLE=yes &> /dev/null' > $(TMPFILE))
+ TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Compilation of libccd failed. Please install it manually')
+@@ -1178,12 +1178,12 @@ ifeq ($(USE_CONVEX), "ON")
+ endif
+ endif
+ # Include test
+- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -E $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -E $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile Convex (libccd) example with CONVEX_INC="$(CONVEX_INC)"')
+ endif
+ # Link test
+- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile and link Convex (libccd) example with CONVEX_INC="$(CONVEX_INC)", CONVEX_LIB="$(CONVEX_LIB)" and CONVEX_ADDLIBS="$(CONVEX_ADDLIBS)"')
+ endif
+@@ -1210,7 +1210,7 @@ ifeq ($(USE_MFEM), "ON")
+ MFEM_LIB ?= -L$(LIB_PATH)/mfem
+ MFEM_ADDLIBS += -lmfem
+ # Link test
+- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ ifeq ($(AUTOINSTALL_MFEM), "ON")
+ $(info 'Could not compile MFEM example. As AUTOINSTALL_MFEM is set to "ON". MFEM will now be automatically downloaded to ../lib/mfem')
+@@ -1257,7 +1257,7 @@ ifeq ($(USE_MFEM), "ON")
+ # At this stage we now have MFEM downloaded. Next we need to compile it
+ TMP := $(shell ls $(LIB_PATH)/mfem/libmfem.a && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd $(LIB_PATH)/mfem \n make config \n make all -j 4' > $(TMPFILE))
++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd $(LIB_PATH)/mfem \n make config \n make all -j 4' > $(TMPFILE))
+ TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Compilation of MFEM failed. Please install it manually')
+@@ -1270,12 +1270,12 @@ ifeq ($(USE_MFEM), "ON")
+
+
+ # Include test
+- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile MFEM example with MFEM_INC="$(MFEM_INC)"')
+ endif
+ # Link test
+- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1)
+ ifeq ($(TMP), -1)
+ $(error 'Could not compile and link MFEM example. Test was done with MFEM_INC="$(MFEM_INC)", MFEM_LIB="$(MFEM_LIB)" and MFEM_ADDLIBS="$(MFEM_ADDLIBS)"')
+ endif
diff --git a/var/spack/repos/builtin/packages/liggghts/package.py b/var/spack/repos/builtin/packages/liggghts/package.py
index d9487bea4553ff..dac43ff3655115 100644
--- a/var/spack/repos/builtin/packages/liggghts/package.py
+++ b/var/spack/repos/builtin/packages/liggghts/package.py
@@ -16,6 +16,8 @@ class Liggghts(MakefilePackage):
url = "https://github.com/CFDEMproject/LIGGGHTS-PUBLIC/archive/3.8.0.tar.gz"
git = "ssh://git@github.com/CFDEMproject/LIGGGHTS-PUBLIC.git"
+ maintainers("SofiaXu")
+
version("3.8.0", sha256="9cb2e6596f584463ac2f80e3ff7b9588b7e3638c44324635b6329df87b90ab03")
variant("mpi", default=True, description="Enable MPI support")
@@ -28,7 +30,13 @@ class Liggghts(MakefilePackage):
depends_on("mpi", when="+mpi")
depends_on("jpeg", when="+jpeg")
depends_on("zlib-api", when="+gzip")
-
+ # patch for makefile test code
+ patch("makefile.patch")
+ # patch for clang and oneapi
+ patch("makefile-llvm-based-compiler.patch", when="%clang")
+ patch("makefile-llvm-based-compiler.patch", when="%oneapi")
+ # C++17 support
+ patch("cpp-17.patch")
build_directory = "src"
build_targets = ["auto"]
@@ -55,9 +63,9 @@ def edit(self, spec, prefix):
if "+mpi" in spec:
mpi = spec["mpi"]
- makefile.filter(r"^#(MPICXX_USER=).*", r"\1{0}".format(mpi.mpicxx))
- makefile.filter(r"^#(MPI_INC_USER=).*", r"\1{0}".format(mpi.prefix.include))
- makefile.filter(r"^#(MPI_LIB_USER=).*", r"\1{0}".format(mpi.prefix.lib))
+ makefile.filter(r"^#(MPICXX_USR=).*", r"\1{0}".format(mpi.mpicxx))
+ makefile.filter(r"^#(MPI_INC_USR=).*", r"\1{0}".format(mpi.prefix.include))
+ makefile.filter(r"^#(MPI_LIB_USR=).*", r"\1{0}".format(mpi.prefix.lib))
else:
makefile.filter(r"^(USE_MPI = ).*", r'\1"OFF"')
# Set path to C++ compiler.
@@ -70,8 +78,8 @@ def edit(self, spec, prefix):
if "+jpeg" in spec:
jpeg = spec["jpeg"]
makefile.filter(r"^(USE_JPG = ).*", r'\1"ON"')
- makefile.filter(r"^#(JPG_INC_USER=-I).*", r"\1{0}".format(jpeg.prefix.include))
- makefile.filter(r"^#(JPG_LIB_USER=-L).*", r"\1{0}".format(jpeg.prefix.lib))
+ makefile.filter(r"^#(JPG_INC_USR=-I).*", r"\1{0}".format(jpeg.prefix.include))
+ makefile.filter(r"^#(JPG_LIB_USR=-L).*", r"\1{0}".format(jpeg.prefix.lib))
if "+gzip" in spec:
makefile.filter(r"^(USE_GZIP = ).*", r'\1"ON"')
diff --git a/var/spack/repos/builtin/packages/likwid/package.py b/var/spack/repos/builtin/packages/likwid/package.py
index f7ebb21048c4c4..6dd5b420302dc5 100644
--- a/var/spack/repos/builtin/packages/likwid/package.py
+++ b/var/spack/repos/builtin/packages/likwid/package.py
@@ -24,6 +24,7 @@ class Likwid(Package):
git = "https://github.com/RRZE-HPC/likwid.git"
maintainers("TomTheBear")
+ version("5.3.0", sha256="c290e554c4253124ac2ab8b056e14ee4d23966b8c9fbfa10ba81f75ae543ce4e")
version("5.2.2", sha256="7dda6af722e04a6c40536fc9f89766ce10f595a8569b29e80563767a6a8f940e")
version("5.2.1", sha256="1b8e668da117f24302a344596336eca2c69d2bc2f49fa228ca41ea0688f6cbc2")
version("5.2.0", sha256="aa6dccacfca59e52d8f3be187ffcf292b2a2fa1f51a81bf8912b9d48e5a257e0")
@@ -65,6 +66,7 @@ class Likwid(Package):
)
variant("fortran", default=True, description="with fortran interface")
variant("cuda", default=False, description="with Nvidia GPU profiling support")
+ variant("rocm", default=False, description="with AMD GPU profiling support")
variant(
"accessmode",
@@ -83,6 +85,10 @@ class Likwid(Package):
depends_on("lua", when="@5.0.2:")
depends_on("cuda", when="@5: +cuda")
depends_on("hwloc", when="@5.2.0:")
+ depends_on("rocprofiler-dev", when="@5.3: +rocm")
+ depends_on("rocm-core", when="@5.3: +rocm")
+ depends_on("rocm-smi", when="@5.3: +rocm")
+ depends_on("rocm-smi-lib", when="@5.3: +rocm")
# TODO: check
# depends_on('gnuplot', type='run')
@@ -103,6 +109,31 @@ def setup_run_environment(self, env):
)
for lib in libs.directories:
env.append_path("LD_LIBRARY_PATH", lib)
+ if "+rocm" in self.spec:
+ libs = find_libraries(
+ "librocprofiler64.so.1",
+ root=self.spec["rocprofiler-dev"].prefix,
+ shared=True,
+ recursive=True,
+ )
+ for lib in libs.directories:
+ env.append_path("LD_LIBRARY_PATH", lib)
+ libs = find_libraries(
+ "libhsa-runtime64.so",
+ root=self.spec["rocm-core"].prefix,
+ shared=True,
+ recursive=True,
+ )
+ for lib in libs.directories:
+ env.append_path("LD_LIBRARY_PATH", lib)
+ libs = find_libraries(
+ "librocm_smi64.so",
+ root=self.spec["rocm-smi-lib"].prefix,
+ shared=True,
+ recursive=True,
+ )
+ for lib in libs.directories:
+ env.append_path("LD_LIBRARY_PATH", lib)
@run_before("install")
def filter_sbang(self):
@@ -170,6 +201,13 @@ def install(self, spec, prefix):
else:
filter_file("^NVIDIA_INTERFACE.*", "NVIDIA_INTERFACE = false", "config.mk")
+ if "+rocm" in self.spec:
+ env["ROCM_HOME"] = spec["rocm-core"].prefix
+ filter_file("^ROCM_INTERFACE.*", "ROCM_INTERFACE = true", "config.mk")
+ filter_file("^BUILDAPPDAEMON.*", "BUILDAPPDAEMON = true", "config.mk")
+ else:
+ filter_file("^ROCM_INTERFACE.*", "ROCM_INTERFACE = false", "config.mk")
+
if spec.satisfies("^lua"):
filter_file(
"^#LUA_INCLUDE_DIR.*",
diff --git a/var/spack/repos/builtin/packages/linaro-forge/package.py b/var/spack/repos/builtin/packages/linaro-forge/package.py
index 7724f87a29ce6d..f13de03dfc15d8 100644
--- a/var/spack/repos/builtin/packages/linaro-forge/package.py
+++ b/var/spack/repos/builtin/packages/linaro-forge/package.py
@@ -23,6 +23,10 @@ class LinaroForge(Package):
maintainers("kenche-linaro")
if platform.machine() in ["aarch64", "arm64"]:
+ version("23.1", sha256="c9889b95729f97bcffaf0f15b930efbd27081b7cf2ebc958eede3a186cc4d93a")
+ version(
+ "23.0.4", sha256="a19e6b247badaa52f78815761f71fb95a565024b7f79bdfb2f602f18b47a881c"
+ )
version(
"23.0.3", sha256="a7e23ef2a187f8e2d6a6692cafb931c9bb614abf58e45ea9c2287191c4c44f02"
)
@@ -40,6 +44,10 @@ class LinaroForge(Package):
"21.1.3", sha256="4a4ff7372aad5a31fc9e18b7b6c493691ab37d8d44a3158584e62d1ab82b0eeb"
)
elif platform.machine() == "ppc64le":
+ version("23.1", sha256="39a522c1d9a29f0a35bba5201f3e23c56d87543410505df30c85128816dd455b")
+ version(
+ "23.0.4", sha256="927c1ba733cf63027243060586b196f8262e545d898712044c359a6af6fc5795"
+ )
version(
"23.0.3", sha256="5ff9770f4bc4a2df4bac8a2544a9d6bad9fba2556420fa2e659e5c21e741caf7"
)
@@ -60,6 +68,10 @@ class LinaroForge(Package):
"21.1.3", sha256="eecbc5686d60994c5468b2d7cd37bebe5d9ac0ba37bd1f98fbfc69b071db541e"
)
elif platform.machine() == "x86_64":
+ version("23.1", sha256="31185d5f9855fd03701089907cdf7b38eb72c484ee730f8341decbbd8f9b5930")
+ version(
+ "23.0.4", sha256="41a81840a273ea9a232efb4f031149867c5eff7a6381d787e18195f1171caac4"
+ )
version(
"23.0.3", sha256="f2a010b94838f174f057cd89d12d03a89ca946163536eab178dd1ec877cdc27f"
)
diff --git a/var/spack/repos/builtin/packages/linux-headers/package.py b/var/spack/repos/builtin/packages/linux-headers/package.py
index 1236a25ce691ee..8e1d995695b069 100644
--- a/var/spack/repos/builtin/packages/linux-headers/package.py
+++ b/var/spack/repos/builtin/packages/linux-headers/package.py
@@ -20,6 +20,10 @@ class LinuxHeaders(Package):
version("6.2.8", sha256="fed0ad87d42f83a70ce019ff2800bc30a855e672e72bf6d54a014d98d344f665")
version("4.9.10", sha256="bd6e05476fd8d9ea4945e11598d87bc97806bbc8d03556abbaaf809707661525")
+ def url_for_version(self, version):
+ url = "https://www.kernel.org/pub/linux/kernel/v{0}.x/linux-{1}.tar.xz"
+ return url.format(version.up_to(1), version)
+
def setup_build_environment(self, env):
# This variable is used in the Makefile. If it is defined on the
# system, it can break the build if there is no build recipe for
diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py
index 905cfe1f831fff..8f975b70720c19 100644
--- a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py
+++ b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py
@@ -161,7 +161,7 @@ class LlvmAmdgpu(CMakePackage):
# OpenMP clang toolchain looks for bitcode files in llvm/bin/../lib
# as per 5.2.0 llvm code. It used to be llvm/bin/../lib/libdevice.
# Below patch is to look in the old path.
- patch("adjust-openmp-bitcode-directory-for-llvm-link.patch", when="@5.2.0:")
+ patch("adjust-openmp-bitcode-directory-for-llvm-link.patch", when="@5.2.0:5.6")
# Below patch is to set the flag -mcode-object-version=none until
# the below fix is available in device-libs release code.
@@ -227,6 +227,44 @@ class LlvmAmdgpu(CMakePackage):
when="@master +rocm-device-libs",
)
+ for d_version, d_shasum in [
+ ("5.6.1", "4de9a57c2092edf9398d671c8a2c60626eb7daf358caf710da70d9c105490221"),
+ ("5.6.0", "30875d440df9d8481ffb24d87755eae20a0efc1114849a72619ea954f1e9206c"),
+ ]:
+ resource(
+ name="hsa-runtime",
+ placement="hsa-runtime",
+ url=f"https://github.com/RadeonOpenCompute/ROCR-Runtime/archive/rocm-{d_version}.tar.gz",
+ sha256=d_shasum,
+ when="@{0}".format(d_version),
+ )
+ resource(
+ name="hsa-runtime",
+ placement="hsa-runtime",
+ git="https://github.com/RadeonOpenCompute/ROCR-Runtime.git",
+ branch="master",
+ when="@master",
+ )
+
+ for d_version, d_shasum in [
+ ("5.6.1", "0a85d84619f98be26ca7a32c71f94ed3c4e9866133789eabb451be64ce739300"),
+ ("5.6.0", "9396a7238b547ee68146c669b10b9d5de8f1d76527c649133c75d8076a185a72"),
+ ]:
+ resource(
+ name="comgr",
+ placement="comgr",
+ url=f"https://github.com/RadeonOpenCompute/ROCm-CompilerSupport/archive/rocm-{d_version}.tar.gz",
+ sha256=d_shasum,
+ when="@{0}".format(d_version),
+ )
+ resource(
+ name="comgr",
+ placement="comgr",
+ git="https://github.com/RadeonOpenCompute/ROCm-CompilerSupport.git",
+ branch="amd-stg-open",
+ when="@master",
+ )
+
def cmake_args(self):
llvm_projects = ["clang", "lld", "clang-tools-extra", "compiler-rt"]
llvm_runtimes = []
@@ -292,6 +330,12 @@ def cmake_args(self):
if self.spec.satisfies("@5.5.0:"):
args.append("-DCLANG_DEFAULT_RTLIB=compiler-rt")
args.append("-DCLANG_DEFAULT_UNWINDLIB=libgcc")
+ if self.spec.satisfies("@5.6.0:"):
+ hsainc_path = os.path.join(self.stage.source_path, "hsa-runtime/src/inc")
+ comgrinc_path = os.path.join(self.stage.source_path, "comgr/lib/comgr/include")
+ args.append("-DSANITIZER_HSA_INCLUDE_PATH={0}".format(hsainc_path))
+ args.append("-DSANITIZER_COMGR_INCLUDE_PATH={0}".format(comgrinc_path))
+ args.append("-DSANITIZER_AMDGPU:Bool=ON")
return args
@run_after("install")
diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py
index 7e110a248ecf0a..abf3dee6b49a36 100644
--- a/var/spack/repos/builtin/packages/llvm/package.py
+++ b/var/spack/repos/builtin/packages/llvm/package.py
@@ -35,6 +35,9 @@ class Llvm(CMakePackage, CudaPackage):
family = "compiler" # Used by lmod
version("main", branch="main")
+ version("17.0.4", sha256="46200b79f52a02fe26d0a43fd856ab6ceff49ab2a0b7c240ac4b700a6ada700c")
+ version("17.0.3", sha256="1e3d9d04fb5fbd8d0080042ad72c7e2a5c68788b014b186647a604dbbdd625d2")
+ version("17.0.2", sha256="dcba3eb486973dce45b6edfe618f3f29b703ae7e6ef9df65182fb50fb6fe4235")
version("17.0.1", sha256="d51b10be66c10a6a81f4c594b554ffbf1063ffbadcb810af37d1f88d6e0b49dd")
version("16.0.6", sha256="56b2f75fdaa95ad5e477a246d3f0d164964ab066b4619a01836ef08e475ec9d5")
version("16.0.5", sha256="e0fbca476693fcafa125bc71c8535587b6d9950293122b66b262bb4333a03942")
@@ -425,6 +428,12 @@ class Llvm(CMakePackage, CudaPackage):
when="@14:15",
)
+ # missing include
+ patch(
+ "https://github.com/llvm/llvm-project/commit/ff1681ddb303223973653f7f5f3f3435b48a1983.patch?full_index=1",
+ sha256="c6ca6b925f150e8644ce756023797b7f94c9619c62507231f979edab1c09af78",
+ when="@6:13",
+ )
# fix building of older versions of llvm with newer versions of glibc
for compiler_rt_as in ["project", "runtime"]:
with when("compiler-rt={0}".format(compiler_rt_as)):
@@ -974,7 +983,10 @@ def post_install(self):
ninja()
ninja("install")
if "+python" in self.spec:
- install_tree("llvm/bindings/python", python_platlib)
+ if spec.version < Version("17.0.0"):
+ # llvm bindings were removed in v17:
+ # https://releases.llvm.org/17.0.1/docs/ReleaseNotes.html#changes-to-the-python-bindings
+ install_tree("llvm/bindings/python", python_platlib)
if "+clang" in self.spec:
install_tree("clang/bindings/python", python_platlib)
diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py
index f4ca15a3a3eaee..af3b8a7b6cbf3b 100644
--- a/var/spack/repos/builtin/packages/lmod/package.py
+++ b/var/spack/repos/builtin/packages/lmod/package.py
@@ -23,6 +23,7 @@ class Lmod(AutotoolsPackage):
version("8.7.24", sha256="8451267652059b6507b652e1b563929ecf9b689ffb20830642085eb6a55bd539")
version("8.7.20", sha256="c04deff7d2ca354610a362459a7aa9a1c642a095e45a4b0bb2471bb3254e85f4")
+ version("8.7.18", sha256="b9912caca1557dd0c17113bceb1a4952e0ae75331d38df6361601db3f80366af")
version("8.7.2", sha256="5f44f3783496d2d597ced7531e1714c740dbb2883a7d16fde362135fb0b0fd96")
version("8.6.18", sha256="3db1c665c35fb8beb78c02e40d56accd361d82b715df70b2a995bcb10fbc2c80")
version("8.6.5", sha256="4a1823264187340be11104d82f8226905daa8149186fa8615dfc742b6d19c2ce")
diff --git a/var/spack/repos/builtin/packages/lua-luajit-openresty/package.py b/var/spack/repos/builtin/packages/lua-luajit-openresty/package.py
index fbcc63cdedc7f1..081e07fe6c2e10 100644
--- a/var/spack/repos/builtin/packages/lua-luajit-openresty/package.py
+++ b/var/spack/repos/builtin/packages/lua-luajit-openresty/package.py
@@ -28,8 +28,7 @@ class LuaLuajitOpenresty(LuaImplPackage):
description="add symlinks to make lua-luajit a drop-in lua replacement",
)
- provides("lua-lang@5.1", when="+lualinks")
- provides("luajit")
+ provides("luajit", "lua-lang@5.1", when="+lualinks")
lua_version_override = "5.1"
@run_after("install")
diff --git a/var/spack/repos/builtin/packages/lua-luajit/package.py b/var/spack/repos/builtin/packages/lua-luajit/package.py
index e8a1c124e09d63..dfe9f51cd0bd8b 100644
--- a/var/spack/repos/builtin/packages/lua-luajit/package.py
+++ b/var/spack/repos/builtin/packages/lua-luajit/package.py
@@ -33,8 +33,7 @@ class LuaLuajit(LuaImplPackage):
description="add symlinks to make lua-luajit a drop-in lua replacement",
)
- provides("lua-lang@5.1", when="+lualinks")
- provides("luajit")
+ provides("luajit", "lua-lang@5.1", when="+lualinks")
lua_version_override = "5.1"
conflicts("platform=darwin", msg="luajit not supported on MacOS, see lua-luajit-openresty")
diff --git a/var/spack/repos/builtin/packages/mapl/package.py b/var/spack/repos/builtin/packages/mapl/package.py
index 54cef1e40e96da..299a203f372b4d 100644
--- a/var/spack/repos/builtin/packages/mapl/package.py
+++ b/var/spack/repos/builtin/packages/mapl/package.py
@@ -36,6 +36,11 @@ class Mapl(CMakePackage):
version("develop", branch="develop")
version("main", branch="main")
+ version("2.42.0", sha256="9b6c3434919c14ef79004db5f76cb3dd8ef375584227101c230a372bb0470fdd")
+ version("2.41.2", sha256="73e1f0961f1b70e8159c0a2ce3499eb5158f3ca6d081f4c7826af7854ebfb44d")
+ version("2.41.1", sha256="2b384bd4fbaac1bff4ef009922c436c4ab54832172a5cd4d312ea44e32c1ae7c")
+ version("2.41.0", sha256="1142f9395e161174e3ec1654fba8bda1d0bd93edc7438b1927d8f5d7b42a0a86")
+ version("2.40.4", sha256="fb843b118d6e56cd4fc4b114c4d6f91956d5c8b3d9389ada56da1dfdbc58904f")
version("2.40.3", sha256="4b82a314c88a035fc2b91395750aa7950d6bee838786178ed16a3f39a1e45519")
version("2.40.2", sha256="7327f6f5bce6e09e7f7b930013fba86ee7cbfe8ed4c7c087fc9ab5acbf6640fd")
version("2.40.1", sha256="6f40f946fabea6ba73b0764092e495505d220455b191b4e454736a0a25ee058c")
@@ -116,6 +121,12 @@ class Mapl(CMakePackage):
# Versions later than 3.14 remove FindESMF.cmake
# from ESMA_CMake.
+ resource(
+ name="esma_cmake",
+ git="https://github.com/GEOS-ESM/ESMA_cmake.git",
+ tag="v3.36.0",
+ when="@2.42.0:",
+ )
resource(
name="esma_cmake",
git="https://github.com/GEOS-ESM/ESMA_cmake.git",
@@ -159,6 +170,12 @@ class Mapl(CMakePackage):
# Patch to add missing MPI Fortran target to top-level CMakeLists.txt
patch("mapl-2.12.3-mpi-fortran.patch", when="@:2.12.3")
+ # MAPL only compiles with MPICH from version 2.42.0 and higher so we conflict
+ # with older versions. Also, it's only been tested with MPICH 4, so we don't
+ # allow older MPICH
+ conflicts("mpich@:3")
+ conflicts("mpich@4", when="@:2.41")
+
variant("flap", default=False, description="Build with FLAP support", when="@:2.39")
variant("pflogger", default=True, description="Build with pFlogger support")
variant("fargparse", default=True, description="Build with fArgParse support")
diff --git a/var/spack/repos/builtin/packages/mercury/package.py b/var/spack/repos/builtin/packages/mercury/package.py
index 1c531277d79ce9..a2cdad6f1b6874 100644
--- a/var/spack/repos/builtin/packages/mercury/package.py
+++ b/var/spack/repos/builtin/packages/mercury/package.py
@@ -17,6 +17,7 @@ class Mercury(CMakePackage):
tags = ["e4s"]
version("master", branch="master", submodules=True)
+ version("2.3.1", sha256="36182d49f2db7e2b075240cab4aaa1d4ec87a7756450c87643ededd1e6f16104")
version("2.3.0", sha256="e9e62ce1bb2fd482f0e85ad75fa255d9750c6fed50ba441a03de93b3b8eae742")
version("2.2.0", sha256="e66490cf63907c3959bbb2932b5aaf51d96a481b17f0935f409f3a862eff97f6")
version("2.1.0", sha256="9a58437161e9273b1b1c484d2f1a477a89eea9afe84575415025d47656f3761b")
diff --git a/var/spack/repos/builtin/packages/metkit/package.py b/var/spack/repos/builtin/packages/metkit/package.py
index 784e028068daff..41246cf6da9ab6 100644
--- a/var/spack/repos/builtin/packages/metkit/package.py
+++ b/var/spack/repos/builtin/packages/metkit/package.py
@@ -15,6 +15,8 @@ class Metkit(CMakePackage):
maintainers("skosukhin")
+ version("1.10.17", sha256="1c525891d77ed28cd4c87b065ba4d1aea24d0905452c18d885ccbd567bbfc9b1")
+ version("1.10.2", sha256="a038050962aecffda27b755c40b0a6ed0db04a2c22cad3d8c93e6109c8ab4b34")
version("1.9.2", sha256="35d5f67196197cc06e5c2afc6d1354981e7c85a441df79a2fbd774e0c343b0b4")
version("1.7.0", sha256="8c34f6d8ea5381bd1bcfb22462349d03e1592e67d8137e76b3cecf134a9d338c")
@@ -26,8 +28,10 @@ class Metkit(CMakePackage):
depends_on("ecbuild@3.4:", type="build")
depends_on("eckit@1.16:")
+ depends_on("eckit@1.21:", when="@1.10:")
depends_on("eccodes@2.5:", when="+grib")
+ depends_on("eccodes@2.27:", when="@1.10.2: +grib")
depends_on("odc", when="+odb")
diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py
index 5fac0860ea1040..75eeda7b1fd51b 100644
--- a/var/spack/repos/builtin/packages/mfem/package.py
+++ b/var/spack/repos/builtin/packages/mfem/package.py
@@ -309,15 +309,19 @@ class Mfem(Package, CudaPackage, ROCmPackage):
depends_on("gslib@1.0.7:", when="@4.3.0:+gslib")
depends_on("suite-sparse", when="+suite-sparse")
depends_on("superlu-dist", when="+superlu-dist")
+ # Propagate 'cuda_arch' to 'superlu-dist' without propagating the '+cuda'
+ # variant so we can build 'mfem+cuda+superlu-dist ^superlu-dist~cuda':
for sm_ in CudaPackage.cuda_arch_values:
depends_on(
"superlu-dist+cuda cuda_arch={0}".format(sm_),
- when="+superlu-dist+cuda cuda_arch={0}".format(sm_),
+ when="+superlu-dist+cuda cuda_arch={0} ^superlu-dist+cuda".format(sm_),
)
+ # Propagate 'amdgpu_target' to 'superlu-dist' without propagating the '+rocm'
+ # variant so we can build 'mfem+rocm+superlu-dist ^superlu-dist~rocm':
for gfx in ROCmPackage.amdgpu_targets:
depends_on(
"superlu-dist+rocm amdgpu_target={0}".format(gfx),
- when="+superlu-dist+rocm amdgpu_target={0}".format(gfx),
+ when="+superlu-dist+rocm amdgpu_target={0} ^superlu-dist+rocm".format(gfx),
)
depends_on("strumpack@3.0.0:", when="+strumpack~shared")
depends_on("strumpack@3.0.0:+shared", when="+strumpack+shared")
@@ -919,10 +923,27 @@ def find_optional_library(name, prefix):
"CUDA_CXX=%s" % join_path(spec["cuda"].prefix, "bin", "nvcc"),
"CUDA_ARCH=sm_%s" % cuda_arch,
]
+ # Check if we are using a CUDA installation where the math libs are
+ # in a separate directory:
+ culibs = ["libcusparse"]
+ cuda_libs = find_optional_library(culibs, spec["cuda"].prefix)
+ if not cuda_libs:
+ p0 = os.path.realpath(join_path(spec["cuda"].prefix, "bin", "nvcc"))
+ p0 = os.path.dirname(p0)
+ p1 = os.path.dirname(p0)
+ while p1 != p0:
+ cuda_libs = find_optional_library(culibs, join_path(p1, "math_libs"))
+ if cuda_libs:
+ break
+ p0, p1 = p1, os.path.dirname(p1)
+ if not cuda_libs:
+ raise InstallError("Required CUDA libraries not found: %s" % culibs)
+ options += ["CUDA_LIB=%s" % ld_flags_from_library_list(cuda_libs)]
if "+rocm" in spec:
amdgpu_target = ",".join(spec.variants["amdgpu_target"].value)
options += ["HIP_CXX=%s" % spec["hip"].hipcc, "HIP_ARCH=%s" % amdgpu_target]
+ hip_headers = HeaderList([])
hip_libs = LibraryList([])
# To use a C++ compiler that supports -xhip flag one can use
# something like this:
@@ -933,7 +954,7 @@ def find_optional_library(name, prefix):
# hip_libs += find_libraries("libamdhip64", spec["hip"].prefix.lib)
if "^hipsparse" in spec: # hipsparse is needed @4.4.0:+rocm
hipsparse = spec["hipsparse"]
- options += ["HIP_OPT=%s" % hipsparse.headers.cpp_flags]
+ hip_headers += hipsparse.headers
hip_libs += hipsparse.libs
# Note: MFEM's defaults.mk wants to find librocsparse.* in
# $(HIP_DIR)/lib, so we set HIP_DIR to be $ROCM_PATH when using
@@ -943,11 +964,19 @@ def find_optional_library(name, prefix):
options += ["HIP_DIR=%s" % env["ROCM_PATH"]]
else:
options += ["HIP_DIR=%s" % hipsparse["rocsparse"].prefix]
+ if "^rocthrust" in spec and not spec["hip"].external:
+ # petsc+rocm needs the rocthrust header path
+ hip_headers += spec["rocthrust"].headers
+ if "^hipblas" in spec and not spec["hip"].external:
+ # superlu-dist+rocm needs the hipblas header path
+ hip_headers += spec["hipblas"].headers
if "%cce" in spec:
# We assume the proper Cray CCE module (cce) is loaded:
craylibs_path = env["CRAYLIBS_" + machine().upper()]
craylibs = ["libmodules", "libfi", "libcraymath", "libf", "libu", "libcsup"]
hip_libs += find_libraries(craylibs, craylibs_path)
+ if hip_headers:
+ options += ["HIP_OPT=%s" % hip_headers.cpp_flags]
if hip_libs:
options += ["HIP_LIB=%s" % ld_flags_from_library_list(hip_libs)]
diff --git a/var/spack/repos/builtin/packages/mfem/test_builds.sh b/var/spack/repos/builtin/packages/mfem/test_builds.sh
index 787f936be132d2..cb658dd59cc468 100755
--- a/var/spack/repos/builtin/packages/mfem/test_builds.sh
+++ b/var/spack/repos/builtin/packages/mfem/test_builds.sh
@@ -31,6 +31,9 @@ petsc_spec_rocm='^petsc+rocm+mumps'
strumpack_spec='^strumpack~slate~openmp~cuda'
strumpack_cuda_spec='^strumpack+cuda~slate~openmp'
strumpack_rocm_spec='^strumpack+rocm~slate~openmp~cuda'
+# superlu specs with cuda and rocm
+superlu_cuda_spec='^superlu-dist+cuda'
+superlu_rocm_spec='^superlu-dist+rocm'
builds=(
# preferred version:
@@ -136,7 +139,7 @@ builds_cuda=(
+superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \
+sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \
^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \
- '"$petsc_spec_cuda $conduit_spec"
+ '"$superlu_cuda_spec $petsc_spec_cuda $conduit_spec"
# hypre with cuda:
# TODO: restore '+libceed' when the libCEED CUDA unit tests take less time.
@@ -148,7 +151,7 @@ builds_cuda=(
+superlu-dist+strumpack+suite-sparse+gslib \
+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \
^raja+cuda+openmp ^hiop+shared ^hypre+cuda \
- '" $strumpack_cuda_spec $conduit_spec"
+ '" $strumpack_cuda_spec $superlu_cuda_spec $conduit_spec"
#
# same builds as above with ${mfem_dev}
@@ -173,7 +176,7 @@ builds_cuda=(
+superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \
+sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \
^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \
- '"$petsc_spec_cuda $conduit_spec"
+ '"$superlu_cuda_spec $petsc_spec_cuda $conduit_spec"
# hypre with cuda:
# TODO: restore '+libceed' when the libCEED CUDA unit tests take less time.
@@ -185,7 +188,7 @@ builds_cuda=(
+superlu-dist+strumpack+suite-sparse+gslib \
+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \
^raja+cuda+openmp ^hiop+shared ^hypre+cuda \
- '"$strumpack_cuda_spec $conduit_spec"
+ '"$strumpack_cuda_spec $superlu_cuda_spec $conduit_spec"
)
@@ -208,7 +211,7 @@ builds_rocm=(
+superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \
+sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
^raja+rocm~openmp ^occa~cuda'" $strumpack_rocm_spec"' \
- '"$petsc_spec_rocm $conduit_spec"
+ '"$superlu_rocm_spec $petsc_spec_rocm $conduit_spec"
# hypre with rocm:
# TODO: add back "+petsc+slepc $petsc_spec_rocm" when it works.
@@ -220,7 +223,7 @@ builds_rocm=(
+superlu-dist+strumpack+suite-sparse+gslib \
+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \
^raja+rocm~openmp ^occa~cuda ^hypre+rocm \
- '"$strumpack_rocm_spec $conduit_spec"
+ '"$strumpack_rocm_spec $superlu_rocm_spec $conduit_spec"
#
# same builds as above with ${mfem_dev}
diff --git a/var/spack/repos/builtin/packages/mgard/package.py b/var/spack/repos/builtin/packages/mgard/package.py
index b58f4c0bba0aa6..411dd0c9b9c6e2 100644
--- a/var/spack/repos/builtin/packages/mgard/package.py
+++ b/var/spack/repos/builtin/packages/mgard/package.py
@@ -59,6 +59,12 @@ class Mgard(CMakePackage, CudaPackage):
)
conflicts("%gcc@:7", when="@2022-11-18:", msg="requires std::optional and other c++17 things")
+ def flag_handler(self, name, flags):
+ if name == "cxxflags":
+ if self.spec.satisfies("@2020-10-01 %oneapi@2023:"):
+ flags.append("-Wno-error=c++11-narrowing")
+ return (flags, None, None)
+
def cmake_args(self):
spec = self.spec
args = ["-DBUILD_TESTING=OFF"]
diff --git a/var/spack/repos/builtin/packages/migraphx/package.py b/var/spack/repos/builtin/packages/migraphx/package.py
index 81bf1bff2b3818..5373703807223b 100644
--- a/var/spack/repos/builtin/packages/migraphx/package.py
+++ b/var/spack/repos/builtin/packages/migraphx/package.py
@@ -131,6 +131,7 @@ def url_for_version(self, version):
depends_on("py-pybind11", type="build", when="@:4.0.0")
depends_on("py-pybind11@2.6:", type="build", when="@4.1.0:")
depends_on("pkgconfig", type="build", when="@5.3.0:")
+ depends_on("abseil-cpp")
for ver in [
"3.5.0",
@@ -198,6 +199,7 @@ def cmake_args(self):
args += self.cmake_python_hints
if "@5.5.0:" in self.spec:
args.append(self.define("CMAKE_CXX_FLAGS", "-I{0}".format(abspath)))
+ args.append(self.define("MIGRAPHX_ENABLE_PYTHON", "OFF"))
return args
def test(self):
diff --git a/var/spack/repos/builtin/packages/millepede/package.py b/var/spack/repos/builtin/packages/millepede/package.py
index 6cc9b5a9ada53d..d086f8a75993f6 100644
--- a/var/spack/repos/builtin/packages/millepede/package.py
+++ b/var/spack/repos/builtin/packages/millepede/package.py
@@ -14,8 +14,6 @@ class Millepede(MakefilePackage):
homepage = "https://gitlab.desy.de/claus.kleinwort/millepede-ii"
url = "https://gitlab.desy.de/claus.kleinwort/millepede-ii/-/archive/V04-11-01/millepede-ii-V04-11-01.tar.gz"
- maintainers("iarspider")
-
parallel = False
version("04-13-03", sha256="669a6e46a6f02ba3c78b2760e2ffb2c90d25b582ccd1a5c0770eef81c7bcbbe9")
diff --git a/var/spack/repos/builtin/packages/mimalloc/package.py b/var/spack/repos/builtin/packages/mimalloc/package.py
index fed6e5bf783543..a88aaed0db8463 100644
--- a/var/spack/repos/builtin/packages/mimalloc/package.py
+++ b/var/spack/repos/builtin/packages/mimalloc/package.py
@@ -14,6 +14,8 @@ class Mimalloc(CMakePackage):
git = "https://github.com/microsoft/mimalloc.git"
maintainers("msimberg")
+ license("MIT")
+
version("dev-slice", branch="dev-slice")
version("dev", branch="dev")
version("master", branch="master")
diff --git a/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch
new file mode 100644
index 00000000000000..364a4a403651c4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch
@@ -0,0 +1,13 @@
+diff --git a/rocAL/rocAL/CMakeLists.txt b/rocAL/rocAL/CMakeLists.txt
+index 7ae8cb8..195f387 100644
+--- a/rocAL/rocAL/CMakeLists.txt
++++ b/rocAL/rocAL/CMakeLists.txt
+@@ -122,6 +122,8 @@ if(NOT Threads_FOUND)
+ endif()
+
+ if(${BUILD_ROCAL})
++ find_path(HALF_INCLUDE_DIR half.hpp)
++ include_directories(${HALF_INCLUDE_DIR})
+ # AMD OpenVX & VX_RPP
+ set(LINK_LIBRARY_LIST ${LINK_LIBRARY_LIST} openvx vx_rpp)
+ # AMD RPP
diff --git a/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch
new file mode 100644
index 00000000000000..2e935e9cee2417
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch
@@ -0,0 +1,21 @@
+diff --git a/rocAL/rocAL/CMakeLists.txt b/rocAL/rocAL/CMakeLists.txt
+index bb28810..3c97eab 100644
+--- a/rocAL/rocAL/CMakeLists.txt
++++ b/rocAL/rocAL/CMakeLists.txt
+@@ -39,6 +39,8 @@ find_package(Boost COMPONENTS ${BOOST_COMPONENTS} QUIET)
+ set(THREADS_PREFER_PTHREAD_FLAG ON)
+ find_package(Threads QUIET)
+
++find_path(HALF_INCLUDE_DIR half.hpp)
++
+ if( GPU_SUPPORT AND "${BACKEND}" STREQUAL "HIP")
+ if(NOT DEFINED HIP_PATH)
+ if(NOT DEFINED ENV{HIP_PATH})
+@@ -120,6 +122,7 @@ if(NOT Threads_FOUND)
+ endif()
+
+ if(${BUILD_ROCAL})
++ include_directories(${HALF_INCLUDE_DIR})
+ # AMD OpenVX & RPP
+ include_directories(${AMDRPP_INCLUDE_DIRS})
+ set(LINK_LIBRARY_LIST ${LINK_LIBRARY_LIST} openvx vx_rpp)
diff --git a/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch b/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch
new file mode 100644
index 00000000000000..c3aec5597982bc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch
@@ -0,0 +1,62 @@
+diff --git a/model_compiler/python/nnir_to_clib.py b/model_compiler/python/nnir_to_clib.py
+index b688094..26fcfe3 100644
+--- a/model_compiler/python/nnir_to_clib.py
++++ b/model_compiler/python/nnir_to_clib.py
+@@ -151,6 +151,10 @@ if (OPENVX_BACKEND_OPENCL_FOUND)
+ include_directories (${OpenCL_INCLUDE_DIRS} ${OpenCL_INCLUDE_DIRS}/Headers )
+ endif()
+
++find_path(HALF_INCLUDE_DIR half.hpp)
++message(STATUS "HALF_INCLUDE_DIR: ${HALF_INCLUDE_DIR}")
++include_directories(${HALF_INCLUDE_DIR})
++
+ find_package(OpenCV QUIET)
+ include_directories (/opt/rocm/include/mivisionx)
+ include_directories (${PROJECT_SOURCE_DIR}/lib)
+diff --git a/samples/inference/mv_objdetect/CMakeLists.txt b/samples/inference/mv_objdetect/CMakeLists.txt
+index 9b92b84..d82b71e 100644
+--- a/samples/inference/mv_objdetect/CMakeLists.txt
++++ b/samples/inference/mv_objdetect/CMakeLists.txt
+@@ -50,7 +50,10 @@ if (OPENVX_BACKEND_OPENCL_FOUND)
+ include_directories (${OpenCL_INCLUDE_DIRS} ${OpenCL_INCLUDE_DIRS}/Headers )
+ endif()
+
+-include_directories (${ROCM_PATH}/include/mivisionx ${PROJECT_SOURCE_DIR} )
++find_path(HALF_INCLUDE_DIR half.hpp)
++message(STATUS "HALF_INCLUDE_DIR: ${HALF_INCLUDE_DIR}")
++
++include_directories (${ROCM_PATH}/include/mivisionx ${PROJECT_SOURCE_DIR} ${HALF_INCLUDE_DIR} )
+ link_directories (${ROCM_PATH}/lib ${PROJECT_SOURCE_DIR}/lib)
+ option (USE_POSTPROC "Use postprocessing module implementation" ON)
+ set(SOURCES mvobjdetect.cpp mvdeploy_api.cpp visualize.cpp)
+diff --git a/utilities/rocAL/rocAL_unittests/CMakeLists.txt b/utilities/rocAL/rocAL_unittests/CMakeLists.txt
+index 6500003..20de035 100644
+--- a/utilities/rocAL/rocAL_unittests/CMakeLists.txt
++++ b/utilities/rocAL/rocAL_unittests/CMakeLists.txt
+@@ -43,9 +43,10 @@ include(GNUInstallDirs)
+
+ list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../amd_openvx/cmake)
+
++find_path(HALF_INCLUDE_DIR half.hpp)
+ find_package(OpenCV QUIET)
+ find_package(AMDRPP QUIET)
+-include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal)
++include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal ${HALF_INCLUDE_DIR})
+ link_directories(${ROCM_PATH}/lib/)
+ file(GLOB My_Source_Files ./*.cpp)
+ add_executable(${PROJECT_NAME} ${My_Source_Files})
+diff --git a/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt b/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt
+index bd64a5b..3aa6172 100644
+--- a/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt
++++ b/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt
+@@ -46,8 +46,8 @@ list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../amd_openvx/cmake)
+
+ find_package(OpenCV QUIET)
+ find_package(AMDRPP QUIET)
+-
+-include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal)
++find_path(HALF_INCLUDE_DIR half.hpp)
++include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal ${HALF_INCLUDE_DIR})
+ link_directories(${ROCM_PATH}/lib/)
+ file(GLOB My_Source_Files ./*.cpp)
+ add_executable(${PROJECT_NAME} ${My_Source_Files})
diff --git a/var/spack/repos/builtin/packages/mivisionx/package.py b/var/spack/repos/builtin/packages/mivisionx/package.py
index bd1a40a8726aeb..f30ae2a9c25599 100644
--- a/var/spack/repos/builtin/packages/mivisionx/package.py
+++ b/var/spack/repos/builtin/packages/mivisionx/package.py
@@ -116,8 +116,19 @@ def url_for_version(self, version):
variant("opencl", default=False, description="Use OPENCL as the backend")
variant("hip", default=True, description="Use HIP as backend")
+ variant("add_tests", default=False, description="add tests and samples folder")
+ patch("0001-add-half-include-path.patch", when="@5.5")
+ patch("0001-add-half-include-path-5.6.patch", when="@5.6:")
+ patch("0002-add-half-include-path-for-tests.patch", when="@5.5: +add_tests")
+
+ patch(
+ "https://github.com/GPUOpen-ProfessionalCompute-Libraries/MIVisionX/commit/da24882438b91a0ae1feee23206b75c1a1256887.patch?full_index=1",
+ sha256="41caff199224f904ef5dc2cd9c5602d6cfa41eba6af0fcc782942a09dd202ab4",
+ when="@5.6",
+ )
conflicts("+opencl", when="@5.6.0:")
+ conflicts("+add_tests", when="@:5.4")
def patch(self):
if self.spec.satisfies("@4.2.0"):
@@ -179,10 +190,90 @@ def patch(self):
"amd_openvx_extensions/amd_nn/nn_hip/CMakeLists.txt",
string=True,
)
+ if self.spec.satisfies("@5.5.0: + hip"):
+ filter_file(
+ "${ROCM_PATH}/llvm/bin/clang++",
+ "{0}/bin/clang++".format(self.spec["llvm-amdgpu"].prefix),
+ "rocAL/rocAL/rocAL_hip/CMakeLists.txt",
+ string=True,
+ )
+ if self.spec.satisfies("+add_tests"):
+ filter_file(
+ "${ROCM_PATH}/include/mivisionx",
+ "{0}/include/mivisionx".format(self.spec.prefix),
+ "tests/amd_migraphx_tests/mnist/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/lib",
+ "{0}/lib".format(self.spec.prefix),
+ "tests/amd_migraphx_tests/mnist/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/include/mivisionx",
+ "{0}/include/mivisionx".format(self.spec.prefix),
+ "tests/amd_migraphx_tests/resnet50/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/lib",
+ "{0}/lib".format(self.spec.prefix),
+ "tests/amd_migraphx_tests/resnet50/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/include/mivisionx",
+ "{0}/include/mivisionx".format(self.spec.prefix),
+ "samples/inference/mv_objdetect/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/lib",
+ "{0}/lib".format(self.spec.prefix),
+ "samples/inference/mv_objdetect/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/include/mivisionx",
+ "{0}/include/mivisionx".format(self.spec.prefix),
+ "model_compiler/python/nnir_to_clib.py",
+ string=True,
+ )
+ filter_file(
+ "/opt/rocm",
+ "{0}".format(self.spec.prefix),
+ "model_compiler/python/nnir_to_clib.py",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal",
+ "{0}/include/mivisionx/rocal".format(self.spec.prefix),
+ "utilities/rocAL/rocAL_unittests/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/lib",
+ "{0}/lib".format(self.spec.prefix),
+ "utilities/rocAL/rocAL_unittests/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal",
+ "{0}/include/mivisionx/rocal".format(self.spec.prefix),
+ "utilities/rocAL/rocAL_video_unittests/CMakeLists.txt",
+ string=True,
+ )
+ filter_file(
+ "${ROCM_PATH}/lib",
+ "{0}/lib".format(self.spec.prefix),
+ "utilities/rocAL/rocAL_video_unittests/CMakeLists.txt",
+ string=True,
+ )
depends_on("cmake@3.5:", type="build")
depends_on("ffmpeg@:4", type="build", when="@:5.3")
- depends_on("ffmpeg@4.4:", type="build", when="@5.4:")
+ depends_on("ffmpeg@4.4", type="build", when="@5.4:")
depends_on("protobuf@:3", type="build")
depends_on(
"opencv@:3.4"
@@ -203,7 +294,17 @@ def patch(self):
depends_on("miopen-opencl@3.5.0", when="@1.7+opencl")
depends_on("miopengemm@1.1.6", when="@1.7+opencl")
depends_on("openssl", when="@4.0.0:")
- depends_on("libjpeg-turbo", type="build")
+ depends_on("libjpeg-turbo@2.0.6+partial_decoder", type="build")
+ depends_on("rpp", when="@5.5:")
+ depends_on("lmdb", when="@5.5:")
+ depends_on("py-setuptools", when="@5.6:")
+ depends_on("py-wheel", when="@5.6:")
+ depends_on("py-pybind11", when="@5.6:")
+ depends_on("py-google-api-python-client", when="+add_tests")
+ depends_on("py-protobuf@3.20.3", type=("build", "run"), when="+add_tests")
+ depends_on("py-future", when="+add_tests")
+ depends_on("py-numpy", when="+add_tests")
+ depends_on("py-pytz", when="+add_tests")
conflicts("^cmake@3.22:", when="@:5.0.0")
# need to choose atleast one backend and both cannot be set
@@ -265,11 +366,15 @@ def patch(self):
depends_on("miopen-hip@" + ver, when="@" + ver)
for ver in ["5.3.3", "5.4.0", "5.4.3", "5.5.0", "5.5.1", "5.6.0", "5.6.1"]:
depends_on("migraphx@" + ver, when="@" + ver)
+ depends_on("hip@" + ver, when="@" + ver)
for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]:
depends_on("rocm-core@" + ver, when="@" + ver)
depends_on("python@3.5:", type="build")
+ def setup_run_environment(self, env):
+ env.set("MIVISIONX_MODEL_COMPILER_PATH", self.spec.prefix.libexec.mivisionx.model_compiler)
+
def flag_handler(self, name, flags):
spec = self.spec
protobuf = spec["protobuf"].prefix.include
@@ -290,4 +395,24 @@ def cmake_args(self):
args.append(self.define("HIP_PATH", spec["hip"].prefix))
if self.spec.satisfies("~hip~opencl"):
args.append(self.define("BACKEND", "CPU"))
+ if self.spec.satisfies("@5.5:"):
+ args.append(
+ self.define("AMDRPP_LIBRARIES", "{0}/lib/librpp.so".format(spec["rpp"].prefix))
+ )
+ args.append(
+ self.define("AMDRPP_INCLUDE_DIRS", "{0}/include/rpp".format(spec["rpp"].prefix))
+ )
+ args.append(
+ self.define(
+ "TurboJpeg_LIBRARIES_DIRS", "{0}/lib64".format(spec["libjpeg-turbo"].prefix)
+ )
+ )
+ args.append(self.define("CMAKE_INSTALL_PREFIX_PYTHON", spec.prefix))
return args
+
+ @run_after("install")
+ def add_tests(self):
+ if self.spec.satisfies("+add_tests"):
+ install_tree("tests", self.spec.prefix.tests)
+ install_tree("samples", self.spec.prefix.samples)
+ install_tree("utilities", self.spec.prefix.utilities)
diff --git a/var/spack/repos/builtin/packages/molgw/package.py b/var/spack/repos/builtin/packages/molgw/package.py
index 17e5283c920558..91026c7abe0b00 100644
--- a/var/spack/repos/builtin/packages/molgw/package.py
+++ b/var/spack/repos/builtin/packages/molgw/package.py
@@ -78,7 +78,7 @@ def edit(self, spec, prefix):
flags["PREFIX"] = prefix
# Set LAPACK and SCALAPACK
- if "^mkl" in spec:
+ if spec["lapack"].name not in INTEL_MATH_LIBRARIES:
flags["LAPACK"] = self._get_mkl_ld_flags(spec)
else:
flags["LAPACK"] = spec["lapack"].libs.ld_flags + " " + spec["blas"].libs.ld_flags
@@ -105,7 +105,7 @@ def edit(self, spec, prefix):
if "+scalapack" in spec:
flags["CPPFLAGS"] = flags.get("CPPFLAGS", "") + " -DHAVE_SCALAPACK -DHAVE_MPI "
- if "^mkl" in spec:
+ if spec["lapack"].name in INTEL_MATH_LIBRARIES:
flags["CPPFLAGS"] = flags.get("CPPFLAGS", "") + " -DHAVE_MKL "
# Write configuration file
diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py
index a68c7292b5d687..aa2d89e5b5e9aa 100644
--- a/var/spack/repos/builtin/packages/mpich/package.py
+++ b/var/spack/repos/builtin/packages/mpich/package.py
@@ -55,7 +55,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage):
variant("hydra", default=True, description="Build the hydra process manager")
variant("romio", default=True, description="Enable ROMIO MPI I/O implementation")
variant("verbs", default=False, description="Build support for OpenFabrics verbs.")
- variant("slurm", default=False, description="Enable SLURM support")
+ variant("slurm", default=False, description="Enable Slurm support")
variant("wrapperrpath", default=True, description="Enable wrapper rpath")
variant(
"pmi",
@@ -70,16 +70,14 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage):
description="""Abstract Device Interface (ADI)
implementation. The ch4 device is in experimental state for versions
before 3.4.""",
- values=("ch3", "ch4"),
+ values=("ch3", "ch4", "ch3:sock"),
multi=False,
)
variant(
"netmod",
default="ofi",
description="""Network module. Only single netmod builds are
-supported. For ch3 device configurations, this presumes the
-ch3:nemesis communication channel. ch3:sock is not supported by this
-spack package at this time.""",
+supported, and netmod is ignored if device is ch3:sock.""",
values=("tcp", "mxm", "ofi", "ucx"),
multi=False,
)
@@ -121,6 +119,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage):
depends_on("yaksa+cuda", when="+cuda ^yaksa")
depends_on("yaksa+rocm", when="+rocm ^yaksa")
conflicts("datatype-engine=yaksa", when="device=ch3")
+ conflicts("datatype-engine=yaksa", when="device=ch3:sock")
variant(
"hcoll",
@@ -135,8 +134,10 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage):
# overriding the variant from CudaPackage.
conflicts("+cuda", when="@:3.3")
conflicts("+cuda", when="device=ch3")
+ conflicts("+cuda", when="device=ch3:sock")
conflicts("+rocm", when="@:4.0")
conflicts("+rocm", when="device=ch3")
+ conflicts("+rocm", when="device=ch3:sock")
conflicts("+cuda", when="+rocm", msg="CUDA must be disabled to support ROCm")
provides("mpi@:4.0")
@@ -164,7 +165,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage):
patch(
"https://github.com/pmodels/mpich/commit/8a851b317ee57366cd15f4f28842063d8eff4483.patch?full_index=1",
sha256="d2dafc020941d2d8cab82bc1047e4a6a6d97736b62b06e2831d536de1ac01fd0",
- when="@3.3:3.3.99 +hwloc",
+ when="@3.3 +hwloc",
)
# fix MPI_Barrier segmentation fault
@@ -249,14 +250,14 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage):
# building from git requires regenerating autotools files
depends_on("automake@1.15:", when="@develop", type="build")
depends_on("libtool@2.4.4:", when="@develop", type="build")
- depends_on("m4", when="@develop", type="build"),
+ depends_on("m4", when="@develop", type="build")
depends_on("autoconf@2.67:", when="@develop", type="build")
# building with "+hwloc' also requires regenerating autotools files
- depends_on("automake@1.15:", when="@3.3:3.3.99 +hwloc", type="build")
- depends_on("libtool@2.4.4:", when="@3.3:3.3.99 +hwloc", type="build")
- depends_on("m4", when="@3.3:3.3.99 +hwloc", type="build"),
- depends_on("autoconf@2.67:", when="@3.3:3.3.99 +hwloc", type="build")
+ depends_on("automake@1.15:", when="@3.3 +hwloc", type="build")
+ depends_on("libtool@2.4.4:", when="@3.3 +hwloc", type="build")
+ depends_on("m4", when="@3.3 +hwloc", type="build")
+ depends_on("autoconf@2.67:", when="@3.3 +hwloc", type="build")
# MPICH's Yaksa submodule requires python to configure
depends_on("python@3.0:", when="@develop", type="build")
@@ -271,6 +272,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage):
conflicts("netmod=tcp", when="device=ch4")
conflicts("pmi=pmi2", when="device=ch3 netmod=ofi")
conflicts("pmi=pmix", when="device=ch3")
+ conflicts("pmi=pmix", when="device=ch3:sock")
conflicts("pmi=pmix", when="+hydra")
conflicts("pmi=cray", when="+hydra")
@@ -462,7 +464,7 @@ def setup_dependent_package(self, module, dependent_spec):
def autoreconf(self, spec, prefix):
"""Not needed usually, configure should be already there"""
# If configure exists nothing needs to be done
- if os.path.exists(self.configure_abs_path) and not spec.satisfies("@3.3:3.3.99 +hwloc"):
+ if os.path.exists(self.configure_abs_path) and not spec.satisfies("@3.3 +hwloc"):
return
# Else bootstrap with autotools
bash = which("bash")
@@ -556,7 +558,10 @@ def configure_args(self):
elif "device=ch3" in spec:
device_config = "--with-device=ch3:nemesis:"
- if "netmod=ucx" in spec:
+ # Do not apply any netmod if device is ch3:sock
+ if "device=ch3:sock" in spec:
+ device_config = "--with-device=ch3:sock"
+ elif "netmod=ucx" in spec:
device_config += "ucx"
elif "netmod=ofi" in spec:
device_config += "ofi"
diff --git a/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch b/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch
new file mode 100644
index 00000000000000..667e412acc1cee
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch
@@ -0,0 +1,26 @@
+--- ./configure.orig 2023-11-12 14:48:25.802025918 -0800
++++ ./configure 2023-11-12 14:48:56.177057419 -0800
+@@ -571,10 +571,7 @@
+ try:
+ flags = []
+ for flag in shlex.split (execute ([ 'pkg-config' ] + pkg_config_flags.split(), RunError)[1]):
+- if flag.startswith ('-I'):
+- flags += [ '-idirafter', flag[2:] ]
+- else:
+- flags += [ flag ]
++ flags += [ flag ]
+ return flags
+ except Exception:
+ log('error running "pkg-config ' + pkg_config_flags + '"\n\n')
+@@ -1323,10 +1320,7 @@
+ for entry in qt:
+ if entry[0] != '$' and not entry == '-I.':
+ entry = entry.replace('\"','').replace("'",'')
+- if entry.startswith('-I'):
+- qt_cflags += [ '-idirafter', entry[2:] ]
+- else:
+- qt_cflags += [ entry ]
++ qt_cflags += [ entry ]
+
+ qt = qt_ldflags + qt_libs
+ qt_ldflags = []
diff --git a/var/spack/repos/builtin/packages/mrtrix3/package.py b/var/spack/repos/builtin/packages/mrtrix3/package.py
index 2a59d7ec22a8e9..53bf19ae53065d 100644
--- a/var/spack/repos/builtin/packages/mrtrix3/package.py
+++ b/var/spack/repos/builtin/packages/mrtrix3/package.py
@@ -17,21 +17,26 @@ class Mrtrix3(Package):
git = "https://github.com/MRtrix3/mrtrix3.git"
version(
- "3.0.3",
- sha256="6ec7d5a567d8d7338e85575a74565189a26ec8971cbe8fb24a49befbc446542e",
+ "3.0.4",
+ sha256="f1d1aa289cfc3e46e3a8eca93594b23d061c6d50a0cd03727433a7e2cd14f71a",
preferred=True,
)
+ version("3.0.3", sha256="6ec7d5a567d8d7338e85575a74565189a26ec8971cbe8fb24a49befbc446542e")
version("2017-09-25", commit="72aca89e3d38c9d9e0c47104d0fb5bd2cbdb536d")
depends_on("python@2.7:", type=("build", "run"))
depends_on("py-numpy", type=("build", "run"))
depends_on("glu")
depends_on("qt+opengl@4.7:")
- depends_on("eigen")
+ # MRTrix <= 3.0.3 can't build with eigen >= 3.4 due to conflicting declarations
+ depends_on("eigen@3.3", when="@3.0.3")
+ depends_on("eigen@3.4:", when="@3.0.4:")
depends_on("zlib-api")
depends_on("libtiff")
depends_on("fftw")
+ patch("fix_includes.patch", when="@3.0.3:3.0.4")
+
conflicts("%gcc@7:", when="@2017-09-25") # MRtrix3/mrtrix3#1041
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py
index 32bcaf3a812ac1..1befb5acef9b84 100644
--- a/var/spack/repos/builtin/packages/mumps/package.py
+++ b/var/spack/repos/builtin/packages/mumps/package.py
@@ -16,6 +16,8 @@ class Mumps(Package):
homepage = "https://graal.ens-lyon.fr/MUMPS/index.php"
url = "https://graal.ens-lyon.fr/MUMPS/MUMPS_5.5.1.tar.gz"
+ maintainers("jcortial-safran")
+
version("5.5.1", sha256="1abff294fa47ee4cfd50dfd5c595942b72ebfcedce08142a75a99ab35014fa15")
version("5.5.0", sha256="e54d17c5e42a36c40607a03279e0704d239d71d38503aab68ef3bfe0a9a79c13")
version("5.4.1", sha256="93034a1a9fe0876307136dcde7e98e9086e199de76f1c47da822e7d4de987fa8")
@@ -223,7 +225,7 @@ def write_makefile_inc(self):
# As of version 5.2.0, MUMPS is able to take advantage
# of the GEMMT BLAS extension. MKL and amdblis are the only
# known BLAS implementation supported.
- if "@5.2.0: ^mkl" in self.spec:
+ if self.spec["blas"].name in INTEL_MATH_LIBRARIES and self.spec.satisfies("@5.2.0:"):
optf.append("-DGEMMT_AVAILABLE")
if "@5.2.0: ^amdblis@3.0:" in self.spec:
diff --git a/var/spack/repos/builtin/packages/must/package.py b/var/spack/repos/builtin/packages/must/package.py
index 1b3b0b152dc52b..5d6c36bb044e84 100644
--- a/var/spack/repos/builtin/packages/must/package.py
+++ b/var/spack/repos/builtin/packages/must/package.py
@@ -21,11 +21,8 @@ class Must(CMakePackage):
version("1.9.0", sha256="24998f4ca6bce718d69347de90798600f2385c21266c2d1dd39a87dd8bd1fba4")
version("1.8.0", sha256="9754fefd2e4c8cba812f8b56a5dd929bc84aa599b2509305e1eb8518be0a8a39")
- version("1.8.0-rc1", sha256="49fd2487fbd1aa41f4252c7e37efebd3f6ff48218c88e82f34b88d59348fe406")
- version(
- "1.8-preview", sha256="67b4b061db7a893e22a6610e2085072716d11738bc6cc3cb3ffd60d6833e8bad"
- )
version("1.7.2", sha256="616c54b7487923959df126ac4b47ae8c611717d679fe7ec29f57a89bf0e2e0d0")
+
variant("test", default=False, description="Enable must internal tests")
variant("tsan", default=True, description="Enable thread sanitizer")
variant("graphviz", default=False, description="Use to generate graphs")
diff --git a/var/spack/repos/builtin/packages/nccl/package.py b/var/spack/repos/builtin/packages/nccl/package.py
index 21db9dad2cb739..51f10ca7eec3a7 100644
--- a/var/spack/repos/builtin/packages/nccl/package.py
+++ b/var/spack/repos/builtin/packages/nccl/package.py
@@ -17,6 +17,7 @@ class Nccl(MakefilePackage, CudaPackage):
maintainers("adamjstewart")
libraries = ["libnccl.so"]
+ version("2.19.3-1", sha256="1c5474553afedb88e878c772f13d6f90b9226b3f2971dfa6f873adb9443100c2")
version("2.18.5-1", sha256="16ac98f3e926c024ce48e10ab220e19ce734adc48c423cfd55ad6f509bd1179f")
version("2.18.3-1", sha256="6477d83c9edbb34a0ebce6d751a1b32962bc6415d75d04972b676c6894ceaef9")
version("2.18.1-1", sha256="0e4ede5cf8df009bff5aeb3a9f194852c03299ae5664b5a425b43358e7a9eef2")
diff --git a/var/spack/repos/builtin/packages/ncview/package.py b/var/spack/repos/builtin/packages/ncview/package.py
index 8526b7807020a8..c706245e94d491 100644
--- a/var/spack/repos/builtin/packages/ncview/package.py
+++ b/var/spack/repos/builtin/packages/ncview/package.py
@@ -26,9 +26,10 @@ def patch(self):
patched_file = "configure"
with keep_modification_time(patched_file):
filter_file(
- "if test x\$CC_TEST_SAME != x\$NETCDF_CC_TEST_SAME; then", # noqa: W605
+ "if test x$CC_TEST_SAME != x$NETCDF_CC_TEST_SAME; then",
"if false; then",
patched_file,
+ string=True,
)
def url_for_version(self, version):
diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py
index db8bd4a66c63ef..ff59c4f539f0f2 100644
--- a/var/spack/repos/builtin/packages/neovim/package.py
+++ b/var/spack/repos/builtin/packages/neovim/package.py
@@ -16,7 +16,9 @@ class Neovim(CMakePackage):
maintainers("albestro", "trws")
version("master", branch="master")
- version("stable", tag="stable", commit="7d4bba7aa7a4a3444919ea7a3804094c290395ef")
+ version("stable", tag="stable", commit="d772f697a281ce9c58bf933997b87c7f27428a60")
+ version("0.9.4", sha256="148356027ee8d586adebb6513a94d76accc79da9597109ace5c445b09d383093")
+ version("0.9.2", sha256="06b8518bad4237a28a67a4fbc16ec32581f35f216b27f4c98347acee7f5fb369")
version("0.9.1", sha256="8db17c2a1f4776dcda00e59489ea0d98ba82f7d1a8ea03281d640e58d8a3a00e")
version("0.9.0", sha256="39d79107c54d2f3babcad2cd157c399241c04f6e75e98c18e8afaf2bb5e82937")
version("0.8.3", sha256="adf45ff160e1d89f519b6114732eba03485ae469beb27919b0f7a4f6b44233c1")
@@ -136,7 +138,7 @@ class Neovim(CMakePackage):
# Support for `libvterm@0.2:` has been added in neovim@0.8.0
# term: Add support for libvterm >= 0.2 (https://github.com/neovim/neovim/releases/tag/v0.8.0)
# https://github.com/neovim/neovim/issues/16217#issuecomment-958590493
- conflicts("^libvterm@0.2:", when="@:0.7")
+ conflicts("libvterm@0.2:", when="@:0.7")
@when("^lua")
def cmake_args(self):
diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py
index b00ad8938127e3..35cdb368819dfb 100644
--- a/var/spack/repos/builtin/packages/nettle/package.py
+++ b/var/spack/repos/builtin/packages/nettle/package.py
@@ -24,6 +24,7 @@ class Nettle(AutotoolsPackage, GNUMirrorPackage):
depends_on("gmp")
depends_on("m4", type="build")
+ depends_on("openssl")
def configure_args(self):
return ["CFLAGS={0}".format(self.compiler.c99_flag)]
diff --git a/var/spack/repos/builtin/packages/nghttp2/package.py b/var/spack/repos/builtin/packages/nghttp2/package.py
index fe9d4f94e38a8e..2de551d8b5fa49 100644
--- a/var/spack/repos/builtin/packages/nghttp2/package.py
+++ b/var/spack/repos/builtin/packages/nghttp2/package.py
@@ -13,6 +13,7 @@ class Nghttp2(AutotoolsPackage):
homepage = "https://nghttp2.org/"
url = "https://github.com/nghttp2/nghttp2/releases/download/v1.26.0/nghttp2-1.26.0.tar.gz"
+ version("1.57.0", sha256="1e3258453784d3b7e6cc48d0be087b168f8360b5d588c66bfeda05d07ad39ffd")
version("1.52.0", sha256="9877caa62bd72dde1331da38ce039dadb049817a01c3bdee809da15b754771b8")
version("1.51.0", sha256="2a0bef286f65b35c24250432e7ec042441a8157a5b93519412d9055169d9ce54")
version("1.50.0", sha256="d162468980dba58e54e31aa2cbaf96fd2f0890e6dd141af100f6bd1b30aa73c6")
diff --git a/var/spack/repos/builtin/packages/ngspice/package.py b/var/spack/repos/builtin/packages/ngspice/package.py
index c826b24052d635..08bbbd712f49e5 100644
--- a/var/spack/repos/builtin/packages/ngspice/package.py
+++ b/var/spack/repos/builtin/packages/ngspice/package.py
@@ -18,6 +18,7 @@ class Ngspice(AutotoolsPackage):
# Master version by default adds the experimental adms feature
version("master", branch="master")
+ version("41", sha256="1ce219395d2f50c33eb223a1403f8318b168f1e6d1015a7db9dbf439408de8c4")
version("40", sha256="e303ca7bc0f594e2d6aa84f68785423e6bf0c8dad009bb20be4d5742588e890d")
version("39", sha256="bf94e811eaad8aaf05821d036a9eb5f8a65d21d30e1cab12701885e09618d771")
version("38", sha256="2c3e22f6c47b165db241cf355371a0a7558540ab2af3f8b5eedeeb289a317c56")
@@ -52,6 +53,7 @@ class Ngspice(AutotoolsPackage):
variant("openmp", default=False, description="Compile with multi-threading support")
variant("readline", default=True, description="Build readline support (for bin)")
variant("fft", default=True, description="Use external fftw lib")
+ variant("osdi", default=False, description="Use osdi/OpenVAF")
depends_on("fftw-api@3:~mpi~openmp", when="+fft~openmp")
depends_on("fftw-api@3:~mpi+openmp", when="+fft+openmp")
@@ -120,6 +122,8 @@ def configure_args(self):
args.append("--enable-openmp")
if "~fft" in spec:
args.append("--with-fftw3=no")
+ if "+osdi" in spec:
+ args.append("--enable-osdi")
if "darwin" in spec.architecture:
args.append("--enable-pss")
if "@master" in spec:
diff --git a/var/spack/repos/builtin/packages/nlcglib/package.py b/var/spack/repos/builtin/packages/nlcglib/package.py
index 5d899b17ed10a6..4817606b8f155e 100644
--- a/var/spack/repos/builtin/packages/nlcglib/package.py
+++ b/var/spack/repos/builtin/packages/nlcglib/package.py
@@ -49,9 +49,12 @@ class Nlcglib(CMakePackage, CudaPackage, ROCmPackage):
depends_on("rocblas")
depends_on("rocsolver")
- with when("+cuda"):
- depends_on("kokkos+cuda+cuda_lambda+wrapper", when="%gcc")
- depends_on("kokkos+cuda")
+ for arch in CudaPackage.cuda_arch_values:
+ depends_on(
+ f"kokkos+cuda+cuda_lambda+wrapper cuda_arch={arch}",
+ when=f"%gcc +cuda cuda_arch={arch}",
+ )
+ depends_on(f"kokkos+cuda cuda_arch={arch}", when=f"+cuda cuda_arch={arch}")
def cmake_args(self):
options = [
diff --git a/var/spack/repos/builtin/packages/npm/package.py b/var/spack/repos/builtin/packages/npm/package.py
index 0d6480f210428b..c8c544c1179381 100644
--- a/var/spack/repos/builtin/packages/npm/package.py
+++ b/var/spack/repos/builtin/packages/npm/package.py
@@ -13,49 +13,16 @@ class Npm(Package):
"""npm: A package manager for javascript."""
homepage = "https://github.com/npm/cli"
- # base https://www.npmjs.com/
-
- git = "https://github.com/npm/cli.git"
url = "https://registry.npmjs.org/npm/-/npm-9.3.1.tgz"
+ git = "https://github.com/npm/cli.git"
version("9.3.1", sha256="41caa26a340b0562bc5429d28792049c980fe3e872b42b82cad94e8f70e37f40")
version("8.19.3", sha256="634bf4e0dc87be771ebf48a058629960e979a209c20a51ebdbc4897ca6a25260")
version("7.24.2", sha256="5b9eeea011f8bc3b76e55cc33339e87213800677f37e0756ad13ef0e9eaccd64")
version("6.14.18", sha256="c9b15f277e2a0b1b57e05bad04504296a27024555d56c2aa967f862e957ad2ed")
- version(
- "6.14.9",
- sha256="1e0e880ce0d5adf0120fb3f92fc8e5ea5bac73681d37282615d074ff670f7703",
- deprecated=True,
- )
- version(
- "6.14.8",
- sha256="fe8e873cb606c06f67f666b4725eb9122c8927f677c8c0baf1477f0ff81f5a2c",
- deprecated=True,
- )
- version(
- "6.13.7",
- sha256="6adf71c198d61a5790cf0e057f4ab72c6ef6c345d72bed8bb7212cb9db969494",
- deprecated=True,
- )
- version(
- "6.13.4",
- sha256="a063290bd5fa06a8753de14169b7b243750432f42d01213fbd699e6b85916de7",
- deprecated=True,
- )
- version(
- "3.10.9",
- sha256="fb0871b1aebf4b74717a72289fade356aedca83ee54e7386e38cb51874501dd6",
- deprecated=True,
- )
- version(
- "3.10.5",
- sha256="ff019769e186152098841c1fa6325e5a79f7903a45f13bd0046a4dc8e63f845f",
- deprecated=True,
- )
-
depends_on("node-js", type=("build", "run"))
- depends_on("libvips")
+ depends_on("libvips", when="@:7")
# npm 6.13.4 ships with node-gyp 5.0.5, which contains several Python 3
# compatibility issues on macOS. Manually update to node-gyp 6.0.1 for
diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py
index 1098a0332db448..90dbdb44786ee2 100644
--- a/var/spack/repos/builtin/packages/octave/package.py
+++ b/var/spack/repos/builtin/packages/octave/package.py
@@ -167,7 +167,7 @@ def configure_args(self):
config_args = []
# Required dependencies
- if "^mkl" in spec and "gfortran" in self.compiler.fc:
+ if spec["lapack"].name in INTEL_MATH_LIBRARIES and "gfortran" in self.compiler.fc:
mkl_re = re.compile(r"(mkl_)intel(_i?lp64\b)")
config_args.extend(
[
diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py
index 5ce974edfa63e0..3ccd8719a1758b 100644
--- a/var/spack/repos/builtin/packages/octopus/package.py
+++ b/var/spack/repos/builtin/packages/octopus/package.py
@@ -93,13 +93,14 @@ class Octopus(AutotoolsPackage, CudaPackage):
depends_on("libxc@2:4", when="@8:9")
depends_on("libxc@5.1.0:", when="@10:")
depends_on("libxc@5.1.0:", when="@develop")
+ depends_on("netcdf-fortran", when="+netcdf") # NetCDF fortran lib without mpi variant
with when("+mpi"): # list all the parallel dependencies
depends_on("fftw@3:+mpi+openmp", when="@8:9") # FFT library
depends_on("fftw-api@3:+mpi+openmp", when="@10:")
depends_on("libvdwxc+mpi", when="+libvdwxc")
depends_on("arpack-ng+mpi", when="+arpack")
depends_on("elpa+mpi", when="+elpa")
- depends_on("netcdf-fortran ^netcdf-c+mpi", when="+netcdf")
+ depends_on("netcdf-c+mpi", when="+netcdf") # Link dependency of NetCDF fortran lib
depends_on("berkeleygw@2.1+mpi", when="+berkeleygw")
with when("~mpi"): # list all the serial dependencies
@@ -108,7 +109,7 @@ class Octopus(AutotoolsPackage, CudaPackage):
depends_on("libvdwxc~mpi", when="+libvdwxc")
depends_on("arpack-ng~mpi", when="+arpack")
depends_on("elpa~mpi", when="+elpa")
- depends_on("netcdf-fortran ^netcdf-c~~mpi", when="+netcdf")
+ depends_on("netcdf-c~~mpi", when="+netcdf") # Link dependency of NetCDF fortran lib
depends_on("berkeleygw@2.1~mpi", when="+berkeleygw")
depends_on("etsf-io", when="+etsf-io")
@@ -158,7 +159,7 @@ def configure_args(self):
if "^fftw" in spec:
args.append("--with-fftw-prefix=%s" % spec["fftw"].prefix)
- elif "^mkl" in spec:
+ elif spec["fftw-api"].name in INTEL_MATH_LIBRARIES:
# As of version 10.0, Octopus depends on fftw-api instead
# of FFTW. If FFTW is not in the dependency tree, then
# it ought to be MKL as it is currently the only providers
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index 779aa94c5afe34..1a0cdb2e7507b2 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -24,6 +24,7 @@ class Openblas(CMakePackage, MakefilePackage):
libraries = ["libopenblas", "openblas"]
version("develop", branch="develop")
+ version("0.3.25", sha256="4c25cb30c4bb23eddca05d7d0a85997b8db6144f5464ba7f8c09ce91e2f35543")
version("0.3.24", sha256="ceadc5065da97bd92404cac7254da66cc6eb192679cf1002098688978d4d5132")
version("0.3.23", sha256="5d9491d07168a5d00116cdc068a40022c3455bf9293c7cb86a65b1054d7e5114")
version("0.3.22", sha256="7fa9685926ba4f27cfe513adbf9af64d6b6b63f9dcabb37baefad6a65ff347a7")
@@ -88,8 +89,7 @@ class Openblas(CMakePackage, MakefilePackage):
)
# virtual dependency
- provides("blas")
- provides("lapack")
+ provides("blas", "lapack")
provides("lapack@3.9.1:", when="@0.3.15:")
provides("lapack@3.7.0", when="@0.2.20")
@@ -192,6 +192,13 @@ class Openblas(CMakePackage, MakefilePackage):
when="@0.3.21 %gcc@:9",
)
+ # Fix build on A64FX for OpenBLAS v0.3.24
+ patch(
+ "https://github.com/OpenMathLib/OpenBLAS/commit/90231bfc4e4afc51f67c248328fbef0cecdbd2c2.patch?full_index=1",
+ sha256="139e314f3408dc5c080d28887471f382e829d1bd06c8655eb72593e4e7b921cc",
+ when="@0.3.24 target=a64fx",
+ )
+
# See https://github.com/spack/spack/issues/19932#issuecomment-733452619
# Notice: fixed on Amazon Linux GCC 7.3.1 (which is an unofficial version
# as GCC only has major.minor releases. But the bound :7.3.0 doesn't hurt)
@@ -372,6 +379,14 @@ def _microarch_target_args(self):
# case can go away.
args.append("TARGET=" + "RISCV64_GENERIC")
+ elif self.spec.satisfies("@0.3.19: target=a64fx"):
+ # Special case for Fujitsu's A64FX
+ if any(self.spec.satisfies(i) for i in ["%gcc@11:", "%clang", "%fj"]):
+ args.append("TARGET=A64FX")
+ else:
+ # fallback to armv8-a+sve without -mtune=a64fx flag
+ args.append("TARGET=ARMV8SVE")
+
else:
args.append("TARGET=" + microarch.name.upper())
diff --git a/var/spack/repos/builtin/packages/openimagedenoise/package.py b/var/spack/repos/builtin/packages/openimagedenoise/package.py
index 9ccce30a86c266..e98d6d2baaa32c 100644
--- a/var/spack/repos/builtin/packages/openimagedenoise/package.py
+++ b/var/spack/repos/builtin/packages/openimagedenoise/package.py
@@ -17,6 +17,7 @@ class Openimagedenoise(CMakePackage):
# maintainers("github_user1", "github_user2")
+ version("2.1.0", sha256="ce144ba582ff36563d9442ee07fa2a4d249bc85aa93e5b25fc527ff4ee755ed6")
version("2.0.1", sha256="328eeb9809d18e835dca7203224af3748578794784c026940c02eea09c695b90")
version("1.4.3", sha256="3276e252297ebad67a999298d8f0c30cfb221e166b166ae5c955d88b94ad062a")
version("1.4.2", sha256="e70d27ce24b41364782376c1b3b4f074f77310ccfe5f8ffec4a13a347e48a0ea")
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
index 87e5bc4f2bfee5..f2347d01ecb0b2 100644
--- a/var/spack/repos/builtin/packages/openmpi/package.py
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -555,11 +555,14 @@ class Openmpi(AutotoolsPackage, CudaPackage):
# PMIx is unavailable for @1, and required for @2:
# OpenMPI @2: includes a vendored version:
- # depends_on('pmix@1.1.2', when='@2.1.6')
- # depends_on('pmix@3.2.3', when='@4.1.2')
- depends_on("pmix@1.0:1", when="@2.0:2 ~internal-pmix")
- depends_on("pmix@3.2:", when="@4.0:4 ~internal-pmix")
- depends_on("pmix@4.2:", when="@5.0:5 ~internal-pmix")
+ with when("~internal-pmix"):
+ depends_on("pmix@1", when="@2")
+ depends_on("pmix@3.2:", when="@4:")
+ depends_on("pmix@4.2:", when="@5:")
+
+ # pmix@4.2.3 contains a breaking change, compat fixed in openmpi@4.1.6
+ # See https://www.mail-archive.com/announce@lists.open-mpi.org//msg00158.html
+ depends_on("pmix@:4.2.2", when="@:4.1.5")
# Libevent is required when *vendored* PMIx is used
depends_on("libevent@2:", when="@main")
@@ -592,7 +595,7 @@ class Openmpi(AutotoolsPackage, CudaPackage):
conflicts(
"schedulers=slurm ~pmi",
when="@1.5.4",
- msg="+pmi is required for openmpi to work with SLURM.",
+ msg="+pmi is required for openmpi to work with Slurm.",
)
conflicts(
"schedulers=loadleveler",
diff --git a/var/spack/repos/builtin/packages/openscenegraph/openscenegraph-3.6.5-openexr3.patch b/var/spack/repos/builtin/packages/openscenegraph/openscenegraph-3.6.5-openexr3.patch
new file mode 100644
index 00000000000000..6a6aa57950d60a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openscenegraph/openscenegraph-3.6.5-openexr3.patch
@@ -0,0 +1,68 @@
+https://bugs.gentoo.org/833491
+
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -752,7 +752,6 @@ ELSE()
+- FIND_PACKAGE(ilmbase)
+ FIND_PACKAGE(Inventor)
+ FIND_PACKAGE(Jasper)
+- FIND_PACKAGE(OpenEXR)
++ FIND_PACKAGE(OpenEXR CONFIG)
+ FIND_PACKAGE(OpenCascade)
+ FIND_PACKAGE(COLLADA)
+ FIND_PACKAGE(FBX)
+--- a/src/osgPlugins/CMakeLists.txt
++++ b/src/osgPlugins/CMakeLists.txt
+@@ -105,7 +105,7 @@ ENDIF()
+ IF(JASPER_FOUND)
+ ADD_PLUGIN_DIRECTORY(jp2)
+ ENDIF()
+-IF(OPENEXR_FOUND AND ZLIB_FOUND AND OSG_CPP_EXCEPTIONS_AVAILABLE)
++IF(OpenEXR_FOUND AND ZLIB_FOUND AND OSG_CPP_EXCEPTIONS_AVAILABLE)
+ ADD_PLUGIN_DIRECTORY(exr)
+ ENDIF()
+ IF(GIFLIB_FOUND)
+--- a/src/osgPlugins/exr/CMakeLists.txt
++++ b/src/osgPlugins/exr/CMakeLists.txt
+@@ -1,9 +1,7 @@
+-INCLUDE_DIRECTORIES( ${ILMBASE_INCLUDE_DIR}/OpenEXR )
+-INCLUDE_DIRECTORIES( ${OPENEXR_INCLUDE_DIR}/OpenEXR )
+-
+ SET(TARGET_SRC ReaderWriterEXR.cpp )
+
+-SET(TARGET_LIBRARIES_VARS ${OPENEXR_LIBRARIES_VARS} ${ILMBASE_LIBRARIES_VARS} ZLIB_LIBRARIES)
++SET(OPENEXR_LIBRARIES_VARS OpenEXR::OpenEXR)
++SET(TARGET_LIBRARIES_VARS OPENEXR_LIBRARIES_VARS ZLIB_LIBRARIES)
+
+ IF(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
+ REMOVE_CXX_FLAG(-Wshadow)
+--- a/src/osgPlugins/exr/ReaderWriterEXR.cpp
++++ b/src/osgPlugins/exr/ReaderWriterEXR.cpp
+@@ -41,11 +41,11 @@ public:
+ {
+ return _inStream->read(c,n).good();
+ };
+- virtual Int64 tellg ()
++ virtual uint64_t tellg ()
+ {
+ return _inStream->tellg();
+ };
+- virtual void seekg (Int64 pos)
++ virtual void seekg (uint64_t pos)
+ {
+ _inStream->seekg(pos);
+ };
+@@ -69,11 +69,11 @@ public:
+ {
+ _outStream->write(c,n);
+ };
+- virtual Int64 tellp ()
++ virtual uint64_t tellp ()
+ {
+ return _outStream->tellp();
+ };
+- virtual void seekp (Int64 pos)
++ virtual void seekp (uint64_t pos)
+ {
+ _outStream->seekp(pos);
+ };
diff --git a/var/spack/repos/builtin/packages/openscenegraph/package.py b/var/spack/repos/builtin/packages/openscenegraph/package.py
index bb4e2186a122a4..bce48ff1c13868 100644
--- a/var/spack/repos/builtin/packages/openscenegraph/package.py
+++ b/var/spack/repos/builtin/packages/openscenegraph/package.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import sys
+
from spack.package import *
@@ -14,6 +16,10 @@ class Openscenegraph(CMakePackage):
git = "https://github.com/openscenegraph/OpenSceneGraph.git"
url = "https://github.com/openscenegraph/OpenSceneGraph/archive/OpenSceneGraph-3.6.4.tar.gz"
+ maintainers("aumuell")
+
+ version("master", branch="master")
+ version("stable", branch="OpenSceneGraph-3.6")
version("3.6.5", sha256="aea196550f02974d6d09291c5d83b51ca6a03b3767e234a8c0e21322927d1e12")
version("3.6.4", sha256="81394d1b484c631028b85d21c5535280c21bbd911cb058e8746c87e93e7b9d33")
version("3.4.1", sha256="930eb46f05781a76883ec16c5f49cfb29a059421db131005d75bec4d78401fd5")
@@ -22,11 +28,25 @@ class Openscenegraph(CMakePackage):
version("3.1.5", sha256="dddecf2b33302076712100af59b880e7647bc595a9a7cc99186e98d6e0eaeb5c")
variant("shared", default=True, description="Builds a shared version of the library")
+ variant("apps", default=False, description="Build OpenSceneGraph tools")
+ variant("dcmtk", default=False, description="Build support for DICOM files using DCMTK")
variant(
"ffmpeg", default=False, description="Builds ffmpeg plugin for audio encoding/decoding"
)
+ variant("gdal", default=False, description="Build support for geospatial files using GDAL")
+ variant("gta", default=False, description="Build support for Generic Tagged Array (GTA) files")
+ variant(
+ "inventor", default=False, description="Build support for Open Inventor files using Coin3D"
+ )
+ variant(
+ "opencascade", default=False, description="Build support for CAD files using Open CASCADE"
+ )
+ variant("openexr", default=False, description="Build support for OpenEXR files")
+ variant("pdf", default=False, description="Build support for PDF files using Poppler")
+ variant("svg", default=False, description="Build support for SVG files using librsvg")
depends_on("cmake@2.8.7:", type="build")
+ depends_on("pkgconfig", type="build")
depends_on("gl")
depends_on(
"qt+opengl", when="@:3.5.4"
@@ -42,39 +62,58 @@ class Openscenegraph(CMakePackage):
depends_on("zlib-api")
depends_on("fontconfig")
- depends_on("ffmpeg+avresample", when="+ffmpeg")
+ depends_on("dcmtk+pic", when="+dcmtk")
+ depends_on("gdal", when="+gdal")
+ depends_on("libgta", when="+gta")
+ depends_on("coin3d", when="+inventor")
+ depends_on("opencascade@:7.5", when="+opencascade")
+ depends_on("openexr", when="+openexr")
+ depends_on("ilmbase", when="+openexr ^openexr@:2")
+ depends_on("poppler+glib", when="+pdf")
+ depends_on("librsvg", when="+svg")
+
+ depends_on("ffmpeg@:4", when="+ffmpeg")
+ depends_on("ffmpeg+avresample", when="^ffmpeg@:4")
# https://github.com/openscenegraph/OpenSceneGraph/issues/167
depends_on("ffmpeg@:2", when="@:3.4.0+ffmpeg")
patch("glibc-jasper.patch", when="@3.4%gcc")
+ # from gentoo: https://raw.githubusercontent.com/gentoo/gentoo/9523b20c27d12dd72d1fd5ced3ba4995099925a2/dev-games/openscenegraph/files/openscenegraph-3.6.5-openexr3.patch
+ patch("openscenegraph-3.6.5-openexr3.patch", when="@3.6:")
+
+ def patch(self):
+ # pkgconfig does not work for GTA on macos
+ if sys.platform == "darwin":
+ filter_file("PKG_CHECK_MODULES\\(GTA gta\\)", "", "CMakeModules/FindGTA.cmake")
def cmake_args(self):
spec = self.spec
- shared_status = "ON" if "+shared" in spec else "OFF"
- opengl_profile = "GL{0}".format(spec["gl"].version.up_to(1))
-
args = [
# Variant Options #
- "-DDYNAMIC_OPENSCENEGRAPH={0}".format(shared_status),
- "-DDYNAMIC_OPENTHREADS={0}".format(shared_status),
- "-DOPENGL_PROFILE={0}".format(opengl_profile),
+ self.define_from_variant("DYNAMIC_OPENSCENEGRAPH", "shared"),
+ self.define_from_variant("DYNAMIC_OPENTHREADS", "shared"),
+ self.define_from_variant("BUILD_OSG_APPLICATIONS", "apps"),
# General Options #
- "-DBUILD_OSG_APPLICATIONS=OFF",
- "-DOSG_NOTIFY_DISABLED=ON",
- "-DLIB_POSTFIX=",
- "-DCMAKE_RELWITHDEBINFO_POSTFIX=",
- "-DCMAKE_MINSIZEREL_POSTFIX=",
+ self.define("OPENGL_PROFILE", f"GL{spec['gl'].version.up_to(1)}"),
+ self.define("OSG_NOTIFY_DISABLED", True),
+ self.define("LIB_POSTFIX", ""),
+ self.define("CMAKE_RELWITHDEBINFO_POSTFIX", ""),
+ self.define("CMAKE_MINSIZEREL_POSTFIX", ""),
]
- if spec.satisfies("~ffmpeg"):
- for ffmpeg_lib in ["libavcodec", "libavformat", "libavutil"]:
- args.extend(
- [
- "-DFFMPEG_{0}_INCLUDE_DIRS=".format(ffmpeg_lib.upper()),
- "-DFFMPEG_{0}_LIBRARIES=".format(ffmpeg_lib.upper()),
- ]
- )
+ # explicitly disable or enable plugins depending on variants
+ # CMake will still search for the packages, but won't build the plugins requiring them
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_DICOM", "dcmtk"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_EXR", "openexr"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_FFMPEG", "ffmpeg"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_GDAL", "gdal"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_OGR", "gdal"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_GTA", "gta"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_INVENTOR", "inventor"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_OPENCASCADE", "opencascade"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_PDF", "pdf"))
+ args.append(self.define_from_variant("BUILD_OSG_PLUGIN_SVG", "svg"))
# NOTE: This is necessary in order to allow OpenSceneGraph to compile
# despite containing a number of implicit bool to int conversions.
diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py
index 358a008088883a..8e2cd947bb54ed 100644
--- a/var/spack/repos/builtin/packages/openssl/package.py
+++ b/var/spack/repos/builtin/packages/openssl/package.py
@@ -381,6 +381,10 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package
depends_on("ca-certificates-mozilla", type="build", when="certs=mozilla")
depends_on("nasm", when="platform=windows")
+ depends_on("gmake", type="build", when="platform=linux")
+ depends_on("gmake", type="build", when="platform=cray")
+ depends_on("gmake", type="build", when="platform=darwin")
+
patch(
"https://github.com/openssl/openssl/commit/f9e578e720bb35228948564192adbe3bc503d5fb.patch?full_index=1",
sha256="3fdcf2d1e47c34f3a012f23306322c5a35cad55b180c9b6fb34537b55884645c",
diff --git a/var/spack/repos/builtin/packages/openvkl/package.py b/var/spack/repos/builtin/packages/openvkl/package.py
index 32bbdcafe26c3b..bc9a32f5ce29fd 100644
--- a/var/spack/repos/builtin/packages/openvkl/package.py
+++ b/var/spack/repos/builtin/packages/openvkl/package.py
@@ -16,6 +16,7 @@ class Openvkl(CMakePackage):
# maintainers("github_user1", "github_user2")
+ version("2.0.0", sha256="469c3fba254c4fcdd84f8a9763d2e1aaa496dc123b5a9d467cc0a561e284c4e6")
version("1.3.2", sha256="7704736566bf17497a3e51c067bd575316895fda96eccc682dae4aac7fb07b28")
version("1.3.1", sha256="c9cefb6c313f2b4c0331e9629931759a6bc204ec00deed6ec0becad1670a1933")
version("1.3.0", sha256="c6d4d40e6d232839c278b53dee1e7bd3bd239c3ccac33f49b465fc65a0692be9")
@@ -36,6 +37,7 @@ class Openvkl(CMakePackage):
depends_on("rkcommon@1.8.0:", when="@1.1:")
depends_on("rkcommon@:1.10.0", when="@:1.3.1")
depends_on("rkcommon@1.11.0:", when="@1.3.2:")
+ depends_on("rkcommon@:1.11.0", when="@:1.3.2")
depends_on("tbb")
def cmake_args(self):
diff --git a/var/spack/repos/builtin/packages/ospray/package.py b/var/spack/repos/builtin/packages/ospray/package.py
index 85a79894bbf246..fe81c528aa142e 100644
--- a/var/spack/repos/builtin/packages/ospray/package.py
+++ b/var/spack/repos/builtin/packages/ospray/package.py
@@ -16,6 +16,7 @@ class Ospray(CMakePackage):
# maintainers("aumuell")
+ version("3.0.0", sha256="d8d8e632d77171c810c0f38f8d5c8387470ca19b75f5b80ad4d3d12007280288")
version("2.12.0", sha256="268b16952b2dd44da2a1e40d2065c960bc2442dd09b63ace8b65d3408f596301")
version("2.11.0", sha256="55974e650d9b78989ee55adb81cffd8c6e39ce5d3cf0a3b3198c522bf36f6e81")
version("2.10.0", sha256="bd478284f48d2cb775fc41a2855a9d9f5ea16c861abda0f8dc94e02ea7189cb8")
@@ -38,26 +39,31 @@ class Ospray(CMakePackage):
depends_on("rkcommon@1.9", when="@2.9.0")
depends_on("rkcommon@1.10:", when="@2.10.0:")
depends_on("rkcommon@1.11:", when="@2.11:")
+ depends_on("rkcommon@1.12:", when="@3:")
depends_on("embree@3.12: +ispc")
depends_on("embree@3.13.1:", when="@2.7.0:")
depends_on("embree@:3", when="@:2.10")
depends_on("embree@4:", when="@2.11:")
+ depends_on("embree@4.3:", when="@3:")
with when("+volumes"):
- depends_on("openvkl@0.13.0:")
+ depends_on("openvkl@0.13.0:1", when="@2")
depends_on("openvkl@1.0.1:", when="@2.7.0:")
depends_on("openvkl@1.2.0:", when="@2.9.0:")
depends_on("openvkl@1.3.0:", when="@2.10.0:")
- depends_on("openvkl@1.3.2:", when="@2.11:")
+ depends_on("openvkl@1.3.2:", when="@2.11:2")
+ depends_on("openvkl@2:", when="@3:")
with when("+denoiser"):
depends_on("openimagedenoise@1.2.3:")
depends_on("openimagedenoise@1.3:", when="@2.5:")
depends_on("openimagedenoise@:1", when="@:2.11")
depends_on("openimagedenoise@2:", when="@2.12:")
+ depends_on("openimagedenoise@2.1:", when="@3:")
depends_on("ispc@1.14.1:", type=("build"))
depends_on("ispc@1.16.0:", when="@2.7.0:", type=("build"))
depends_on("ispc@1.18.0:", when="@2.10.0:", type=("build"))
depends_on("ispc@1.19.0:", when="@2.11.0:", type=("build"))
depends_on("ispc@1.20.0:", when="@2.12.0:", type=("build"))
+ depends_on("ispc@1.21.1:", when="@3:", type=("build"))
depends_on("tbb")
depends_on("mpi", when="+mpi")
diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py
index 8c9f50a3d1aa2d..6a755fcbf11f89 100644
--- a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py
+++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py
@@ -20,6 +20,7 @@ class OsuMicroBenchmarks(AutotoolsPackage, CudaPackage, ROCmPackage):
maintainers("natshineman", "harisubramoni", "MatthewLieber")
+ version("7.3", sha256="8fa25b8aaa34e4b07ab3a4f30b7690ab46b038b08d204a853a9b6aa7bdb02f2f")
version("7.2", sha256="1a4e1f2aab0e65404b3414e23bd46616184b69b6231ce9313d9c630bd6e633c1")
version("7.1-1", sha256="85f4dd8be1df31255e232852769ae5b82e87a5fb14be2f8eba1ae9de8ffe391a")
version("7.1", sha256="2c4c931ecaf19e8ab72a393ee732e25743208c9a58fa50023e3fac47064292cc")
diff --git a/var/spack/repos/builtin/packages/pacparser/package.py b/var/spack/repos/builtin/packages/pacparser/package.py
index d42d927e370356..905cc9f874fc17 100644
--- a/var/spack/repos/builtin/packages/pacparser/package.py
+++ b/var/spack/repos/builtin/packages/pacparser/package.py
@@ -9,8 +9,6 @@
class Pacparser(MakefilePackage):
"""pacparser is a library to parse proxy auto-config (PAC) files."""
- maintainers("iarspider")
-
homepage = "https://pacparser.github.io/"
url = "https://github.com/manugarg/pacparser/releases/download/v1.4.0/pacparser-v1.4.0.tar.gz"
git = "https://github.com/manugarg/pacparser.git"
diff --git a/var/spack/repos/builtin/packages/parallelio/package.py b/var/spack/repos/builtin/packages/parallelio/package.py
index f3bcbaa99ad1a6..1841fe6bf6ccc8 100644
--- a/var/spack/repos/builtin/packages/parallelio/package.py
+++ b/var/spack/repos/builtin/packages/parallelio/package.py
@@ -55,6 +55,11 @@ class Parallelio(CMakePackage):
depends_on("parallel-netcdf", type="link", when="+pnetcdf")
resource(name="genf90", git="https://github.com/PARALLELIO/genf90.git", tag="genf90_200608")
+ resource(
+ name="CMake_Fortran_utils",
+ git="https://github.com/CESM-Development/CMake_Fortran_utils.git",
+ tag="CMake_Fortran_utils_150308",
+ )
# Allow argument mismatch in gfortran versions > 10 for mpi library compatibility
patch("gfortran.patch", when="@:2.5.8 +fortran %gcc@10:")
diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py
index 5760a9d68da7de..1f3cd9a76fb19a 100644
--- a/var/spack/repos/builtin/packages/paraview/package.py
+++ b/var/spack/repos/builtin/packages/paraview/package.py
@@ -28,6 +28,9 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage):
tags = ["e4s"]
version("master", branch="master", submodules=True)
+ version(
+ "5.12.0-RC1", sha256="892eda2ae72831bbadd846be465d496ada35739779229c604cddd56e018a1aea"
+ )
version(
"5.11.2",
sha256="5c5d2f922f30d91feefc43b4a729015dbb1459f54c938896c123d2ac289c7a1e",
@@ -190,7 +193,7 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage):
depends_on("libxt", when="~osmesa platform={}".format(p))
conflicts("+qt", when="+osmesa")
- depends_on("ospray@2.1:", when="+raytracing")
+ depends_on("ospray@2.1:2", when="+raytracing")
depends_on("openimagedenoise", when="+raytracing")
depends_on("ospray +mpi", when="+raytracing +mpi")
@@ -226,8 +229,10 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage):
depends_on("protobuf@3.4:3.18", when="@:5.10%xl")
depends_on("protobuf@3.4:3.18", when="@:5.10%xl_r")
# protobuf requires newer abseil-cpp, which in turn requires C++14,
- # but paraview uses C++11 by default
- depends_on("protobuf@3.4:3.21", when="@:5.11")
+ # but paraview uses C++11 by default. Use for 5.11+ until ParaView updates
+ # its C++ standard level.
+ depends_on("protobuf@3.4:3.21", when="@5.11:")
+ depends_on("protobuf@3.4:3.21", when="@master")
depends_on("libxml2")
depends_on("lz4")
depends_on("xz")
@@ -280,7 +285,9 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage):
# Fix IOADIOS2 module to work with kits
# https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8653
- patch("vtk-adios2-module-no-kit.patch", when="@5.8:")
+ patch("vtk-adios2-module-no-kit.patch", when="@5.8:5.11")
+ # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8653
+ patch("vtk-adios2-module-no-kit-5.12.patch", when="@5.12:")
# Patch for paraview 5.9.0%xl_r
# https://gitlab.kitware.com/vtk/vtk/-/merge_requests/7591
@@ -426,6 +433,10 @@ def nvariant_bool(feature):
self.define_from_variant("VISIT_BUILD_READER_Silo", "visitbridge"),
]
+ if spec.satisfies("@5.12:"):
+ cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_fast_float:BOOL=OFF")
+ cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_token:BOOL=OFF")
+
if spec.satisfies("@5.11:"):
cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_verdict:BOOL=OFF")
diff --git a/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch b/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch
new file mode 100644
index 00000000000000..34a98eac474716
--- /dev/null
+++ b/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch
@@ -0,0 +1,230 @@
+diff --git a/VTK/IO/ADIOS2/CMakeLists.txt b/VTK/IO/ADIOS2/CMakeLists.txt
+index 86c6d49cc4f..07b1d4fe0ef 100644
+--- a/VTK/IO/ADIOS2/CMakeLists.txt
++++ b/VTK/IO/ADIOS2/CMakeLists.txt
+@@ -1,9 +1,9 @@
+ vtk_module_find_package(PRIVATE_IF_SHARED
+ PACKAGE ADIOS2
+ VERSION 2.4)
+-if (VTK_USE_MPI AND NOT ADIOS2_HAVE_MPI)
++if (TARGET VTK::ParallelMPI AND NOT ADIOS2_HAVE_MPI)
+ message(FATAL_ERROR "VTK built with MPI requires ADIOS2 built with MPI")
+-elseif(NOT VTK_USE_MPI AND ADIOS2_HAVE_MPI)
++elseif(NOT TARGET VTK::ParallelMPI AND ADIOS2_HAVE_MPI)
+ message(FATAL_ERROR "VTK built without MPI requires ADIOS2 built without MPI")
+ endif()
+
+@@ -18,38 +18,30 @@ set(classes_core vtkADIOS2CoreImageReader)
+ set(private_classes_core Core/vtkADIOS2CoreArraySelection)
+ set(private_headers_core Core/vtkADIOS2CoreTypeTraits.h)
+ set(private_templates_core)
+-set(vtk_io_adios2_core_enabled TRUE CACHE INTERNAL "" FORCE)
+
+-if (vtk_io_adios2_core_enabled)
+- list(APPEND classes ${classes_core})
+- list(APPEND private_classes ${private_classes_core})
+- list(APPEND private_headers ${private_headers_core})
+- list(APPEND private_templates ${private_templates_core})
+-endif()
++list(APPEND classes ${classes_core})
++list(APPEND private_classes ${private_classes_core})
++list(APPEND private_headers ${private_headers_core})
++list(APPEND private_templates ${private_templates_core})
++
++# Build VTX Schema for Parallel
++if (TARGET VTK::ParallelMPI)
++ set(classes_vtx vtkADIOS2VTXReader)
++ set(private_classes_vtx
++ VTX/VTXSchemaManager
++ VTX/common/VTXDataArray
++ VTX/common/VTXHelper
++ VTX/schema/VTXSchema
++ VTX/schema/vtk/VTXvtkBase
++ VTX/schema/vtk/VTXvtkVTI
++ VTX/schema/vtk/VTXvtkVTU)
++ set(private_headers_vtx VTX/common/VTXTypes.h)
++ set(private_templates_vtx
++ VTX/common/VTXHelper.txx
++ VTX/schema/VTXSchema.txx
++ VTX/schema/vtk/VTXvtkVTI.txx
++ VTX/schema/vtk/VTXvtkVTU.txx)
+
+-set(classes_vtx vtkADIOS2VTXReader)
+-set(private_classes_vtx
+- VTX/VTXSchemaManager
+- VTX/common/VTXDataArray
+- VTX/common/VTXHelper
+- VTX/schema/VTXSchema
+- VTX/schema/vtk/VTXvtkBase
+- VTX/schema/vtk/VTXvtkVTI
+- VTX/schema/vtk/VTXvtkVTU)
+-set(private_headers_vtx VTX/common/VTXTypes.h)
+-set(private_templates_vtx
+- VTX/common/VTXHelper.txx
+- VTX/schema/VTXSchema.txx
+- VTX/schema/vtk/VTXvtkVTI.txx
+- VTX/schema/vtk/VTXvtkVTU.txx)
+-
+-if (VTK_USE_MPI)
+- set(vtk_io_adios2_vtx_enabled TRUE CACHE INTERNAL "" FORCE)
+-else ()
+- set(vtk_io_adios2_vtx_enabled FALSE CACHE INTERNAL "" FORCE)
+-endif()
+-
+-if (vtk_io_adios2_vtx_enabled)
+ list(APPEND classes ${classes_vtx})
+ list(APPEND private_classes ${private_classes_vtx})
+ list(APPEND private_headers ${private_headers_vtx})
+@@ -63,10 +55,6 @@ vtk_module_add_module(VTK::IOADIOS2
+ PRIVATE_TEMPLATES ${private_templates})
+ vtk_module_link(VTK::IOADIOS2 PRIVATE adios2::adios2)
+
+-if (ADIOS2_HAVE_MPI)
+- vtk_module_definitions(VTK::IOADIOS2 PRIVATE IOADIOS2_HAVE_MPI)
+-endif ()
+-
+ if (ADIOS2_VERSION VERSION_GREATER_EQUAL "2.8.0")
+ vtk_module_definitions(VTK::IOADIOS2 PRIVATE IOADIOS2_BP5_RANDOM_ACCESS)
+ endif ()
+diff --git a/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt b/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt
+index 1534a1e7271..29c51970daf 100644
+--- a/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt
++++ b/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt
+@@ -2,40 +2,34 @@ find_package(ADIOS2 2.4 REQUIRED
+ COMPONENTS CXX
+ OPTIONAL_COMPONENTS MPI)
+
+-if (ADIOS2_HAVE_MPI)
+- if (vtk_io_adios2_core_enabled)
+- set(TestADIOS2BPReaderSingleTimeStep_NUMPROCS 2)
++if (TARGET VTK::ParallelMPI)
++ set(TestADIOS2BPReaderSingleTimeStep_NUMPROCS 2)
+ # For now vtkMultiBlockVolumeMapper does not support rendering in parallel
+- set(TestADIOS2BPReaderMultiTimeSteps_NUMPROCS 2)
+- set(TestADIOS2BPReaderMultiTimeSteps2D_NUMPROCS 2)
+- vtk_add_test_mpi(vtkIOADIOS2CxxTests-MPI mpiTests TESTING_DATA
+- TestADIOS2BPReaderMPISingleTimeStep.cxx
+- TestADIOS2BPReaderMPIMultiTimeSteps3D.cxx,NO_VALID
+- TestADIOS2BPReaderMPIMultiTimeSteps2D.cxx)
+- vtk_test_cxx_executable(vtkIOADIOS2CxxTests-MPI mpiTests)
+- endif()
++ set(TestADIOS2BPReaderMultiTimeSteps_NUMPROCS 2)
++ set(TestADIOS2BPReaderMultiTimeSteps2D_NUMPROCS 2)
++ vtk_add_test_mpi(vtkIOADIOS2CxxTests-MPI mpiTests TESTING_DATA
++ TestADIOS2BPReaderMPISingleTimeStep.cxx
++ TestADIOS2BPReaderMPIMultiTimeSteps3D.cxx,NO_VALID
++ TestADIOS2BPReaderMPIMultiTimeSteps2D.cxx)
++ vtk_test_cxx_executable(vtkIOADIOS2CxxTests-MPI mpiTests)
+
+ # VTX tests
+- if (vtk_io_adios2_vtx_enabled)
+- vtk_add_test_cxx(vtkIOADIOS2VTXCxxTests tests TESTING_DATA NO_OUTPUT
+- UnitTestIOADIOS2VTX.cxx,NO_VALID
+- #TestIOADIOS2VTX_VTI3D.cxx,
+- TestIOADIOS2VTX_VTI3DRendering.cxx,NO_VALID
+- #TestIOADIOS2VTX_VTU3D.cxx,NO_VALID
+- TestIOADIOS2VTX_VTU3DRendering.cxx,NO_VALID
+- TestIOADIOS2VTX_VTU2DRendering.cxx,NO_VALID
+- TestIOADIOS2VTX_VTU1DRendering.cxx,NO_VALID)
++ vtk_add_test_cxx(vtkIOADIOS2VTXCxxTests tests TESTING_DATA NO_OUTPUT
++ UnitTestIOADIOS2VTX.cxx,NO_VALID
++ #TestIOADIOS2VTX_VTI3D.cxx,
++ TestIOADIOS2VTX_VTI3DRendering.cxx,NO_VALID
++ #TestIOADIOS2VTX_VTU3D.cxx,NO_VALID
++ TestIOADIOS2VTX_VTU3DRendering.cxx,NO_VALID
++ TestIOADIOS2VTX_VTU2DRendering.cxx,NO_VALID
++ TestIOADIOS2VTX_VTU1DRendering.cxx,NO_VALID)
+
+- vtk_test_cxx_executable(vtkIOADIOS2VTXCxxTests tests)
+- target_link_libraries(vtkIOADIOS2VTXCxxTests PUBLIC adios2::adios2)
+- endif ()
++ vtk_test_cxx_executable(vtkIOADIOS2VTXCxxTests tests)
++ target_link_libraries(vtkIOADIOS2VTXCxxTests PUBLIC adios2::adios2)
+ else ()
+- if (vtk_io_adios2_core_enabled)
+- vtk_add_test_cxx(vtkIOADIOS2CxxTests tests TESTING_DATA
+- TestADIOS2BPReaderSingleTimeStep.cxx
+- TestADIOS2BPReaderMultiTimeSteps3D.cxx
+- TestADIOS2BPReaderMultiTimeSteps2D.cxx)
++ vtk_add_test_cxx(vtkIOADIOS2CxxTests tests TESTING_DATA
++ TestADIOS2BPReaderSingleTimeStep.cxx
++ TestADIOS2BPReaderMultiTimeSteps3D.cxx
++ TestADIOS2BPReaderMultiTimeSteps2D.cxx)
+
+- vtk_test_cxx_executable(vtkIOADIOS2CxxTests tests)
+- endif ()
++ vtk_test_cxx_executable(vtkIOADIOS2CxxTests tests)
+ endif ()
+diff --git a/VTK/IO/ADIOS2/vtk.module b/VTK/IO/ADIOS2/vtk.module
+index 5069bd828b0..fe37260eb6d 100644
+--- a/VTK/IO/ADIOS2/vtk.module
++++ b/VTK/IO/ADIOS2/vtk.module
+@@ -2,8 +2,6 @@ NAME
+ VTK::IOADIOS2
+ LIBRARY_NAME
+ vtkIOADIOS2
+-KIT
+- VTK::IO
+ SPDX_LICENSE_IDENTIFIER
+ LicenseRef-BSD-3-Clause-Sandia-USGov
+ SPDX_COPYRIGHT_TEXT
+diff --git a/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx b/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx
+index 6ba4d25230d..c209fd905d5 100644
+--- a/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx
++++ b/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx
+@@ -28,7 +28,7 @@
+ #include "vtkLongLongArray.h"
+ #include "vtkMultiBlockDataSet.h"
+ #include "vtkMultiPieceDataSet.h"
+-#include "vtkMultiProcessController.h"
++#include "vtkMultiProcessController.h" // For the MPI controller member
+ #include "vtkNew.h"
+ #include "vtkObjectFactory.h"
+ #include "vtkPointData.h"
+@@ -46,7 +46,7 @@
+ #include "vtkUnstructuredGrid.h"
+ #include "vtksys/SystemTools.hxx"
+
+-#ifdef IOADIOS2_HAVE_MPI
++#if VTK_MODULE_ENABLE_VTK_ParallelMPI
+ #include "vtkMPI.h"
+ #include "vtkMPIController.h"
+ #endif
+@@ -126,7 +126,7 @@ vtkNew vtkADIOS2CoreImageReader::vtkADIOS2CoreImageReaderI
+ int myLen = static_cast(ibds->GetNumberOfBlocks());
+ int* allLens{ nullptr };
+ int procId{ 0 }, numProcess{ 0 };
+-#ifdef IOADIOS2_HAVE_MPI
++#if VTK_MODULE_ENABLE_VTK_ParallelMPI
+ auto ctrl = vtkMultiProcessController::GetGlobalController();
+ if (ctrl)
+ {
+@@ -286,7 +286,7 @@ const vtkADIOS2CoreImageReader::StringToParams& vtkADIOS2CoreImageReader::GetAva
+ //------------------------------------------------------------------------------
+ void vtkADIOS2CoreImageReader::SetController(vtkMultiProcessController* controller)
+ {
+-#ifdef IOADIOS2_HAVE_MPI
++#if VTK_MODULE_ENABLE_VTK_ParallelMPI
+ vtkMPIController* mpiController = vtkMPIController::SafeDownCast(controller);
+ if (controller && !mpiController)
+ {
+@@ -337,7 +337,7 @@ bool vtkADIOS2CoreImageReader::OpenAndReadMetaData()
+ // Initialize the ADIOS2 data structures
+ if (!this->Impl->Adios)
+ {
+-#ifdef IOADIOS2_HAVE_MPI
++#if VTK_MODULE_ENABLE_VTK_ParallelMPI
+ // Make sure the ADIOS subsystem is initialized before processing any
+ // sort of request.
+ if (!this->Controller)
+@@ -910,7 +910,7 @@ void vtkADIOS2CoreImageReader::CalculateWorkDistribution(const std::string& varN
+ auto var = this->Impl->AdiosIO.InquireVariable(varName);
+ size_t blockNum = this->Impl->BpReader.BlocksInfo(var, this->Impl->RequestStep).size();
+
+-#ifdef IOADIOS2_HAVE_MPI
++#if VTK_MODULE_ENABLE_VTK_ParallelMPI
+ size_t rank = static_cast(this->Controller->GetLocalProcessId());
+ size_t procs = static_cast(this->Controller->GetNumberOfProcesses());
+ #else
+--
+GitLab
diff --git a/var/spack/repos/builtin/packages/pastix/package.py b/var/spack/repos/builtin/packages/pastix/package.py
index ecd303784fdc0f..f405217508235e 100644
--- a/var/spack/repos/builtin/packages/pastix/package.py
+++ b/var/spack/repos/builtin/packages/pastix/package.py
@@ -17,6 +17,7 @@ class Pastix(CMakePackage, CudaPackage):
maintainers("fpruvost", "mfaverge", "ramet")
version("master", branch="master", submodules=True)
+ version("6.3.1", sha256="290464d73b7d43356e4735a29932bf6f23a88e94ec7139ba7744c21e42c52681")
version("6.3.0", sha256="a6bfec32a3279d7b24c5fc05885c6632d177e467f1584707c6fd7c42a8703c3e")
version("6.2.2", sha256="cce9a1fe4678b5733c9f1a5a52f77b040eadc3e254418c6fb03d8ab37dede508")
version("6.2.1", sha256="b680cbfc265df8cba18d3a7093fcc02e260198c4a2d6a86d1e684bb291e309dd")
diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py
index 2630085d4eeaf7..21fc0d62aad485 100644
--- a/var/spack/repos/builtin/packages/patchelf/package.py
+++ b/var/spack/repos/builtin/packages/patchelf/package.py
@@ -19,7 +19,14 @@ class Patchelf(AutotoolsPackage):
maintainers("haampie")
version("0.18.0", sha256="64de10e4c6b8b8379db7e87f58030f336ea747c0515f381132e810dbf84a86e7")
- version("0.17.2", sha256="20427b718dd130e4b66d95072c2a2bd5e17232e20dad58c1bea9da81fae330e0")
+ # patchelf 0.18 breaks libraries:
+ # https://github.com/spack/spack/issues/39252
+ # https://github.com/spack/spack/pull/40938
+ version(
+ "0.17.2",
+ sha256="20427b718dd130e4b66d95072c2a2bd5e17232e20dad58c1bea9da81fae330e0",
+ preferred=True,
+ )
version("0.16.1", sha256="1a562ed28b16f8a00456b5f9ee573bb1af7c39c1beea01d94fc0c7b3256b0406")
version("0.15.0", sha256="53a8d58ed4e060412b8fdcb6489562b3c62be6f65cee5af30eba60f4423bfa0f")
version("0.14.5", sha256="113ada3f1ace08f0a7224aa8500f1fa6b08320d8f7df05ff58585286ec5faa6f")
diff --git a/var/spack/repos/builtin/packages/pcl/package.py b/var/spack/repos/builtin/packages/pcl/package.py
index f57dfed3871772..e0dd4967aa44cc 100644
--- a/var/spack/repos/builtin/packages/pcl/package.py
+++ b/var/spack/repos/builtin/packages/pcl/package.py
@@ -14,12 +14,29 @@ class Pcl(CMakePackage):
homepage = "https://pointclouds.org/"
url = "https://github.com/PointCloudLibrary/pcl/releases/download/pcl-1.11.1/source.tar.gz"
+ version("1.13.1", sha256="be4d499c066203a3c296e2f7e823d6209be5983415f2279310ed1c9abb361d30")
+ version("1.13.0", sha256="bd110789f6a7416ed1c58da302afbdb80f8d297a9e23cc02fd78ab78b4762698")
+ version("1.12.1", sha256="a9573efad5e024c02f2cc9180bb8f82605c3772c62463efbe25c5d6e634b91dc")
+ version("1.12.0", sha256="606a2d5c7af304791731d6b8ea79365bc8f2cd75908006484d71ecee01d9b51c")
version("1.11.1", sha256="19d1a0bee2bc153de47c05da54fc6feb23393f306ab2dea2e25419654000336e")
depends_on("cmake@3.5:", type="build")
+ depends_on("cmake@3.10:", when="@1.12.1:", type="build")
depends_on("eigen@3.1:")
+ depends_on("eigen@3.3:", when="@1.13:")
depends_on("flann@1.7:")
- depends_on("boost@1.55:+filesystem+date_time+iostreams+system")
+ depends_on("flann@1.9.1:", when="@1.12:")
+ depends_on("boost@1.55:")
+ depends_on("boost@1.65:", when="@1.12:")
+ depends_on("boost+filesystem+iostreams+system")
+ depends_on("boost+date_time", when="@:1.13.0")
+
+ # fix build with clang: #30653
+ with when("@:1.12"):
+ patch(
+ "https://github.com/PointCloudLibrary/pcl/commit/dff16af269fbd2c15772d53064882b2bf8c2ffe9.patch?full_index=1",
+ sha256="17a7a7aec8e63701294612cbb25d46ac1ce58f643dbc68e1517329ae0b68956d",
+ )
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
diff --git a/var/spack/repos/builtin/packages/pegtl/package.py b/var/spack/repos/builtin/packages/pegtl/package.py
index 8384ed91281e66..72a4bd67ada63f 100644
--- a/var/spack/repos/builtin/packages/pegtl/package.py
+++ b/var/spack/repos/builtin/packages/pegtl/package.py
@@ -19,6 +19,7 @@ class Pegtl(CMakePackage):
git = "https://github.com/taocpp/PEGTL.git"
version("master", branch="master")
+ version("3.2.7", sha256="444c3c33686c6b2d8d45ad03af5041b7bc910ef44ac10216237d8e3e8d6e7025")
version("3.2.0", sha256="91aa6529ef9e6b57368e7b5b1f04a3bd26a39419d30e35a3c5c66ef073926b56")
version("2.8.3", sha256="370afd0fbe6d73c448a33c10fbe4a7254f92077f5a217317d0a32a9231293015")
version("2.1.4", sha256="d990dccc07b4d9ba548326d11c5c5e34fa88b34fe113cb5377da03dda29f23f2")
diff --git a/var/spack/repos/builtin/packages/perl-class-singleton/package.py b/var/spack/repos/builtin/packages/perl-class-singleton/package.py
new file mode 100644
index 00000000000000..fa44321b24ee80
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-class-singleton/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlClassSingleton(PerlPackage):
+ """Class::Singleton - Implementation of a "Singleton" class"""
+
+ homepage = "https://metacpan.org/pod/Class::Singleton"
+ url = "https://cpan.metacpan.org/authors/id/S/SH/SHAY/Class-Singleton-1.6.tar.gz"
+
+ version("1.6", sha256="27ba13f0d9512929166bbd8c9ef95d90d630fc80f0c9a1b7458891055e9282a4")
diff --git a/var/spack/repos/builtin/packages/perl-datetime-locale/package.py b/var/spack/repos/builtin/packages/perl-datetime-locale/package.py
new file mode 100644
index 00000000000000..6e341423f06238
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-datetime-locale/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlDatetimeLocale(PerlPackage):
+ """DateTime::Locale - Localization support for DateTime.pm"""
+
+ homepage = "https://metacpan.org/pod/DateTime::Locale"
+ url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-Locale-1.40.tar.gz"
+
+ version("1.40", sha256="7490b4194b5d23a4e144976dedb3bdbcc6d3364b5d139cc922a86d41fdb87afb")
+
+ depends_on("perl-file-sharedir-install", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py b/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py
new file mode 100644
index 00000000000000..b6c9eba506d845
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlDatetimeTimezone(PerlPackage):
+ """DateTime::TimeZone - Time zone object base class and factory"""
+
+ homepage = "https://metacpan.org/pod/DateTime::TimeZone"
+ url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-TimeZone-2.60.tar.gz"
+
+ version("2.60", sha256="f0460d379323905b579bed44e141237a337dc25dd26b6ab0c60ac2b80629323d")
diff --git a/var/spack/repos/builtin/packages/perl-datetime/package.py b/var/spack/repos/builtin/packages/perl-datetime/package.py
new file mode 100644
index 00000000000000..3bb9f31f819821
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-datetime/package.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlDatetime(PerlPackage):
+ """DateTime - A date and time object for Perl"""
+
+ homepage = "https://metacpan.org/pod/DateTime"
+ url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-1.63.tar.gz"
+
+ version("1.63", sha256="1b11e49ec6e184ae2a10eccd05eda9534f32458fc644c12ab710c29a3a816f6f")
+
+ depends_on("perl-namespace-autoclean", type=("run"))
diff --git a/var/spack/repos/builtin/packages/perl-devel-cover/package.py b/var/spack/repos/builtin/packages/perl-devel-cover/package.py
new file mode 100644
index 00000000000000..dfadcfb6713ba0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-devel-cover/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlDevelCover(PerlPackage):
+ """Devel::Cover - Perl extension for code coverage metrics"""
+
+ homepage = "https://metacpan.org/pod/Devel::Cover"
+ url = "https://cpan.metacpan.org/authors/id/P/PJ/PJCJ/Devel-Cover-1.40.tar.gz"
+
+ version("1.40", sha256="26e2f431fbcf7bff3851f352f83b84067c09ff206f40ab975cad8d2bafe711a8")
diff --git a/var/spack/repos/builtin/packages/perl-file-sharedir/package.py b/var/spack/repos/builtin/packages/perl-file-sharedir/package.py
index 65211470e6cca5..e5b9631918c502 100644
--- a/var/spack/repos/builtin/packages/perl-file-sharedir/package.py
+++ b/var/spack/repos/builtin/packages/perl-file-sharedir/package.py
@@ -7,9 +7,9 @@
class PerlFileSharedir(PerlPackage):
- """Locate per-dist and per-module shared files.""" # AUTO-CPAN2Spack
+ """File::ShareDir - Locate per-dist and per-module shared files"""
- homepage = "https://metacpan.org/release/File-ShareDir" # AUTO-CPAN2Spack
+ homepage = "https://metacpan.org/pod/File::ShareDir"
url = "https://cpan.metacpan.org/authors/id/R/RE/REHSACK/File-ShareDir-1.118.tar.gz"
maintainers("chissg", "gartung", "marcmengel", "vitodb") # AUTO-CPAN2Spack
diff --git a/var/spack/repos/builtin/packages/perl-file-spec/package.py b/var/spack/repos/builtin/packages/perl-file-spec/package.py
new file mode 100644
index 00000000000000..3d4d767b0b7c2b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-file-spec/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlFileSpec(PerlPackage):
+ """File::Spec - Perl extension for portably performing operations on file names"""
+
+ homepage = "https://metacpan.org/pod/File::Spec"
+ url = "https://cpan.metacpan.org/authors/id/K/KW/KWILLIAMS/File-Spec-0.90.tar.gz"
+
+ version("0.90", sha256="695a34604e1b6a98327fe2b374504329735b07c2c45db9f55df1636e4c29bf79")
diff --git a/var/spack/repos/builtin/packages/perl-memory-process/package.py b/var/spack/repos/builtin/packages/perl-memory-process/package.py
new file mode 100644
index 00000000000000..3cc302b624fe17
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-memory-process/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlMemoryProcess(PerlPackage):
+ """Memory::Process - Perl class to determine actual memory usage"""
+
+ homepage = "https://metacpan.org/pod/Memory::Process"
+ url = "https://cpan.metacpan.org/authors/id/S/SK/SKIM/Memory-Process-0.06.tar.gz"
+
+ version("0.06", sha256="35814488ffd29c97621625ea3b3d700afbfa60ed055bd759d4e58d9c8fd44e4e")
diff --git a/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py b/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py
index b5be6bd9a4e722..ec7b25395e84fe 100644
--- a/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py
+++ b/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py
@@ -7,9 +7,9 @@
class PerlNamespaceAutoclean(PerlPackage):
- """Keep imports out of your namespace.""" # AUTO-CPAN2Spack
+ """Namespace::Autoclean - Keep imports out of your namespace"""
- homepage = "https://github.com/moose/namespace-autoclean" # AUTO-CPAN2Spack
+ homepage = "https://metacpan.org/pod/namespace::autoclean"
url = "https://cpan.metacpan.org/authors/id/E/ET/ETHER/namespace-autoclean-0.29.tar.gz"
maintainers("chissg", "gartung", "marcmengel", "vitodb") # AUTO-CPAN2Spack
diff --git a/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py b/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py
new file mode 100644
index 00000000000000..6f408c960a8df9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py
@@ -0,0 +1,16 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlParamsValidationcompiler(PerlPackage):
+ """Params::ValidationCompiler - Build an optimized subroutine parameter validator once,
+ use it forever"""
+
+ homepage = "https://metacpan.org/pod/Params::ValidationCompiler"
+ url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/Params-ValidationCompiler-0.31.tar.gz"
+
+ version("0.31", sha256="7b6497173f1b6adb29f5d51d8cf9ec36d2f1219412b4b2410e9d77a901e84a6d")
diff --git a/var/spack/repos/builtin/packages/perl-specio/package.py b/var/spack/repos/builtin/packages/perl-specio/package.py
new file mode 100644
index 00000000000000..05954d586dee77
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-specio/package.py
@@ -0,0 +1,15 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PerlSpecio(PerlPackage):
+ """Type constraints and coercions for Perl ."""
+
+ homepage = "https://metacpan.org/dist/Specio"
+ url = "http://search.cpan.org/CPAN/authors/id/D/DR/DROLSKY/Specio-0.48.tar.gz"
+
+ version("0.48", sha256="0c85793580f1274ef08173079131d101f77b22accea7afa8255202f0811682b2")
diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py
index 01df5328561bbd..33f87fde6efb60 100644
--- a/var/spack/repos/builtin/packages/perl/package.py
+++ b/var/spack/repos/builtin/packages/perl/package.py
@@ -119,6 +119,7 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
extendable = True
if sys.platform != "win32":
+ depends_on("gmake", type="build")
depends_on("gdbm@:1.23")
# Bind us below gdbm-1.20 due to API change: https://github.com/Perl/perl5/issues/18915
depends_on("gdbm@:1.19", when="@:5.35")
diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py
index 2f258edc17a183..5093961f7deffc 100644
--- a/var/spack/repos/builtin/packages/petsc/package.py
+++ b/var/spack/repos/builtin/packages/petsc/package.py
@@ -22,6 +22,7 @@ class Petsc(Package, CudaPackage, ROCmPackage):
version("main", branch="main")
+ version("3.20.1", sha256="3d54f13000c9c8ceb13ca4f24f93d838319019d29e6de5244551a3ec22704f32")
version("3.20.0", sha256="c152ccb12cb2353369d27a65470d4044a0c67e0b69814368249976f5bb232bd4")
version("3.19.6", sha256="6045e379464e91bb2ef776f22a08a1bc1ff5796ffd6825f15270159cbb2464ae")
version("3.19.5", sha256="511aa78cad36db2dfd298acf35e9f7afd2ecc1f089da5b0b5682507a31a5d6b2")
diff --git a/var/spack/repos/builtin/packages/pflotran/package.py b/var/spack/repos/builtin/packages/pflotran/package.py
index 0045f2758837dc..37fecbe31c3efd 100644
--- a/var/spack/repos/builtin/packages/pflotran/package.py
+++ b/var/spack/repos/builtin/packages/pflotran/package.py
@@ -18,6 +18,7 @@ class Pflotran(AutotoolsPackage):
maintainers("ghammond86", "balay")
version("develop")
+ version("5.0.0", commit="f0fe931c72c03580e489724afeb8c5451406b942") # tag v5.0.0
version("4.0.1", commit="fd351a49b687e27f46eae92e9259156eea74897d") # tag v4.0.1
version("3.0.2", commit="9e07f416a66b0ad304c720b61aa41cba9a0929d5") # tag v3.0.2
version("xsdk-0.6.0", commit="46e14355c1827c057f2e1b3e3ae934119ab023b2")
@@ -30,6 +31,7 @@ class Pflotran(AutotoolsPackage):
depends_on("mpi")
depends_on("hdf5@1.8.12:+mpi+fortran+hl")
depends_on("petsc@main:+hdf5+metis", when="@develop")
+ depends_on("petsc@3.20:+hdf5+metis", when="@5.0.0")
depends_on("petsc@3.18:+hdf5+metis", when="@4.0.1")
depends_on("petsc@3.16:+hdf5+metis", when="@3.0.2")
depends_on("petsc@3.14:+hdf5+metis", when="@xsdk-0.6.0")
@@ -55,3 +57,9 @@ def flag_handler(self, name, flags):
if "%gcc@10:" in self.spec and name == "fflags":
flags.append("-fallow-argument-mismatch")
return flags, None, None
+
+ @when("@5.0.0")
+ def patch(self):
+ filter_file(
+ "use iso_[cC]_binding", "use, intrinsic :: iso_c_binding", "src/pflotran/hdf5_aux.F90"
+ )
diff --git a/var/spack/repos/builtin/packages/pfunit/package.py b/var/spack/repos/builtin/packages/pfunit/package.py
index 0fb3af60eb33e6..b257f0e5fc38fc 100644
--- a/var/spack/repos/builtin/packages/pfunit/package.py
+++ b/var/spack/repos/builtin/packages/pfunit/package.py
@@ -19,6 +19,7 @@ class Pfunit(CMakePackage):
maintainers("mathomp4", "tclune")
+ version("4.7.4", sha256="ac850e33ea99c283f503f75293bf238b4b601885d7adba333066e6185dad5c04")
version("4.7.3", sha256="247239298b55e847417b7830183d7fc62cca93dc92c8ec7c0067784b7ce34544")
version("4.7.2", sha256="3142a1e56b7d127fdc9589cf6deff8505174129834a6a268d0ce7e296f51ab02")
version("4.7.1", sha256="64de3eb9f364b57ef6df81ba33400dfd4dcebca6eb5d0e9b7955ed8156e29165")
diff --git a/var/spack/repos/builtin/packages/phist/package.py b/var/spack/repos/builtin/packages/phist/package.py
index a67deddebd3557..338680e772cb69 100644
--- a/var/spack/repos/builtin/packages/phist/package.py
+++ b/var/spack/repos/builtin/packages/phist/package.py
@@ -235,6 +235,13 @@ def patch(self):
test.filter("1 2 3 12", "1 2 3")
test.filter("12/", "6/")
test.filter("TEST_DRIVERS_NUM_THREADS 6", "TEST_DRIVERS_NUM_THREADS 3")
+ # Avoid finding external modules like:
+ # /opt/rocm/llvm/include/iso_fortran_env.mod
+ filter_file(
+ "use iso_fortran_env",
+ "use, intrinsic :: iso_fortran_env",
+ "drivers/matfuncs/matpde3d.F90",
+ )
def setup_build_environment(self, env):
env.set("SPACK_SBANG", sbang.sbang_install_path())
diff --git a/var/spack/repos/builtin/packages/photos-f/package.py b/var/spack/repos/builtin/packages/photos-f/package.py
index 5c6cac9bf568d8..536407ff8ac49a 100644
--- a/var/spack/repos/builtin/packages/photos-f/package.py
+++ b/var/spack/repos/builtin/packages/photos-f/package.py
@@ -18,8 +18,6 @@ class PhotosF(MakefilePackage):
"http://cern.ch/service-spi/external/MCGenerators/distribution/photos/photos-215.5-src.tgz"
)
- maintainers("iarspider")
-
version("215.5", sha256="3e2b3f60ffe2d3a6a95cf2f156aa24b93e1fa3c439a85fa0ae780ca2f6e0dbb5")
patch("photos-215.5-update-configure.patch", level=2)
diff --git a/var/spack/repos/builtin/packages/pika-algorithms/package.py b/var/spack/repos/builtin/packages/pika-algorithms/package.py
index 3387dfdb2736cb..48ca6fe2f0129b 100644
--- a/var/spack/repos/builtin/packages/pika-algorithms/package.py
+++ b/var/spack/repos/builtin/packages/pika-algorithms/package.py
@@ -15,6 +15,8 @@ class PikaAlgorithms(CMakePackage):
git = "https://github.com/pika-org/pika-algorithms.git"
maintainers("msimberg", "albestro", "teonnik", "aurianer")
+ license("BSL-1.0")
+
version("0.1.4", sha256="67ea5e8545b234f82dcc75612a774f2e3df8425a283f2034c2d1e2e5ac74f945")
version("0.1.3", sha256="53b79fcc0e5decc0a4d70abf0897a4f66141b85eea6d65013f51eec02ad123b7")
version("0.1.2", sha256="286cf5c4db06717fa66c681cec8c99207154dd07e72d72f2b5b4a3cb9ff698bf")
diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py
index 50ff40f4112d90..1dcd4d2613a5a2 100644
--- a/var/spack/repos/builtin/packages/pika/package.py
+++ b/var/spack/repos/builtin/packages/pika/package.py
@@ -17,6 +17,9 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/pika-org/pika.git"
maintainers("msimberg", "albestro", "teonnik", "aurianer")
+ license("BSL-1.0")
+
+ version("0.20.0", sha256="f338cceea66a0e3954806b2aca08f6560bba524ecea222f04bc18b483851c877")
version("0.19.1", sha256="674675abf0dd4c6f5a0b2fa3db944b277ed65c62f654029d938a8cab608a9c1d")
version("0.19.0", sha256="f45cc16e4e50cbb183ed743bdc8b775d49776ee33c13ea39a650f4230a5744cb")
version("0.18.0", sha256="f34890e0594eeca6ac57f2b988d0807b502782817e53a7f7043c3f921b08c99f")
@@ -42,7 +45,6 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage):
generator("ninja")
- map_cxxstd = lambda cxxstd: "2a" if cxxstd == "20" else cxxstd
cxxstds = ("17", "20", "23")
variant(
"cxxstd",
@@ -91,6 +93,9 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage):
conflicts("%clang@:8", when="@0.2:")
conflicts("+stdexec", when="cxxstd=17")
conflicts("cxxstd=23", when="^cmake@:3.20.2")
+ # CUDA version <= 11 does not support C++20 and newer
+ for cxxstd in filter(lambda x: x != "17", cxxstds):
+ conflicts(f"cxxstd={cxxstd}", when="^cuda@:11")
# Other dependencies
depends_on("boost@1.71:")
@@ -139,7 +144,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage):
)
for cxxstd in cxxstds:
- depends_on("boost cxxstd={0}".format(map_cxxstd(cxxstd)), when="cxxstd={0}".format(cxxstd))
+ depends_on("boost cxxstd={0}".format(cxxstd), when="cxxstd={0}".format(cxxstd))
depends_on("fmt cxxstd={0}".format(cxxstd), when="@0.11: cxxstd={0}".format(cxxstd))
# COROUTINES
diff --git a/var/spack/repos/builtin/packages/plasma/package.py b/var/spack/repos/builtin/packages/plasma/package.py
index 6b92413fd7f598..5cccfe3ff4e6b5 100644
--- a/var/spack/repos/builtin/packages/plasma/package.py
+++ b/var/spack/repos/builtin/packages/plasma/package.py
@@ -19,11 +19,13 @@ class Plasma(CMakePackage):
homepage = "https://github.com/icl-utk-edu/plasma/"
url = "https://github.com/icl-utk-edu/plasma/releases/download/21.8.29/plasma-21.8.29.tar.gz"
git = "https://github.com/icl-utk-edu/plasma"
+
maintainers("luszczek")
tags = ["e4s"]
version("develop", git=git)
+ version("23.8.2", sha256="2db34de0575f3e3d16531bdcf1caddef146f68e71335977a3e8ec193003ab943")
version("22.9.29", sha256="78827898b7e3830eee2e388823b9180858279f77c5eda5aa1be173765c53ade5")
version("21.8.29", sha256="e0bb4d9143c8540f9f46cbccac9ed0cbea12500a864e6954fce2fe94ea057a10")
version("20.9.20", sha256="2144a77b739f8dd2f0dbe5b64d94cde0e916f55c4eb170facd168c0db7fc7970")
diff --git a/var/spack/repos/builtin/packages/podio/package.py b/var/spack/repos/builtin/packages/podio/package.py
index f2bbe7e74c213d..b7eaa980a3ed50 100644
--- a/var/spack/repos/builtin/packages/podio/package.py
+++ b/var/spack/repos/builtin/packages/podio/package.py
@@ -15,14 +15,31 @@ class Podio(CMakePackage):
url = "https://github.com/AIDASoft/podio/archive/v00-09-02.tar.gz"
git = "https://github.com/AIDASoft/podio.git"
- maintainers("vvolkl", "drbenmorgan", "jmcarcell")
+ maintainers("vvolkl", "drbenmorgan", "jmcarcell", "tmadlener")
tags = ["hep", "key4hep"]
version("master", branch="master")
- version("0.16.6", sha256="859f7cd16bd2b833bee9c1f33eb4cdbc2a0c2b1a48a853f67c30e8a0301d16df")
- version("0.16.5", sha256="42135e4d0e11be6f0d88748799fa2ec985514d4b4c979a10a56a00a378f65ee0")
- version("0.16.3", sha256="d8208f98496af68ca8d02d302f428aab510e50d07575b90c3477fff7e499335b")
+ version("0.17.3", sha256="079517eba9c43d01255ef8acd88468c3ead7bb9d8fed11792e121bb481d54dee")
+ version("0.17.2", sha256="5b519335c4e1708f71ed85b3cac8ca81e544cc4572a5c37019ce9fc414c5e74d")
+ version("0.17.1", sha256="97d6c5f81d50ee42bf7c01f041af2fd333c806f1bbf0a4828ca961a24cea6bb2")
+ version("0.17", sha256="0c19f69970a891459cab227ab009514f1c1ce102b70e8c4b7d204eb6a0c643c1")
+ version("0.16.7", sha256="8af7c947e2637f508b7af053412bacd9218d41a455d69addd7492f05b7a4338d")
+ version(
+ "0.16.6",
+ sha256="859f7cd16bd2b833bee9c1f33eb4cdbc2a0c2b1a48a853f67c30e8a0301d16df",
+ deprecated=True,
+ )
+ version(
+ "0.16.5",
+ sha256="42135e4d0e11be6f0d88748799fa2ec985514d4b4c979a10a56a00a378f65ee0",
+ deprecated=True,
+ )
+ version(
+ "0.16.3",
+ sha256="d8208f98496af68ca8d02d302f428aab510e50d07575b90c3477fff7e499335b",
+ deprecated=True,
+ )
version(
"0.16.2",
sha256="faf7167290faf322f23c734adff19904b10793b5ab14e1dfe90ce257c225114b",
@@ -112,6 +129,7 @@ class Podio(CMakePackage):
description="Use the specified C++ standard when building.",
)
variant("sio", default=False, description="Build the SIO I/O backend")
+ variant("rntuple", default=False, description="Build the RNTuple backend")
# cpack config throws an error on some systems
patch("cpack.patch", when="@:0.10.0")
@@ -119,9 +137,12 @@ class Podio(CMakePackage):
patch("python-tests.patch", when="@:0.14.0")
depends_on("root@6.08.06: cxxstd=17", when="cxxstd=17")
- depends_on("root@6.25.02: cxxstd=20", when="cxxstd=20")
+ depends_on("root@6.28.04:", when="+rntuple")
+ depends_on("root@6.28:", when="@0.17:")
+ for cxxstd in ("17", "20"):
+ depends_on("root cxxstd={}".format(cxxstd), when="cxxstd={}".format(cxxstd))
- depends_on("cmake@3.8:", type="build")
+ depends_on("cmake@3.12:", type="build")
depends_on("python", type=("build", "run"))
depends_on("py-pyyaml", type=("build", "run"))
depends_on("py-jinja2@2.10.1:", type=("build", "run"), when="@0.12.0:")
@@ -131,10 +152,12 @@ class Podio(CMakePackage):
depends_on("py-tabulate", type=("run", "test"), when="@0.16.6:")
conflicts("+sio", when="@:0.12", msg="sio support requires at least podio@0.13")
+ conflicts("+rntuple", when="@:0.16", msg="rntuple support requires at least podio@0.17")
def cmake_args(self):
args = [
self.define_from_variant("ENABLE_SIO", "sio"),
+ self.define_from_variant("ENABLE_RNTUPLE", "rntuple"),
self.define("CMAKE_CXX_STANDARD", self.spec.variants["cxxstd"].value),
self.define("BUILD_TESTING", self.run_tests),
]
@@ -156,6 +179,12 @@ def setup_dependent_build_environment(self, env, dependent_spec):
env.prepend_path("PYTHONPATH", self.prefix.python)
env.prepend_path("LD_LIBRARY_PATH", self.spec["podio"].libs.directories[0])
env.prepend_path("ROOT_INCLUDE_PATH", self.prefix.include)
+ if self.spec.satisfies("+sio @0.17:"):
+ # sio needs to be on LD_LIBRARY_PATH for ROOT to be able to
+ # dynamicaly load the python libraries also in dependent build
+ # environments since the import structure has changed with
+ # podio@0.17
+ env.prepend_path("LD_LIBRARY_PATH", self.spec["sio"].libs.directories[0])
def url_for_version(self, version):
"""Translate version numbers to ilcsoft conventions.
diff --git a/var/spack/repos/builtin/packages/pulseaudio/package.py b/var/spack/repos/builtin/packages/pulseaudio/package.py
index f96d2af990e6ce..bfc93dab7d4d0b 100644
--- a/var/spack/repos/builtin/packages/pulseaudio/package.py
+++ b/var/spack/repos/builtin/packages/pulseaudio/package.py
@@ -51,6 +51,7 @@ class Pulseaudio(AutotoolsPackage):
depends_on("openssl", when="+openssl")
depends_on("perl-xml-parser", type="build")
depends_on("speexdsp@1.2:")
+ depends_on("m4", type="build")
def configure_args(self):
args = [
diff --git a/var/spack/repos/builtin/packages/py-abipy/package.py b/var/spack/repos/builtin/packages/py-abipy/package.py
index 3e868f56075119..dfaed29c7d4f61 100644
--- a/var/spack/repos/builtin/packages/py-abipy/package.py
+++ b/var/spack/repos/builtin/packages/py-abipy/package.py
@@ -17,7 +17,7 @@ class PyAbipy(PythonPackage):
version("0.2.0", sha256="c72b796ba0f9ea4299eac3085bede092d2652e9e5e8074d3badd19ef7b600792")
variant("gui", default=False, description="Build the GUI")
- variant("ipython", default=False, when="0.2.0", description="Build IPython support")
+ variant("ipython", default=False, when="@0.2.0", description="Build IPython support")
depends_on("py-setuptools", type="build")
# in newer pip versions --install-option does not exist
diff --git a/var/spack/repos/builtin/packages/py-archspec/package.py b/var/spack/repos/builtin/packages/py-archspec/package.py
index 047beda9afabb6..564f798ea953d5 100644
--- a/var/spack/repos/builtin/packages/py-archspec/package.py
+++ b/var/spack/repos/builtin/packages/py-archspec/package.py
@@ -15,8 +15,9 @@ class PyArchspec(PythonPackage):
maintainers("alalazo")
+ version("0.2.2", sha256="d922c9fd80a5234d8cef883fbe0e146b381c449062c0405f91714ebad1edc035")
version("0.2.1", sha256="0974a8a95831d2d43cce906c5b79a35d5fd2bf9be478b0e3b7d83ccc51ac815e")
version("0.2.0", sha256="6aaba5ebdb5c3633c400d8c221a6a18716da0c64b367a8509f4217b22e91a5f5")
depends_on("py-poetry-core@1.0.0:", type="build")
- depends_on("py-click@8", type=("build", "run"))
+ depends_on("py-click@8", type=("build", "run"), when="@:0.2.0")
diff --git a/var/spack/repos/builtin/packages/py-async-lru/package.py b/var/spack/repos/builtin/packages/py-async-lru/package.py
index 021112b4f9f4b6..ec2033768f1b4e 100644
--- a/var/spack/repos/builtin/packages/py-async-lru/package.py
+++ b/var/spack/repos/builtin/packages/py-async-lru/package.py
@@ -12,8 +12,6 @@ class PyAsyncLru(PythonPackage):
homepage = "https://github.com/wikibusiness/async_lru"
pypi = "async-lru/async-lru-1.0.2.tar.gz"
- maintainers("iarspider")
-
version("1.0.3", sha256="c2cb9b2915eb14e6cf3e717154b40f715bf90e596d73623677affd0d1fbcd32a")
version("1.0.2", sha256="baa898027619f5cc31b7966f96f00e4fc0df43ba206a8940a5d1af5336a477cb")
diff --git a/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py b/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py
index a46d057753a4df..5b2b830c4645ea 100644
--- a/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py
+++ b/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py
@@ -12,8 +12,6 @@ class PyBackportsEntryPointsSelectable(PythonPackage):
homepage = "https://github.com/jaraco/backports.entry_points_selectable"
pypi = "backports.entry_points_selectable/backports.entry_points_selectable-1.1.0.tar.gz"
- maintainers("iarspider")
-
version("1.1.1", sha256="914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386")
version("1.1.0", sha256="988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a")
diff --git a/var/spack/repos/builtin/packages/py-black/package.py b/var/spack/repos/builtin/packages/py-black/package.py
index bb6539d7150aa4..a2cba61bc88951 100644
--- a/var/spack/repos/builtin/packages/py-black/package.py
+++ b/var/spack/repos/builtin/packages/py-black/package.py
@@ -17,6 +17,9 @@ class PyBlack(PythonPackage):
maintainers("adamjstewart")
+ version("23.11.0", sha256="4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05")
+ version("23.10.1", sha256="1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258")
+ version("23.10.0", sha256="31b9f87b277a68d0e99d2905edae08807c007973eaa609da5f0c62def6b7c0bd")
version("23.9.1", sha256="24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d")
version("23.9.0", sha256="3511c8a7e22ce653f89ae90dfddaf94f3bb7e2587a245246572d3b9c92adf066")
version("23.7.0", sha256="022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb")
@@ -37,23 +40,25 @@ class PyBlack(PythonPackage):
depends_on("py-hatchling@1.8:", when="@22.10:", type="build")
depends_on("py-hatch-vcs", when="@22.10:", type="build")
depends_on("py-hatch-fancy-pypi-readme", when="@22.10:", type="build")
- depends_on("python@3.8:", when="@23.7:", type=("build", "run"))
- # Needed to ensure that Spack can bootstrap black with Python 3.6
- depends_on("python@3.7:", when="@22.10:", type=("build", "run"))
- depends_on("py-click@8:", type=("build", "run"))
- depends_on("py-mypy-extensions@0.4.3:", type=("build", "run"))
- depends_on("py-packaging@22:", when="@23.1:", type=("build", "run"))
- depends_on("py-pathspec@0.9:", type=("build", "run"))
- depends_on("py-platformdirs@2:", type=("build", "run"))
- depends_on("py-tomli@1.1:", when="@22.8: ^python@:3.10", type=("build", "run"))
- depends_on("py-tomli@1.1:", when="@21.7:22.6", type=("build", "run"))
- depends_on("py-typing-extensions@3.10:", when="^python@:3.9", type=("build", "run"))
-
- depends_on("py-colorama@0.4.3:", when="+colorama", type=("build", "run"))
- depends_on("py-uvloop@0.15.2:", when="+uvloop", type=("build", "run"))
- depends_on("py-aiohttp@3.7.4:", when="+d", type=("build", "run"))
- depends_on("py-ipython@7.8:", when="+jupyter", type=("build", "run"))
- depends_on("py-tokenize-rt@3.2:", when="+jupyter", type=("build", "run"))
+
+ with default_args(type=("build", "run")):
+ depends_on("python@3.8:", when="@23.7:")
+ depends_on("python@3.7:", when="@22.10:")
+ depends_on("py-click@8:")
+ depends_on("py-mypy-extensions@0.4.3:")
+ depends_on("py-packaging@22:", when="@23.1:")
+ depends_on("py-pathspec@0.9:")
+ depends_on("py-platformdirs@2:")
+ depends_on("py-tomli@1.1:", when="@22.8: ^python@:3.10")
+ depends_on("py-tomli@1.1:", when="@21.7:22.6")
+ depends_on("py-typing-extensions@4.0.1:", when="@23.9: ^python@:3.10")
+ depends_on("py-typing-extensions@3.10:", when="@:23.7 ^python@:3.9")
+
+ depends_on("py-colorama@0.4.3:", when="+colorama")
+ depends_on("py-uvloop@0.15.2:", when="+uvloop")
+ depends_on("py-aiohttp@3.7.4:", when="+d")
+ depends_on("py-ipython@7.8:", when="+jupyter")
+ depends_on("py-tokenize-rt@3.2:", when="+jupyter")
# Historical dependencies
depends_on("py-setuptools@45:", when="@:22.8", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-bluepyefe/package.py b/var/spack/repos/builtin/packages/py-bluepyefe/package.py
new file mode 100644
index 00000000000000..8a15e4edf9e2c5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-bluepyefe/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from spack.package import *
+
+
+class PyBluepyefe(PythonPackage):
+ """Blue Brain Python E-feature extraction"""
+
+ homepage = "https://github.com/BlueBrain/BluePyEfe"
+ pypi = "bluepyefe/bluepyefe-2.2.18.tar.gz"
+ git = "https://github.com/BlueBrain/BluePyEfe.git"
+
+ version("2.2.18", sha256="bfb50c6482433ec2ffb4b65b072d2778bd89ae50d92dd6830969222aabb30275")
+
+ depends_on("py-setuptools", type="build")
+
+ depends_on("py-numpy@:1.23", type=("build", "run"))
+ depends_on("py-neo", type=("build", "run"))
+ depends_on("py-matplotlib", type=("build", "run"))
+ depends_on("py-efel", type=("build", "run"))
+ depends_on("py-scipy", type=("build", "run"))
+ depends_on("py-h5py", type=("build", "run"))
+ depends_on("py-igor", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-bluepyemodel/package.py b/var/spack/repos/builtin/packages/py-bluepyemodel/package.py
new file mode 100644
index 00000000000000..f865b9791b622b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-bluepyemodel/package.py
@@ -0,0 +1,36 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyBluepyemodel(PythonPackage):
+ """Python library to optimize and evaluate electrical models."""
+
+ homepage = "https://github.com/BlueBrain/BluePyEModel"
+ pypi = "bluepyemodel/bluepyemodel-0.0.46.tar.gz"
+
+ version("0.0.46", sha256="ad4c125e491f3337fcc341a4f389b8a616d883ce50fd77d9fb0ea6e13be5da61")
+
+ depends_on("py-setuptools", type="build")
+ depends_on("py-setuptools-scm", type="build")
+
+ depends_on("py-numpy", type=("build", "run"))
+ depends_on("py-scipy", type=("build", "run"))
+ depends_on("py-pandas", type=("build", "run"))
+ depends_on("py-ipyparallel@6.3:", type=("build", "run"))
+ depends_on("py-tqdm", type=("build", "run"))
+ depends_on("py-pyyaml", type=("build", "run"))
+ depends_on("py-gitpython", type=("build", "run"))
+ depends_on("py-bluepyopt@1.12.12:", type=("build", "run"))
+ depends_on("py-bluepyefe@2.2.0:", type=("build", "run"))
+ depends_on("py-neurom@3.0:3", type=("build", "run"))
+ depends_on("py-efel@3.1:", type=("build", "run"))
+ depends_on("py-configparser", type=("build", "run"))
+ depends_on("py-morph-tool@2.8:", type=("build", "run"))
+ depends_on("py-fasteners@0.16:", type=("build", "run"))
+ depends_on("neuron+python@8.0:", type=("build", "run"))
+ depends_on("py-jinja2@3.0.3", when="@0.0.11:", type=("build", "run"))
+ depends_on("py-currentscape@0.0.11:", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-bluepyopt/package.py b/var/spack/repos/builtin/packages/py-bluepyopt/package.py
new file mode 100644
index 00000000000000..ccc39f913558fe
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-bluepyopt/package.py
@@ -0,0 +1,37 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from spack.package import *
+
+
+class PyBluepyopt(PythonPackage):
+ """Bluebrain Python Optimisation Library"""
+
+ homepage = "https://github.com/BlueBrain/BluePyOpt"
+ pypi = "bluepyopt/bluepyopt-1.9.27.tar.gz"
+
+ # NOTE : while adding new release check pmi_rank.patch compatibility
+ version("1.14.4", sha256="7567fd736053250ca06030f67ad93c607b100c2b98df8dc588c26b64cb3e171c")
+
+ # patch required to avoid hpe-mpi linked mechanism library
+ patch("pmi_rank.patch")
+
+ variant("scoop", default=False, description="Use BluePyOpt together with py-scoop")
+
+ depends_on("py-setuptools", type="build")
+ depends_on("py-numpy@1.6:", type=("build", "run"))
+ depends_on("py-pandas@0.18:", type=("build", "run"))
+ depends_on("py-deap@1.3.3:", type=("build", "run"))
+ depends_on("py-efel@2.13:", type=("build", "run"))
+ depends_on("py-ipyparallel", type=("build", "run"))
+ depends_on("py-pickleshare@0.7.3:", type=("build", "run"))
+ depends_on("py-jinja2@2.8:", type=("build", "run"))
+ depends_on("py-future", type=("build", "run"))
+ depends_on("py-pebble@4.6:", type=("build", "run"))
+ depends_on("py-scoop@0.7:", type=("build", "run"), when="+scoop")
+ depends_on("neuron@7.4:", type=("build", "run"))
+
+ def setup_run_environment(self, env):
+ env.unset("PMI_RANK")
+ env.set("NEURON_INIT_MPI", "0")
diff --git a/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch b/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch
new file mode 100644
index 00000000000000..21a73849b28683
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch
@@ -0,0 +1,17 @@
+diff --git a/bluepyopt/ephys/simulators.py b/bluepyopt/ephys/simulators.py
+index e71ad8b..3c93237 100644
+--- a/bluepyopt/ephys/simulators.py
++++ b/bluepyopt/ephys/simulators.py
+@@ -89,6 +89,12 @@ class NrnSimulator(object):
+ NrnSimulator._nrn_disable_banner()
+ self.banner_disabled = True
+
++ # certain mpi libraries (hpe-mpt) use PMI_RANK env variable to initialize
++ # MPI before calling MPI_Init (which is undesirable). Unset this variable
++ # if exist to avoid issue with loading neuron and mechanism library.
++ if 'PMI_RANK' in os.environ:
++ os.environ.pop("PMI_RANK")
++
+ import neuron # NOQA
+
+ return neuron
diff --git a/var/spack/repos/builtin/packages/py-bokeh/package.py b/var/spack/repos/builtin/packages/py-bokeh/package.py
index 9c6601dffe311e..dcc13130ad8a87 100644
--- a/var/spack/repos/builtin/packages/py-bokeh/package.py
+++ b/var/spack/repos/builtin/packages/py-bokeh/package.py
@@ -9,9 +9,10 @@
class PyBokeh(PythonPackage):
"""Statistical and novel interactive HTML plots for Python"""
- homepage = "https://github.com/bokeh/bokeh"
+ homepage = "https://bokeh.org/"
pypi = "bokeh/bokeh-0.12.2.tar.gz"
+ version("3.3.1", sha256="2a7b3702d7e9f03ef4cd801b02b7380196c70cff2773859bcb84fa565218955c")
version("2.4.3", sha256="ef33801161af379665ab7a34684f2209861e3aefd5c803a21fbbb99d94874b03")
version("2.4.1", sha256="d0410717d743a0ac251e62480e2ea860a7341bdcd1dbe01499a904f233c90512")
version("2.4.0", sha256="6fa00ed8baab5cca33f4175792c309fa2536eaae7e90abee884501ba8c90fddb")
@@ -20,11 +21,16 @@ class PyBokeh(PythonPackage):
version("0.12.2", sha256="0a840f6267b6d342e1bd720deee30b693989538c49644142521d247c0f2e6939")
depends_on("py-setuptools", type="build", when="@1.3.4:")
+ depends_on("py-setuptools@64:", type="build", when="@3:")
+ depends_on("py-setuptools-git-versioning", type="build", when="@3:")
+ depends_on("py-colorama", type="build", when="@3:")
depends_on("python@2.6:", type=("build", "run"), when="@0.12.2")
depends_on("python@2.7:", type=("build", "run"), when="@1.3.4:")
depends_on("python@3.6:", type=("build", "run"), when="@2.3.3:")
depends_on("python@3.7:", type=("build", "run"), when="@2.4.0:")
+ depends_on("python@3.8:", type=("build", "run"), when="@3.0.0:")
+ depends_on("python@3.9:", type=("build", "run"), when="@3.2.0:")
depends_on("py-requests@1.2.3:", type=("build", "run"), when="@0.12.2")
depends_on("py-six@1.5.2:", type=("build", "run"), when="@:1.3.4")
@@ -33,11 +39,16 @@ class PyBokeh(PythonPackage):
depends_on("py-jinja2@2.7:", type=("build", "run"))
depends_on("py-jinja2@2.9:", type=("build", "run"), when="@2.3.3:")
+ depends_on("py-contourpy@1:", type=("build", "run"), when="@3:")
+
depends_on("py-numpy@1.7.1:", type=("build", "run"))
depends_on("py-numpy@1.11.3:", type=("build", "run"), when="@2.3.3:")
+ depends_on("py-numpy@1.16:", type=("build", "run"), when="@3.1:")
depends_on("py-packaging@16.8:", type=("build", "run"), when="@1.3.4:")
+ depends_on("py-pandas@1.2:", type=("build", "run"), when="@3:")
+
depends_on("pil@4.0:", type=("build", "run"), when="@1.3.4:")
depends_on("pil@7.1.0:", type=("build", "run"), when="@2.3.3:")
@@ -46,5 +57,7 @@ class PyBokeh(PythonPackage):
depends_on("py-tornado@4.3:", type=("build", "run"))
depends_on("py-tornado@5.1:", type=("build", "run"), when="@2.3.3:")
- depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@2.3.3:")
- depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.4.0:")
+ depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@2.3.3:3.0.0")
+ depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.4.0:3.0.0")
+
+ depends_on("py-xyzservices@2021.09.1:", type=("build", "run"), when="@3:")
diff --git a/var/spack/repos/builtin/packages/py-cleo/package.py b/var/spack/repos/builtin/packages/py-cleo/package.py
index 1852bd7b3eb76e..b5d60f1adc25e3 100644
--- a/var/spack/repos/builtin/packages/py-cleo/package.py
+++ b/var/spack/repos/builtin/packages/py-cleo/package.py
@@ -12,17 +12,26 @@ class PyCleo(PythonPackage):
homepage = "https://github.com/sdispater/cleo"
pypi = "cleo/cleo-0.8.1.tar.gz"
- version("1.0.0a5", sha256="097c9d0e0332fd53cc89fc11eb0a6ba0309e6a3933c08f7b38558555486925d3")
+ maintainers("LydDeb")
+
+ version("2.1.0", sha256="0b2c880b5d13660a7ea651001fb4acb527696c01f15c9ee650f377aa543fd523")
+ version("2.0.1", sha256="eb4b2e1f3063c11085cebe489a6e9124163c226575a3c3be69b2e51af4a15ec5")
+ version("2.0.0", sha256="fbc5cb141cbc31ea8ffd3d5cd67d3b183fa38aa5098fd37e39e9a953a232fda9")
+ version("1.0.0", sha256="bb5e4f70db83a597575ec86a1ed8fc56bd80934cfea3db97a23ea50c03b78382")
version(
- "0.8.1",
- sha256="3d0e22d30117851b45970b6c14aca4ab0b18b1b53c8af57bed13208147e4069f",
- preferred=True,
+ "1.0.0a5",
+ sha256="097c9d0e0332fd53cc89fc11eb0a6ba0309e6a3933c08f7b38558555486925d3",
+ deprecated=True,
)
+ version("0.8.1", sha256="3d0e22d30117851b45970b6c14aca4ab0b18b1b53c8af57bed13208147e4069f")
depends_on("python@2.7,3.4:3", type=("build", "run"))
depends_on("python@3.7:3", when="@1:", type=("build", "run"))
depends_on("py-poetry-core@1:", type="build")
- depends_on("py-poetry-core@1", when="@1:", type="build")
- depends_on("py-clikit@0.6.0:0.6", when="@0.8.1", type=("build", "run"))
- depends_on("py-pylev@1.3:1", when="@1:", type=("build", "run"))
- depends_on("py-crashtest@0.3.1:0.3", when="@1:", type=("build", "run"))
+ depends_on("py-poetry-core@1.1:1", when="@1:2.0.0", type="build")
+ depends_on("py-poetry-core@1.1.0:", when="@2.0.1:", type="build")
+ depends_on("py-clikit@0.6", when="@0.8.1", type=("build", "run"))
+ depends_on("py-pylev@1.3:1", when="@1.0.0a5", type=("build", "run"))
+ depends_on("py-crashtest@0.4.1:0.4", when="@1:", type=("build", "run"))
+ depends_on("py-rapidfuzz@2.2:2", when="@1:2.0", type=("build", "run"))
+ depends_on("py-rapidfuzz@3", when="@2.1:", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-comm/package.py b/var/spack/repos/builtin/packages/py-comm/package.py
index fd195b1f4f7644..5e82ade2c04fd2 100644
--- a/var/spack/repos/builtin/packages/py-comm/package.py
+++ b/var/spack/repos/builtin/packages/py-comm/package.py
@@ -12,7 +12,10 @@ class PyComm(PythonPackage):
homepage = "https://github.com/ipython/comm"
pypi = "comm/comm-0.1.3.tar.gz"
+ version("0.1.4", sha256="354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15")
version("0.1.3", sha256="a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e")
depends_on("py-hatchling@1.10:", type="build")
- depends_on("py-traitlets@5.3:", type=("build", "run"))
+
+ depends_on("py-traitlets@4:", when="@0.1.4:", type=("build", "run"))
+ depends_on("py-traitlets@5.3:", when="@0.1.3", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-cppy/package.py b/var/spack/repos/builtin/packages/py-cppy/package.py
index b759799e1b65fc..c697695cce8859 100644
--- a/var/spack/repos/builtin/packages/py-cppy/package.py
+++ b/var/spack/repos/builtin/packages/py-cppy/package.py
@@ -12,8 +12,6 @@ class PyCppy(PythonPackage):
homepage = "https://github.com/nucleic/cppy"
pypi = "cppy/cppy-1.1.0.tar.gz"
- maintainers("iarspider")
-
version("1.2.1", sha256="83b43bf17b1085ac15c5debdb42154f138b928234b21447358981f69d0d6fe1b")
version("1.1.0", sha256="4eda6f1952054a270f32dc11df7c5e24b259a09fddf7bfaa5f33df9fb4a29642")
diff --git a/var/spack/repos/builtin/packages/py-crashtest/package.py b/var/spack/repos/builtin/packages/py-crashtest/package.py
index f16c993168ec74..d2dade000db470 100644
--- a/var/spack/repos/builtin/packages/py-crashtest/package.py
+++ b/var/spack/repos/builtin/packages/py-crashtest/package.py
@@ -13,9 +13,11 @@ class PyCrashtest(PythonPackage):
homepage = "https://github.com/sdispater/crashtest"
pypi = "crashtest/crashtest-0.3.1.tar.gz"
+ version("0.4.1", sha256="80d7b1f316ebfbd429f648076d6275c877ba30ba48979de4191714a75266f0ce")
version("0.4.0", sha256="d629b00f1d4e79c316909f4eb763bbcb29b510d65fbde1365a1ceb93ab7fa4c8")
version("0.3.1", sha256="42ca7b6ce88b6c7433e2ce47ea884e91ec93104a4b754998be498a8e6c3d37dd")
depends_on("python@3.6:3", type=("build", "run"))
depends_on("python@3.7:3", when="@0.4.0:", type=("build", "run"))
depends_on("py-poetry-core@1:", type="build")
+ depends_on("py-poetry-core@1.1.0:", when="@0.4.1:", type="build")
diff --git a/var/spack/repos/builtin/packages/py-currentscape/package.py b/var/spack/repos/builtin/packages/py-currentscape/package.py
new file mode 100644
index 00000000000000..eb6d75be89c8ab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-currentscape/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyCurrentscape(PythonPackage):
+ """Module to easily plot the currents in electrical neuron models."""
+
+ homepage = "https://github.com/BlueBrain/Currentscape"
+ git = "https://github.com/BlueBrain/Currentscape.git"
+ pypi = "currentscape/currentscape-1.0.12.tar.gz"
+
+ version("1.0.12", sha256="d83c5a58074e4d612553472a487e5d1d2854dc4d5c161817c6bafdf4a5988011")
+
+ depends_on("py-setuptools", type=("build", "run"))
+ depends_on("py-setuptools-scm", type=("build",))
+ depends_on("python@3.8:", type=("build", "run"))
+ depends_on("py-numpy", type=("build", "run"))
+ depends_on("py-matplotlib", type=("build", "run"))
+ depends_on("py-palettable", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py
index ba371b1b1649e6..d0426c40392d97 100644
--- a/var/spack/repos/builtin/packages/py-cython/package.py
+++ b/var/spack/repos/builtin/packages/py-cython/package.py
@@ -13,6 +13,7 @@ class PyCython(PythonPackage):
pypi = "cython/Cython-0.29.21.tar.gz"
tags = ["build-tools"]
+ version("3.0.4", sha256="2e379b491ee985d31e5faaf050f79f4a8f59f482835906efe4477b33b4fbe9ff")
version("3.0.0", sha256="350b18f9673e63101dbbfcf774ee2f57c20ac4636d255741d76ca79016b1bd82")
version(
"3.0.0a9",
@@ -45,6 +46,9 @@ class PyCython(PythonPackage):
version("0.23.5", sha256="0ae5a5451a190e03ee36922c4189ca2c88d1df40a89b4f224bc842d388a0d1b6")
version("0.23.4", sha256="fec42fecee35d6cc02887f1eef4e4952c97402ed2800bfe41bbd9ed1a0730d8e")
+ # https://github.com/cython/cython/issues/5751 (distutils not yet dropped)
+ depends_on("python@:3.11", type=("build", "link", "run"))
+
# https://github.com/cython/cython/commit/1cd24026e9cf6d63d539b359f8ba5155fd48ae21
# collections.Iterable was removed in Python 3.10
depends_on("python@:3.9", when="@:0.29.14", type=("build", "link", "run"))
diff --git a/var/spack/repos/builtin/packages/py-dictdiffer/package.py b/var/spack/repos/builtin/packages/py-dictdiffer/package.py
index 06c719cc6f2e71..acacfb00115bfa 100644
--- a/var/spack/repos/builtin/packages/py-dictdiffer/package.py
+++ b/var/spack/repos/builtin/packages/py-dictdiffer/package.py
@@ -12,9 +12,11 @@ class PyDictdiffer(PythonPackage):
homepage = "https://github.com/inveniosoftware/dictdiffer"
pypi = "dictdiffer/dictdiffer-0.8.1.tar.gz"
+ version("0.9.0", sha256="17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578")
version("0.8.1", sha256="1adec0d67cdf6166bda96ae2934ddb5e54433998ceab63c984574d187cc563d2")
depends_on("python@2.7:2.8,3.4:", type=("build", "run"))
- depends_on("py-setuptools", type=("build", "run"))
+ depends_on("py-setuptools", type="build", when="@0.9:")
+ depends_on("py-setuptools", type=("build", "run"), when="@:0.8")
depends_on("py-setuptools-scm@3.1.0:", type="build")
depends_on("py-pytest-runner@2.7:", type="build")
diff --git a/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py b/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py
index ee18886ed1b15f..aa99a09d8c1267 100644
--- a/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py
+++ b/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py
@@ -15,10 +15,17 @@ class PyDlioProfilerPy(PythonPackage):
version("develop", branch="dev")
version("master", branch="master")
+ version("0.0.2", tag="v0.0.2", commit="b72144abf1499e03d1db87ef51e780633e9e9533")
version("0.0.1", tag="v0.0.1", commit="28affe716211315dd6936ddc8e25ce6c43cdf491")
- depends_on("cpp-logger@0.0.1")
- depends_on("brahma@0.0.1")
- depends_on("gotcha@develop")
+
+ depends_on("cpp-logger@0.0.1", when="@:0.0.1")
+ depends_on("cpp-logger@0.0.2", when="@0.0.2:")
+ depends_on("brahma@0.0.1", when="@:0.0.1")
+ depends_on("brahma@0.0.2", when="@0.0.2:")
+ depends_on("gotcha@1.0.4", when="@:0.0.1")
+ depends_on("gotcha@1.0.5", when="@0.0.2:")
+ depends_on("gotcha@1.0.5", when="@0.0.2:")
+ depends_on("yaml-cpp@0.6.3", when="@0.0.2:")
depends_on("py-setuptools@42:", type="build")
depends_on("py-pybind11", type=("build", "run"))
depends_on("py-ninja", type="build")
diff --git a/var/spack/repos/builtin/packages/py-efel/package.py b/var/spack/repos/builtin/packages/py-efel/package.py
new file mode 100644
index 00000000000000..a33749b9af75eb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-efel/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from spack.package import *
+
+
+class PyEfel(PythonPackage):
+ """The Electrophys Feature Extract Library (eFEL) allows
+ neuroscientists to automatically extract features from time series data
+ recorded from neurons (both in vitro and in silico).
+ Examples are the action potential width and amplitude in
+ voltage traces recorded during whole-cell patch clamp experiments.
+ The user of the library provides a set of traces and selects the
+ features to be calculated. The library will then extract the requested
+ features and return the values to the user."""
+
+ homepage = "https://github.com/BlueBrain/eFEL"
+ pypi = "efel/efel-3.0.80.tar.gz"
+
+ version("5.2.0", sha256="ed2c5efe22a4c703a4d9e47775b939009e1456713ac896898ebabf177c60b1dc")
+
+ depends_on("py-setuptools", type="build")
+ depends_on("py-numpy@1.6:", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-elephant/package.py b/var/spack/repos/builtin/packages/py-elephant/package.py
index 41f94db3a5eeac..63c2fea875ed6a 100644
--- a/var/spack/repos/builtin/packages/py-elephant/package.py
+++ b/var/spack/repos/builtin/packages/py-elephant/package.py
@@ -16,6 +16,10 @@ class PyElephant(PythonPackage):
# list of GitHub accounts to notify when the package is updated.
maintainers("Moritz-Alexander-Kern")
+ version("master", branch="master")
+ version("0.14.0", sha256="02ce3b2a8d08dc19828f95384551339ea0946bc405c1db9aace54135417c2b0f")
+ version("0.13.0", sha256="2c6463cf9ace41631f2af196c5b80b468bf1c4b264d3a6b1ea0fb587d9e7dd67")
+ version("0.12.0", sha256="81f8d668f92d8688344bb7a9c5abaa8438d824560c935a411e6e36ddf7dc7c72")
version("0.11.2", sha256="f8759fff0bbb136ae4ffc8d1eacadeea8ba56610d705c3bf207de87ada3ba240")
version("0.11.1", sha256="d604a202583440fdf9d95d42cef50a410bd74fcaaa1a925b139435f27ab012ef")
version("0.11.0", sha256="7b547964dbd196361edc922db2c5a7c0c886ef1effcca6c6dc7adb06f966a3be")
@@ -28,40 +32,44 @@ class PyElephant(PythonPackage):
version("0.3.0", sha256="747251ccfb5820bdead6391411b5faf205b4ddf3ababaefe865f50b16540cfef")
variant("docs", default=False, description="Install documentation dependencies")
- variant("pandas", default=False, description="Build with pandas", when="@0.3.0:0.4.1")
variant(
"extras", default=False, description="Build with extras for GPFA, ASSET", when="@0.6.4:"
)
depends_on("py-setuptools", type="build")
- depends_on("python@3.7:", type=("build", "run"), when="@0.11.0:")
+
+ depends_on("python@3.8:", type=("build", "run"), when="@0.12.0:")
+
+ depends_on("py-neo@0.10.0:", type=("build", "run"), when="@0.11.0:")
+ depends_on("py-neo@0.9.0", type=("build", "run"), when="@0.9.0:0.10.0")
+ depends_on("py-neo@0.8.0", type=("build", "run"), when="@0.6.4:0.8.0")
depends_on("py-neo@0.3.4:", type=("build", "run"), when="@0.3.0:0.4.1")
+ depends_on("py-numpy@1.19.5:", type=("build", "run"), when="@0.12.0:")
+ depends_on("py-numpy@1.18.1:1.23.5", type=("build", "run"), when="@0.6.4:0.11.2")
depends_on("py-numpy@1.8.2:", type=("build", "run"), when="@0.3.0:0.4.1")
- depends_on("py-quantities@0.10.1:", type=("build", "run"), when="@0.3.0:0.4.1")
+ depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@0.14.0:")
+ depends_on("py-quantities@0.12.1:0.13.0,0.14.1:", type=("build", "run"), when="@0.6.4:0.13.0")
+ depends_on("py-quantities@0.10.1:0.13.0,0.14.1:", type=("build", "run"), when="@0.3.0:0.4.1")
+ depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:")
depends_on("py-scipy@0.14.0:", type=("build", "run"), when="@0.3.0:0.4.1")
- depends_on("py-pandas@0.14.1:", type=("build", "run"), when="+pandas")
- depends_on("py-numpydoc@0.5:", type=("build", "run"), when="@0.3.0:0.8.0+docs")
+ depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:")
+ depends_on("py-tqdm", type=("build", "run"), when="@0.6.4:")
+
+ depends_on("py-pandas@0.18.0:", type=("build", "run"), when="+extras")
+ depends_on("py-scikit-learn@0.23.2:", type=("build", "run"), when="+extras")
+ depends_on("py-statsmodels@0.12.1:", type=("build", "run"), when="+extras")
+ depends_on("py-jinja2@2.11.2:", type=("build", "run"), when="+extras")
+
depends_on("py-numpydoc@1.1.0:", type=("build", "run"), when="@0.9.0:+docs")
- depends_on("py-sphinx@1.2.2:", type=("build", "run"), when="@0.3.0:0.6.0+docs")
- depends_on("py-sphinx@2.4.3:", type=("build", "run"), when="@0.7.0:0.8.0+docs")
- depends_on("py-sphinx@3.3.0:", type=("build", "run"), when="@0.9.0:+docs")
+ depends_on("py-numpydoc@0.5:", type=("build", "run"), when="@0.3.0:0.8.0+docs")
depends_on("py-jupyter@1.0.0:", type=("build", "run"), when="@0.7.0:+docs")
- depends_on("py-nbsphinx@0.5.0:", type=("build", "run"), when="@0.7.0:0.8.0+docs")
+ depends_on("py-sphinx@3.3.0:", type=("build", "run"), when="@0.9.0:+docs")
+ depends_on("py-sphinx@2.4.3:", type=("build", "run"), when="@0.7.0:0.8.0+docs")
+ depends_on("py-sphinx@1.2.2:", type=("build", "run"), when="@0.3.0:0.6.0+docs")
depends_on("py-nbsphinx@0.8.0:", type=("build", "run"), when="@0.9.0:+docs")
- depends_on("py-sphinxcontrib-bibtex@1.0.0", type=("build", "run"), when="@0.7.0:+docs")
- depends_on("py-sphinx-tabs@1.1.13:", type=("build", "run"), when="@0.7.0:0.8.0+docs")
+ depends_on("py-nbsphinx@0.5.0:", type=("build", "run"), when="@0.7.0:0.8.0+docs")
+ depends_on("py-sphinxcontrib-bibtex@1.0.1:", type=("build", "run"), when="@0.7.0:+docs")
depends_on("py-sphinx-tabs@1.3.0:", type=("build", "run"), when="@0.9.0:+docs")
- depends_on("py-matplotlib@3.1.0:", type=("build", "run"), when="@0.8.0+docs")
+ depends_on("py-sphinx-tabs@1.1.13:", type=("build", "run"), when="@0.7.0:0.8.0+docs")
depends_on("py-matplotlib@3.3.2:", type=("build", "run"), when="@0.9.0:+docs")
- depends_on("py-pandas@0.18.0:", type=("build", "run"), when="+extras")
- depends_on("py-scikit-learn@0.23.2:", type=("build", "run"), when="+extras")
- depends_on("py-statsmodels@0.12.1:", type=("build", "run"), when="+extras")
- depends_on("py-jinja2@2.11.2:", type=("build", "run"), when="+extras")
- depends_on("py-neo@0.10.0:", type=("build", "run"), when="@0.11.0:")
- depends_on("py-neo@0.9.0", type=("build", "run"), when="@0.9.0:0.10.0")
- depends_on("py-neo@0.8.0", type=("build", "run"), when="@0.6.4:0.8.0")
- depends_on("py-numpy@1.18.1:", type=("build", "run"), when="@0.6.4:")
- depends_on("py-quantities@0.12.1:", type=("build", "run"), when="@0.6.4:")
- depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:")
- depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:")
- depends_on("py-tqdm", type=("build", "run"), when="@0.6.4:")
+ depends_on("py-matplotlib@3.1.0:", type=("build", "run"), when="@0.8.0+docs")
diff --git a/var/spack/repos/builtin/packages/py-fenics-basix/package.py b/var/spack/repos/builtin/packages/py-fenics-basix/package.py
index 6d3ffa6cae3754..d3ef7e9db6d305 100644
--- a/var/spack/repos/builtin/packages/py-fenics-basix/package.py
+++ b/var/spack/repos/builtin/packages/py-fenics-basix/package.py
@@ -27,14 +27,16 @@ class PyFenicsBasix(PythonPackage):
depends_on("fenics-basix@0.4.2", type=("build", "run"), when="@0.4.2")
# See python/CMakeLists.txt
- depends_on("cmake@3.16:", type="build")
+ depends_on("cmake@3.16:", when="@:0.7", type="build")
+ depends_on("cmake@3.19:", when="@0.8:", type="build")
# See python/pyproject.toml
- depends_on("python@3.8:", when="@0.7.0:", type=("build", "run"))
- depends_on("py-setuptools@42:", type="build")
+ depends_on("python@3.8:", when="@0.7:", type=("build", "run"))
depends_on("py-numpy@1.21:", type=("build", "run"))
depends_on("py-pybind11@2.9.1:", when="@:0.7", type="build")
- depends_on("py-nanobind@1.5.1:", when="@0.8:", type="build")
+ depends_on("py-setuptools@42:", when="@:0.7", type="build")
+ depends_on("py-nanobind@1.6.0:", when="@0.8:", type="build")
+ depends_on("py-scikit-build-core+pyproject@0.5.0:", when="@0.8:", type="build")
depends_on("xtensor@0.23.10:", type="build", when="@:0.4")
diff --git a/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py b/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py
index 2cd0584a662527..f2cec1e21e9b0d 100644
--- a/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py
+++ b/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py
@@ -24,7 +24,8 @@ class PyFenicsFfcx(PythonPackage):
depends_on("python@3.8:", when="@0.7:", type=("build", "run"))
depends_on("py-setuptools@62:", when="@0.7:", type="build")
- depends_on("py-setuptools@58:", when="@0.4.2:0.6", type="build")
+ # Runtime dependency on pkg_resources from setuptools at 0.6.0
+ depends_on("py-setuptools@58:", when="@0.4.2:0.6", type=("build", "run"))
# CFFI is required at runtime for JIT support
depends_on("py-cffi", type=("build", "run"))
@@ -35,6 +36,7 @@ class PyFenicsFfcx(PythonPackage):
depends_on("py-fenics-ufl@main", type=("build", "run"), when="@main")
depends_on("py-fenics-ufl@2023.3.0:", type=("build", "run"), when="@0.8")
depends_on("py-fenics-ufl@2023.2.0", type=("build", "run"), when="@0.7")
+ depends_on("py-fenics-ufl@2023.1", type=("build", "run"), when="@0.6")
depends_on("py-fenics-ufl@2022.2.0", type=("build", "run"), when="@0.5.0:0.5")
depends_on("py-fenics-ufl@2022.1.0", type=("build", "run"), when="@0.4.2")
diff --git a/var/spack/repos/builtin/packages/py-fsspec/package.py b/var/spack/repos/builtin/packages/py-fsspec/package.py
index e9a2d3f41192f9..6413d8413242e0 100644
--- a/var/spack/repos/builtin/packages/py-fsspec/package.py
+++ b/var/spack/repos/builtin/packages/py-fsspec/package.py
@@ -12,6 +12,7 @@ class PyFsspec(PythonPackage):
homepage = "https://github.com/intake/filesystem_spec"
pypi = "fsspec/fsspec-0.4.4.tar.gz"
+ version("2023.10.0", sha256="330c66757591df346ad3091a53bd907e15348c2ba17d63fd54f5c39c4457d2a5")
version("2023.1.0", sha256="fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411")
version("2022.11.0", sha256="259d5fd5c8e756ff2ea72f42e7613c32667dc2049a4ac3d84364a7ca034acb8b")
version("2021.7.0", sha256="792ebd3b54de0b30f1ce73f0ba0a8bcc864724f2d9f248cb8d0ece47db0cbde8")
diff --git a/var/spack/repos/builtin/packages/py-generateds/package.py b/var/spack/repos/builtin/packages/py-generateds/package.py
new file mode 100644
index 00000000000000..bcf82787cf4dd3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-generateds/package.py
@@ -0,0 +1,27 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyGenerateds(PythonPackage):
+ """Generate Python data structures and XML parser from Xschema."""
+
+ homepage = "http://www.davekuhlman.org/generateDS.html"
+ pypi = "generateDS/generateDS-2.41.4.tar.gz"
+
+ maintainers("LydDeb")
+
+ version("2.43.2", sha256="e86f033f4d93414dd5b04cab9544a68b8f46d559073d85cd0990266b7b9ec09e")
+ version("2.43.1", sha256="2d3d71b42a09ba153bc51d2204324d04e384d0f15e41bdba881ee2daff9bbd68")
+ version("2.42.2", sha256="1d322aa7e074c262062b068660dd0c53bbdb0bb2b30152bb9e0074bd29fd365a")
+ version("2.42.1", sha256="87e4654449d34150802ca0cfb2330761382510d1385880f4d607cd34466abc2d")
+ version("2.41.5", sha256="8800c09454bb22f8f80f2ee138072d4e58bd5b6c14dbdf0a2a7ca13f06ba72e4")
+ version("2.41.4", sha256="804592eef573fa514741528a0bf9998f0c57ee29960c87f54608011f1fc722ea")
+
+ depends_on("py-setuptools", type="build")
+ depends_on("py-six", type=("build", "run"))
+ depends_on("py-lxml", type=("build", "run"))
+ depends_on("py-requests@2.21:", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-geomdl/package.py b/var/spack/repos/builtin/packages/py-geomdl/package.py
new file mode 100644
index 00000000000000..6a2e9f27603796
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-geomdl/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyGeomdl(PythonPackage):
+ """Object-oriented pure Python B-Spline and NURBS library."""
+
+ homepage = "https://pypi.org/project/geomdl"
+ pypi = "geomdl/geomdl-5.3.1.tar.gz"
+
+ version("5.3.1", sha256="e81a31b4d5f111267b16045ba1d9539235a98b2cff5e4bad18f7ddcd4cb804c8")
+
+ depends_on("py-setuptools@40.6.3:", type="build")
+
+ # For compiling geomdl.core module
+ depends_on("py-cython@:2", type="build")
+
+ variant("viz", default=False, description="Add viz dependencies")
+
+ depends_on("py-numpy@1.15.4:", type="run", when="+viz")
+ depends_on("py-matplotlib@2.2.3:", type="run", when="+viz")
+ depends_on("py-plotly", type="run", when="+viz")
diff --git a/var/spack/repos/builtin/packages/py-gitpython/package.py b/var/spack/repos/builtin/packages/py-gitpython/package.py
index 69a0f5ec393acc..5cc534437a4937 100644
--- a/var/spack/repos/builtin/packages/py-gitpython/package.py
+++ b/var/spack/repos/builtin/packages/py-gitpython/package.py
@@ -12,6 +12,8 @@ class PyGitpython(PythonPackage):
homepage = "https://gitpython.readthedocs.org"
pypi = "GitPython/GitPython-3.1.12.tar.gz"
+ version("3.1.40", sha256="22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4")
+ version("3.1.34", sha256="85f7d365d1f6bf677ae51039c1ef67ca59091c7ebd5a3509aa399d4eda02d6dd")
version("3.1.27", sha256="1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704")
version("3.1.24", sha256="df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5")
version("3.1.23", sha256="aaae7a3bfdf0a6db30dc1f3aeae47b71cd326d86b936fe2e158aa925fdf1471c")
diff --git a/var/spack/repos/builtin/packages/py-gpaw/package.py b/var/spack/repos/builtin/packages/py-gpaw/package.py
index 0f5072e927c534..f6759fb279ea53 100644
--- a/var/spack/repos/builtin/packages/py-gpaw/package.py
+++ b/var/spack/repos/builtin/packages/py-gpaw/package.py
@@ -35,7 +35,7 @@ class PyGpaw(PythonPackage):
depends_on("py-ase@3.19.0:", type=("build", "run"), when="@20.1.0")
depends_on("py-ase@3.20.1:", type=("build", "run"), when="@20.10.0")
depends_on("py-ase@3.21.0:", type=("build", "run"), when="@21.1.0")
- depends_on("py-numpy +blas +lapack", type=("build", "run"))
+ depends_on("py-numpy", type=("build", "run"))
depends_on("py-scipy", type=("build", "run"))
depends_on("libxc@3:4.3.4")
depends_on("blas")
diff --git a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py
index e2cc8d79db6289..e1fd541b47b356 100644
--- a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py
+++ b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py
@@ -41,8 +41,10 @@ def setup_build_environment(self, env):
for dep in self.spec.dependencies(deptype="link"):
query = self.spec[dep.name]
- env.prepend_path("LIBRARY_PATH", query.libs.directories[0])
- env.prepend_path("CPATH", query.headers.directories[0])
+ for p in query.libs.directories:
+ env.prepend_path("LIBRARY_PATH", p)
+ for p in query.headers.directories:
+ env.prepend_path("CPATH", p)
def patch(self):
if self.spec.satisfies("%fj"):
diff --git a/var/spack/repos/builtin/packages/py-grpcio/package.py b/var/spack/repos/builtin/packages/py-grpcio/package.py
index e85a283dded496..fe325898a6d4de 100644
--- a/var/spack/repos/builtin/packages/py-grpcio/package.py
+++ b/var/spack/repos/builtin/packages/py-grpcio/package.py
@@ -57,9 +57,10 @@ def setup_build_environment(self, env):
for dep in self.spec.dependencies(deptype="link"):
query = self.spec[dep.name]
- if query.libs.directories:
- env.prepend_path("LIBRARY_PATH", query.libs.directories[0])
- env.prepend_path("CPATH", query.headers.directories[0])
+ for p in query.libs.directories:
+ env.prepend_path("LIBRARY_PATH", p)
+ for p in query.headers.directories:
+ env.prepend_path("CPATH", p)
def patch(self):
filter_file("-std=gnu99", "", "setup.py")
diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py
index 262ce6445ac74a..60de24072a8291 100644
--- a/var/spack/repos/builtin/packages/py-h5py/package.py
+++ b/var/spack/repos/builtin/packages/py-h5py/package.py
@@ -38,39 +38,39 @@ class PyH5py(PythonPackage):
variant("mpi", default=True, description="Build with MPI support")
# Python versions
- depends_on("python@:3.9", type=("build", "run"), when="@:2.8")
- depends_on("python@3.6:", type=("build", "run"), when="@3:3.1")
depends_on("python@3.7:", type=("build", "run"), when="@3.2:")
+ depends_on("python@3.6:", type=("build", "run"), when="@3:3.1")
+ depends_on("python@:3.9", type=("build", "run"), when="@:2.8")
# Build dependencies
- depends_on("py-cython@0.23:0", type="build", when="@:2")
- depends_on("py-cython@0.29:0", type=("build"), when="@3:")
- depends_on("py-cython@0.29.14:0", type=("build"), when="@3:3.7 ^python@3.8.0:3.8")
depends_on("py-cython@0.29.15:0", type=("build"), when="@3:3.7 ^python@3.9.0:")
+ depends_on("py-cython@0.29.14:0", type=("build"), when="@3:3.7 ^python@3.8.0:3.8")
+ depends_on("py-cython@0.29:0", type=("build"), when="@3:")
+ depends_on("py-cython@0.23:0", type="build", when="@:2")
depends_on("py-pkgconfig", type="build")
- depends_on("py-setuptools", type="build")
depends_on("py-setuptools@61:", type="build", when="@3.8.0:")
+ depends_on("py-setuptools", type="build")
depends_on("py-wheel", type="build", when="@3:")
# Build and runtime dependencies
depends_on("py-cached-property@1.5:", type=("build", "run"), when="@:3.6 ^python@:3.7")
- depends_on("py-numpy@1.7:", type=("build", "run"), when="@:2")
- depends_on("py-numpy@1.14.5:", type=("build", "run"), when="@3:")
- depends_on("py-numpy@1.17.5:", type=("build", "run"), when="@3:3.5 ^python@3.8.0:3.8")
depends_on("py-numpy@1.19.3:", type=("build", "run"), when="@3:3.5 ^python@3.9.0:")
+ depends_on("py-numpy@1.17.5:", type=("build", "run"), when="@3:3.5 ^python@3.8.0:3.8")
+ depends_on("py-numpy@1.14.5:", type=("build", "run"), when="@3:")
+ depends_on("py-numpy@1.7:", type=("build", "run"), when="@:2")
depends_on("py-six", type=("build", "run"), when="@:2")
# Link dependencies (py-h5py v2 cannot build against HDF5 1.12 regardless
# of API setting)
- depends_on("hdf5@1.8.4:1.11 +hl", when="@:2")
- depends_on("hdf5@1.8.4:1.12 +hl", when="@3:3.7")
depends_on("hdf5@1.8.4:1.14 +hl", when="@3.8:")
+ depends_on("hdf5@1.8.4:1.12 +hl", when="@3:3.7")
+ depends_on("hdf5@1.8.4:1.11 +hl", when="@:2")
# MPI dependencies
depends_on("hdf5+mpi", when="+mpi")
depends_on("mpi", when="+mpi")
- depends_on("py-mpi4py", when="@:2 +mpi", type=("build", "run"))
depends_on("py-mpi4py@3.0.2:", when="@3: +mpi", type=("build", "run"))
+ depends_on("py-mpi4py", when="@:2 +mpi", type=("build", "run"))
def flag_handler(self, name, flags):
if name == "cflags":
diff --git a/var/spack/repos/builtin/packages/py-heat/package.py b/var/spack/repos/builtin/packages/py-heat/package.py
new file mode 100644
index 00000000000000..5d122fe5167ba2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-heat/package.py
@@ -0,0 +1,47 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyHeat(PythonPackage):
+ """Heat is a flexible and seamless open-source software for high performance data analytics
+ and machine learning. It provides highly optimized algorithms and data structures for tensor
+ computations using CPUs, GPUs and distributed cluster systems on top of MPI."""
+
+ homepage = "https://github.com/helmholtz-analytics/heat/"
+ pypi = "heat/heat-1.3.0.tar.gz"
+
+ maintainers("mrfh92", "ClaudiaComito", "JuanPedroGHM")
+
+ version("1.3.0", sha256="fa247539a559881ffe574a70227d3c72551e7c4a9fb29b0945578d6a840d1c87")
+
+ variant("docutils", default=False, description="Use the py-docutils package")
+ variant("hdf5", default=False, description="Use the py-h5py package needed for HDF5 support")
+ variant(
+ "netcdf", default=False, description="Use the py-netcdf4 package needed for NetCDF support"
+ )
+ variant("dev", default=False, description="Use the py-pre-commit package")
+ variant(
+ "examples",
+ default=False,
+ description="Use py-scikit-learn and py-matplotlib for the example tests",
+ )
+
+ depends_on("python@3.8:", type=("build", "run"))
+ depends_on("py-numpy@1.20:", type=("build", "run"))
+ depends_on("py-torch@1.8:2.0.1", type=("build", "run"))
+ depends_on("py-scipy@0.14:", type=("build", "run"))
+ depends_on("pil@6:", type=("build", "run"))
+ depends_on("py-torchvision@0.8:", type=("build", "run"))
+ depends_on("py-mpi4py@3:", type=("build", "run"))
+ depends_on("py-setuptools", type="build")
+
+ depends_on("py-docutils@0.16:", when="+docutils", type=("build", "link", "run"))
+ depends_on("py-h5py@2.8.0:", when="+hdf5", type=("build", "link", "run"))
+ depends_on("py-netcdf4@1.5.6:", when="+netcdf", type=("build", "link", "run"))
+ depends_on("py-pre-commit@1.18.3:", when="+dev", type=("build", "link", "run"))
+ depends_on("py-scikit-learn@0.24.0:", when="+examples", type=("build", "link", "run"))
+ depends_on("py-matplotlib@3.1.0:", when="+examples", type=("build", "link", "run"))
diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py
index 0e0bc5fd7f6068..5e221c0296824f 100644
--- a/var/spack/repos/builtin/packages/py-horovod/package.py
+++ b/var/spack/repos/builtin/packages/py-horovod/package.py
@@ -225,7 +225,7 @@ class PyHorovod(PythonPackage, CudaPackage):
conflicts(
"controllers=gloo", when="@:0.20.0 platform=darwin", msg="Gloo cannot be compiled on MacOS"
)
- # FIXME
+ # https://github.com/horovod/horovod/issues/3996
conflicts("^py-torch@2.1:")
# https://github.com/horovod/horovod/pull/1835
diff --git a/var/spack/repos/builtin/packages/py-huggingface-hub/package.py b/var/spack/repos/builtin/packages/py-huggingface-hub/package.py
index 6865a0d40e08ce..d63a2de77f99e7 100644
--- a/var/spack/repos/builtin/packages/py-huggingface-hub/package.py
+++ b/var/spack/repos/builtin/packages/py-huggingface-hub/package.py
@@ -14,6 +14,7 @@ class PyHuggingfaceHub(PythonPackage):
homepage = "https://github.com/huggingface/huggingface_hub"
pypi = "huggingface_hub/huggingface_hub-0.0.10.tar.gz"
+ version("0.19.4", sha256="176a4fc355a851c17550e7619488f383189727eab209534d7cef2114dae77b22")
version("0.14.1", sha256="9ab899af8e10922eac65e290d60ab956882ab0bf643e3d990b1394b6b47b7fbc")
version("0.10.1", sha256="5c188d5b16bec4b78449f8681f9975ff9d321c16046cc29bcf0d7e464ff29276")
version("0.0.10", sha256="556765e4c7edd2d2c4c733809bae1069dca20e10ff043870ec40d53e498efae2")
@@ -28,14 +29,14 @@ class PyHuggingfaceHub(PythonPackage):
depends_on("py-setuptools", type="build")
depends_on("py-filelock", type=("build", "run"))
+ depends_on("py-fsspec@2023.5:", when="@0.18:", type=("build", "run"))
depends_on("py-fsspec", when="@0.14:", type=("build", "run"))
depends_on("py-requests", type=("build", "run"))
- depends_on("py-tqdm@4.42.1:", type=("build", "run"))
+ depends_on("py-tqdm@4.42.1:", when="@0.12:", type=("build", "run"))
depends_on("py-tqdm", type=("build", "run"))
depends_on("py-pyyaml@5.1:", when="@0.10:", type=("build", "run"))
depends_on("py-typing-extensions@3.7.4.3:", when="@0.10:", type=("build", "run"))
depends_on("py-typing-extensions", when="@0.0.10:", type=("build", "run"))
- depends_on("py-importlib-metadata", when="^python@:3.7", type=("build", "run"))
depends_on("py-packaging@20.9:", when="@0.10:", type=("build", "run"))
depends_on("py-inquirerpy@0.3.4", when="@0.14:+cli", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-imagecodecs/package.py b/var/spack/repos/builtin/packages/py-imagecodecs/package.py
index 03195578b6087f..e156b28e77b055 100644
--- a/var/spack/repos/builtin/packages/py-imagecodecs/package.py
+++ b/var/spack/repos/builtin/packages/py-imagecodecs/package.py
@@ -66,11 +66,12 @@ def patch(self):
)
# 239
filter_file(
- "append\('/usr/include/jxrlib'\)", # noqa: W605
+ "append('/usr/include/jxrlib')",
"extend(('{0}/libjxr/image', '{0}/libjxr/common', '{0}/libjxr/glue'))".format( # noqa: E501
spec["jxrlib-debian"].prefix.include
),
"setup.py",
+ string=True,
)
# 367
diff --git a/var/spack/repos/builtin/packages/py-installer/package.py b/var/spack/repos/builtin/packages/py-installer/package.py
index 30a4a62072ab03..1bedecf074160a 100644
--- a/var/spack/repos/builtin/packages/py-installer/package.py
+++ b/var/spack/repos/builtin/packages/py-installer/package.py
@@ -40,6 +40,5 @@ def install(self, spec, prefix):
def setup_dependent_package(self, module, dependent_spec):
installer = dependent_spec["python"].command
- installer.add_default_arg("-m")
- installer.add_default_arg("installer")
+ installer.add_default_arg("-m", "installer")
setattr(module, "installer", installer)
diff --git a/var/spack/repos/builtin/packages/py-isort/package.py b/var/spack/repos/builtin/packages/py-isort/package.py
index aca4dd29047753..5a4ea271f6a834 100644
--- a/var/spack/repos/builtin/packages/py-isort/package.py
+++ b/var/spack/repos/builtin/packages/py-isort/package.py
@@ -13,16 +13,33 @@ class PyIsort(PythonPackage):
pypi = "isort/isort-4.2.15.tar.gz"
version("5.12.0", sha256="8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504")
+ version("5.11.5", sha256="6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db")
version("5.10.1", sha256="e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951")
version("5.9.3", sha256="9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899")
version("5.9.1", sha256="83510593e07e433b77bd5bff0f6f607dbafa06d1a89022616f02d8b699cfcd56")
- version("4.3.20", sha256="c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a")
- version("4.2.15", sha256="79f46172d3a4e2e53e7016e663cc7a8b538bec525c36675fcfd2767df30b3983")
+ version(
+ "4.3.20",
+ sha256="c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a",
+ deprecated=True,
+ )
+ version(
+ "4.2.15",
+ sha256="79f46172d3a4e2e53e7016e663cc7a8b538bec525c36675fcfd2767df30b3983",
+ deprecated=True,
+ )
variant("colors", default=False, description="Install colorama for --color support")
- depends_on("python@3.8:", when="@5.12:", type=("build", "run"))
- depends_on("python@3.6.1:3", when="@5:5.10", type=("build", "run"))
+ with default_args(type=("build", "run")):
+ depends_on("python@3.8:", when="@5.12:")
+ depends_on("python@3.7:", when="@5.11")
+ # This needs to be @3.6 since for bootstrapping the current Spack interpreter is
+ # identified by major.minor (and the new versioning identifies it as @=3.6)
+ depends_on("python@3.6:3", when="@5.10")
+ depends_on("python@3.6:3", when="@5.9")
+
+ conflicts("python@3.6.0", when="@5:")
+
depends_on("py-setuptools", when="@:4", type=("build", "run"))
depends_on("py-poetry-core@1:", when="@5:", type="build")
depends_on("py-colorama@0.4.3:", when="+colors @5.12:", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-jarowinkler/package.py b/var/spack/repos/builtin/packages/py-jarowinkler/package.py
new file mode 100644
index 00000000000000..9256776613a20c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-jarowinkler/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyJarowinkler(PythonPackage):
+ """library for fast approximate string matching using Jaro and Jaro-Winkler similarity."""
+
+ homepage = "https://github.com/maxbachmann/JaroWinkler"
+ pypi = "jarowinkler/jarowinkler-1.2.3.tar.gz"
+
+ maintainers("LydDeb")
+
+ version("1.2.3", sha256="af28ea284cfbd1b21b29ff94b759f20e94e4f7c06f424b0b4702e701c2a21668")
+
+ depends_on("py-setuptools@42:", type="build")
+ depends_on("py-scikit-build@0.15.0", type="build")
+ depends_on("py-rapidfuzz-capi@1.0.5", type="build")
diff --git a/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py b/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py
index 0d2274f997fb33..261e43dbd65543 100644
--- a/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py
+++ b/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py
@@ -15,9 +15,11 @@ class PyJsonpathNg(PythonPackage):
homepage = "https://github.com/h2non/jsonpath-ng"
pypi = "jsonpath-ng/jsonpath-ng-1.5.2.tar.gz"
+ version("1.6.0", sha256="5483f8e9d74c39c9abfab554c070ae783c1c8cbadf5df60d561bc705ac68a07e")
+ version("1.5.3", sha256="a273b182a82c1256daab86a313b937059261b5c5f8c4fa3fc38b882b344dd567")
version("1.5.2", sha256="144d91379be14d9019f51973bd647719c877bfc07dc6f3f5068895765950c69d")
depends_on("py-setuptools", type="build")
depends_on("py-ply", type=("build", "run"))
- depends_on("py-decorator", type=("build", "run"))
- depends_on("py-six", type=("build", "run"))
+ depends_on("py-decorator", type=("build", "run"), when="@:1.5")
+ depends_on("py-six", type=("build", "run"), when="@:1.5")
diff --git a/var/spack/repos/builtin/packages/py-kiwisolver/package.py b/var/spack/repos/builtin/packages/py-kiwisolver/package.py
index 803646240a34cb..08ad89b0e4d407 100644
--- a/var/spack/repos/builtin/packages/py-kiwisolver/package.py
+++ b/var/spack/repos/builtin/packages/py-kiwisolver/package.py
@@ -12,6 +12,7 @@ class PyKiwisolver(PythonPackage):
homepage = "https://github.com/nucleic/kiwi"
pypi = "kiwisolver/kiwisolver-1.1.0.tar.gz"
+ version("1.4.5", sha256="e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec")
version("1.4.4", sha256="d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955")
version("1.3.2", sha256="fc4453705b81d03568d5b808ad8f09c77c47534f6ac2e72e733f9ca4714aa75c")
version("1.3.1", sha256="950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248")
diff --git a/var/spack/repos/builtin/packages/py-kombu/package.py b/var/spack/repos/builtin/packages/py-kombu/package.py
index 9c732796cf30a6..257b0acd7f0953 100644
--- a/var/spack/repos/builtin/packages/py-kombu/package.py
+++ b/var/spack/repos/builtin/packages/py-kombu/package.py
@@ -24,12 +24,15 @@ class PyKombu(PythonPackage):
variant("redis", default=False, description="Use redis transport")
depends_on("py-setuptools", type="build")
+ # "pytz>dev" in tests_require: setuptools parser changed in v60 and errors.
+ depends_on("py-setuptools@:59", when="@4.6:5.2", type="build")
+
depends_on("py-amqp@2.5.2:2.5", when="@:4.6.6", type=("build", "run"))
depends_on("py-amqp@2.6.0:2.6", when="@4.6.7:4", type=("build", "run"))
depends_on("py-amqp@5.0.0:5", when="@5.0.0:5.0.2", type=("build", "run"))
depends_on("py-amqp@5.0.9:5.0", when="@5.2.3", type=("build", "run"))
depends_on("py-vine", when="@5.1.0:", type=("build", "run"))
- depends_on("py-importlib-metadata@0.18:", type=("build", "run"), when="python@:3.7")
- depends_on("py-cached-property", type=("build", "run"), when="python@:3.7")
+ depends_on("py-importlib-metadata@0.18:", type=("build", "run"), when="^python@:3.7")
+ depends_on("py-cached-property", type=("build", "run"), when="^python@:3.7")
depends_on("py-redis@3.4.1:3,4.0.2:", when="+redis", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-langsmith/package.py b/var/spack/repos/builtin/packages/py-langsmith/package.py
index a152c9903d01ce..e5c8363487b6e3 100644
--- a/var/spack/repos/builtin/packages/py-langsmith/package.py
+++ b/var/spack/repos/builtin/packages/py-langsmith/package.py
@@ -11,6 +11,7 @@ class PyLangsmith(PythonPackage):
pypi = "langsmith/langsmith-0.0.10.tar.gz"
+ version("0.0.11", sha256="7c1be28257d6c7279c85f81e6d8359d1006af3b1238fc198d13ca75c8fe421c8")
version("0.0.10", sha256="11e5db0d8e29ee5583cabd872eeece8ce50738737b1f52f316ac984f4a1a58c5")
version("0.0.7", sha256="2f18e51cfd4e42f2b3cf00fa87e9d03012eb7269cdafd8e7c0cf7aa828dcc03e")
diff --git a/var/spack/repos/builtin/packages/py-libensemble/package.py b/var/spack/repos/builtin/packages/py-libensemble/package.py
index d47a3d68fb932e..57dd42fe76b7c0 100644
--- a/var/spack/repos/builtin/packages/py-libensemble/package.py
+++ b/var/spack/repos/builtin/packages/py-libensemble/package.py
@@ -12,13 +12,14 @@ class PyLibensemble(PythonPackage):
"""Library for managing ensemble-like collections of computations."""
homepage = "https://libensemble.readthedocs.io"
- pypi = "libensemble/libensemble-1.0.0.tar.gz"
+ pypi = "libensemble/libensemble-1.1.0.tar.gz"
git = "https://github.com/Libensemble/libensemble.git"
maintainers("shuds13", "jlnav")
tags = ["e4s"]
version("develop", branch="develop")
+ version("1.1.0", sha256="3e3ddc4233272d3651e9d62c7bf420018930a4b9b135ef9ede01d5356235c1c6")
version("1.0.0", sha256="b164e044f16f15b68fd565684ad8ce876c93aaeb84e5078f4ea2a29684b110ca")
version("0.10.2", sha256="ef8dfe5d233dcae2636a3d6aa38f3c2ad0f42c65bd38f664e99b3e63b9f86622")
version("0.10.1", sha256="56ae42ec9a28d3df8f46bdf7d016db9526200e9df2a28d849902e3c44fe5c1ba")
diff --git a/var/spack/repos/builtin/packages/py-lightning/package.py b/var/spack/repos/builtin/packages/py-lightning/package.py
index f5131ec0715265..032aa9c817aca7 100644
--- a/var/spack/repos/builtin/packages/py-lightning/package.py
+++ b/var/spack/repos/builtin/packages/py-lightning/package.py
@@ -15,6 +15,8 @@ class PyLightning(PythonPackage):
maintainers("adamjstewart")
+ version("2.1.2", sha256="3b2599a8a719916cb03526e6570356809729680c6cda09391232e2aba0a4ed4b")
+ version("2.1.1", sha256="865491940d20a9754eac7494aa18cab893e0c2b31e83743349eeeaf31dfb52db")
version("2.1.0", sha256="1f78f5995ae7dcffa1edf34320db136902b73a0d1b304404c48ec8be165b3a93")
version("2.0.9", sha256="2395ece6e29e12064718ff16b8edec5685df7f7095d4fee78edb0a654f5cd7eb")
version("2.0.8", sha256="db914e211b5c3b079a821be6e4344e72d0a729163676a65c4e00aae98390ae7b")
@@ -94,3 +96,6 @@ class PyLightning(PythonPackage):
depends_on("py-websocket-client@:2", type=("build", "run"))
depends_on("py-websockets@:12", when="@2.0.5:", type=("build", "run"))
depends_on("py-websockets@:11", when="@:2.0.4", type=("build", "run"))
+
+ # https://github.com/Lightning-AI/lightning/issues/18858
+ conflicts("^py-torch~distributed", when="@2.1.0")
diff --git a/var/spack/repos/builtin/packages/py-llvmlite/package.py b/var/spack/repos/builtin/packages/py-llvmlite/package.py
index a6f7cd8a4db2bd..61c7d7304bf6a5 100644
--- a/var/spack/repos/builtin/packages/py-llvmlite/package.py
+++ b/var/spack/repos/builtin/packages/py-llvmlite/package.py
@@ -13,6 +13,9 @@ class PyLlvmlite(PythonPackage):
pypi = "llvmlite/llvmlite-0.23.0.tar.gz"
git = "https://github.com/numba/llvmlite.git"
+ version("0.41.1", sha256="f19f767a018e6ec89608e1f6b13348fa2fcde657151137cb64e56d48598a92db")
+ version("0.41.0", sha256="7d41db345d76d2dfa31871178ce0d8e9fd8aa015aa1b7d4dab84b5cb393901e0")
+ version("0.40.1", sha256="5cdb0d45df602099d833d50bd9e81353a5e036242d3c003c5b294fc61d1986b4")
version("0.40.0", sha256="c910b8fbfd67b8e9d0b10ebc012b23cd67cbecef1b96f00d391ddd298d71671c")
version("0.39.1", sha256="b43abd7c82e805261c425d50335be9a6c4f84264e34d6d6e475207300005d572")
version("0.39.0", sha256="01098be54f1aa25e391cebba8ea71cd1533f8cd1f50e34c7dd7540c2560a93af")
diff --git a/var/spack/repos/builtin/packages/py-macs3/package.py b/var/spack/repos/builtin/packages/py-macs3/package.py
index be94b9c290cd7c..1a218e44a6ea98 100644
--- a/var/spack/repos/builtin/packages/py-macs3/package.py
+++ b/var/spack/repos/builtin/packages/py-macs3/package.py
@@ -24,3 +24,5 @@ class PyMacs3(PythonPackage):
depends_on("py-numpy@1.19:", type=("build", "run"))
depends_on("py-cykhash@2", type=("build", "run"))
depends_on("py-hmmlearn@0.3:", type=("build", "run"))
+
+ depends_on("zlib-api")
diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py
index 2560155ac8f684..3ca76ae4d3fe89 100644
--- a/var/spack/repos/builtin/packages/py-matplotlib/package.py
+++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py
@@ -24,7 +24,10 @@ class PyMatplotlib(PythonPackage):
"mpl_toolkits.mplot3d.tests",
]
+ version("3.8.2", sha256="01a978b871b881ee76017152f1f1a0cbf6bd5f7b8ff8c96df0df1bd57d8755a1")
+ version("3.8.1", sha256="044df81c1f6f3a8e52d70c4cfcb44e77ea9632a10929932870dfaa90de94365d")
version("3.8.0", sha256="df8505e1c19d5c2c26aff3497a7cbd3ccfc2e97043d1e4db3e76afa399164b69")
+ version("3.7.4", sha256="7cd4fef8187d1dd0d9dcfdbaa06ac326d396fb8c71c647129f0bf56835d77026")
version("3.7.3", sha256="f09b3dd6bdeb588de91f853bbb2d6f0ff8ab693485b0c49035eaa510cb4f142e")
version("3.7.2", sha256="a8cdb91dddb04436bd2f098b8fdf4b81352e68cf4d2c6756fcc414791076569b")
version("3.7.1", sha256="7b73305f25eab4541bd7ee0b96d87e53ae9c9f1823be5659b806cd85786fe882")
@@ -136,7 +139,8 @@ class PyMatplotlib(PythonPackage):
depends_on("py-contourpy@1.0.1:", when="@3.6:", type=("build", "run"))
depends_on("py-cycler@0.10:", type=("build", "run"))
depends_on("py-fonttools@4.22:", when="@3.5:", type=("build", "run"))
- depends_on("py-kiwisolver@1.0.1:", type=("build", "run"), when="@2.2.0:")
+ depends_on("py-kiwisolver@1.3.1:", when="@3.8.1:", type=("build", "run"))
+ depends_on("py-kiwisolver@1.0.1:", when="@2.2:", type=("build", "run"))
depends_on("py-numpy@1.21:1", when="@3.8:", type=("build", "link", "run"))
depends_on("py-numpy@1.20:", when="@3.7:", type=("build", "link", "run"))
depends_on("py-numpy@1.19:", when="@3.6:", type=("build", "link", "run"))
@@ -146,8 +150,9 @@ class PyMatplotlib(PythonPackage):
depends_on("py-numpy@1.11:", type=("build", "run"))
depends_on("py-packaging@20:", when="@3.6:", type=("build", "run"))
depends_on("py-packaging", when="@3.5:", type=("build", "run"))
+ depends_on("pil@8:", when="@3.8.1:", type=("build", "run"))
depends_on("pil@6.2:", when="@3.3:", type=("build", "run"))
- depends_on("py-pyparsing@2.3.1:3.0", when="@3.7.2:", type=("build", "run"))
+ depends_on("py-pyparsing@2.3.1:3.0", when="@3.7.2", type=("build", "run"))
depends_on("py-pyparsing@2.3.1:", when="@3.7:", type=("build", "run"))
depends_on("py-pyparsing@2.2.1:", when="@3.4:", type=("build", "run"))
depends_on("py-pyparsing@2.0.3,2.0.5:2.1.1,2.1.3:2.1.5,2.1.7:", type=("build", "run"))
@@ -221,7 +226,9 @@ class PyMatplotlib(PythonPackage):
# Dependencies for building matplotlib
# Setup dependencies
depends_on("py-certifi@2020.6.20:", when="@3.3.1:", type="build")
+ depends_on("py-numpy@1.25:", when="@3.8:", type="build")
depends_on("py-pybind11@2.6:", when="@3.7:", type="build")
+ depends_on("py-setuptools@64:", when="@3.8.1:", type="build")
depends_on("py-setuptools@42:", when="@3.8:", type="build")
depends_on("py-setuptools@42:", when="@3.7.2:3.7", type=("build", "run"))
depends_on("py-setuptools", when="@:3.7.1", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-moarchiving/package.py b/var/spack/repos/builtin/packages/py-moarchiving/package.py
new file mode 100644
index 00000000000000..a43a9c9efad260
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-moarchiving/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyMoarchiving(PythonPackage):
+ """
+ Biobjective Archive class with hypervolume indicator and uncrowded
+ hypervolume improvement computation.
+ """
+
+ homepage = "https://github.com/CMA-ES/moarchiving"
+ pypi = "moarchiving/moarchiving-0.6.0.tar.gz"
+
+ maintainers("LydDeb")
+
+ version("0.6.0", sha256="705ded992d399bc1ac703e68391bded6f64e1bde81b2bb25061eaa6208b5b29a")
+
+ variant("arbitrary_precision", default=False, description="Build with Fraction support")
+
+ depends_on("py-setuptools", type="build")
+ depends_on("py-fraction", when="+arbitrary_precision", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-morph-tool/package.py b/var/spack/repos/builtin/packages/py-morph-tool/package.py
new file mode 100644
index 00000000000000..7927b468c07edf
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-morph-tool/package.py
@@ -0,0 +1,39 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyMorphTool(PythonPackage):
+ """Python morphology manipulation toolkit"""
+
+ homepage = "https://github.com/BlueBrain/morph-tool"
+ git = "https://github.com/BlueBrain/morph-tool.git"
+ pypi = "morph-tool/morph-tool-2.9.1.tar.gz"
+
+ version("master", branch="master")
+ version("2.9.1", sha256="305e9456c8047726588b23dfa070eb95ccbe5573e9fea3e0a83dc93eacdf61dc")
+ version("2.9.0", sha256="c60d4010e17ddcc3f53c864c374fffee05713c8f8fd2ba4eed7706041ce1fa47")
+
+ variant("nrn", default=False, description="Enable additional neuron support")
+ variant("plot", default=False, description="Enable additional plotly support")
+ variant("parallel", default=False, description="Enable additional parallel support")
+
+ depends_on("py-setuptools", type="build")
+ depends_on("py-setuptools-scm", type="build")
+
+ depends_on("py-click@6.7:", type=("build", "run"))
+ depends_on("py-deprecation@2.1.0:", type=("build", "run"))
+ depends_on("py-more-itertools@8.6.0:", type=("build", "run"))
+ depends_on("py-morphio@3", type=("build", "run"))
+ depends_on("py-neurom@3", type=("build", "run"))
+ depends_on("py-numpy@1.14:", type=("build", "run"))
+ depends_on("py-pandas@1.0.3:", type=("build", "run"))
+ depends_on("py-xmltodict@0.12.0:", type=("build", "run"))
+
+ depends_on("py-plotly@4.1.0:", type=("build", "run"), when="+plot")
+ depends_on("py-dask+bag@2.19.0:", type=("build", "run"), when="+parallel")
+ depends_on("neuron+python@7.8:", type=("build", "run"), when="+nrn")
+ depends_on("py-bluepyopt@1.9.37:", type=("build", "run"), when="+nrn")
diff --git a/var/spack/repos/builtin/packages/py-morphio/package.py b/var/spack/repos/builtin/packages/py-morphio/package.py
new file mode 100644
index 00000000000000..a5a9fee7deaf3c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-morphio/package.py
@@ -0,0 +1,30 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import sys
+
+from spack.package import *
+
+
+class PyMorphio(PythonPackage):
+ """Python library for reading / writing morphology files"""
+
+ homepage = "https://github.com/BlueBrain/MorphIO"
+ git = "https://github.com/BlueBrain/MorphIO.git"
+ pypi = "morphio/MorphIO-3.3.2.tar.gz"
+
+ version("master", branch="master", submodules=True)
+
+ version("3.3.6", sha256="0f2e55470d92a3d89f2141ae905ee104fd16257b93dafb90682d90171de2f4e6")
+
+ depends_on("py-setuptools@24.2:", type="build")
+ depends_on("py-setuptools-scm", type="build")
+
+ depends_on("ninja", type="build")
+ depends_on("cmake@3.2:", type="build")
+ depends_on("py-numpy@1.14.1:", type=("build", "run"))
+ depends_on("py-h5py@3", when="platform=windows", type=("build", "run"))
+ if sys.platform != "win32":
+ depends_on("hdf5")
diff --git a/var/spack/repos/builtin/packages/py-mypy/package.py b/var/spack/repos/builtin/packages/py-mypy/package.py
index 68896c792adb98..9f7e22c2bcc634 100644
--- a/var/spack/repos/builtin/packages/py-mypy/package.py
+++ b/var/spack/repos/builtin/packages/py-mypy/package.py
@@ -15,6 +15,13 @@ class PyMypy(PythonPackage):
maintainers("adamjstewart")
+ version("1.7.0", sha256="1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc")
+ version("1.6.1", sha256="4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1")
+ version("1.6.0", sha256="4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f")
+ version("1.5.1", sha256="b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92")
+ version("1.5.0", sha256="f3460f34b3839b9bc84ee3ed65076eb827cd99ed13ed08d723f9083cada4a212")
+ version("1.4.1", sha256="9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b")
+ version("1.4.0", sha256="de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042")
version("1.3.0", sha256="e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11")
version("1.2.0", sha256="f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1")
version("1.1.1", sha256="ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f")
@@ -43,31 +50,34 @@ class PyMypy(PythonPackage):
version("0.670", sha256="e80fd6af34614a0e898a57f14296d0dacb584648f0339c2e000ddbf0f4cc2f8d")
# pyproject.toml
- depends_on("py-setuptools@40.6.2:", when="@0.790:", type=("build", "run"))
- depends_on("py-setuptools", type=("build", "run"))
+ depends_on("py-setuptools@40.6.2:", when="@0.790:", type="build")
+ depends_on("py-setuptools", type="build")
depends_on("py-wheel@0.30:", when="@0.790:", type="build")
+ depends_on("py-types-psutil", when="@0.981:", type="build")
+ depends_on("py-types-setuptools", when="@0.981:", type="build")
+
+ # setup.py
+ depends_on("python@3.8:", when="@1.5:", type=("build", "run"))
+ depends_on("python@3.7:", when="@0.981:", type=("build", "run"))
+ depends_on("py-typing-extensions@4.1:", when="@1.5:", type=("build", "run"))
depends_on("py-typing-extensions@3.10:", when="@0.930:", type=("build", "run"))
depends_on("py-typing-extensions@3.7.4:", when="@0.700:", type=("build", "run"))
depends_on("py-mypy-extensions@1:", when="@1.1:", type=("build", "run"))
depends_on("py-mypy-extensions@0.4.3:", when="@0.930:1.0", type=("build", "run"))
depends_on("py-mypy-extensions@0.4.3:0.4", when="@0.700:0.929", type=("build", "run"))
depends_on("py-mypy-extensions@0.4.0:0.4", when="@:0.699", type=("build", "run"))
- depends_on("py-typed-ast@1.4.0:1", when="@0.920: ^python@:3.7", type=("build", "run"))
- depends_on("py-typed-ast@1.4.0:1.4", when="@0.900:0.910 ^python@:3.7", type=("build", "run"))
- depends_on("py-typed-ast@1.4.0:1.4", when="@0.700:0.899", type=("build", "run"))
- depends_on("py-typed-ast@1.3.1:1.3", when="@:0.699", type=("build", "run"))
depends_on("py-tomli@1.1:", when="@0.950: ^python@:3.10", type=("build", "run"))
depends_on("py-tomli@1.1:", when="@0.930:0.949", type=("build", "run"))
depends_on("py-tomli@1.1:2", when="@0.920:0.929", type=("build", "run"))
- depends_on("py-types-psutil", when="@0.981:", type="build")
- depends_on("py-types-setuptools", when="@0.981:", type="build")
- depends_on("py-types-typed-ast@1.5.8:1.5", when="@0.981:", type="build")
-
- # setup.py
- depends_on("python@3.7:", when="@0.981:", type=("build", "run"))
# Historical dependencies
+ depends_on("py-types-typed-ast@1.5.8.5:1.5", when="@1.2:1.4", type="build")
+ depends_on("py-types-typed-ast@1.5.8:1.5", when="@0.981:1.1", type="build")
depends_on("py-toml", when="@0.900:0.910", type=("build", "run"))
+ depends_on("py-typed-ast@1.4.0:1", when="@0.920:1.4 ^python@:3.7", type=("build", "run"))
+ depends_on("py-typed-ast@1.4.0:1.4", when="@0.900:0.910 ^python@:3.7", type=("build", "run"))
+ depends_on("py-typed-ast@1.4.0:1.4", when="@0.700:0.899", type=("build", "run"))
+ depends_on("py-typed-ast@1.3.1:1.3", when="@:0.699", type=("build", "run"))
# https://github.com/python/mypy/issues/13627
conflicts("^python@3.10.7:", when="@:0.971")
diff --git a/var/spack/repos/builtin/packages/py-nanobind/package.py b/var/spack/repos/builtin/packages/py-nanobind/package.py
index 19c3d915f98843..95a38f5b763c0a 100644
--- a/var/spack/repos/builtin/packages/py-nanobind/package.py
+++ b/var/spack/repos/builtin/packages/py-nanobind/package.py
@@ -23,6 +23,12 @@ class PyNanobind(PythonPackage):
maintainers("chrisrichardson", "garth-wells", "ma595")
version("master", branch="master", submodules=True)
+ version(
+ "1.8.0", tag="v1.8.0", commit="1a309ba444a47e081dc6213d72345a2fbbd20795", submodules=True
+ )
+ version(
+ "1.7.0", tag="v1.7.0", commit="555ec7595c89c60ce7cf53e803bc226dc4899abb", submodules=True
+ )
version(
"1.6.2", tag="v1.6.2", commit="cc5ac7e61def198db2a8b65c6d630343987a9f1d", submodules=True
)
diff --git a/var/spack/repos/builtin/packages/py-neo/package.py b/var/spack/repos/builtin/packages/py-neo/package.py
index 8803b8eebb0ff0..801207a47978d4 100644
--- a/var/spack/repos/builtin/packages/py-neo/package.py
+++ b/var/spack/repos/builtin/packages/py-neo/package.py
@@ -14,6 +14,9 @@ class PyNeo(PythonPackage):
homepage = "https://neuralensemble.org/neo"
pypi = "neo/neo-0.4.1.tar.gz"
+ version("0.12.0", sha256="3b6ca4fc05dfdb4e953e253e70994bfbbc8fe2e90958fbda7fa5860caf3fa63a")
+ version("0.11.1", sha256="f4a206044b332ad00b10072b0dc7a70b359fa365ec786f92ab757ef4ae588474")
+ version("0.11.0", sha256="cdf8e1324a3fbbd1efd5618dcd37cfc497b1997923bd710b598472c1d846674a")
version("0.10.2", sha256="2d4218b0826daeea880e155227060029ec38a00238ceb5f097138d9467c6399b")
version("0.10.0", sha256="e591a53e18cfa4478603a0e133f3fa0e07bc016b2a279d21d72cf8196eca8353")
version("0.9.0", sha256="6e31c88d7c52174fa2512df589b2b5003e9471fde27fca9f315f4770ba3bd3cb")
@@ -22,12 +25,20 @@ class PyNeo(PythonPackage):
version("0.4.1", sha256="a5a4f3aa31654d52789f679717c9fb622ad4f59b56d227dca490357b9de0a1ce")
version("0.3.3", sha256="6b80eb5bdc9eb4eca829f7464f861c5f1a3a6289559de037930d529bb3dddefb")
+ depends_on("python@3.8:", type=("build", "run"), when="@0.12.0:")
+
+ # py-setuptools@:61 doesn't support PEP 621
+ depends_on("py-setuptools@62:", type="build", when="@0.12:")
depends_on("py-setuptools", type="build")
- depends_on("py-numpy@1.3.0:", type=("build", "run"), when="@0.3.3:0.4.1")
- depends_on("py-numpy@1.7.1:", type=("build", "run"), when="@0.5.2:0.8.0")
+ depends_on("py-packaging", type=("build", "run"))
+
+ depends_on("py-numpy@1.19.5:", type=("build", "run"), when="@0.12.0:")
+ depends_on("py-numpy@1.18.5:", type=("build", "run"), when="@0.11.0:0.11.1")
+ depends_on("py-numpy@1.16.1:", type=("build", "run"), when="@0.10.0:0.10.2")
depends_on("py-numpy@1.13.0:", type=("build", "run"), when="@0.9.0")
- depends_on("py-numpy@1.16.1:", type=("build", "run"), when="@0.10.0:")
+ depends_on("py-numpy@1.7.1:", type=("build", "run"), when="@0.5.2:0.8.0")
- depends_on("py-quantities@0.9.0:", type=("build", "run"), when="@0.3.3:0.8.0")
- depends_on("py-quantities@0.12.1:", type=("build", "run"), when="@0.9.0:")
+ depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@0.12.0:")
+ depends_on("py-quantities@0.12.1:", type=("build", "run"), when="@0.9.0:0.11.1")
+ depends_on("py-quantities@0.9.0:", type=("build", "run"), when="@0.5.2:0.8.0")
diff --git a/var/spack/repos/builtin/packages/py-neurom/package.py b/var/spack/repos/builtin/packages/py-neurom/package.py
new file mode 100644
index 00000000000000..19bad5fc7b94c7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-neurom/package.py
@@ -0,0 +1,35 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyNeurom(PythonPackage):
+ """Python library neuron morphology analysis"""
+
+ homepage = "https://github.com/BlueBrain/NeuroM"
+ git = "https://github.com/BlueBrain/NeuroM.git"
+ pypi = "neurom/neurom-2.2.1.tar.gz"
+
+ version("master", branch="master")
+ version("3.2.4", sha256="a584e0979b54deee906dd716ea90de20773e20b527d83960d0fe655b0905eb4a")
+
+ variant("plotly", default=False, description="Enable plotly support")
+
+ depends_on("py-setuptools@42:", type=("build", "run"))
+ depends_on("py-setuptools-scm", type="build")
+ depends_on("python@3.8:", type=("build", "run"))
+
+ depends_on("py-click@7.0:", type=("build", "run"))
+ depends_on("py-matplotlib@3.2.1:", type=("build", "run"))
+ depends_on("py-morphio@3.3.6:", type=("build", "run"))
+ depends_on("py-numpy@1.8.0:", type=("build", "run"))
+ depends_on("py-pandas@1.0.5:", type=("build", "run"))
+ depends_on("py-pyyaml@3.10:", type=("build", "run"))
+ depends_on("py-scipy@1.2.0:", type=("build", "run"))
+ depends_on("py-tqdm@4.8.4:", type=("build", "run"))
+
+ depends_on("py-plotly@3.6.0:", type=("build", "run"), when="+plotly")
+ depends_on("py-psutil@5.5.1:", type=("build", "run"), when="+plotly")
diff --git a/var/spack/repos/builtin/packages/py-numcodecs/package.py b/var/spack/repos/builtin/packages/py-numcodecs/package.py
index badf48b465bf8f..6d466c19175c48 100644
--- a/var/spack/repos/builtin/packages/py-numcodecs/package.py
+++ b/var/spack/repos/builtin/packages/py-numcodecs/package.py
@@ -49,10 +49,11 @@ def setup_build_environment(self, env):
# This package likes to compile natively by checking cpu features and then setting flags
# -msse2 and -mavx2, which we want to avoid in Spack. This could go away if the package
# supports external libraries.
- if "avx2" not in self.spec.target.features:
- env.set("DISABLE_NUMCODECS_AVX2", "1")
- if "sse2" not in self.spec.target.features:
- env.set("DISABLE_NUMCODECS_SSE2", "1")
+ if self.spec.satisfies("target=x86_64:"):
+ if "avx2" not in self.spec.target.features:
+ env.set("DISABLE_NUMCODECS_AVX2", "1")
+ if "sse2" not in self.spec.target.features:
+ env.set("DISABLE_NUMCODECS_SSE2", "1")
def flag_handler(self, name, flags):
if name == "cflags":
diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py
index e5ffea879c4cd7..05f5ceec494098 100644
--- a/var/spack/repos/builtin/packages/py-numpy/package.py
+++ b/var/spack/repos/builtin/packages/py-numpy/package.py
@@ -5,16 +5,13 @@
import platform
import subprocess
+from typing import Tuple
from spack.package import *
class PyNumpy(PythonPackage):
- """NumPy is the fundamental package for scientific computing with Python.
- It contains among other things: a powerful N-dimensional array object,
- sophisticated (broadcasting) functions, tools for integrating C/C++ and
- Fortran code, and useful linear algebra, Fourier transform, and random
- number capabilities"""
+ """Fundamental package for array computing in Python."""
homepage = "https://numpy.org/"
pypi = "numpy/numpy-1.23.0.tar.gz"
@@ -23,6 +20,9 @@ class PyNumpy(PythonPackage):
maintainers("adamjstewart", "rgommers")
version("main", branch="main")
+ version("1.26.2", sha256="f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea")
+ version("1.26.1", sha256="c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe")
+ version("1.26.0", sha256="f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf")
version("1.25.2", sha256="fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760")
version("1.25.1", sha256="9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf")
version("1.25.0", sha256="f1accae9a28dc3cda46a91de86acf69de0d1b5f4edd44a9b0c3ceb8036dfff19")
@@ -87,11 +87,8 @@ class PyNumpy(PythonPackage):
version("1.14.6", sha256="1250edf6f6c43e1d7823f0967416bc18258bb271dc536298eb0ea00a9e45b80a")
version("1.14.5", sha256="a4a433b3a264dbc9aa9c7c241e87c0358a503ea6394f8737df1683c7c9a102ac")
- variant("blas", default=True, description="Build with BLAS support")
- variant("lapack", default=True, description="Build with LAPACK support")
-
- # Based on wheel availability on PyPI
- depends_on("python@3.9:3.11", when="@1.25:", type=("build", "link", "run"))
+ depends_on("python@3.9:3.12", when="@1.26:", type=("build", "link", "run"))
+ depends_on("python@3.9:3.11", when="@1.25", type=("build", "link", "run"))
depends_on("python@3.8:3.11", when="@1.23.2:1.24", type=("build", "link", "run"))
depends_on("python@3.8:3.10", when="@1.22:1.23.1", type=("build", "link", "run"))
depends_on("python@:3.10", when="@1.21.2:1.21", type=("build", "link", "run"))
@@ -99,19 +96,30 @@ class PyNumpy(PythonPackage):
depends_on("python@:3.8", when="@1.17.3:1.19.2", type=("build", "link", "run"))
depends_on("python@:3.7", when="@1.14.5:1.17.2", type=("build", "link", "run"))
+ depends_on("py-cython@0.29.34:3", when="@1.26:", type="build")
+ depends_on("py-cython@0.29.34:2", when="@1.25", type="build")
+ depends_on("py-cython@0.29.30:2", when="@1.22.4:1.24", type="build")
+ depends_on("py-cython@0.29.24:2", when="@1.21.2:1.22.3", type="build")
+ depends_on("py-cython@0.29.21:2", when="@1.19.1:1.21.1", type="build")
+ depends_on("py-cython@0.29.14:2", when="@1.18.1:1.19.0", type="build")
+ depends_on("py-cython@0.29.13:2", when="@1.18.0", type="build")
+ depends_on("py-pyproject-metadata@0.7.1:", when="@1.26:", type="build")
+ depends_on("py-tomli@1:", when="@1.26: ^python@:3.10", type="build")
+ depends_on("py-setuptools@60:", when="@1.26: ^python@3.12:", type="build")
# https://github.com/spack/spack/pull/32078
- depends_on("py-setuptools@:63", type=("build", "run"))
+ depends_on("py-setuptools@:63", when="@:1.25", type=("build", "run"))
depends_on("py-setuptools@:59", when="@:1.22.1", type=("build", "run"))
- # Check pyproject.toml for updates to the required cython version
- depends_on("py-cython@0.29.34:2", when="@1.25:", type="build")
- depends_on("py-cython@0.29.13:2", when="@1.18.0:", type="build")
- depends_on("py-cython@0.29.14:2", when="@1.18.1:", type="build")
- depends_on("py-cython@0.29.21:2", when="@1.19.1:", type="build")
- depends_on("py-cython@0.29.24:2", when="@1.21.2:", type="build")
- depends_on("py-cython@0.29.30:2", when="@1.22.4:", type="build")
- depends_on("blas", when="+blas")
- depends_on("lapack", when="+lapack")
+ depends_on("py-colorama", when="@1.26: platform=windows", type="build")
+
+ # Required to use --config-settings
+ depends_on("py-pip@23.1:", when="@1.26:", type="build")
+ # meson is vendored, ninja and pkgconfig are not
+ depends_on("ninja@1.8.2:", when="@1.26:", type="build")
+ depends_on("pkgconfig", when="@1.26:", type="build")
+ depends_on("blas")
+ depends_on("lapack")
+ # test_requirements.txt
depends_on("py-nose@1.0.0:", when="@:1.14", type="test")
depends_on("py-pytest", when="@1.15:", type="test")
depends_on("py-hypothesis", when="@1.19:", type="test")
@@ -145,13 +153,21 @@ class PyNumpy(PythonPackage):
when="@1.22.0:1.22.3",
)
- # version 1.21.0 runs into an infinit loop during printing
+ # meson.build
+ # https://docs.scipy.org/doc/scipy/dev/toolchain.html#compilers
+ conflicts("%gcc@:8.3", when="@1.26:", msg="NumPy requires GCC >= 8.4")
+ conflicts("%gcc@:4.7", msg="NumPy requires GCC >= 4.8")
+ conflicts(
+ "%msvc@:19.19",
+ when="@1.26:",
+ msg="NumPy requires at least vc142 (default with Visual Studio 2019) "
+ "when building with MSVC",
+ )
+
+ # version 1.21.0 runs into an infinite loop during printing
# (e.g. print(numpy.ones(1000)) when compiled with gcc 11
conflicts("%gcc@11:", when="@1.21.0")
- # GCC 4.8 is the minimum version that works
- conflicts("%gcc@:4.7", msg="GCC 4.8+ required")
-
# NVHPC support added in https://github.com/numpy/numpy/pull/17344
conflicts("%nvhpc", when="@:1.19")
@@ -159,6 +175,10 @@ class PyNumpy(PythonPackage):
conflicts("%intel", when="@1.23.0:1.23.3")
conflicts("%oneapi", when="@1.23.0:1.23.3")
+ @property
+ def archive_files(self):
+ return [join_path(self.stage.source_path, "build", "meson-logs", "meson-log.txt")]
+
def url_for_version(self, version):
url = "https://files.pythonhosted.org/packages/source/n/numpy/numpy-{}.{}"
if version >= Version("1.23"):
@@ -193,16 +213,68 @@ def flag_handler(self, name, flags):
return (flags, None, None)
- @run_before("install")
- def set_blas_lapack(self):
- # https://numpy.org/devdocs/user/building.html
- # https://github.com/numpy/numpy/blob/master/site.cfg.example
+ def blas_lapack_pkg_config(self) -> Tuple[str, str]:
+ """Convert library names to pkg-config names.
- # Skip if no BLAS/LAPACK requested
+ Returns:
+ The names of the blas and lapack libs that pkg-config should search for.
+ """
spec = self.spec
- if "+blas" not in spec and "+lapack" not in spec:
- return
+ blas = spec["blas"].libs.names[0]
+ lapack = spec["lapack"].libs.names[0]
+
+ if spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
+ blas = "mkl-dynamic-lp64-seq"
+ if spec["lapack"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
+ lapack = "mkl-dynamic-lp64-seq"
+
+ if spec["blas"].name in ["blis", "amdblis"]:
+ blas = "blis"
+
+ if spec["blas"].name == "cray-libsci":
+ blas = "libsci"
+
+ if spec["lapack"].name == "cray-libsci":
+ lapack = "libsci"
+
+ if "armpl" in blas:
+ if "_mp" in blas:
+ blas = "armpl-dynamic-lp64-omp"
+ else:
+ blas = "armpl-dynamic-lp64-seq"
+
+ if "armpl" in lapack:
+ if "_mp" in lapack:
+ lapack = "armpl-dynamic-lp64-omp"
+ else:
+ lapack = "armpl-dynamic-lp64-seq"
+
+ return blas, lapack
+
+ @when("@1.26:")
+ def config_settings(self, spec, prefix):
+ blas, lapack = self.blas_lapack_pkg_config()
+ return {
+ "builddir": "build",
+ "compile-args": f"-j{make_jobs}",
+ "setup-args": {
+ # https://scipy.github.io/devdocs/building/blas_lapack.html
+ "-Dblas": blas,
+ "-Dlapack": lapack,
+ # https://numpy.org/doc/stable/reference/simd/build-options.html
+ # TODO: get this working in CI
+ # "-Dcpu-baseline": "native",
+ # "-Dcpu-dispatch": "none",
+ },
+ }
+
+ def blas_lapack_site_cfg(self) -> None:
+ """Write a site.cfg file to configure BLAS/LAPACK."""
+ spec = self.spec
+
+ # https://numpy.org/doc/1.25/user/building.html
+ # https://github.com/numpy/numpy/blob/v1.25.2/site.cfg.example
def write_library_dirs(f, dirs):
f.write("library_dirs = {0}\n".format(dirs))
if not (
@@ -211,17 +283,11 @@ def write_library_dirs(f, dirs):
):
f.write("rpath = {0}\n".format(dirs))
- blas_libs = LibraryList([])
- blas_headers = HeaderList([])
- if "+blas" in spec:
- blas_libs = spec["blas"].libs
- blas_headers = spec["blas"].headers
+ blas_libs = spec["blas"].libs
+ blas_headers = spec["blas"].headers
- lapack_libs = LibraryList([])
- lapack_headers = HeaderList([])
- if "+lapack" in spec:
- lapack_libs = spec["lapack"].libs
- lapack_headers = spec["lapack"].headers
+ lapack_libs = spec["lapack"].libs
+ lapack_headers = spec["lapack"].headers
lapackblas_libs = lapack_libs + blas_libs
lapackblas_headers = lapack_headers + blas_headers
@@ -334,15 +400,25 @@ def write_library_dirs(f, dirs):
write_library_dirs(f, lapack_lib_dirs)
f.write("include_dirs = {0}\n".format(lapack_header_dirs))
+ @when("@:1.25")
+ @run_before("install")
+ def set_blas_lapack(self):
+ self.blas_lapack_site_cfg()
+
+ @when("@1.26:")
+ def setup_build_environment(self, env):
+ # https://github.com/scipy/scipy/issues/19357
+ if self.spec.satisfies("%apple-clang@15:"):
+ env.append_flags("LDFLAGS", "-Wl,-ld_classic")
+
+ @when("@:1.25")
def setup_build_environment(self, env):
# Tell numpy which BLAS/LAPACK libraries we want to use.
- # https://github.com/numpy/numpy/pull/13132
- # https://numpy.org/devdocs/user/building.html#accelerated-blas-lapack-libraries
spec = self.spec
- # https://numpy.org/devdocs/user/building.html#blas
- if "blas" not in spec:
- blas = ""
- elif (
+ # https://github.com/numpy/numpy/pull/13132
+ # https://numpy.org/doc/1.25/user/building.html#accelerated-blas-lapack-libraries
+ # https://numpy.org/doc/1.25/user/building.html#blas
+ if (
spec["blas"].name == "intel-mkl"
or spec["blas"].name == "intel-parallel-studio"
or spec["blas"].name == "intel-oneapi-mkl"
@@ -361,10 +437,8 @@ def setup_build_environment(self, env):
env.set("NPY_BLAS_ORDER", blas)
- # https://numpy.org/devdocs/user/building.html#lapack
- if "lapack" not in spec:
- lapack = ""
- elif (
+ # https://numpy.org/doc/1.25/user/building.html#lapack
+ if (
spec["lapack"].name == "intel-mkl"
or spec["lapack"].name == "intel-parallel-studio"
or spec["lapack"].name == "intel-oneapi-mkl"
diff --git a/var/spack/repos/builtin/packages/py-nvidia-dali/package.py b/var/spack/repos/builtin/packages/py-nvidia-dali/package.py
index 2b1af9e19a3691..93804505fb5e5c 100644
--- a/var/spack/repos/builtin/packages/py-nvidia-dali/package.py
+++ b/var/spack/repos/builtin/packages/py-nvidia-dali/package.py
@@ -170,20 +170,20 @@ class PyNvidiaDali(PythonPackage):
)
cuda120_versions = (
- "1.27.0-cuda120",
- "1.26.0-cuda120",
- "1.25.0-cuda120",
- "1.24.0-cuda120",
- "1.23.0-cuda120",
- "1.22.0-cuda120",
+ "@1.27.0-cuda120",
+ "@1.26.0-cuda120",
+ "@1.25.0-cuda120",
+ "@1.24.0-cuda120",
+ "@1.23.0-cuda120",
+ "@1.22.0-cuda120",
)
cuda110_versions = (
- "1.27.0-cuda110",
- "1.26.0-cuda110",
- "1.25.0-cuda110",
- "1.24.0-cuda110",
- "1.23.0-cuda110",
- "1.22.0-cuda110",
+ "@1.27.0-cuda110",
+ "@1.26.0-cuda110",
+ "@1.25.0-cuda110",
+ "@1.24.0-cuda110",
+ "@1.23.0-cuda110",
+ "@1.22.0-cuda110",
)
for v in cuda120_versions:
diff --git a/var/spack/repos/builtin/packages/py-onnxruntime/package.py b/var/spack/repos/builtin/packages/py-onnxruntime/package.py
index 205785a4db66f9..30a466dcc180ab 100644
--- a/var/spack/repos/builtin/packages/py-onnxruntime/package.py
+++ b/var/spack/repos/builtin/packages/py-onnxruntime/package.py
@@ -63,6 +63,12 @@ class PyOnnxruntime(CMakePackage, PythonExtension):
patch("libiconv-1.10.patch", level=0, when="@1.10.0 ^libiconv")
# https://github.com/microsoft/onnxruntime/commit/de4089f8cbe0baffe56a363cc3a41595cc8f0809.patch
patch("gcc11.patch", level=1, when="@1.7.2")
+ # https://github.com/microsoft/onnxruntime/pull/16257
+ patch(
+ "https://github.com/microsoft/onnxruntime/commit/a3a443c80431c390cbf8855e9c7b2a95d413cd54.patch?full_index=1",
+ sha256="537c43b061d31bf97d2778d723a41fbd390160f9ebc304f06726e3bfd8dc4583",
+ when="@1.10:1.15",
+ )
dynamic_cpu_arch_values = ("NOAVX", "AVX", "AVX2", "AVX512")
diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py
index f3d531f3bc382f..9d91ef08ace82c 100644
--- a/var/spack/repos/builtin/packages/py-pandas/package.py
+++ b/var/spack/repos/builtin/packages/py-pandas/package.py
@@ -17,8 +17,8 @@ class PyPandas(PythonPackage):
maintainers("adamjstewart")
- variant("excel", when="@1.4:", default=False, description="Build with support for Excel")
-
+ version("2.1.3", sha256="22929f84bca106921917eb73c1521317ddd0a4c71b395bcf767a106e3494209f")
+ version("2.1.2", sha256="52897edc2774d2779fbeb6880d2cfb305daa0b1a29c16b91f531a18918a6e0f3")
version("2.1.1", sha256="fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b")
version("2.1.0", sha256="62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918")
version("2.0.3", sha256="c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c")
@@ -66,6 +66,8 @@ class PyPandas(PythonPackage):
version("0.24.1", sha256="435821cb2501eabbcee7e83614bd710940dc0cf28b5afbc4bdb816c31cec71af")
version("0.23.4", sha256="5b24ca47acf69222e82530e89111dd9d14f9b970ab2cd3a1c2c78f0c4fbba4f4")
+ variant("excel", when="@1.4:", default=False, description="Build with support for Excel")
+
# Required dependencies
# https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#python-version-support
depends_on("python@3.9:3.12", when="@2.1.1:", type=("build", "run"))
@@ -91,6 +93,7 @@ class PyPandas(PythonPackage):
depends_on("py-versioneer+toml", when="@2:", type="build")
# https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#dependencies
+ depends_on("py-numpy@1.22.4:2", when="@2.1.2:", type=("build", "run"))
depends_on("py-numpy@1.22.4:", when="@2.1:", type=("build", "run"))
depends_on("py-numpy@1.20.3:", when="@1.5:", type=("build", "run"))
depends_on("py-numpy@1.18.5:", when="@1.4:", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-pdbfixer/package.py b/var/spack/repos/builtin/packages/py-pdbfixer/package.py
index 2da9f24d1ac086..2dbd4aa3eec7bf 100644
--- a/var/spack/repos/builtin/packages/py-pdbfixer/package.py
+++ b/var/spack/repos/builtin/packages/py-pdbfixer/package.py
@@ -18,6 +18,6 @@ class PyPdbfixer(PythonPackage):
version("1.7", sha256="a0bef3c52a7bbe69a6aea5333f51f3e7d158339be5829aed19b0344bd66d4eea")
depends_on("py-setuptools", type="build")
- depends_on("openmm@7.1:7.5", type=("build", "run"), when="1.7")
+ depends_on("openmm@7.1:7.5", type=("build", "run"), when="@1.7")
depends_on("openmm@7.6:", type=("build", "run"), when="@1.8:")
depends_on("py-numpy", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-petsc4py/package.py b/var/spack/repos/builtin/packages/py-petsc4py/package.py
index 0181571f1b05fc..06c551b9f4a1a1 100644
--- a/var/spack/repos/builtin/packages/py-petsc4py/package.py
+++ b/var/spack/repos/builtin/packages/py-petsc4py/package.py
@@ -18,6 +18,7 @@ class PyPetsc4py(PythonPackage):
maintainers("balay")
version("main", branch="main")
+ version("3.20.1", sha256="dcc9092040d13130496f1961b79c36468f383b6ede398080e004f1966c06ad38")
version("3.20.0", sha256="c2461eef3977ae5c214ad252520adbb92ec3a31d00e79391dd92535077bbf03e")
version("3.19.6", sha256="bd7891b651eb83504c744e70706818cf63ecbabee3206c1fed7c3013873802b9")
version("3.19.5", sha256="e059fdb8b23936c3182c9226924029dbdc8f1f72a623be0fe8c2caf8646c7a45")
diff --git a/var/spack/repos/builtin/packages/py-pint/package.py b/var/spack/repos/builtin/packages/py-pint/package.py
index 85bb7a0f054acd..83cb92af0d6d67 100644
--- a/var/spack/repos/builtin/packages/py-pint/package.py
+++ b/var/spack/repos/builtin/packages/py-pint/package.py
@@ -18,6 +18,8 @@ class PyPint(PythonPackage):
# any import tests for this package.
import_modules = [] # type: List[str]
+ version("0.22", sha256="2d139f6abbcf3016cad7d3cec05707fe908ac4f99cf59aedfd6ee667b7a64433")
+ version("0.21.1", sha256="5d5b6b518d0c5a7ab03a776175db500f1ed1523ee75fb7fafe38af8149431c8d")
version("0.20.1", sha256="387cf04078dc7dfe4a708033baad54ab61d82ab06c4ee3d4922b1e45d5626067")
version("0.18", sha256="8c4bce884c269051feb7abc69dbfd18403c0c764abc83da132e8a7222f8ba801")
version("0.17", sha256="f4d0caa713239e6847a7c6eefe2427358566451fe56497d533f21fb590a3f313")
@@ -27,11 +29,14 @@ class PyPint(PythonPackage):
version("0.9", sha256="32d8a9a9d63f4f81194c0014b3b742679dce81a26d45127d9810a68a561fe4e2")
version("0.8.1", sha256="afcf31443a478c32bbac4b00337ee9026a13d0e2ac83d30c79151462513bb0d4")
- depends_on("python@3.8:", type=("build", "run"), when="@0.19:")
- depends_on("py-setuptools@41:", when="@0.16:", type="build")
+ depends_on("python@3.9:", when="@0.22:", type=("build", "run"))
+ depends_on("python@3.8:", when="@0.19:0.21", type=("build", "run"))
+ depends_on("py-typing-extensions", when="@0.22:", type=("build", "run"))
+ depends_on("py-setuptools@61:", when="@0.21:", type="build")
+ depends_on("py-setuptools@41:", when="@0.16:0.20", type="build")
depends_on("py-setuptools@41:", when="@0.11:0.15", type=("build", "run"))
depends_on("py-setuptools", when="@:0.10", type=("build", "run"))
depends_on("py-setuptools-scm@3.4.3:+toml", when="@0.11:", type="build")
depends_on("py-setuptools-scm", when="@0.10", type="build")
- depends_on("py-packaging", type=("build", "run"), when="@0.13:18")
- depends_on("py-importlib-metadata", type=("build", "run"), when="@0.13:18 ^python@:3.7")
+ depends_on("py-packaging", when="@0.13:18", type=("build", "run"))
+ depends_on("py-importlib-metadata", when="@0.13:18 ^python@:3.7", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-pip/package.py b/var/spack/repos/builtin/packages/py-pip/package.py
index 52d290d0b549c8..d92a53671ed8b8 100644
--- a/var/spack/repos/builtin/packages/py-pip/package.py
+++ b/var/spack/repos/builtin/packages/py-pip/package.py
@@ -15,6 +15,8 @@ class PyPip(Package, PythonExtension):
url = "https://files.pythonhosted.org/packages/py3/p/pip/pip-20.2-py3-none-any.whl"
list_url = "https://pypi.org/simple/pip/"
+ tags = ["build-tools"]
+
maintainers("adamjstewart", "pradyunsg")
version(
@@ -108,6 +110,5 @@ def install(self, spec, prefix):
def setup_dependent_package(self, module, dependent_spec):
pip = dependent_spec["python"].command
- pip.add_default_arg("-m")
- pip.add_default_arg("pip")
+ pip.add_default_arg("-m", "pip")
setattr(module, "pip", pip)
diff --git a/var/spack/repos/builtin/packages/py-pyfr/package.py b/var/spack/repos/builtin/packages/py-pyfr/package.py
index 7cbfe6ab71f67d..9f81ef7597fa51 100644
--- a/var/spack/repos/builtin/packages/py-pyfr/package.py
+++ b/var/spack/repos/builtin/packages/py-pyfr/package.py
@@ -41,7 +41,7 @@ class PyPyfr(PythonPackage, CudaPackage, ROCmPackage):
depends_on("py-h5py@2.10:", type=("build", "run"))
depends_on("py-mako@1.0.0:", type=("build", "run"))
depends_on("py-mpi4py@3.1.0:", type=("build", "run"))
- depends_on("py-numpy@1.20:+blas", type=("build", "run"))
+ depends_on("py-numpy@1.20:", type=("build", "run"))
depends_on("py-platformdirs@2.2.0:", type=("build", "run"))
depends_on("py-pytools@2016.2.1:", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-pygithub/package.py b/var/spack/repos/builtin/packages/py-pygithub/package.py
index 24885aa333d76b..867403bc855724 100644
--- a/var/spack/repos/builtin/packages/py-pygithub/package.py
+++ b/var/spack/repos/builtin/packages/py-pygithub/package.py
@@ -7,16 +7,25 @@
class PyPygithub(PythonPackage):
- """Use the full Github API v3"""
+ """Typed interactions with the GitHub API v3"""
homepage = "https://pygithub.readthedocs.io/"
pypi = "PyGithub/PyGithub-1.54.1.tar.gz"
+ version("2.1.1", sha256="ecf12c2809c44147bce63b047b3d2e9dac8a41b63e90fcb263c703f64936b97c")
+ version("1.59.1", sha256="c44e3a121c15bf9d3a5cc98d94c9a047a5132a9b01d22264627f58ade9ddc217")
version("1.55", sha256="1bbfff9372047ff3f21d5cd8e07720f3dbfdaf6462fcaed9d815f528f1ba7283")
depends_on("python@3.6:", type=("build", "run"))
+ depends_on("python@3.7:", type=("build", "run"), when="@1.57:")
+
depends_on("py-setuptools", type="build")
- depends_on("py-deprecated", type=("build", "run"))
- depends_on("py-pyjwt@2:", type=("build", "run"))
+ depends_on("py-setuptools-scm", type="build", when="@1.58.1:")
depends_on("py-pynacl@1.4.0:", type=("build", "run"))
+ depends_on("py-python-dateutil", type=("build", "run"), when="@2.1.0:")
depends_on("py-requests@2.14.0:", type=("build", "run"))
+ depends_on("py-pyjwt@2.4.0:", type=("build", "run"))
+ depends_on("py-pyjwt@2.4.0: +crypto", type=("build", "run"), when="@1.58.1:")
+ depends_on("py-typing-extensions@4:", type=("build", "run"), when="@2.1.0:")
+ depends_on("py-urllib3@1.26.0:", type=("build", "run"), when="@2.1.0:")
+ depends_on("py-deprecated", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-pynucleus/package.py b/var/spack/repos/builtin/packages/py-pynucleus/package.py
index c4f2f82b7a24ef..4194bb2c609e77 100644
--- a/var/spack/repos/builtin/packages/py-pynucleus/package.py
+++ b/var/spack/repos/builtin/packages/py-pynucleus/package.py
@@ -19,6 +19,9 @@ class PyPynucleus(PythonPackage):
for ref in refs:
version(ref, branch=ref)
+ variant("examples", default=True, description="Install examples")
+ variant("tests", default=True, description="Install tests")
+
depends_on("python@3.10:", type=("build", "run"))
depends_on("py-mpi4py@2.0.0:", type=("build", "link", "run"))
depends_on("py-cython@0.29.32:", type=("build", "run"))
@@ -30,14 +33,14 @@ class PyPynucleus(PythonPackage):
depends_on("py-h5py", type=("build", "run"))
depends_on("py-tabulate", type=("build", "run"))
depends_on("py-pyyaml", type=("build", "run"))
- depends_on("py-matplotlib+latex", type=("build", "run"))
+ depends_on("py-matplotlib", type=("build", "run"))
depends_on("py-scikit-sparse", type=("build", "run"))
depends_on("py-modepy", type=("build", "run"))
depends_on("py-meshpy", type=("build", "run"))
depends_on("py-pytools", type=("build", "run"))
depends_on("py-psutil", type="run")
-
- variant("examples", default=True, description="Install examples")
+ depends_on("py-pytest", when="+tests", type="run")
+ depends_on("py-pytest-html", when="+tests", type="run")
import_modules = [
"PyNucleus",
@@ -64,5 +67,9 @@ def install_python(self):
def install_additional_files(self):
spec = self.spec
prefix = self.prefix
- if "+examples" in spec:
+ if "+examples" in spec or "+tests" in spec:
install_tree("drivers", prefix.drivers)
+ if "+examples" in spec:
+ install_tree("examples", prefix.examples)
+ if "+tests" in spec:
+ install_tree("tests", prefix.tests)
diff --git a/var/spack/repos/builtin/packages/py-pyside2/package.py b/var/spack/repos/builtin/packages/py-pyside2/package.py
index e6ee1f8cc89531..b13f0c1aac4220 100644
--- a/var/spack/repos/builtin/packages/py-pyside2/package.py
+++ b/var/spack/repos/builtin/packages/py-pyside2/package.py
@@ -54,7 +54,7 @@ class PyPyside2(PythonPackage):
depends_on("cmake@3.1:", type="build")
# libclang versioning from sources/shiboken2/doc/gettingstarted.rst
depends_on("llvm@6", type="build", when="@5.12:5.13")
- depends_on("llvm@10", type="build", when="@5.15")
+ depends_on("llvm@10:", type="build", when="@5.15:")
depends_on("py-setuptools", type="build")
depends_on("py-packaging", type="build")
depends_on("py-wheel", type="build")
@@ -69,6 +69,23 @@ class PyPyside2(PythonPackage):
depends_on("libxslt@1.1.19:", when="+doc", type="build")
depends_on("py-sphinx", when="+doc", type="build")
+ def patch(self):
+ filter_file(
+ "=${shiboken_include_dirs}",
+ ":".join(
+ [
+ "=${shiboken_include_dirs}",
+ self.spec["qt"]["glx"]["libglx"].prefix.include,
+ self.spec["qt"]["libxcb"].prefix.include,
+ ]
+ ),
+ "sources/pyside2/cmake/Macros/PySideModules.cmake",
+ string=True,
+ )
+
+ def setup_build_environment(self, env):
+ env.set("LLVM_INSTALL_DIR", self.spec["llvm"].prefix)
+
def install_options(self, spec, prefix):
args = [
"--parallel={0}".format(make_jobs),
diff --git a/var/spack/repos/builtin/packages/py-pyzmq/package.py b/var/spack/repos/builtin/packages/py-pyzmq/package.py
index 4850dddc08ec8e..bf60b4be57d91b 100644
--- a/var/spack/repos/builtin/packages/py-pyzmq/package.py
+++ b/var/spack/repos/builtin/packages/py-pyzmq/package.py
@@ -45,6 +45,9 @@ class PyPyzmq(PythonPackage):
# pyproject.toml
depends_on("py-setuptools", type="build")
+ # https://github.com/zeromq/pyzmq/issues/1278
+ # https://github.com/zeromq/pyzmq/pull/1317
+ depends_on("py-setuptools@:59", when="@17:18.0", type="build")
depends_on("py-packaging", type="build")
# setup.py
diff --git a/var/spack/repos/builtin/packages/py-quantities/package.py b/var/spack/repos/builtin/packages/py-quantities/package.py
index 024901ab674b08..0d9b38f69e418e 100644
--- a/var/spack/repos/builtin/packages/py-quantities/package.py
+++ b/var/spack/repos/builtin/packages/py-quantities/package.py
@@ -13,18 +13,19 @@ class PyQuantities(PythonPackage):
pypi = "quantities/quantities-0.12.1.tar.gz"
maintainers("apdavison")
+ version("0.14.1", sha256="efeafffc0c0364f891a9327239cd12496bccb55cd037a6d1bf44de706f722877")
version("0.13.0", sha256="0fde20115410de21cefa786f3aeae69c1b51bb19ee492190324c1da705e61a81")
version("0.12.5", sha256="67546963cb2a519b1a4aa43d132ef754360268e5d551b43dd1716903d99812f0")
version("0.12.4", sha256="a33d636d1870c9e1127631185d89b0105a49f827d6aacd44ad9d8f151f331d8b")
version("0.12.3", sha256="582f3c7aeba897846761e966615e01202a5e5d06add304492931b05085d19883")
- depends_on("python@2.7.0:2.7,3.4:3.7", type=("build", "run"), when="@0.12.3")
- depends_on("python@2.7.0:2.7,3.4:3.8", type=("build", "run"), when="@0.12.4:0.12.5")
- depends_on("python@3.7:3.10", type=("build", "run"), when="@0.13:")
+ depends_on("python@3.8:", type=("build", "run"), when="@0.14:")
- # pip silently replaces distutils with setuptools
+ depends_on("py-setuptools@61:", type="build", when="@0.14:")
depends_on("py-setuptools", type="build")
+ depends_on("py-setuptools-scm+toml", type="build", when="@0.14:")
- depends_on("py-numpy@1.8.2:1.16", type=("build", "run"), when="@0.12.3")
+ depends_on("py-numpy@1.19:", type=("build", "run"), when="@0.14:")
+ depends_on("py-numpy@1.16:", type=("build", "run"), when="@0.13")
depends_on("py-numpy@1.8.2:1.17", type=("build", "run"), when="@0.12.4:0.12")
- depends_on("py-numpy@1.16:", type=("build", "run"), when="@0.13.0:")
+ depends_on("py-numpy@1.8.2:1.16", type=("build", "run"), when="@0.12.3")
diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py
new file mode 100644
index 00000000000000..af0ebea5b21c49
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyRapidfuzzCapi(PythonPackage):
+ """
+ C-API of RapidFuzz, which can be used to extend RapidFuzz from separate packages.
+ """
+
+ homepage = "https://github.com/maxbachmann/rapidfuzz_capi"
+ pypi = "rapidfuzz_capi/rapidfuzz_capi-1.0.5.tar.gz"
+
+ maintainers("LydDeb")
+
+ version("1.0.5", sha256="b3af179874b28364ba1b7850e37d0d353de9cf5b844e3569c023b74da3a9c68e")
+
+ depends_on("py-setuptools", type="build")
diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py
index 5bfd1563b5bb78..a1213a1b379306 100644
--- a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py
+++ b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py
@@ -13,12 +13,16 @@ class PyRapidfuzz(PythonPackage):
pypi = "rapidfuzz/rapidfuzz-1.8.2.tar.gz"
version("3.3.1", sha256="6783b3852f15ed7567688e2e358757a7b4f38683a915ba5edc6c64f1a3f0b450")
+ version("2.2.0", sha256="acb8839aac452ec61a419fdc8799e8a6e6cd21bed53d04678cdda6fba1247e2f")
version("1.8.2", sha256="d6efbb2b6b18b3a67d7bdfbcd9bb72732f55736852bbef823bdf210f9e0c6c90")
depends_on("python", type=("build", "link", "run"))
- depends_on("py-setuptools@42:", when="@3:", type="build")
+ depends_on("py-setuptools@42:", when="@2:", type="build")
depends_on("py-setuptools", type="build")
depends_on("py-scikit-build@0.17", when="@3:", type="build")
+ depends_on("py-scikit-build@0.13:", when="@2.2:", type="build")
+ depends_on("py-rapidfuzz-capi@1.0.5", when="@2", type="build")
+ depends_on("py-jarowinkler@1.2.0:1", when="@2", type=("build", "run"))
# CMakeLists.txt
depends_on("cmake@3.12:", type="build")
diff --git a/var/spack/repos/builtin/packages/py-scikit-build-core/package.py b/var/spack/repos/builtin/packages/py-scikit-build-core/package.py
index 1733dc770631e5..ed75519d482b81 100644
--- a/var/spack/repos/builtin/packages/py-scikit-build-core/package.py
+++ b/var/spack/repos/builtin/packages/py-scikit-build-core/package.py
@@ -17,6 +17,8 @@ class PyScikitBuildCore(PythonPackage):
maintainers("wdconinc")
+ version("0.6.1", sha256="392254a4ca7235c27a4be98cc24cd708f563171961ce37cff66120ebfda20b7a")
+ version("0.6.0", sha256="1bea5ed83610b367f3446badd996f2356690548188d6d38e5b93152df311a7ae")
version("0.2.0", sha256="d2a76d9447a412038dc5e25dd259b03c25278661a0c7c3da766bb971c1a9acd2")
variant("pyproject", default=False, description="Enable pyproject.toml support")
@@ -29,6 +31,7 @@ class PyScikitBuildCore(PythonPackage):
# Dependencies
depends_on("py-exceptiongroup", when="^python@:3.10", type=("build", "run"))
+ depends_on("py-importlib-metadata", when="@0.3.0: ^python@:3.7")
depends_on("py-importlib-resources@1.3:", when="^python@:3.8", type=("build", "run"))
depends_on("py-packaging@20.9:", type=("build", "run"))
depends_on("py-tomli@1.1:", when="^python@:3.10", type=("build", "run"))
@@ -49,6 +52,7 @@ class PyScikitBuildCore(PythonPackage):
depends_on("py-pytest@7:", type="test")
depends_on("py-pytest-subprocess@1.5:", type="test")
depends_on("py-setuptools", type="test")
+ depends_on("py-virtualenv", when="@0.6:", type="test")
depends_on("py-wheel", type="test")
@run_after("install")
diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py
index 389bc6d48bbb9d..05f6d09b53952b 100644
--- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py
+++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py
@@ -17,6 +17,7 @@ class PyScikitLearn(PythonPackage):
maintainers("adamjstewart")
version("master", branch="master")
+ version("1.3.2", sha256="a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05")
version("1.3.1", sha256="1a231cced3ee3fa04756b4a7ab532dc9417acd581a330adff5f2c01ac2831fcf")
version("1.3.0", sha256="8be549886f5eda46436b6e555b0e4873b4f10aa21c07df45c4bc1735afbccd7a")
version("1.2.2", sha256="8429aea30ec24e7a8c7ed8a3fa6213adf3814a6efbea09e16e0a0c71e1a1a3d7")
@@ -51,7 +52,8 @@ class PyScikitLearn(PythonPackage):
variant("openmp", default=True, description="Build with OpenMP support")
# Based on PyPI wheel availability
- depends_on("python@3.8:3.11", when="@1.1.3:", type=("build", "run"))
+ depends_on("python@3.8:3.12", when="@1.3.1:", type=("build", "run"))
+ depends_on("python@3.8:3.11", when="@1.1.3:1.3.0", type=("build", "run"))
depends_on("python@3.8:3.10", when="@1.1.0:1.1.2", type=("build", "run"))
depends_on("python@:3.10", when="@1.0.2", type=("build", "run"))
depends_on("python@:3.9", when="@0.24:1.0.1", type=("build", "run"))
@@ -61,6 +63,10 @@ class PyScikitLearn(PythonPackage):
# pyproject.toml
depends_on("py-setuptools", type="build")
depends_on("py-setuptools@:59", when="@:1.2.1", type="build")
+ depends_on("py-cython@0.29.33:2", when="@1.3:", type="build")
+ depends_on("py-cython@0.29.24:2", when="@1.0.2:", type="build")
+ depends_on("py-cython@0.28.5:2", when="@0.21:", type="build")
+ depends_on("py-cython@0.23:2", type="build")
# sklearn/_min_dependencies.py
depends_on("py-numpy@1.17.3:", when="@1.1:", type=("build", "run"))
@@ -80,10 +86,6 @@ class PyScikitLearn(PythonPackage):
depends_on("py-joblib@1:", when="@1.1:", type=("build", "run"))
depends_on("py-joblib@0.11:", type=("build", "run"))
depends_on("py-threadpoolctl@2.0.0:", when="@0.23:", type=("build", "run"))
- depends_on("py-cython@0.29.33:", when="@1.3:", type="build")
- depends_on("py-cython@0.29.24:", when="@1.0.2:", type="build")
- depends_on("py-cython@0.28.5:", when="@0.21:", type="build")
- depends_on("py-cython@0.23:", type="build")
depends_on("llvm-openmp", when="@0.21: %apple-clang +openmp")
# Test dependencies
diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py
index 4a07657d80d7bb..5d053cbb91d1c6 100644
--- a/var/spack/repos/builtin/packages/py-scipy/package.py
+++ b/var/spack/repos/builtin/packages/py-scipy/package.py
@@ -3,16 +3,11 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import glob
-import os
-
from spack.package import *
class PyScipy(PythonPackage):
- """SciPy (pronounced "Sigh Pie") is a Scientific Library for Python.
- It provides many user-friendly and efficient numerical routines such
- as routines for numerical integration and optimization."""
+ """Fundamental algorithms for scientific computing in Python."""
homepage = "https://www.scipy.org/"
pypi = "scipy/scipy-1.10.1.tar.gz"
@@ -20,7 +15,10 @@ class PyScipy(PythonPackage):
maintainers("adamjstewart", "rgommers")
- version("master", branch="master")
+ version("main", branch="main")
+ version("master", branch="master", deprecated=True)
+ version("1.11.4", sha256="90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa")
+ version("1.11.3", sha256="bba4d955f54edd61899776bad459bf7326e14b9fa1c552181f0479cc60a568cd")
version("1.11.2", sha256="b29318a5e39bd200ca4381d80b065cdf3076c7d7281c5e36569e99273867f61d")
version("1.11.1", sha256="fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289")
version("1.11.0", sha256="f9b0248cb9d08eead44cde47cbf6339f1e9aa0dfde28f5fb27950743e317bd5d")
@@ -65,22 +63,18 @@ class PyScipy(PythonPackage):
depends_on("python@:3.8", when="@1.3.2:1.5.3", type=("build", "link", "run"))
depends_on("python@:3.7", when="@1.1:1.3.1", type=("build", "link", "run"))
- # TODO: remove once pip build supports BLAS/LAPACK specification
- # https://github.com/mesonbuild/meson-python/pull/167
- depends_on("py-build", when="@1.9:", type="build")
-
- depends_on("py-meson-python@0.12.1:0.13", when="@1.11:", type="build")
- depends_on("py-meson-python@0.11:0.12", when="@1.10.1:1.10", type="build")
- depends_on("py-meson-python@0.11", when="@1.10.0", type="build")
- depends_on("py-meson-python@0.9:", when="@1.9.2:1.9", type="build")
- depends_on("py-meson-python@0.8.1:", when="@1.9.1", type="build")
- depends_on("py-meson-python@0.7", when="@1.9.0", type="build")
- depends_on("meson@0.62.2", when="@1.9.0:1.9.1", type="build")
+ depends_on("py-meson-python@0.12.1:", when="@1.11:", type="build")
+ depends_on("py-meson-python@0.11:", when="@1.10:", type="build")
+ depends_on("py-meson-python@0.9:", when="@1.9.2:", type="build")
+ depends_on("py-meson-python@0.8.1:", when="@1.9.1:", type="build")
+ depends_on("py-meson-python@0.7:", when="@1.9:", type="build")
+ depends_on("meson", when="@1.9.0:1.9.1", type="build")
depends_on("py-cython@0.29.35:2", when="@1.11:", type="build")
depends_on("py-cython@0.29.32:2", when="@1.9.2:", type="build")
depends_on("py-cython@0.29.21:2", when="@1.9:", type="build")
depends_on("py-cython@0.29.18:2", when="@1.7:", type="build")
- depends_on("py-pybind11@2.10.4:2.10", when="@1.11:", type=("build", "link"))
+ depends_on("py-pybind11@2.10.4:2.11.0", when="@1.11.3:", type=("build", "link"))
+ depends_on("py-pybind11@2.10.4:2.10", when="@1.11.0:1.11.2", type=("build", "link"))
depends_on("py-pybind11@2.10.1", when="@1.10", type=("build", "link"))
depends_on("py-pybind11@2.4.3:2.10", when="@1.9.1:1.9", type=("build", "link"))
depends_on("py-pybind11@2.4.3:2.9", when="@1.9.0", type=("build", "link"))
@@ -90,14 +84,11 @@ class PyScipy(PythonPackage):
depends_on("py-pybind11@2.4.3:", when="@1.5:1.6.1", type=("build", "link"))
depends_on("py-pybind11@2.4.0:", when="@1.4.1:1.4", type=("build", "link"))
depends_on("py-pybind11@2.2.4:", when="@1.4.0", type=("build", "link"))
- depends_on("py-pythran@0.12:0.13", when="@1.11:", type="build")
- depends_on("py-pythran@0.12", when="@1.10", type="build")
- depends_on("py-pythran@0.9.12:0.12", when="@1.9.2:1.9", type="build")
- depends_on("py-pythran@0.9.12:0.11", when="@1.9.0:1.9.1", type="build")
- depends_on("py-pythran@0.10", when="@1.8", type="build")
- depends_on("py-pythran@0.9.12:0.9", when="@1.7.2:1.7", type="build")
- depends_on("py-pythran@0.9.11", when="@1.7.0:1.7.1", type="build")
- depends_on("py-wheel@:0.40", when="@1.11:", type="build")
+ depends_on("py-pythran@0.12:", when="@1.10:", type="build")
+ depends_on("py-pythran@0.10:", when="@1.8", type="build")
+ depends_on("py-pythran@0.9.12:", when="@1.7.2:", type="build")
+ depends_on("py-pythran@0.9.11:", when="@1.7:", type="build")
+ depends_on("py-wheel@:0.40", when="@1.11.0:1.11.2", type="build")
depends_on("py-wheel@:0.38", when="@1.10", type="build")
depends_on("py-wheel@:0.37", when="@:1.9", type="build")
depends_on("pkgconfig", when="@1.9:", type="build")
@@ -105,43 +96,53 @@ class PyScipy(PythonPackage):
depends_on("py-setuptools@:59", when="@1.8", type="build")
depends_on("py-setuptools@:57", when="@1.7", type="build")
depends_on("py-setuptools@:51.0.0", when="@1.6", type="build")
- depends_on("py-numpy@1.21.6:1.27+blas+lapack", when="@1.11:", type=("build", "link", "run"))
- depends_on("py-numpy@1.19.5:1.26+blas+lapack", when="@1.10", type=("build", "link", "run"))
- depends_on("py-numpy@1.18.5:1.25+blas+lapack", when="@1.9", type=("build", "link", "run"))
- depends_on("py-numpy@1.17.3:1.24+blas+lapack", when="@1.8", type=("build", "link", "run"))
- depends_on(
- "py-numpy@1.16.5:1.22+blas+lapack", when="@1.6.2:1.7", type=("build", "link", "run")
- )
- depends_on("py-numpy@1.16.5:+blas+lapack", when="@1.6:1.6.1", type=("build", "link", "run"))
- depends_on("py-numpy@1.14.5:+blas+lapack", when="@1.5.0:1.5", type=("build", "link", "run"))
- depends_on("py-numpy@1.13.3:+blas+lapack", when="@1.3:1.4", type=("build", "link", "run"))
- depends_on("py-numpy@1.8.2:+blas+lapack", when="@:1.2", type=("build", "link", "run"))
+ depends_on("py-numpy@1.21.6:1.27", when="@1.11:", type=("build", "link", "run"))
+ depends_on("py-numpy@1.19.5:1.26", when="@1.10", type=("build", "link", "run"))
+ depends_on("py-numpy@1.18.5:1.25", when="@1.9", type=("build", "link", "run"))
+ depends_on("py-numpy@1.17.3:1.24", when="@1.8", type=("build", "link", "run"))
+ depends_on("py-numpy@1.16.5:1.22", when="@1.6:1.7", type=("build", "link", "run"))
+ depends_on("py-numpy@1.14.5:1.21", when="@1.5", type=("build", "link", "run"))
+ depends_on("py-numpy@1.13.3:1.21", when="@1.3:1.4", type=("build", "link", "run"))
+ depends_on("py-numpy@1.8.2:1.20", when="@:1.2", type=("build", "link", "run"))
depends_on("py-pytest", type="test")
- # NOTE: scipy should use the same BLAS/LAPACK as numpy.
- # For scipy 1.8 and older, this is achieved by calling the set_blas_lapack()
- # and setup_build_environment() from numpy in the scipy spec.
- depends_on("blas")
- depends_on("lapack")
+ # Required to use --config-settings
+ depends_on("py-pip@23.1:", when="@1.9:", type="build")
# https://docs.scipy.org/doc/scipy/dev/toolchain.html#other-libraries
depends_on("lapack@3.7.1:", when="@1.9:")
depends_on("lapack@3.4.1:", when="@1.2:")
+ depends_on("lapack")
+ depends_on("blas")
+ # meson.build
# https://docs.scipy.org/doc/scipy/dev/toolchain.html#compilers
- conflicts("%gcc@:7", when="@1.10:")
- conflicts("%gcc@:4.7", when="@:1.9")
- conflicts("%apple-clang@:9", when="@1.10:")
- conflicts("%msvc@:19.19", when="@1.10:")
+ conflicts("%gcc@:7", when="@1.10:", msg="SciPy requires GCC >= 8.0")
+ conflicts("%gcc@:4.7", when="@:1.9", msg="SciPy requires GCC >= 4.8")
+ conflicts(
+ "%msvc@:19.19",
+ when="@1.10:",
+ msg="SciPy requires at least vc142 (default with Visual Studio 2019) "
+ "when building with MSVC",
+ )
- # https://github.com/scipy/scipy/pull/11324
- conflicts("@1.4.0:1.4.1", when="target=ppc64le:")
+ # https://github.com/scipy/scipy/issues/19352
+ conflicts("^py-cython@3.0.3")
# https://github.com/mesonbuild/meson/pull/10909#issuecomment-1282241479
# Intel OneAPI ifx claims to support -fvisibility, but this does not work.
# Meson adds this flag for all Python extensions which include Fortran code.
conflicts("%oneapi@:2023.0", when="@1.9:")
+ # error: expected unqualified-id (exact compiler versions unknown)
+ conflicts("%apple-clang@15:", when="@:1.9")
+
+ # https://docs.scipy.org/doc//scipy-1.10.1/release.1.7.3.html
+ conflicts("platform=darwin target=aarch64:", when="@:1.7.2")
+
+ # https://github.com/scipy/scipy/pull/11324
+ conflicts("@1.4.0:1.4.1", when="target=ppc64le:")
+
# https://github.com/scipy/scipy/issues/12860
patch(
"https://git.sagemath.org/sage.git/plain/build/pkgs/scipy/patches/extern_decls.patch?id=711fe05025795e44b84233e065d240859ccae5bd",
@@ -155,12 +156,6 @@ class PyScipy(PythonPackage):
def archive_files(self):
return [join_path(self.stage.source_path, "build", "meson-logs", "meson-log.txt")]
- @run_before("install")
- def set_blas_lapack(self):
- # Pick up BLAS/LAPACK from numpy
- if self.spec.satisfies("@:1.8"):
- self.spec["py-numpy"].package.set_blas_lapack()
-
@run_before("install")
def set_fortran_compiler(self):
if self.compiler.f77 is None or self.compiler.fc is None:
@@ -200,53 +195,27 @@ def setup_build_environment(self, env):
if self.spec.satisfies("@:1.8"):
self.spec["py-numpy"].package.setup_build_environment(env)
- # TODO: remove once pip build supports BLAS/LAPACK specification
- # https://github.com/mesonbuild/meson-python/pull/167
+ # https://github.com/scipy/scipy/issues/19357
+ if self.spec.satisfies("%apple-clang@15:"):
+ env.append_flags("LDFLAGS", "-Wl,-ld_classic")
+
@when("@1.9:")
- def install(self, spec, prefix):
- blas = spec["blas"].libs.names[0]
- lapack = spec["lapack"].libs.names[0]
- if spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
- blas = "mkl-dynamic-lp64-seq"
- if spec["lapack"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
- lapack = "mkl-dynamic-lp64-seq"
- if spec["blas"].name in ["blis", "amdblis"]:
- blas = "blis"
- if "armpl" in blas:
- if "_mp" in blas:
- blas = "armpl-dynamic-lp64-omp"
- else:
- blas = "armpl-dynamic-lp64-seq"
- if "armpl" in lapack:
- if "_mp" in lapack:
- lapack = "armpl-dynamic-lp64-omp"
- else:
- lapack = "armpl-dynamic-lp64-seq"
-
- args = [
- "setup",
- "build",
- "-Dblas=" + blas,
- "-Dlapack=" + lapack,
- "--prefix=" + join_path(os.getcwd(), "build-install"),
- "-Ddebug=false",
- "-Doptimization=2",
- ]
- meson = which("meson")
- meson(*args)
- args = [
- "-m",
- "build",
- "--wheel",
- "-Cbuilddir=build",
- "--no-isolation",
- "--skip-dependency-check",
- "-Ccompile-args=-j%s" % make_jobs,
- ".",
- ]
- python(*args)
- args = std_pip_args + ["--prefix=" + prefix, glob.glob(join_path("dist", "scipy*.whl"))[0]]
- pip(*args)
+ def config_settings(self, spec, prefix):
+ blas, lapack = self.spec["py-numpy"].package.blas_lapack_pkg_config()
+ return {
+ "builddir": "build",
+ "compile-args": f"-j{make_jobs}",
+ "setup-args": {
+ # http://scipy.github.io/devdocs/building/blas_lapack.html
+ "-Dblas": blas,
+ "-Dlapack": lapack,
+ },
+ }
+
+ @when("@:1.8")
+ @run_before("install")
+ def set_blas_lapack(self):
+ self.spec["py-numpy"].package.blas_lapack_site_cfg()
@run_after("install")
@on_package_attributes(run_tests=True)
diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py
index 03487bfaa07aca..36cc6de4ee38b7 100644
--- a/var/spack/repos/builtin/packages/py-setuptools/package.py
+++ b/var/spack/repos/builtin/packages/py-setuptools/package.py
@@ -6,60 +6,191 @@
from spack.package import *
-class PySetuptools(PythonPackage):
+class PySetuptools(Package, PythonExtension):
"""A Python utility that aids in the process of downloading, building,
upgrading, installing, and uninstalling Python packages."""
homepage = "https://github.com/pypa/setuptools"
- pypi = "setuptools/setuptools-62.3.2.tar.gz"
+ url = "https://files.pythonhosted.org/packages/py3/s/setuptools/setuptools-62.3.2-py3-none-any.whl"
+ list_url = "https://pypi.org/simple/setuptools/"
tags = ["build-tools"]
- version("68.0.0", sha256="baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235")
- version("67.6.0", sha256="2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077")
- version("65.5.0", sha256="512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17")
- version("65.0.0", sha256="d73f8cd714a1a6691f5eb5abeeacbf313242b7aa2f5eba93776542c1aad90c6f")
- version("64.0.0", sha256="9b5d2cb8df48f005825654e0cb17217418317e4d996c035f0bca7cbaeb8acf51")
- version("63.4.3", sha256="521c833d1e5e1ef0869940e7f486a83de7773b9f029010ad0c2fe35453a9dad9")
- version("63.0.0", sha256="7388e17e72f5c0c7279f59da950a7925910e35bc1a84e19d3affbb40da248d1d")
- version("62.6.0", sha256="990a4f7861b31532871ab72331e755b5f14efbe52d336ea7f6118144dd478741")
- version("62.4.0", sha256="bf8a748ac98b09d32c9a64a995a6b25921c96cc5743c1efa82763ba80ff54e91")
- version("62.3.2", sha256="a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7")
- version("59.4.0", sha256="b4c634615a0cf5b02cf83c7bedffc8da0ca439f00e79452699454da6fbd4153d")
- version("58.2.0", sha256="2c55bdb85d5bb460bd2e3b12052b677879cffcf46c0c688f2e5bf51d36001145")
- version("57.4.0", sha256="6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465")
- version("57.1.0", sha256="cfca9c97e7eebbc8abe18d5e5e962a08dcad55bb63afddd82d681de4d22a597b")
- version("51.0.0", sha256="029c49fd713e9230f6a41c0298e6e1f5839f2cde7104c0ad5e053a37777e7688")
- version("50.3.2", sha256="ed0519d27a243843b05d82a5e9d01b0b083d9934eaa3d02779a23da18077bd3c")
- version("50.1.0", sha256="4a7708dafd2d360ce5e2ac7577374da9fb65fc867bc4cdaf461f9f834dfa6ac3")
- version("49.6.0", sha256="46bd862894ed22c2edff033c758c2dc026324788d758e96788e8f7c11f4e9707")
- version("49.2.0", sha256="afe9e81fee0270d3f60d52608549cc8ec4c46dada8c95640c1a00160f577acf2")
- version("46.1.3", sha256="795e0475ba6cd7fa082b1ee6e90d552209995627a2a227a47c6ea93282f4bfb1")
- version("44.1.1", sha256="c67aa55db532a0dadc4d2e20ba9961cbd3ccc84d544e9029699822542b5a476b")
- version("44.1.0", sha256="794a96b0c1dc6f182c36b72ab70d7e90f1d59f7a132e6919bb37b4fd4d424aca")
- version("43.0.0", sha256="db45ebb4a4b3b95ff0aca3ce5fe1e820ce17be393caf8902c78aa36240e8c378")
- version("41.4.0", sha256="7eae782ccf36b790c21bde7d86a4f303a441cd77036b25c559a602cf5186ce4d")
- version("41.3.0", sha256="9f5c54b529b2156c6f288e837e625581bb31ff94d4cfd116b8f271c589749556")
- version("41.0.1", sha256="a222d126f5471598053c9a77f4b5d4f26eaa1f150ad6e01dcf1a42e185d05613")
- version("41.0.0", sha256="79d30254b6fe7a8e672e43cd85f13a9f3f2a50080bc81d851143e2219ef0dcb1")
- version("40.8.0", sha256="6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d")
- version("40.4.3", sha256="acbc5740dd63f243f46c2b4b8e2c7fd92259c2ddb55a4115b16418a2ed371b15")
- version("40.2.0", sha256="47881d54ede4da9c15273bac65f9340f8929d4f0213193fa7894be384f2dcfa6")
- version("39.2.0", sha256="f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2")
- version("39.0.1", sha256="bec7badf0f60e7fc8153fac47836edc41b74e5d541d7692e614e635720d6a7c7")
- version("25.2.0", sha256="b2757ddac2c41173140b111e246d200768f6dd314110e1e40661d0ecf9b4d6a6")
- version("20.7.0", sha256="505cdf282c5f6e3a056e79f0244b8945f3632257bba8469386c6b9b396400233")
- version("20.6.7", sha256="d20152ee6337323d3b6d95cd733fb719d6b4f3fbc40f61f7a48e5a1bb96478b2")
-
- def url_for_version(self, version):
- url = self.url.rsplit("/", 1)[0]
- if version.satisfies(ver("32.1.2:51.0.0")):
- url += "/setuptools-{}.zip"
- else:
- url += "/setuptools-{}.tar.gz"
- return url.format(version)
-
- patch("rpath-compiler-flag.patch", when="@48:58.2")
+ version(
+ "68.0.0",
+ sha256="11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f",
+ expand=False,
+ )
+ version(
+ "67.6.0",
+ sha256="b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2",
+ expand=False,
+ )
+ version(
+ "65.5.0",
+ sha256="f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356",
+ expand=False,
+ )
+ version(
+ "65.0.0",
+ sha256="fe9a97f68b064a6ddd4bacfb0b4b93a4c65a556d97ce906255540439d0c35cef",
+ expand=False,
+ )
+ version(
+ "64.0.0",
+ sha256="63f463b90ff5e0a1422010100268fd688e15c44ae0798659013c8412963e15e4",
+ expand=False,
+ )
+ version(
+ "63.4.3",
+ sha256="7f61f7e82647f77d4118eeaf43d64cbcd4d87e38af9611694d4866eb070cd10d",
+ expand=False,
+ )
+ version(
+ "63.0.0",
+ sha256="045aec56a3eee5c82373a70e02db8b6da9a10f7faf61ff89a14ab66c738ed370",
+ expand=False,
+ )
+ version(
+ "62.6.0",
+ sha256="c1848f654aea2e3526d17fc3ce6aeaa5e7e24e66e645b5be2171f3f6b4e5a178",
+ expand=False,
+ )
+ version(
+ "62.4.0",
+ sha256="5a844ad6e190dccc67d6d7411d119c5152ce01f7c76be4d8a1eaa314501bba77",
+ expand=False,
+ )
+ version(
+ "62.3.2",
+ sha256="68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36",
+ expand=False,
+ )
+ version(
+ "59.4.0",
+ sha256="feb5ff19b354cde9efd2344ef6d5e79880ce4be643037641b49508bbb850d060",
+ expand=False,
+ )
+ version(
+ "58.2.0",
+ sha256="2551203ae6955b9876741a26ab3e767bb3242dafe86a32a749ea0d78b6792f11",
+ expand=False,
+ )
+ version(
+ "57.4.0",
+ sha256="a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6",
+ expand=False,
+ )
+ version(
+ "57.1.0",
+ sha256="ddae4c1b9220daf1e32ba9d4e3714df6019c5b583755559be84ff8199f7e1fe3",
+ expand=False,
+ )
+ version(
+ "51.0.0",
+ sha256="8c177936215945c9a37ef809ada0fab365191952f7a123618432bbfac353c529",
+ expand=False,
+ )
+ version(
+ "50.3.2",
+ sha256="2c242a0856fbad7efbe560df4a7add9324f340cf48df43651e9604924466794a",
+ expand=False,
+ )
+ version(
+ "50.1.0",
+ sha256="4537c77e6e7dc170081f8547564551d4ff4e4999717434e1257600bbd3a23296",
+ expand=False,
+ )
+ version(
+ "49.6.0",
+ sha256="4dd5bb0a0a0cff77b46ca5dd3a84857ee48c83e8223886b556613c724994073f",
+ expand=False,
+ )
+ version(
+ "49.2.0",
+ sha256="272c7f48f5cddc5af5901f4265274c421c7eede5c8bc454ac2903d3f8fc365e9",
+ expand=False,
+ )
+ version(
+ "46.1.3",
+ sha256="4fe404eec2738c20ab5841fa2d791902d2a645f32318a7850ef26f8d7215a8ee",
+ expand=False,
+ )
+ version(
+ "44.1.1",
+ sha256="27a714c09253134e60a6fa68130f78c7037e5562c4f21f8f318f2ae900d152d5",
+ expand=False,
+ )
+ version(
+ "44.1.0",
+ sha256="992728077ca19db6598072414fb83e0a284aca1253aaf2e24bb1e55ee6db1a30",
+ expand=False,
+ )
+ version(
+ "43.0.0",
+ sha256="a67faa51519ef28cd8261aff0e221b6e4c370f8fb8bada8aa3e7ad8945199963",
+ expand=False,
+ )
+ version(
+ "41.4.0",
+ sha256="8d01f7ee4191d9fdcd9cc5796f75199deccb25b154eba82d44d6a042cf873670",
+ expand=False,
+ )
+ version(
+ "41.3.0",
+ sha256="e9832acd9be6f3174f4c34b40e7d913a146727920cbef6465c1c1bd2d21a4ec4",
+ expand=False,
+ )
+ version(
+ "41.0.1",
+ sha256="c7769ce668c7a333d84e17fe8b524b1c45e7ee9f7908ad0a73e1eda7e6a5aebf",
+ expand=False,
+ )
+ version(
+ "41.0.0",
+ sha256="e67486071cd5cdeba783bd0b64f5f30784ff855b35071c8670551fd7fc52d4a1",
+ expand=False,
+ )
+ version(
+ "40.8.0",
+ sha256="e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab",
+ expand=False,
+ )
+ version(
+ "40.4.3",
+ sha256="ce4137d58b444bac11a31d4e0c1805c69d89e8ed4e91fde1999674ecc2f6f9ff",
+ expand=False,
+ )
+ version(
+ "40.2.0",
+ sha256="ea3796a48a207b46ea36a9d26de4d0cc87c953a683a7b314ea65d666930ea8e6",
+ expand=False,
+ )
+ version(
+ "39.2.0",
+ sha256="8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926",
+ expand=False,
+ )
+ version(
+ "39.0.1",
+ sha256="8010754433e3211b9cdbbf784b50f30e80bf40fc6b05eb5f865fab83300599b8",
+ expand=False,
+ )
+ version(
+ "25.2.0",
+ sha256="2845247c359bb91097ccf8f6be8a69edfa44847f3d2d5def39aa43c3d7f615ca",
+ expand=False,
+ )
+ version(
+ "20.7.0",
+ sha256="8917a52aa3a389893221b173a89dae0471022d32bff3ebc31a1072988aa8039d",
+ expand=False,
+ )
+ version(
+ "20.6.7",
+ sha256="9982ee4d279a2541dc1a7efee994ff9c535cfc05315e121e09df7f93da48c442",
+ expand=False,
+ )
extends("python")
@@ -69,10 +200,6 @@ def url_for_version(self, version):
depends_on("python@2.7:2.8,3.5:", when="@44", type=("build", "run"))
depends_on("python@2.7:2.8,3.4:", when="@:43", type=("build", "run"))
- # Newer pip requires setuptools to be installed, before building
- # setuptools. This issue was fixed or worked around in setuptools 54+
- depends_on("py-pip@:18", when="@:53", type="build")
-
# Uses HTMLParser.unescape
depends_on("python@:3.8", when="@:41.0", type=("build", "run"))
@@ -81,3 +208,25 @@ def url_for_version(self, version):
# https://github.com/pypa/setuptools/issues/3661
depends_on("python@:3.11", when="@:67", type=("build", "run"))
+
+ depends_on("py-pip", type="build")
+
+ def url_for_version(self, version):
+ url = "https://files.pythonhosted.org/packages/{0}/s/setuptools/setuptools-{1}-{0}-none-any.whl"
+
+ if version >= Version("45.1.0"):
+ python_tag = "py3"
+ else:
+ python_tag = "py2.py3"
+ return url.format(python_tag, version)
+
+ def install(self, spec, prefix):
+ # When setuptools changes its entry point we might get weird
+ # incompatibilities if building from sources in a non-isolated environment.
+ #
+ # https://github.com/pypa/setuptools/issues/980#issuecomment-1154471423
+ #
+ # We work around this issue by installing setuptools from wheels
+ whl = self.stage.archive_file
+ args = ["-m", "pip"] + std_pip_args + ["--prefix=" + prefix, whl]
+ python(*args)
diff --git a/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch b/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch
deleted file mode 100644
index 6b37d623234a53..00000000000000
--- a/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py
---- a/setuptools/_distutils/unixccompiler.py
-+++ b/setuptools/_distutils/unixccompiler.py
-@@ -257,7 +257,7 @@ class UnixCCompiler(CCompiler):
- # No idea how --enable-new-dtags would be passed on to
- # ld if this system was using GNU ld. Don't know if a
- # system like this even exists.
-- return "-R" + dir
-+ return "-Wl,-rpath," + dir
-
- def library_option(self, lib):
- return "-l" + lib
-
diff --git a/var/spack/repos/builtin/packages/py-slepc4py/package.py b/var/spack/repos/builtin/packages/py-slepc4py/package.py
index 9bf3413fceba4e..cf9637161cc99d 100644
--- a/var/spack/repos/builtin/packages/py-slepc4py/package.py
+++ b/var/spack/repos/builtin/packages/py-slepc4py/package.py
@@ -16,6 +16,7 @@ class PySlepc4py(PythonPackage):
maintainers("joseeroman", "balay")
version("main", branch="main")
+ version("3.20.1", sha256="7e6d156f7b0891bfa0616b38a502460c62797f16ca146b321e16cce4cf139d07")
version("3.20.0", sha256="56cbea1f56746136e5a934bf4a481e566f35e475cb950c0a5bce7d5c3cc7690a")
version("3.19.2", sha256="da8b6a7aaaf5e4497b896b2e478c42dd9de4fb31da93eb294181bea3bb60c767")
version("3.19.1", sha256="68303f4acef8efc0542ab288a19159d0e6cdf313726f573e0bea2edb3d2c9595")
diff --git a/var/spack/repos/builtin/packages/py-spython/package.py b/var/spack/repos/builtin/packages/py-spython/package.py
index d3c49ac9adb32f..41c5375563edf8 100644
--- a/var/spack/repos/builtin/packages/py-spython/package.py
+++ b/var/spack/repos/builtin/packages/py-spython/package.py
@@ -13,6 +13,7 @@ class PySpython(PythonPackage):
homepage = "https://github.com/singularityhub/singularity-cli"
pypi = "spython/spython-0.2.14.tar.gz"
+ version("0.3.1", sha256="143557849d636697ddd80e0ba95920efe4668351f5becce6bdc73a7651aa128d")
version("0.2.14", sha256="49e22fbbdebe456b27ca17d30061489db8e0f95e62be3623267a23b85e3ce0f0")
variant(
@@ -27,5 +28,4 @@ class PySpython(PythonPackage):
depends_on("singularity@3.5.2:", when="runtime=singularity", type="run")
depends_on("py-setuptools", type="build")
-
- depends_on("py-semver@2.8.1:", type=("build", "run"))
+ depends_on("py-semver@2.8.1:", when="@:0.2", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py b/var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py
index cfdd03d289b55d..08e2eb92b3dea9 100644
--- a/var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py
+++ b/var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py
@@ -12,8 +12,11 @@ class PySqlalchemyUtils(PythonPackage):
homepage = "https://github.com/kvesteri/sqlalchemy-utils"
pypi = "sqlalchemy-utils/SQLAlchemy-Utils-0.36.8.tar.gz"
+ version("0.41.1", sha256="a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74")
version("0.36.8", sha256="fb66e9956e41340011b70b80f898fde6064ec1817af77199ee21ace71d7d6ab0")
depends_on("py-setuptools", type="build")
- depends_on("py-six", type=("build", "run"))
- depends_on("py-sqlalchemy@1.0:", type=("build", "run"))
+ depends_on("py-six", type=("build", "run"), when="@0.36.8")
+ depends_on("py-sqlalchemy@1.0:", type=("build", "run"), when="@0.36.8")
+ depends_on("py-sqlalchemy@1.3:", type=("build", "run"), when="@0.41.1")
+ depends_on("py-importlib-metadata", type=("build", "run"), when="@0.41.1 ^python@:3.7")
diff --git a/var/spack/repos/builtin/packages/py-sqlalchemy/package.py b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py
index de5d823e6d3755..14bd1d37eee090 100644
--- a/var/spack/repos/builtin/packages/py-sqlalchemy/package.py
+++ b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py
@@ -14,6 +14,7 @@ class PySqlalchemy(PythonPackage):
git = "https://github.com/sqlalchemy/sqlalchemy.git"
version("2.0.19", sha256="77a14fa20264af73ddcdb1e2b9c5a829b8cc6b8304d0f093271980e36c200a3f")
+ version("1.4.49", sha256="06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9")
version("1.4.45", sha256="fd69850860093a3f69fefe0ab56d041edfdfe18510b53d9a2eaecba2f15fa795")
version("1.4.44", sha256="2dda5f96719ae89b3ec0f1b79698d86eb9aecb1d54e990abb3fdd92c04b46a90")
version("1.4.25", sha256="1adf3d25e2e33afbcd48cfad8076f9378793be43e7fec3e4334306cac6bec138")
diff --git a/var/spack/repos/builtin/packages/py-stashcp/package.py b/var/spack/repos/builtin/packages/py-stashcp/package.py
new file mode 100644
index 00000000000000..7120260cb18fac
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-stashcp/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyStashcp(PythonPackage):
+ """Stashcp uses geo located nearby caches in order to copy from the OSG
+ Connect's stash storage service to a job's workspace on a cluster."""
+
+ homepage = "https://github.com/opensciencegrid/StashCache"
+ pypi = "stashcp/stashcp-6.1.0.tar.gz"
+
+ maintainers("wdconinc")
+
+ version("6.1.0", sha256="40484b40aeb853eb6a5f5472daf533a176d61fa6ab839cd265ea0baa3fe63068")
+
+ depends_on("py-setuptools", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py b/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py
index 1ad767902d9175..8189fa0c49cff8 100644
--- a/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py
+++ b/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py
@@ -29,5 +29,5 @@ class PyTensorflowDatasets(PythonPackage):
depends_on("py-tensorflow-metadata", type=("build", "run"))
depends_on("py-termcolor", type=("build", "run"))
depends_on("py-tqdm", type=("build", "run"))
- depends_on("py-typing-extensions", type=("build", "run"), when="python@:3.7")
- depends_on("py-importlib-resources", type=("build", "run"), when="python@:3.8")
+ depends_on("py-typing-extensions", type=("build", "run"), when="^python@:3.7")
+ depends_on("py-importlib-resources", type=("build", "run"), when="^python@:3.8")
diff --git a/var/spack/repos/builtin/packages/py-tokenizers/package.py b/var/spack/repos/builtin/packages/py-tokenizers/package.py
index 5555fcdb087e4c..117239fe9afd0c 100644
--- a/var/spack/repos/builtin/packages/py-tokenizers/package.py
+++ b/var/spack/repos/builtin/packages/py-tokenizers/package.py
@@ -13,23 +13,35 @@ class PyTokenizers(PythonPackage):
homepage = "https://github.com/huggingface/tokenizers"
pypi = "tokenizers/tokenizers-0.6.0.tar.gz"
+ version("0.15.0", sha256="10c7e6e7b4cabd757da59e93f5f8d1126291d16f8b54f28510825ef56a3e5d0e")
version("0.13.3", sha256="2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e")
version("0.13.1", sha256="3333d1cee5c8f47c96362ea0abc1f81c77c9b92c6c3d11cbf1d01985f0d5cf1d")
version("0.10.3", sha256="1a5d3b596c6d3a237e1ad7f46c472d467b0246be7fd1a364f12576eb8db8f7e6")
- version("0.6.0", sha256="1da11fbfb4f73be695bed0d655576097d09a137a16dceab2f66399716afaffac")
- version("0.5.2", sha256="b5a235f9c71d04d4925df6c4fa13b13f1d03f9b7ac302b89f8120790c4f742bc")
-
- depends_on("py-setuptools", type="build")
- depends_on("py-setuptools-rust", type="build")
-
- # A nightly or dev version of rust is required to build older versions.
- # https://github.com/huggingface/tokenizers/issues/176
- # https://github.com/PyO3/pyo3/issues/5
- depends_on("rust@nightly", when="@:0.10", type="build")
+ version(
+ "0.6.0",
+ sha256="1da11fbfb4f73be695bed0d655576097d09a137a16dceab2f66399716afaffac",
+ deprecated=True,
+ )
+ version(
+ "0.5.2",
+ sha256="b5a235f9c71d04d4925df6c4fa13b13f1d03f9b7ac302b89f8120790c4f742bc",
+ deprecated=True,
+ )
# TODO: This package currently requires internet access to install.
+ depends_on("py-maturin@1", when="@0.14:", type="build")
+ depends_on("py-huggingface-hub@0.16.4:0", when="@0.15:", type=("build", "run"))
# cargo resolves dependencies, which includes openssl-sys somewhere, which needs
# system pkgconfig and openssl.
depends_on("pkgconfig", type="build")
depends_on("openssl")
+
+ # Historical dependencies
+ depends_on("py-setuptools", when="@:0.13", type="build")
+ depends_on("py-setuptools-rust", when="@:0.13", type="build")
+
+ # A nightly or dev version of rust is required to build older versions.
+ # https://github.com/huggingface/tokenizers/issues/176
+ # https://github.com/PyO3/pyo3/issues/5
+ depends_on("rust@nightly", when="@:0.8", type="build")
diff --git a/var/spack/repos/builtin/packages/py-tomopy/package.py b/var/spack/repos/builtin/packages/py-tomopy/package.py
index b99e60ef91cb3e..59a1c0f1b32d49 100644
--- a/var/spack/repos/builtin/packages/py-tomopy/package.py
+++ b/var/spack/repos/builtin/packages/py-tomopy/package.py
@@ -34,7 +34,7 @@ class PyTomopy(PythonPackage):
# Note: The module name of py-scikit-build is skbuild:
depends_on("py-scikit-build", type=("build"))
depends_on("py-scikit-image@0.17:", type=("build", "run"))
- depends_on("py-numpy+blas", type=("build", "run"))
+ depends_on("py-numpy", type=("build", "run"))
depends_on("py-pyfftw", type=("build", "run"), when="@1.0:1.9")
depends_on("py-scipy", type=("build", "run"))
depends_on("py-setuptools", type="build")
diff --git a/var/spack/repos/builtin/packages/py-torch-cluster/package.py b/var/spack/repos/builtin/packages/py-torch-cluster/package.py
index 13f59512b7faa7..69bb2161e0dfe3 100644
--- a/var/spack/repos/builtin/packages/py-torch-cluster/package.py
+++ b/var/spack/repos/builtin/packages/py-torch-cluster/package.py
@@ -7,24 +7,29 @@
class PyTorchCluster(PythonPackage):
- """This package consists of a small extension library of
- highly optimized graph cluster algorithms for the use in
- PyTorch."""
+ """This package consists of a small extension library of highly optimized graph cluster
+ algorithms for the use in PyTorch.
+ """
homepage = "https://github.com/rusty1s/pytorch_cluster"
url = "https://github.com/rusty1s/pytorch_cluster/archive/1.5.7.tar.gz"
+ version("1.6.3", sha256="0e2b08095e03cf87ce9b23b7a7352236a25d3ed92d92351dc020fd927ea8dbfe")
version("1.5.8", sha256="95c6e81e9c4a6235e1b2152ab917021d2060ad995199f6bd7fb39986d37310f0")
version("1.5.7", sha256="71701d2f7f3e458ebe5904c982951349fdb60e6f1654e19c7e102a226e2de72e")
variant("cuda", default=False, description="Enables CUDA support")
- depends_on("python@3.6:", type=("build", "run"))
depends_on("py-setuptools", type="build")
- depends_on("py-pytest-runner", type="build")
depends_on("py-scipy", type=("build", "run"))
- depends_on("py-torch+cuda", when="+cuda")
- depends_on("py-torch~cuda", when="~cuda")
+ depends_on("py-torch+cuda", when="+cuda", type=("build", "link", "run"))
+ depends_on("py-torch~cuda", when="~cuda", type=("build", "link", "run"))
+
+ # https://github.com/rusty1s/pytorch_cluster/issues/120
+ depends_on("py-torch~openmp", when="@:1.5 %apple-clang", type=("build", "link", "run"))
+
+ # Historical dependencies
+ depends_on("py-pytest-runner", when="@:1.5", type="build")
def setup_build_environment(self, env):
if "+cuda" in self.spec:
diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py
index b27e296e12c181..e73113d510579b 100644
--- a/var/spack/repos/builtin/packages/py-torch/package.py
+++ b/var/spack/repos/builtin/packages/py-torch/package.py
@@ -25,6 +25,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
version("main", branch="main")
version("master", branch="main", deprecated=True)
+ version("2.1.1", tag="v2.1.1", commit="4c55dc50355d5e923642c59ad2a23d6ad54711e7")
version("2.1.0", tag="v2.1.0", commit="7bcf7da3a268b435777fe87c7794c382f444e86d")
version("2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5")
version("2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e")
@@ -103,7 +104,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
"breakpad",
default=True,
description="Enable breakpad crash dump library",
- when="@1.9:1.11",
+ when="@1.10:1.11",
)
conflicts("+cuda+rocm")
@@ -114,9 +115,6 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
# https://github.com/pytorch/pytorch/issues/77811
conflicts("+qnnpack", when="platform=darwin target=aarch64:")
- # https://github.com/pytorch/pytorch/issues/80805
- conflicts("+openmp", when="platform=darwin target=aarch64:")
-
# https://github.com/pytorch/pytorch/issues/97397
conflicts(
"~tensorpipe",
@@ -194,12 +192,9 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
# depends_on("cpuinfo@2022-08-19", when="@1.13:2.0")
# depends_on("cpuinfo@2020-12-17", when="@1.8:1.12")
# depends_on("cpuinfo@2020-06-11", when="@1.6:1.7")
- # https://github.com/shibatch/sleef/issues/427
- # depends_on("sleef@3.5.1_2020-12-22", when="@1.8:")
- # https://github.com/pytorch/pytorch/issues/60334
- # depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7")
- # https://github.com/Maratyszcza/FP16/issues/18
- # depends_on("fp16@2020-05-14", when="@1.6:")
+ depends_on("sleef@3.5.1_2020-12-22", when="@1.8:")
+ depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7")
+ depends_on("fp16@2020-05-14", when="@1.6:")
depends_on("pthreadpool@2021-04-13", when="@1.9:")
depends_on("pthreadpool@2020-10-05", when="@1.8")
depends_on("pthreadpool@2020-06-15", when="@1.6:1.7")
@@ -247,14 +242,14 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
# depends_on("xnnpack@2021-02-22", when="@1.8:1.9+xnnpack")
# depends_on("xnnpack@2020-03-23", when="@1.6:1.7+xnnpack")
depends_on("mpi", when="+mpi")
- # https://github.com/pytorch/pytorch/issues/60270
- # depends_on("gloo@2023-05-19", when="@2.1:+gloo")
- # depends_on("gloo@2023-01-17", when="@2.0+gloo")
- # depends_on("gloo@2022-05-18", when="@1.13:1+gloo")
- # depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo")
- # depends_on("gloo@2021-05-04", when="@1.9+gloo")
- # depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo")
- # depends_on("gloo@2020-03-17", when="@1.6+gloo")
+ depends_on("gloo@2023-05-19", when="@2.1:+gloo")
+ depends_on("gloo@2023-01-17", when="@2.0+gloo")
+ depends_on("gloo@2022-05-18", when="@1.13:1+gloo")
+ depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo")
+ depends_on("gloo@2021-05-04", when="@1.9+gloo")
+ depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo")
+ depends_on("gloo@2020-03-17", when="@1.6+gloo")
+ depends_on("gloo+cuda", when="@1.6:+gloo+cuda")
# https://github.com/pytorch/pytorch/issues/60331
# depends_on("onnx@1.14.1", when="@2.1:+onnx_ml")
# depends_on("onnx@1.13.1", when="@2.0+onnx_ml")
@@ -271,6 +266,13 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
depends_on("py-six", type="test")
depends_on("py-psutil", type="test")
+ # https://github.com/pytorch/pytorch/issues/90448
+ patch(
+ "https://github.com/pytorch/pytorch/pull/97270.patch?full_index=1",
+ sha256="beb3fb57746cf8443f5caa6e08b2f8f4d4822c1e11e0c912134bd166c6a0ade7",
+ when="@1.10:2.0",
+ )
+
# Fix BLAS being overridden by MKL
# https://github.com/pytorch/pytorch/issues/60328
patch(
@@ -286,6 +288,14 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
when="@1.1:1.8.1",
)
+ # https://github.com/pytorch/pytorch/issues/70297
+ patch(
+ "https://github.com/google/breakpad/commit/605c51ed96ad44b34c457bbca320e74e194c317e.patch?full_index=1",
+ sha256="694d83db3a2147d543357f22ba5c8d5683d0ed43e693d42bca8f24ec50080f98",
+ when="+breakpad",
+ working_dir="third_party/breakpad",
+ )
+
# Fixes CMake configuration error when XNNPACK is disabled
# https://github.com/pytorch/pytorch/pull/35607
# https://github.com/pytorch/pytorch/pull/37865
@@ -294,11 +304,6 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
# Fixes build error when ROCm is enabled for pytorch-1.5 release
patch("rocm.patch", when="@1.5+rocm")
- # Fixes fatal error: sleef.h: No such file or directory
- # https://github.com/pytorch/pytorch/pull/35359
- # https://github.com/pytorch/pytorch/issues/26555
- # patch("sleef.patch", when="@:1.5")
-
# Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3
# https://github.com/pytorch/pytorch/pull/37086
patch(
@@ -489,9 +494,8 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False):
enable_or_disable("cuda")
if "+cuda" in self.spec:
- # cmake/public/cuda.cmake
- # cmake/Modules_CUDA_fix/upstream/FindCUDA.cmake
- env.unset("CUDA_ROOT")
+ env.set("CUDA_HOME", self.spec["cuda"].prefix) # Linux/macOS
+ env.set("CUDA_PATH", self.spec["cuda"].prefix) # Windows
torch_cuda_arch = ";".join(
"{0:.1f}".format(float(i) / 10.0) for i in self.spec.variants["cuda_arch"].value
)
@@ -619,17 +623,13 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False):
env.set("pybind11_INCLUDE_DIR", self.spec["py-pybind11"].prefix.include)
if self.spec.satisfies("@1.10:"):
env.set("USE_SYSTEM_PYBIND11", "ON")
- # https://github.com/pytorch/pytorch/issues/60334
- # if self.spec.satisfies("@1.8:"):
- # env.set("USE_SYSTEM_SLEEF", "ON")
if self.spec.satisfies("@1.6:"):
# env.set("USE_SYSTEM_LIBS", "ON")
# https://github.com/pytorch/pytorch/issues/60329
# env.set("USE_SYSTEM_CPUINFO", "ON")
- # https://github.com/pytorch/pytorch/issues/60270
- # env.set("USE_SYSTEM_GLOO", "ON")
- # https://github.com/Maratyszcza/FP16/issues/18
- # env.set("USE_SYSTEM_FP16", "ON")
+ env.set("USE_SYSTEM_SLEEF", "ON")
+ env.set("USE_SYSTEM_GLOO", "ON")
+ env.set("USE_SYSTEM_FP16", "ON")
env.set("USE_SYSTEM_PTHREADPOOL", "ON")
env.set("USE_SYSTEM_PSIMD", "ON")
env.set("USE_SYSTEM_FXDIV", "ON")
diff --git a/var/spack/repos/builtin/packages/py-torch/sleef.patch b/var/spack/repos/builtin/packages/py-torch/sleef.patch
deleted file mode 100644
index 67f0234162d1a1..00000000000000
--- a/var/spack/repos/builtin/packages/py-torch/sleef.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff --git a/caffe2/CMakeLists.txt b/caffe2/CMakeLists.txt
-index 8025a7de3c..2e5cdbb5c9 100644
---- a/caffe2/CMakeLists.txt
-+++ b/caffe2/CMakeLists.txt
-@@ -1232,6 +1232,7 @@ if (BUILD_TEST)
- add_executable(${test_name} "${test_src}")
- target_link_libraries(${test_name} ${Caffe2_MAIN_LIBS} gtest_main)
- target_include_directories(${test_name} PRIVATE $)
-+ target_include_directories(${test_name} PRIVATE $)
- target_include_directories(${test_name} PRIVATE ${Caffe2_CPU_INCLUDE})
- add_test(NAME ${test_name} COMMAND $)
- if (INSTALL_TEST)
diff --git a/var/spack/repos/builtin/packages/py-torchaudio/package.py b/var/spack/repos/builtin/packages/py-torchaudio/package.py
index d07ce1de2182c7..94bfd97e3e487c 100644
--- a/var/spack/repos/builtin/packages/py-torchaudio/package.py
+++ b/var/spack/repos/builtin/packages/py-torchaudio/package.py
@@ -15,6 +15,7 @@ class PyTorchaudio(PythonPackage):
submodules = True
version("main", branch="main")
+ version("2.1.1", tag="v2.1.1", commit="db624844f5c95bb7618fe5a5f532bf9b68efeb45")
version("2.1.0", tag="v2.1.0", commit="6ea1133706801ec6e81bb29142da2e21a8583a0a")
version("2.0.2", tag="v2.0.2", commit="31de77dad5c89274451b3f5c4bcb630be12787c4")
version("2.0.1", tag="v2.0.1", commit="3b40834aca41957002dfe074175e900cf8906237")
@@ -56,6 +57,7 @@ class PyTorchaudio(PythonPackage):
depends_on("sox")
depends_on("py-torch@main", when="@main", type=("build", "link", "run"))
+ depends_on("py-torch@2.1.1", when="@2.1.1", type=("build", "link", "run"))
depends_on("py-torch@2.1.0", when="@2.1.0", type=("build", "link", "run"))
depends_on("py-torch@2.0.1", when="@2.0.2", type=("build", "link", "run"))
depends_on("py-torch@2.0.0", when="@2.0.1", type=("build", "link", "run"))
diff --git a/var/spack/repos/builtin/packages/py-torchdata/package.py b/var/spack/repos/builtin/packages/py-torchdata/package.py
index fd9367f31d0179..5c1eedcfb64c78 100644
--- a/var/spack/repos/builtin/packages/py-torchdata/package.py
+++ b/var/spack/repos/builtin/packages/py-torchdata/package.py
@@ -16,6 +16,7 @@ class PyTorchdata(PythonPackage):
maintainers("adamjstewart")
version("main", branch="main")
+ version("0.7.1", sha256="1b6589336776ccba19fd3bf435588416105d372f6b85d58a9f2b008286f483bf")
version("0.7.0", sha256="0b444719c3abc67201ed0fea92ea9c4100e7f36551ba0d19a09446cc11154eb3")
version("0.6.1", sha256="c596db251c5e6550db3f00e4308ee7112585cca4d6a1c82a433478fd86693257")
version("0.6.0", sha256="048dea12ee96c0ea1525097959fee811d7b38c2ed05f44a90f35f8961895fb5b")
@@ -38,6 +39,7 @@ class PyTorchdata(PythonPackage):
# https://github.com/pytorch/data#version-compatibility
depends_on("py-torch@main", when="@main", type=("build", "run"))
+ depends_on("py-torch@2.1.1", when="@0.7.1", type=("build", "run"))
depends_on("py-torch@2.1.0", when="@0.7.0", type=("build", "run"))
depends_on("py-torch@2.0.1", when="@0.6.1", type=("build", "run"))
depends_on("py-torch@2.0.0", when="@0.6.0", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-torchgeo/package.py b/var/spack/repos/builtin/packages/py-torchgeo/package.py
index f5ef2ddc4bc783..b9069126fe0f53 100644
--- a/var/spack/repos/builtin/packages/py-torchgeo/package.py
+++ b/var/spack/repos/builtin/packages/py-torchgeo/package.py
@@ -16,6 +16,7 @@ class PyTorchgeo(PythonPackage):
maintainers("adamjstewart", "calebrob6")
version("main", branch="main")
+ version("0.5.1", sha256="5f86a34d18fe36eeb9146b057b21e5356252ef8ab6a9db33feebb120a01feff8")
version("0.5.0", sha256="2bc2f9c4a19a569790cb3396499fdec17496632b0e52b86be390a2cc7a1a7033")
version("0.4.1", sha256="a3692436bf63df8d2f9b76d16eea5ee309dd1bd74e0fde6e64456abfdb2a5b58")
version("0.4.0", sha256="a0812487205aa2db7bc92119d896ae4bf4f1014e6fdc0ce0f75bcb24fada6613")
diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py
index 180b555069bc5e..6457a832a7866a 100644
--- a/var/spack/repos/builtin/packages/py-torchtext/package.py
+++ b/var/spack/repos/builtin/packages/py-torchtext/package.py
@@ -17,6 +17,7 @@ class PyTorchtext(PythonPackage):
maintainers("adamjstewart")
version("main", branch="main")
+ version("0.16.1", tag="v0.16.1", commit="66671007c84e07386da3c04e5ca403b8a417c8e5")
version("0.16.0", tag="v0.16.0", commit="4e255c95c76b1ccde4f6650391c0bc30650d6dbe")
version("0.15.2", tag="v0.15.2", commit="4571036cf66c539e50625218aeb99a288d79f3e1")
version("0.15.1", tag="v0.15.1", commit="c696895e524c61fd2b8b26916dd006411c5f3ba5")
@@ -58,6 +59,7 @@ class PyTorchtext(PythonPackage):
# https://github.com/pytorch/text#installation
depends_on("py-torch@main", when="@main", type=("build", "link", "run"))
+ depends_on("py-torch@2.1.1", when="@0.16.1", type=("build", "link", "run"))
depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run"))
depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run"))
depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run"))
diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py
index 5aef4c6aef8a29..2ed70ca4ee9984 100644
--- a/var/spack/repos/builtin/packages/py-torchvision/package.py
+++ b/var/spack/repos/builtin/packages/py-torchvision/package.py
@@ -17,6 +17,7 @@ class PyTorchvision(PythonPackage):
maintainers("adamjstewart")
version("main", branch="main")
+ version("0.16.1", sha256="d31fe52e4540750c8d372b0f38f1bfa81d8261193f2c2c06577332831d203c50")
version("0.16.0", sha256="79b30b082237e3ead21e74587cedf4a4d832f977cf7dfeccfb65f67988b12ceb")
version("0.15.2", sha256="1efcb80e0a6e42c54f07ee16167839b4d302aeeecc12839cc47c74b06a2c20d4")
version("0.15.1", sha256="689d23d4ebb0c7e54e8651c89b17155b64341c14ae4444a04ca7dc6f2b6a0a43")
@@ -62,6 +63,7 @@ class PyTorchvision(PythonPackage):
# https://github.com/pytorch/vision#installation
depends_on("py-torch@main", when="@main", type=("build", "link", "run"))
+ depends_on("py-torch@2.1.1", when="@0.16.1", type=("build", "link", "run"))
depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run"))
depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run"))
depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run"))
diff --git a/var/spack/repos/builtin/packages/py-transformers/package.py b/var/spack/repos/builtin/packages/py-transformers/package.py
index 5380f44a78684d..7f4e8306c6db8f 100644
--- a/var/spack/repos/builtin/packages/py-transformers/package.py
+++ b/var/spack/repos/builtin/packages/py-transformers/package.py
@@ -16,17 +16,15 @@ class PyTransformers(PythonPackage):
maintainers("adamjstewart")
+ version("4.35.2", sha256="2d125e197d77b0cdb6c9201df9fa7e2101493272e448b9fba9341c695bee2f52")
version("4.31.0", sha256="4302fba920a1c24d3a429a29efff6a63eac03f3f3cf55b55927fc795d01cb273")
version("4.24.0", sha256="486f353a8e594002e48be0e2aba723d96eda839e63bfe274702a4b5eda85559b")
version("4.6.1", sha256="83dbff763b7e7dc57cbef1a6b849655d4fcab6bffdd955c5e8bea12a4f76dc10")
version("2.8.0", sha256="b9f29cdfd39c28f29e0806c321270dea337d6174a7aa60daf9625bf83dbb12ee")
- depends_on("python@3.8:", when="@4.31:", type=("build", "run"))
- depends_on("python@3.7:", when="@4.24:", type=("build", "run"))
- depends_on("python@3.6:", type=("build", "run"))
depends_on("py-setuptools", type="build")
- depends_on("py-importlib-metadata", when="@4.6: ^python@:3.7", type=("build", "run"))
depends_on("py-filelock", type=("build", "run"))
+ depends_on("py-huggingface-hub@0.16.4:0", when="@4.34:", type=("build", "run"))
depends_on("py-huggingface-hub@0.14.1:0", when="@4.26:", type=("build", "run"))
depends_on("py-huggingface-hub@0.10:0", when="@4.24:", type=("build", "run"))
depends_on("py-huggingface-hub@0.0.8", when="@4.6.1", type=("build", "run"))
@@ -38,7 +36,8 @@ class PyTransformers(PythonPackage):
depends_on("py-regex@:2019.12.16,2019.12.18:", type=("build", "run"))
depends_on("py-requests", type=("build", "run"))
depends_on("py-safetensors@0.3.1:", when="@4.31:", type=("build", "run"))
- depends_on("py-tokenizers@0.11.1:0.11.2,0.11.4:0.13", when="@4.24:", type=("build", "run"))
+ depends_on("py-tokenizers@0.14:0.18", when="@4.35:", type=("build", "run"))
+ depends_on("py-tokenizers@0.11.1:0.11.2,0.11.4:0.13", when="@4.24:4.33", type=("build", "run"))
depends_on("py-tokenizers@0.10.1:0.10", when="@4.6.1", type=("build", "run"))
depends_on("py-tokenizers@0.5.2", when="@2.8.0", type=("build", "run"))
depends_on("py-tqdm@4.27:", type=("build", "run"))
diff --git a/var/spack/repos/builtin/packages/py-vermin/package.py b/var/spack/repos/builtin/packages/py-vermin/package.py
index 5fabf334b6b7da..643fa2895fcb7c 100644
--- a/var/spack/repos/builtin/packages/py-vermin/package.py
+++ b/var/spack/repos/builtin/packages/py-vermin/package.py
@@ -11,10 +11,11 @@ class PyVermin(PythonPackage):
"""Concurrently detect the minimum Python versions needed to run code."""
homepage = "https://github.com/netromdk/vermin"
- url = "https://github.com/netromdk/vermin/archive/v1.5.2.tar.gz"
+ url = "https://github.com/netromdk/vermin/archive/v1.6.0.tar.gz"
maintainers("netromdk")
+ version("1.6.0", sha256="31200b1e674e064c7473484372db2743f5abbf1409d994880486bca5bcf05bec")
version("1.5.2", sha256="e4b6ca6f3e71b0d83a179dc4a4ba50682f60474cf8c948ba9f82e330f219ff4a")
version("1.5.1", sha256="2d1c7601d054da9fa5c5eb6c817c714235f9d484b74011f7f86c98f0a25e93ea")
version("1.5.0", sha256="77207385c9cea1f02053a8f2e7f2e8c945394cf37c44c70ce217cada077a2d17")
diff --git a/var/spack/repos/builtin/packages/py-xyzservices/package.py b/var/spack/repos/builtin/packages/py-xyzservices/package.py
new file mode 100644
index 00000000000000..c9760370bb9fc0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-xyzservices/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyXyzservices(PythonPackage):
+ """xyzservices is a lightweight library providing a repository of
+ available XYZ services offering raster basemap tiles."""
+
+ homepage = "https://github.com/geopandas/xyzservices"
+ pypi = "xyzservices/xyzservices-2023.10.1.tar.gz"
+
+ license("BSD-3-Clause")
+
+ version("2023.10.1", sha256="091229269043bc8258042edbedad4fcb44684b0473ede027b5672ad40dc9fa02")
+
+ depends_on("python@3.8:", type=("build", "run"))
+
+ depends_on("py-setuptools", type="build")
+ depends_on("py-setuptools-scm", type="build")
diff --git a/var/spack/repos/builtin/packages/pythia8/package.py b/var/spack/repos/builtin/packages/pythia8/package.py
index b65b7ac4d8e3f8..45a695a052600e 100644
--- a/var/spack/repos/builtin/packages/pythia8/package.py
+++ b/var/spack/repos/builtin/packages/pythia8/package.py
@@ -131,16 +131,16 @@ def configure_args(self):
args.append("--with-boost=" + self.spec["boost"].prefix)
if "+madgraph5amc" in self.spec:
- args += "--with-mg5mes=" + self.spec["madgraph5amc"].prefix
+ args.append("--with-mg5mes=" + self.spec["madgraph5amc"].prefix)
else:
- args += "--without-mg5mes"
+ args.append("--without-mg5mes")
args += self.with_or_without("hepmc3", activation_value="prefix")
if "+fastjet" in self.spec:
- args += "--with-fastjet3=" + self.spec["fastjet"].prefix
+ args.append("--with-fastjet3=" + self.spec["fastjet"].prefix)
else:
- args += "--without-fastjet3"
+ args.append("--without-fastjet3")
args += self.with_or_without("evtgen", activation_value="prefix")
args += self.with_or_without("root", activation_value="prefix")
diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index c1fa5554205d4c..a4f3ca74d1db04 100644
--- a/var/spack/repos/builtin/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -237,6 +237,7 @@ class Python(Package):
variant("crypt", default=True, description="Build crypt module", when="@:3.12 platform=cray")
if sys.platform != "win32":
+ depends_on("gmake", type="build")
depends_on("pkgconfig@0.9.0:", type="build")
depends_on("gettext +libxml2", when="+libxml2")
depends_on("iconv", when="~libxml2")
diff --git a/var/spack/repos/builtin/packages/q-e-sirius/package.py b/var/spack/repos/builtin/packages/q-e-sirius/package.py
index ec78ba5702b1a1..1605e4e37d5703 100644
--- a/var/spack/repos/builtin/packages/q-e-sirius/package.py
+++ b/var/spack/repos/builtin/packages/q-e-sirius/package.py
@@ -93,7 +93,7 @@ def cmake_args(self):
# Work around spack issue #19970 where spack sets
# rpaths for MKL just during make, but cmake removes
# them during make install.
- if "^mkl" in self.spec:
+ if self.spec["lapack"].name in INTEL_MATH_LIBRARIES:
args.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON")
spec = self.spec
args.append(self.define("BLAS_LIBRARIES", spec["blas"].libs.joined(";")))
diff --git a/var/spack/repos/builtin/packages/qgis/package.py b/var/spack/repos/builtin/packages/qgis/package.py
index 45bb05639910f2..8daf503c98b4c7 100644
--- a/var/spack/repos/builtin/packages/qgis/package.py
+++ b/var/spack/repos/builtin/packages/qgis/package.py
@@ -17,12 +17,14 @@ class Qgis(CMakePackage):
maintainers("adamjstewart", "Sinan81")
+ version("3.34.0", sha256="348a2df4c4520813a319b7f72546b3823e044cacd28646ba189b56a49c7d1b5f")
# Prefer latest long term release
version(
- "3.28.11",
- sha256="c5eb703893c7f98de051c45d677c4a34b40f986db51782a4930ddefad4e193b4",
+ "3.28.12",
+ sha256="d6d0ea39ed3433d553f8b83324dc14cfa90f8caaf766fa484791df9169800f25",
preferred=True,
)
+ version("3.28.11", sha256="c5eb703893c7f98de051c45d677c4a34b40f986db51782a4930ddefad4e193b4")
version("3.28.10", sha256="cff867e97909bbc2facce6343770dcb1b61fc6e4855f57783e30bf63d51c5218")
version("3.28.3", sha256="a09124f46465a520f6d735306ba3954c339b84aa396d6f52b476b82edcc4fe0e")
version("3.22.16", sha256="dbd1f8a639291bb2492eea61e4ef96079d7b27d3dfa538dab8cd98f31429254a")
@@ -63,6 +65,7 @@ class Qgis(CMakePackage):
"custom_widgets", default=False, description="Build QGIS custom widgets for Qt Designer"
)
variant("desktop", default=True, description="Build QGIS desktop")
+ # variant("draco", default=True, description="Build with DRACO support") #TODO
variant("georeferencer", default=True, description="Build GeoReferencer plugin")
variant("globe", default=False, description="Build Globe plugin")
variant("grass7", default=False, description="Build with GRASS providers and plugin")
@@ -77,6 +80,7 @@ class Qgis(CMakePackage):
)
variant("oauth2_plugin", default=True, description="Build OAuth2 authentication method plugin")
variant("oracle", default=False, description="Build with Oracle support")
+ # variant("pdal", default=False, description="Build with PDAL support") #TODO
variant("postgresql", default=True, description="Build with PostreSQL support")
variant(
"py_compile",
@@ -244,6 +248,7 @@ def cmake_args(self):
"TRUE" if "+custom_widgets" in spec else "FALSE"
),
"-DWITH_DESKTOP={0}".format("TRUE" if "+desktop" in spec else "FALSE"),
+ "-DWITH_DRACO={0}".format("TRUE" if "+draco" in spec else "FALSE"),
"-DWITH_GEOREFERENCER={0}".format("TRUE" if "+georeferencer" in spec else "FALSE"),
"-DWITH_GLOBE={0}".format("TRUE" if "+globe" in spec else "FALSE"),
"-DWITH_GUI={0}".format("TRUE" if "+gui" in spec else "FALSE"),
@@ -251,6 +256,7 @@ def cmake_args(self):
self.define_from_variant("WITH_INTERNAL_O2", "internal_o2"),
"-DWITH_OAUTH2_PLUGIN={0}".format("TRUE" if "+oauth2_plugin" in spec else "FALSE"),
"-DWITH_ORACLE={0}".format("TRUE" if "+oracle" in spec else "FALSE"),
+ "-DWITH_PDAL={0}".format("TRUE" if "+pdal" in spec else "FALSE"),
"-DWITH_POSTGRESQL={0}".format("TRUE" if "+postgresql" in spec else "FALSE"),
"-DWITH_PY_COMPILE={0}".format("TRUE" if "+py_compile" in spec else "FALSE"),
"-DWITH_QSCIAPI={0}".format("TRUE" if "+qsciapi" in spec else "FALSE"),
diff --git a/var/spack/repos/builtin/packages/qmcpack/package.py b/var/spack/repos/builtin/packages/qmcpack/package.py
index 65a02c1cf9dc8f..99612cedf9650b 100644
--- a/var/spack/repos/builtin/packages/qmcpack/package.py
+++ b/var/spack/repos/builtin/packages/qmcpack/package.py
@@ -376,7 +376,7 @@ def cmake_args(self):
# Next two environment variables were introduced in QMCPACK 3.5.0
# Prior to v3.5.0, these lines should be benign but CMake
# may issue a warning.
- if "^mkl" in spec:
+ if spec["lapack"].name in INTEL_MATH_LIBRARIES:
args.append("-DENABLE_MKL=1")
args.append("-DMKL_ROOT=%s" % env["MKLROOT"])
else:
diff --git a/var/spack/repos/builtin/packages/qt-base/package.py b/var/spack/repos/builtin/packages/qt-base/package.py
index f3fcfc0eed7193..d0bda5fd9cd90a 100644
--- a/var/spack/repos/builtin/packages/qt-base/package.py
+++ b/var/spack/repos/builtin/packages/qt-base/package.py
@@ -33,8 +33,6 @@ def get_list_url(qualname):
maintainers("wdconinc", "sethrj")
- provides("qmake")
-
# Default dependencies for all qt-* components
generator("ninja")
depends_on("cmake@3.16:", type="build")
@@ -91,6 +89,11 @@ class QtBase(QtPackage):
url = QtPackage.get_url(__qualname__)
list_url = QtPackage.get_list_url(__qualname__)
+ provides("qmake")
+
+ version("6.6.1", sha256="eb091c56e8c572d35d3da36f94f9e228892d43aecb559fa4728a19f0e44914c4")
+ version("6.6.0", sha256="882f39ea3a40a0894cd64e515ce51711a4fab79b8c47bc0fe0279e99493a62cf")
+ version("6.5.3", sha256="174021c4a630df2e7e912c2e523844ad3cb5f90967614628fd8aa15ddbab8bc5")
version("6.5.2", sha256="221cafd400c0a992a42746b43ea879d23869232e56d9afe72cb191363267c674")
version("6.5.1", sha256="fdde60cdc5c899ab7165f1c3f7b93bc727c2484c348f367d155604f5d901bfb6")
version("6.5.0", sha256="7b0de20e177335927c55c58a3e1a7e269e32b044936e97e9a82564f0f3e69f99")
diff --git a/var/spack/repos/builtin/packages/qt-declarative/package.py b/var/spack/repos/builtin/packages/qt-declarative/package.py
index 390053188dcafa..805e7910714d01 100644
--- a/var/spack/repos/builtin/packages/qt-declarative/package.py
+++ b/var/spack/repos/builtin/packages/qt-declarative/package.py
@@ -14,6 +14,9 @@ class QtDeclarative(QtPackage):
url = QtPackage.get_url(__qualname__)
list_url = QtPackage.get_list_url(__qualname__)
+ version("6.6.1", sha256="b1f5a75c2ea967d21b2c45f56ba1de66e2bf14a581b2f0d8e776441f1bebd0e7")
+ version("6.6.0", sha256="2e52ef00736a9954426adf454cfb365fabdffb5703c814c188bc866cbf9f4dad")
+ version("6.5.3", sha256="563924e58ac517492acb1952af0fb950cd54045ef6d61b98de06fac728239811")
version("6.5.2", sha256="8b9eed849c90fb301d5399c545c2c926c18dc889d724df2b284253152a2ee139")
version("6.5.1", sha256="b6f81ee73e8dbc30601c022b30ceb592fd2f8a5a79e7bc48fcd7feef80e3cc7a")
version("6.5.0", sha256="38281cdfc60b8820ac2943eebabe968138f90629edc8c6c5e88a72a7ec05e303")
diff --git a/var/spack/repos/builtin/packages/qt-quick3d/package.py b/var/spack/repos/builtin/packages/qt-quick3d/package.py
index 15453659090e38..c5437ab3102050 100644
--- a/var/spack/repos/builtin/packages/qt-quick3d/package.py
+++ b/var/spack/repos/builtin/packages/qt-quick3d/package.py
@@ -14,6 +14,9 @@ class QtQuick3d(QtPackage):
url = QtPackage.get_url(__qualname__)
list_url = QtPackage.get_list_url(__qualname__)
+ version("6.6.1", sha256="57abc6e178d2b28cfac544c71cb20f362409267be5422ca3fbaa46a1bbfd5515")
+ version("6.6.0", sha256="2cda12649cfb6c23261c48e626714ca7eb01fa4b20e0bed02031f9c488c820ad")
+ version("6.5.3", sha256="5df7494824c44fc73c03348b218166db5c4d8d42bd7d221f15e58c962cf657e5")
version("6.5.2", sha256="7b40e578fc1ee2a5f5c413873fdb0552bb97829b70296ba3c6844da062608a7e")
version("6.5.1", sha256="2b4f65f6c616302b38656f287e9acdf5a9f0e220ef79eaa2e80946780898fa51")
version("6.5.0", sha256="eaf41f06450b2be50f16b39ec06c06d10dd337b7516aba1d95695b326fd9ef40")
diff --git a/var/spack/repos/builtin/packages/qt-quicktimeline/package.py b/var/spack/repos/builtin/packages/qt-quicktimeline/package.py
index 611057a0efc4be..7a8ef0ee7a7f97 100644
--- a/var/spack/repos/builtin/packages/qt-quicktimeline/package.py
+++ b/var/spack/repos/builtin/packages/qt-quicktimeline/package.py
@@ -14,6 +14,9 @@ class QtQuicktimeline(QtPackage):
url = QtPackage.get_url(__qualname__)
list_url = QtPackage.get_list_url(__qualname__)
+ version("6.6.1", sha256="fe77555566bd6bb0ef0cb67b6ad09e225399fba3d2ec388de84e8a6200c0e2fc")
+ version("6.6.0", sha256="06b94443da3f81153f04dca0cce781481462310d51f97d5550f81322a7a88cd0")
+ version("6.5.3", sha256="fddd90cdb15af093673c6da924e18e22ebd364b9ab215356e1b40db28ac66640")
version("6.5.2", sha256="96389af740fde3b2a655bf994001b94fd6e151ef84958ff9982e2ae799f1c3a2")
version("6.5.1", sha256="d7d845f877f9b990e63ab14c9152f18e290611e760719a9c22f7740b91bd2ed1")
version("6.5.0", sha256="ff862aad1aa4327c39c071ad1ca6eea6c64d4937521f9ed5d022a70cb3df92a7")
diff --git a/var/spack/repos/builtin/packages/qt-shadertools/package.py b/var/spack/repos/builtin/packages/qt-shadertools/package.py
index 866e0cb18b9d90..b865bf4b9f259f 100644
--- a/var/spack/repos/builtin/packages/qt-shadertools/package.py
+++ b/var/spack/repos/builtin/packages/qt-shadertools/package.py
@@ -16,6 +16,9 @@ class QtShadertools(QtPackage):
url = QtPackage.get_url(__qualname__)
list_url = QtPackage.get_list_url(__qualname__)
+ version("6.6.1", sha256="1206110464f8829e34ca7277bdcd2044e96a98078c9ab9f8b96ed526a4d81526")
+ version("6.6.0", sha256="8b34908f8bbc7fb00a00babede91dbbeec9826f5138d390041f239d483e1162a")
+ version("6.5.3", sha256="e6c627763db8c60799218947443efb90fb3511342f2212f5e99cd98f6942ed08")
version("6.5.2", sha256="2b14cf982753f19cf48a4780bc7d96d8fc0ad3ed1049ae5d3292fc5fc1fd6aef")
version("6.5.1", sha256="642bf97498d54b4471bf4cc227709c6b676dbd520765f82b0749a2b4ef833d25")
version("6.5.0", sha256="ef2c71fac111a837914b7dc2b46c26579ea50b05fbd60022d430da88bdb211cb")
diff --git a/var/spack/repos/builtin/packages/qt-svg/package.py b/var/spack/repos/builtin/packages/qt-svg/package.py
new file mode 100644
index 00000000000000..eb8b8036a37836
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qt-svg/package.py
@@ -0,0 +1,46 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+from spack.pkg.builtin.qt_base import QtBase, QtPackage
+
+
+class QtSvg(QtPackage):
+ """Scalable Vector Graphics (SVG) is an XML-based language for describing
+ two-dimensional vector graphics. Qt provides classes for rendering and
+ displaying SVG drawings in widgets and on other paint devices."""
+
+ url = QtPackage.get_url(__qualname__)
+ list_url = QtPackage.get_list_url(__qualname__)
+
+ version("6.6.1", sha256="b947acd83ac51116f29c7f7278d9faed19b8c11e021dbf08616e7d6200118db8")
+ version("6.6.0", sha256="4fd6b4d9307c3cd8fd207e60334823fed07a9acb32f7d53cd9c9be9b6a2f8a30")
+ version("6.5.3", sha256="fb8e5574c2480aab78062fad2d0a521633b4591ada600130b918b703c2ddb09a")
+ version("6.5.2", sha256="2d0c8780f164472ad968bb4eff325a86b2826f101efedbeca5662acdc0b294ba")
+ version("6.5.1", sha256="1b262f860c51bc5af5034d88e74bb5584ecdc661f4903c9ba27c8edad14fc403")
+ version("6.5.0", sha256="2f96e22858de18de02b05eb6bcc96fadb6d77f4dadd407e1fa4aebcceb6dd154")
+ version("6.4.3", sha256="3cc7479f7787a19e7af8923547dfc35b7b3fd658e3701577e76b2c1e4c1c0c23")
+ version("6.4.2", sha256="2f5fa08dbe6f3aea0c1c77acb74b6164dc069e15010103377186902b018fb623")
+ version("6.4.1", sha256="be6300292a6f38d85c13bb750890af268bd979fb18ab754f88d5332935215e47")
+ version("6.4.0", sha256="375eb69f320121e42d5dc107f9455008980c149646931b8ace19e6bc235dcd80")
+ version("6.3.2", sha256="781055bca458be46ef69f2fff147a00226e41f3a23d02c91238b0328a7156518")
+
+ variant("widgets", default=False, description="Build SVG widgets.")
+
+ depends_on("qt-base +gui")
+ depends_on("qt-base +widgets", when="+widgets")
+
+ for _v in QtBase.versions:
+ v = str(_v)
+ depends_on("qt-base@" + v, when="@" + v)
+
+ def cmake_args(self):
+ args = super().cmake_args() + []
+ return args
+
+ def setup_run_environment(self, env):
+ # to make plugins from SVG module to base, for e.g. icon loading
+ env.prepend_path("QT_PLUGIN_PATH", self.prefix.plugins)
diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py
index c678bb465fb199..2d91a1d99febdc 100644
--- a/var/spack/repos/builtin/packages/qt/package.py
+++ b/var/spack/repos/builtin/packages/qt/package.py
@@ -31,6 +31,7 @@ class Qt(Package):
phases = ["configure", "build", "install"]
+ version("5.15.11", sha256="7426b1eaab52ed169ce53804bdd05dfe364f761468f888a0f15a308dc1dc2951")
version("5.15.10", sha256="b545cb83c60934adc9a6bbd27e2af79e5013de77d46f5b9f5bb2a3c762bf55ca")
version("5.15.9", sha256="26d5f36134db03abe4a6db794c7570d729c92a3fc1b0bf9b1c8f86d0573cd02f")
version("5.15.8", sha256="776a9302c336671f9406a53bd30b8e36f825742b2ec44a57c08217bff0fa86b9")
@@ -145,7 +146,7 @@ class Qt(Package):
"https://src.fedoraproject.org/rpms/qt5-qtlocation/raw/b6d99579de9ce5802c592b512a9f644a5e4690b9/f/qtlocation-gcc10.patch",
sha256="78c70fbd0c74031c5f0f1f5990e0b4214fc04c5073c67ce1f23863373932ec86",
working_dir="qtlocation",
- when="@5.15.10 %gcc@10:",
+ when="@5.15.10: %gcc@10:",
)
# https://github.com/microsoft/vcpkg/issues/21055
patch("qt5-macos12.patch", working_dir="qtbase", when="@5.14: %apple-clang@13:")
diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py
index 4d41903cd637d7..40c036320d7a7a 100644
--- a/var/spack/repos/builtin/packages/quantum-espresso/package.py
+++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py
@@ -242,6 +242,11 @@ class QuantumEspresso(CMakePackage, Package):
depends_on("git@2.13:", type="build")
depends_on("m4", type="build")
+ # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa
+ for _intel_pkg in INTEL_MATH_LIBRARIES:
+ requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}")
+ requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}")
+
# CONFLICTS SECTION
# Omitted for now due to concretizer bug
# MKL with 64-bit integers not supported.
@@ -489,7 +494,8 @@ def install(self, pkg, spec, prefix):
# you need to pass it in the FFTW_INCLUDE and FFT_LIBS directory.
# QE supports an internal FFTW2, but only an external FFTW3 interface.
- if "^mkl" in spec:
+ is_using_intel_libraries = spec["lapack"].name in INTEL_MATH_LIBRARIES
+ if is_using_intel_libraries:
# A seperate FFT library is not needed when linking against MKL
options.append("FFTW_INCLUDE={0}".format(join_path(env["MKLROOT"], "include/fftw")))
if "^fftw@3:" in spec:
@@ -531,11 +537,11 @@ def install(self, pkg, spec, prefix):
if spec.satisfies("@:6.4"): # set even if MKL is selected
options.append("BLAS_LIBS={0}".format(lapack_blas.ld_flags))
else: # behavior changed at 6.5 and later
- if not spec.satisfies("^mkl"):
+ if not is_using_intel_libraries:
options.append("BLAS_LIBS={0}".format(lapack_blas.ld_flags))
if "+scalapack" in spec:
- if "^mkl" in spec:
+ if is_using_intel_libraries:
if "^openmpi" in spec:
scalapack_option = "yes"
else: # mpich, intel-mpi
diff --git a/var/spack/repos/builtin/packages/qwt/package.py b/var/spack/repos/builtin/packages/qwt/package.py
index e2d7a8e0ee5656..7bc3d51ece7dce 100644
--- a/var/spack/repos/builtin/packages/qwt/package.py
+++ b/var/spack/repos/builtin/packages/qwt/package.py
@@ -31,7 +31,9 @@ class Qwt(QMakePackage):
depends_on("qt+tools", when="+designer")
depends_on("qt+opengl", when="+opengl")
- depends_on("qt")
+ # Qwt does not support Qt6; this picks the right qmake provider
+ conflicts("^qt-base", msg="Qwt requires Qt5")
+
# the qt@5.14.2 limitation was lifted in qwt@6.1.5
# https://sourceforge.net/p/qwt/code/HEAD/tree/tags/qwt-6.1.6/CHANGES-6.1
depends_on("qt@:5.14.2", when="@:6.1.4")
diff --git a/var/spack/repos/builtin/packages/r-rlang/package.py b/var/spack/repos/builtin/packages/r-rlang/package.py
index 40120150b06b2c..2805d53bba2b10 100644
--- a/var/spack/repos/builtin/packages/r-rlang/package.py
+++ b/var/spack/repos/builtin/packages/r-rlang/package.py
@@ -14,6 +14,8 @@ class RRlang(RPackage):
cran = "rlang"
+ version("1.1.2", sha256="2a0ee1dc6e5c59b283c32db5e74e869922a336197cb406fe92622b6ec66f8092")
+ version("1.1.1", sha256="5e5ec9a7796977216c39d94b1e342e08f0681746657067ba30de11b8fa8ada99")
version("1.1.0", sha256="f89859d91c9edc05fd7ccf21163fe53ad58da907ee273a93d5ab004a8649335b")
version("1.0.6", sha256="e6973d98a0ea301c0da1eeaa435e9e65d1c3f0b95ed68bdc2d6cb0c610166760")
version("1.0.2", sha256="8de87c3e6fb0b3cce2dabc6908186f8e1528cc0c16b54de965fe02d405fdd7cc")
diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py
index dfe397ca2c0d7c..a12d089808e1e2 100644
--- a/var/spack/repos/builtin/packages/r/package.py
+++ b/var/spack/repos/builtin/packages/r/package.py
@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
-import re
from spack.package import *
@@ -60,21 +59,20 @@ class R(AutotoolsPackage):
version("3.1.3", sha256="07e98323935baa38079204bfb9414a029704bb9c0ca5ab317020ae521a377312")
version("3.1.2", sha256="bcd150afcae0e02f6efb5f35a6ab72432be82e849ec52ce0bb89d8c342a8fa7a")
- variant(
- "external-lapack", default=False, description="Links to externally installed BLAS/LAPACK"
- )
variant("X", default=False, description="Enable X11 support (TCLTK, PNG, JPEG, TIFF, CAIRO)")
variant("memory_profiling", default=False, description="Enable memory profiling")
variant("rmath", default=False, description="Build standalone Rmath library")
- depends_on("blas", when="+external-lapack")
- depends_on("lapack", when="+external-lapack")
+ depends_on("blas")
+ depends_on("lapack")
+
depends_on("bzip2")
depends_on("curl+libidn2")
# R didn't anticipate the celebratory non-breaking major version bump of curl 8.
depends_on("curl@:7", when="@:4.2")
depends_on("icu4c")
depends_on("java")
+ depends_on("libtirpc")
depends_on("ncurses")
depends_on("pcre", when="@:3.6.3")
depends_on("pcre2", when="@4:")
@@ -84,16 +82,18 @@ class R(AutotoolsPackage):
depends_on("zlib-api")
depends_on("zlib@1.2.5:", when="^zlib")
depends_on("texinfo", type="build")
- depends_on("cairo+X+gobject+pdf", when="+X")
- depends_on("pango+X", when="+X")
- depends_on("harfbuzz+graphite2", when="+X")
- depends_on("jpeg", when="+X")
- depends_on("libpng", when="+X")
- depends_on("libtiff", when="+X")
- depends_on("libx11", when="+X")
- depends_on("libxmu", when="+X")
- depends_on("libxt", when="+X")
- depends_on("tk", when="+X")
+
+ with when("+X"):
+ depends_on("cairo+X+gobject+pdf")
+ depends_on("pango+X")
+ depends_on("harfbuzz+graphite2")
+ depends_on("jpeg")
+ depends_on("libpng")
+ depends_on("libtiff")
+ depends_on("libx11")
+ depends_on("libxmu")
+ depends_on("libxt")
+ depends_on("tk")
patch("zlib.patch", when="@:3.3.2")
@@ -126,32 +126,34 @@ def configure_args(self):
spec = self.spec
prefix = self.prefix
+ extra_rpath = join_path(prefix, "rlib", "R", "lib")
+
+ blas_flags: str = spec["blas"].libs.ld_flags
+ lapack_flags: str = spec["lapack"].libs.ld_flags
+
+ # R uses LAPACK in Fortran, which requires libmkl_gf_* when gfortran is used.
+ # TODO: cleaning this up seem to require both compilers as dependencies and use variants.
+ if spec["lapack"].name in INTEL_MATH_LIBRARIES and "gfortran" in self.compiler.fc:
+ xlp64 = "ilp64" if spec["lapack"].satisfies("+ilp64") else "lp64"
+ blas_flags = blas_flags.replace(f"mkl_intel_{xlp64}", f"mkl_gf_{xlp64}")
+ lapack_flags = lapack_flags.replace(f"mkl_intel_{xlp64}", f"mkl_gf_{xlp64}")
+
config_args = [
"--with-internal-tzcode",
"--libdir={0}".format(join_path(prefix, "rlib")),
"--enable-R-shlib",
- "--enable-BLAS-shlib",
"--enable-R-framework=no",
"--without-recommended-packages",
- "LDFLAGS=-L{0} -Wl,-rpath,{0}".format(join_path(prefix, "rlib", "R", "lib")),
+ f"LDFLAGS=-Wl,-rpath,{extra_rpath}",
+ f"--with-blas={blas_flags}",
+ f"--with-lapack={lapack_flags}",
+ # cannot disable docs with a normal configure option
+ "ac_cv_path_PDFLATEX=",
+ "ac_cv_path_PDFTEX=",
+ "ac_cv_path_TEX=",
+ "ac_cv_path_TEXI2DVI=",
]
- if "+external-lapack" in spec:
- if "^mkl" in spec and "gfortran" in self.compiler.fc:
- mkl_re = re.compile(r"(mkl_)intel(_i?lp64\b)")
- config_args.extend(
- [
- mkl_re.sub(
- r"\g<1>gf\g<2>", "--with-blas={0}".format(spec["blas"].libs.ld_flags)
- ),
- "--with-lapack",
- ]
- )
- else:
- config_args.extend(
- ["--with-blas={0}".format(spec["blas"].libs.ld_flags), "--with-lapack"]
- )
-
if "+X" in spec:
config_args.append("--with-cairo")
config_args.append("--with-jpeglib")
diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py
index cc1ede76be35f4..30a63bc079eb18 100644
--- a/var/spack/repos/builtin/packages/raja/package.py
+++ b/var/spack/repos/builtin/packages/raja/package.py
@@ -114,6 +114,7 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage):
variant("openmp", default=True, description="Build OpenMP backend")
variant("shared", default=True, description="Build Shared Libs")
+ variant("plugins", default=False, description="Enable runtime plugins")
variant("examples", default=True, description="Build examples.")
variant("exercises", default=True, description="Build exercises.")
# TODO: figure out gtest dependency and then set this default True
@@ -161,6 +162,11 @@ def _get_sys_type(self, spec):
sys_type = env["SYS_TYPE"]
return sys_type
+ @property
+ def libs(self):
+ shared = "+shared" in self.spec
+ return find_libraries("libRAJA", root=self.prefix, shared=shared, recursive=True)
+
@property
def cache_name(self):
hostname = socket.gethostname()
@@ -225,6 +231,7 @@ def initconfig_package_entries(self):
if "camp" in self.spec:
entries.append(cmake_cache_path("camp_DIR", spec["camp"].prefix))
entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec))
+ entries.append(cmake_cache_option("RAJA_ENABLE_RUNTIME_PLUGINS", "+plugins" in spec))
entries.append(
cmake_cache_option("{}ENABLE_EXAMPLES".format(option_prefix), "+examples" in spec)
)
diff --git a/var/spack/repos/builtin/packages/ratel/package.py b/var/spack/repos/builtin/packages/ratel/package.py
index c3e32ab85eb30b..9eeaf2ec8c8a7d 100644
--- a/var/spack/repos/builtin/packages/ratel/package.py
+++ b/var/spack/repos/builtin/packages/ratel/package.py
@@ -15,6 +15,7 @@ class Ratel(MakefilePackage, CudaPackage, ROCmPackage):
maintainers("jedbrown", "jeremylt")
version("develop", branch="main")
+ version("0.3.0", tag="v0.3.0", commit="ca2f3357e10b89fb274626fba104aad30c72774b")
version("0.2.1", tag="v0.2.1", commit="043b61696a2407205fdfd898681467d1a7ff59e0")
version("0.1.2", tag="v0.1.2", commit="94ad630bf897d231af7a94bf08257f6067258aae")
@@ -22,6 +23,8 @@ class Ratel(MakefilePackage, CudaPackage, ROCmPackage):
depends_on("libceed@develop", when="@develop")
depends_on("petsc@main", when="@develop")
# released versions
+ depends_on("libceed@0.12.0:0.12", when="@0.3.0")
+ depends_on("petsc@3.20.0:3.20", when="@0.3.0")
depends_on("libceed@0.11.0:0.11", when="@0.2.1")
depends_on("petsc@3.18.3:3.18", when="@0.2.1")
depends_on("libceed@0.10.1:0.10", when="@0.1.2")
diff --git a/var/spack/repos/builtin/packages/rclone/package.py b/var/spack/repos/builtin/packages/rclone/package.py
index a1dacabc7708e7..06dd69d9175dc7 100644
--- a/var/spack/repos/builtin/packages/rclone/package.py
+++ b/var/spack/repos/builtin/packages/rclone/package.py
@@ -15,6 +15,7 @@ class Rclone(Package):
maintainers("alecbcs")
+ version("1.64.2", sha256="0c74d8fb887691e04e865e3b6bc32e8af47c3e54a9922ffdbed38c8323e281c9")
version("1.63.1", sha256="0d8bf8b7460681f7906096a9d37eedecc5a1d1d3ad17652e68f0c6de104c2412")
version("1.62.2", sha256="340371f94604e6771cc4a2c91e37d1bf00a524deab520340440fb0968e783f63")
version("1.61.1", sha256="34b5f52047741c7bbf54572c02cc9998489c4736a753af3c99255296b1af125d")
diff --git a/var/spack/repos/builtin/packages/rdma-core/package.py b/var/spack/repos/builtin/packages/rdma-core/package.py
index 875471d02cfe4a..f792d5118f1c35 100644
--- a/var/spack/repos/builtin/packages/rdma-core/package.py
+++ b/var/spack/repos/builtin/packages/rdma-core/package.py
@@ -61,9 +61,11 @@ class RdmaCore(CMakePackage):
default=True,
description="Produce static libraries along with usual shared libraries.",
)
+ variant("pyverbs", default=True, description="Build with support for pyverbs")
+ variant("man_pages", default=True, description="Build with support for man pages")
depends_on("pkgconfig", type="build")
- depends_on("py-docutils", type="build")
+ depends_on("py-docutils", when="+man_pages", type="build")
depends_on("libnl")
conflicts("platform=darwin", msg="rdma-core requires FreeBSD or Linux")
conflicts("%intel", msg="rdma-core cannot be built with intel (use gcc instead)")
@@ -90,6 +92,11 @@ def cmake_args(self):
cmake_args.append(self.define_from_variant("ENABLE_STATIC", "static"))
+ if self.spec.satisfies("~pyverbs"):
+ cmake_args.append("-DNO_PYVERBS=1")
+ if self.spec.satisfies("~man_pages"):
+ cmake_args.append("-DNO_MAN_PAGES=1")
+
if self.spec.satisfies("@:39.0"):
cmake_args.extend(
[
diff --git a/var/spack/repos/builtin/packages/re2/package.py b/var/spack/repos/builtin/packages/re2/package.py
index 761005949b60e2..3c62d3da76217c 100644
--- a/var/spack/repos/builtin/packages/re2/package.py
+++ b/var/spack/repos/builtin/packages/re2/package.py
@@ -13,6 +13,9 @@ class Re2(CMakePackage):
homepage = "https://github.com/google/re2"
url = "https://github.com/google/re2/archive/2020-08-01.tar.gz"
+ version(
+ "2023-09-01", sha256="5bb6875ae1cd1e9fedde98018c346db7260655f86fdb8837e3075103acd3649b"
+ )
version(
"2021-06-01", sha256="26155e050b10b5969e986dab35654247a3b1b295e0532880b5a9c13c0a700ceb"
)
@@ -26,6 +29,8 @@ class Re2(CMakePackage):
variant("shared", default=False, description="Build shared instead of static libraries")
variant("pic", default=True, description="Enable position independent code")
+ depends_on("abseil-cpp", when="@2023-09-01:")
+
# shared libs must have position-independent code
conflicts("+shared ~pic")
diff --git a/var/spack/repos/builtin/packages/recola/package.py b/var/spack/repos/builtin/packages/recola/package.py
index 80d11f2433444d..c4cb8d3c5f48b1 100644
--- a/var/spack/repos/builtin/packages/recola/package.py
+++ b/var/spack/repos/builtin/packages/recola/package.py
@@ -15,20 +15,27 @@ class Recola(CMakePackage):
tags = ["hep"]
- homepage = "https://recola.hepforge.org"
- url = "https://recola.hepforge.org/downloads/?f=recola2-2.2.3.tar.gz"
+ homepage = "https://recola.gitlab.io/recola2/"
+ url = "https://gitlab.com/recola/recola2/-/archive/2.2.4/recola2-2.2.4.tar.gz"
maintainers("vvolkl")
variant("python", default=True, description="Build py-recola python bindings.")
- version("2.2.4", sha256="16bdefb633d51842b4d32c39a43118d7052302cd63be456a473557e9b7e0316e")
- version("2.2.3", sha256="db0f5e448ed603ac4073d4bbf36fd74f401a22876ad390c0d02c815a78106c5f")
+ version("2.2.4", sha256="212ae6141bc5de38c50be3e0c6947a3b0752aeb463cf850c22cfed5e61b1a64b")
+ version("2.2.3", sha256="8dc25798960c272434fcde93817ed92aad82b2a7cf07438bb4deb5688d301086")
+ version("2.2.2", sha256="a64cf2b4aa213289dfab6e2255a77264f281cd0ac85f5e9770c82b815272c5c9")
+ version("2.2.0", sha256="a64cf2b4aa213289dfab6e2255a77264f281cd0ac85f5e9770c82b815272c5c9")
version(
"1.4.3",
url="https://recola.hepforge.org/downloads/?f=recola-1.4.3.tar.gz",
sha256="f6a7dce6e1f09821ba919524f786557984f216c001ab63e7793e8aa9a8560ceb",
)
+ version(
+ "1.4.0",
+ url="https://recola.hepforge.org/downloads/?f=recola-1.4.0.tar.gz",
+ sha256="dc7db5ac9456dda2e6c03a63ad642066b0b5e4ceb8cae1f2a13ab33b35caaba8",
+ )
depends_on("collier")
depends_on("recola-sm")
diff --git a/var/spack/repos/builtin/packages/restic/package.py b/var/spack/repos/builtin/packages/restic/package.py
index 493e0c098331f2..569d51dc704c21 100644
--- a/var/spack/repos/builtin/packages/restic/package.py
+++ b/var/spack/repos/builtin/packages/restic/package.py
@@ -14,6 +14,7 @@ class Restic(Package):
maintainers("alecbcs")
+ version("0.16.2", sha256="88165b5b89b6064df37a9964d660f40ac62db51d6536e459db9aaea6f2b2fc11")
version("0.16.0", sha256="b91f5ef6203a5c50a72943c21aaef336e1344f19a3afd35406c00f065db8a8b9")
version("0.15.2", sha256="52aca841486eaf4fe6422b059aa05bbf20db94b957de1d3fca019ed2af8192b7")
version("0.15.1", sha256="fce382fdcdac0158a35daa640766d5e8a6e7b342ae2b0b84f2aacdff13990c52")
diff --git a/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py b/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py
index 71ff595e64247e..d0d4f1f6e9292d 100644
--- a/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py
+++ b/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py
@@ -18,6 +18,48 @@ class RiscvGnuToolchain(AutotoolsPackage):
maintainers("wanlinwang")
version("develop", branch="master", submodules=True)
+ version(
+ "2023.10.18",
+ tag="2023.10.18",
+ commit="b86b2b37d0acc607156ff56ff17ee105a9b48897",
+ submodules=True,
+ )
+ version(
+ "2023.10.17",
+ tag="2023.10.17",
+ commit="c11f0748276c58df4f9d9602cdc2de5f17cbae8c",
+ submodules=True,
+ )
+ version(
+ "2023.10.12",
+ tag="2023.10.12",
+ commit="e65e7fc58543c821baf4f1fb6d0ef700177b9d89",
+ submodules=True,
+ )
+ version(
+ "2023.10.06",
+ tag="2023.10.06",
+ commit="6e7190e8c95e09d541e69f6f6e39163f808570d5",
+ submodules=True,
+ )
+ version(
+ "2023.09.27",
+ tag="2023.09.27",
+ commit="5afde2de23c6597aaa5069f36574c61bcb39b007",
+ submodules=True,
+ )
+ version(
+ "2023.09.26",
+ tag="2023.09.26",
+ commit="ffb5968884630c7baebba7b2af493f6b5f74ad80",
+ submodules=True,
+ )
+ version(
+ "2023.09.13",
+ tag="2023.09.13",
+ commit="5437780994b830e9eabf467f85f22ed24b5fade1",
+ submodules=True,
+ )
version(
"2022.08.08",
tag="2022.08.08",
diff --git a/var/spack/repos/builtin/packages/rkcommon/package.py b/var/spack/repos/builtin/packages/rkcommon/package.py
index 4f0a07559ef9c6..8223c6836000a9 100644
--- a/var/spack/repos/builtin/packages/rkcommon/package.py
+++ b/var/spack/repos/builtin/packages/rkcommon/package.py
@@ -16,6 +16,7 @@ class Rkcommon(CMakePackage):
# maintainers("github_user1",o"github_user2")
+ version("1.12.0", sha256="6abb901073811cdbcbe336772e1fcb458d78cab5ad8d5d61de2b57ab83581e80")
version("1.11.0", sha256="9cfeedaccdefbdcf23c465cb1e6c02057100c4a1a573672dc6cfea5348cedfdd")
version("1.10.0", sha256="57a33ce499a7fc5a5aaffa39ec7597115cf69ed4ff773546b5b71ff475ee4730")
version("1.9.0", sha256="b68aa02ef44c9e35c168f826a14802bb5cc6a9d769ba4b64b2c54f347a14aa53")
diff --git a/var/spack/repos/builtin/packages/rocalution/package.py b/var/spack/repos/builtin/packages/rocalution/package.py
index b0ba2021ba6804..4b80d75ee79033 100644
--- a/var/spack/repos/builtin/packages/rocalution/package.py
+++ b/var/spack/repos/builtin/packages/rocalution/package.py
@@ -184,10 +184,6 @@ class Rocalution(CMakePackage):
# Fix build for most Radeon 5000 and Radeon 6000 series GPUs.
patch("0004-fix-navi-1x.patch", when="@5.2.0:5.3")
- def check(self):
- exe = join_path(self.build_directory, "clients", "staging", "rocalution-test")
- self.run_test(exe)
-
def setup_build_environment(self, env):
env.set("CXX", self.spec["hip"].hipcc)
@@ -236,3 +232,9 @@ def cmake_args(self):
args.append("-DCMAKE_INSTALL_LIBDIR=lib")
return args
+
+ @run_after("build")
+ @on_package_attributes(run_tests=True)
+ def check_build(self):
+ exe = Executable(join_path(self.build_directory, "clients", "staging", "rocalution-test"))
+ exe()
diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py
index dfefd8ef75d5c5..adad90b646e628 100644
--- a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py
+++ b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py
@@ -179,14 +179,18 @@ def setup_build_environment(self, build_env):
depends_on("hip-rocclr@" + ver, when="@" + ver)
def patch(self):
- if "@4.5.0:5.1" in self.spec:
+ if self.spec.satisfies("@4.5:5.1"):
filter_file(
"@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True
)
- elif "@5.2.0:" in self.spec:
+ elif self.spec.satisfies("@5.2:5.4"):
filter_file(
"@ROCM_PATH@/bin", self.spec.prefix.bin, "rvs/conf/deviceid.sh.in", string=True
)
+ elif self.spec.satisfies("@5.5:"):
+ filter_file(
+ "@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True
+ )
def cmake_args(self):
args = [
diff --git a/var/spack/repos/builtin/packages/rocsolver/package.py b/var/spack/repos/builtin/packages/rocsolver/package.py
index 3b1cfcb51173dd..babf8b9d524cf6 100644
--- a/var/spack/repos/builtin/packages/rocsolver/package.py
+++ b/var/spack/repos/builtin/packages/rocsolver/package.py
@@ -126,6 +126,7 @@ class Rocsolver(CMakePackage):
depends_on("cmake@3.8:", type="build", when="@4.1.0:")
depends_on("cmake@3.5:", type="build")
depends_on("fmt@7:", type="build", when="@4.5.0:")
+ depends_on("fmt@7:8.0.1", type="test", when="@5.6:")
depends_on("googletest@1.10.0:", type="test")
depends_on("netlib-lapack@3.7.1:", type="test")
@@ -136,10 +137,6 @@ class Rocsolver(CMakePackage):
# Maximize compatibility with other libraries that are using fmt.
patch("fmt-9-compatibility.patch", when="@5.2.0:5.5")
- def check(self):
- exe = join_path(self.build_directory, "clients", "staging", "rocsolver-test")
- self.run_test(exe, options=["--gtest_filter=checkin*-*known_bug*"])
-
depends_on("hip@4.1.0:", when="@4.1.0:")
depends_on("rocm-cmake@master", type="build", when="@master:")
depends_on("rocm-cmake@4.5.0:", type="build", when="@4.5.0:")
@@ -236,3 +233,9 @@ def cmake_args(self):
def setup_build_environment(self, env):
env.set("CXX", self.spec["hip"].hipcc)
+
+ @run_after("build")
+ @on_package_attributes(run_tests=True)
+ def check_build(self):
+ exe = Executable(join_path(self.build_directory, "clients", "staging", "rocsolver-test"))
+ exe("--gtest_filter=checkin*-*known_bug*")
diff --git a/var/spack/repos/builtin/packages/rocsparse/package.py b/var/spack/repos/builtin/packages/rocsparse/package.py
index 4fb8fb1646b4fc..e0ae2806622acc 100644
--- a/var/spack/repos/builtin/packages/rocsparse/package.py
+++ b/var/spack/repos/builtin/packages/rocsparse/package.py
@@ -311,10 +311,13 @@ class Rocsparse(CMakePackage):
destination="mtx",
)
- def check(self):
+ @run_after("build")
+ def check_build(self):
if self.spec.satisfies("+test"):
- exe = join_path(self.build_directory, "clients", "staging", "rocsparse-test")
- self.run_test(exe, options=["--gtest_filter=*quick*:*pre_checkin*-*known_bug*"])
+ exe = Executable(
+ join_path(self.build_directory, "clients", "staging", "rocsparse-test")
+ )
+ exe("--gtest_filter=*quick*:*pre_checkin*-*known_bug*")
def setup_build_environment(self, env):
env.set("CXX", self.spec["hip"].hipcc)
diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py
index 3fee41cbbad957..7c1235d411ebff 100644
--- a/var/spack/repos/builtin/packages/root/package.py
+++ b/var/spack/repos/builtin/packages/root/package.py
@@ -36,10 +36,14 @@ class Root(CMakePackage):
version("develop", branch="master")
# Production version
+ version("6.30.02", sha256="51a09c86ffa94089abac76daa3adc20812efc6c93b427697b843b12d73e145de")
+ version("6.30.00", sha256="0592c066954cfed42312957c9cb251654456064fe2d8dabdcb8826f1c0099d71")
+ version("6.28.10", sha256="fc6a2d6c7cba853b0cfd6bd9514c90e9df50e1743899bc1db0472ee6f4e65a0a")
version("6.28.06", sha256="af3b673b9aca393a5c9ae1bf86eab2672aaf1841b658c5c6e7a30ab93c586533")
version("6.28.04", sha256="70f7f86a0cd5e3f2a0befdc59942dd50140d990ab264e8e56c7f17f6bfe9c965")
version("6.28.02", sha256="6643c07710e68972b00227c68b20b1016fec16f3fba5f44a571fa1ce5bb42faa")
version("6.28.00", sha256="afa1c5c06d0915411cb9492e474ea9ab12b09961a358e7e559013ed63b5d8084")
+ version("6.26.12", sha256="229daa0749e3e31b6e0dedc58b6838dbfc1180b4aba4741883b617b0b4fbc966")
version("6.26.10", sha256="8e56bec397104017aa54f9eb554de7a1a134474fe0b3bb0f43a70fc4fabd625f")
version("6.26.08", sha256="4dda043e7918b40743ad0299ddd8d526b7078f0a3822fd06066df948af47940e")
version("6.26.06", sha256="b1f73c976a580a5c56c8c8a0152582a1dfc560b4dd80e1b7545237b65e6c89cb")
@@ -107,6 +111,8 @@ class Root(CMakePackage):
when="@6.26:6.26.06 +root7 ^nlohmann-json@3.11:",
)
+ patch("webgui.patch", level=0, when="@6.26.00:6.26.10,6.28.00:6.28.08,6.30.00 +webgui")
+
if sys.platform == "darwin":
# Resolve non-standard use of uint, _cf_
# https://sft.its.cern.ch/jira/browse/ROOT-7886.
@@ -162,9 +168,11 @@ class Root(CMakePackage):
)
variant("mysql", default=False, description="Enable support for MySQL databases")
variant("opengl", default=True, description="Enable OpenGL support")
- variant("oracle", default=False, description="Enable support for Oracle databases")
+ variant(
+ "oracle", when="@:6.30", default=False, description="Enable support for Oracle databases"
+ )
variant("postgres", default=False, description="Enable postgres support")
- variant("pythia6", default=False, description="Enable pythia6 support")
+ variant("pythia6", when="@:6.30", default=False, description="Enable pythia6 support")
variant("pythia8", default=False, description="Enable pythia8 support")
variant("python", default=True, description="Enable Python ROOT bindings")
variant("qt4", when="@:6.17", default=False, description="Enable Qt graphics backend")
@@ -295,6 +303,7 @@ class Root(CMakePackage):
depends_on("unuran", when="+unuran")
depends_on("vc@1.0:", when="@6.07.04: +vc")
depends_on("vc@1.3.0:", when="@6.09.02: +vc")
+ depends_on("vc@1.4.4:", when="@6.29.02: +vc")
depends_on("vdt", when="+vdt")
depends_on("veccore", when="+veccore")
depends_on("libxml2", when="+xml")
@@ -326,6 +335,7 @@ class Root(CMakePackage):
conflicts("~http", when="@6.29.00: +webgui", msg="root+webgui requires HTTP")
conflicts("cxxstd=11", when="+root7", msg="root7 requires at least C++14")
conflicts("cxxstd=11", when="@6.25.02:", msg="This version of root requires at least C++14")
+ conflicts("cxxstd=14", when="@6.30.00:", msg="This version of root requires at least C++17")
conflicts(
"cxxstd=20", when="@:6.28.02", msg="C++20 support requires root version at least 6.28.04"
)
diff --git a/var/spack/repos/builtin/packages/root/webgui.patch b/var/spack/repos/builtin/packages/root/webgui.patch
new file mode 100644
index 00000000000000..932bb1023f850a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/root/webgui.patch
@@ -0,0 +1,25 @@
+--- config/rootrc.in_ORIG 2023-11-28 08:32:42.696061390 +0100
++++ config/rootrc.in 2023-11-28 08:32:47.672727920 +0100
+@@ -217,7 +217,7 @@
+ # Type of tree viewer: TTreeViewer or RTreeViewer
+ TreeViewer.Name: TTreeViewer
+ # Type of Browser: TRootBrowser or TRootBrowserLite
+-Browser.Name: @root_browser_class@
++Browser.Name: TRootBrowser
+ # Browser Options (plugins)
+ # F: File browser E: Text Editor H: HTML browser
+ # C: Canvas I: I/O redirection P: Proof G: GL viewer
+
+--- config/rootrc.in_ORIG 2023-11-28 08:18:11.686085190 +0100
++++ config/rootrc.in 2023-11-28 08:18:15.839418409 +0100
+@@ -247,8 +247,8 @@
+ WebGui.HttpPortMax: 9800
+ # Exact IP iddress to bind bind http server (default - empty)
+ WebGui.HttpBind:
+-# Use only loopback address to bind http server (default - no)
+-WebGui.HttpLoopback: no
++# Use only loopback address to bind http server (default - yes)
++WebGui.HttpLoopback: yes
+ # Use https protocol for the http server (default - no)
+ WebGui.UseHttps: no
+ WebGui.ServerCert: rootserver.pem
\ No newline at end of file
diff --git a/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch b/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch
new file mode 100644
index 00000000000000..4325ed07381f54
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch
@@ -0,0 +1,23 @@
+https://bugs.gentoo.org/828082
+--- a/librtmp/rtmp.c
++++ b/librtmp/rtmp.c
+@@ -28,6 +28,7 @@
+ #include
+ #include
+ #include
++#include
+
+ #include "rtmp_sys.h"
+ #include "log.h"
+diff --git a/librtmp/hashswf.c b/librtmp/hashswf.c
+index 32b2eed..e3669e3 100644
+--- a/librtmp/hashswf.c
++++ b/librtmp/hashswf.c
+@@ -25,6 +25,7 @@
+ #include
+ #include
+ #include
++#include
+
+ #include "rtmp_sys.h"
+ #include "log.h"
diff --git a/var/spack/repos/builtin/packages/rtmpdump/package.py b/var/spack/repos/builtin/packages/rtmpdump/package.py
new file mode 100644
index 00000000000000..a868e6e3d0d8c9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rtmpdump/package.py
@@ -0,0 +1,38 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class Rtmpdump(MakefilePackage):
+ """rtmpdump is a toolkit for RTMP streams."""
+
+ homepage = "https://rtmpdump.mplayerhq.hu/"
+ git = "https://git.ffmpeg.org/rtmpdump.git"
+
+ maintainers("tobbez")
+
+ license("GPL-2.0-or-later")
+
+ version("2021-02-19", commit="f1b83c10d8beb43fcc70a6e88cf4325499f25857")
+
+ variant("tls", default="openssl", description="TLS backend", values=("gnutls", "openssl"))
+
+ depends_on("openssl@:3", when="tls=openssl")
+ depends_on("gnutls", when="tls=gnutls")
+ depends_on("zlib-api")
+
+ patch("missing-include.patch")
+ patch("rtmpdump-fix-chunk-size.patch")
+ patch("rtmpdump-openssl-1.1-v2.patch")
+ patch("rtmpdump-swf_vertification_type_2.patch")
+ patch("rtmpdump-swf_vertification_type_2_part_2.patch")
+
+ @property
+ def build_targets(self):
+ return [f"CRYPTO={self.spec.variants['tls'].value.upper()}"]
+
+ def install(self, spec, prefix):
+ make("install", f"prefix={prefix}", "sbindir=$(bindir)")
diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch
new file mode 100644
index 00000000000000..1c6cfdc6261075
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch
@@ -0,0 +1,48 @@
+https://git.alpinelinux.org/aports/commit/main/rtmpdump/fix-chunk-size.patch?id=bf39fb1177ee77eee6c214a7393cc0054958ce08
+https://git.alpinelinux.org/aports/commit/main/rtmpdump/fix-chunk-size.patch?id=69bc162319b12e9b6c6d3ea345dbf7c218753594
+diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c
+index a2863b0..ac1b3be 100644
+--- a/librtmp/rtmp.c
++++ b/librtmp/rtmp.c
+@@ -2077,6 +2077,29 @@ RTMP_SendClientBW(RTMP *r)
+ }
+
+ static int
++SendClientChunkSize(RTMP *r, int chunkSize)
++{
++ RTMPPacket packet;
++ char pbuf[256], *pend = pbuf + sizeof(pbuf);
++ int ret;
++
++ packet.m_nChannel = 0x02; /* control channel (invoke) */
++ packet.m_headerType = RTMP_PACKET_SIZE_LARGE;
++ packet.m_packetType = RTMP_PACKET_TYPE_CHUNK_SIZE;
++ packet.m_nTimeStamp = 0;
++ packet.m_nInfoField2 = 0;
++ packet.m_hasAbsTimestamp = 0;
++ packet.m_body = pbuf + RTMP_MAX_HEADER_SIZE;
++
++ packet.m_nBodySize = 4;
++
++ AMF_EncodeInt32(packet.m_body, pend, chunkSize);
++ ret = RTMP_SendPacket(r, &packet, FALSE);
++ r->m_outChunkSize = chunkSize;
++ return ret;
++}
++
++static int
+ SendBytesReceived(RTMP *r)
+ {
+ RTMPPacket packet;
+@@ -3349,6 +3372,11 @@ HandleChangeChunkSize(RTMP *r, const RTMPPacket *packet)
+ r->m_inChunkSize = AMF_DecodeInt32(packet->m_body);
+ RTMP_Log(RTMP_LOGDEBUG, "%s, received: chunk size change to %d", __FUNCTION__,
+ r->m_inChunkSize);
++ if (r->Link.protocol & RTMP_FEATURE_WRITE)
++ {
++ RTMP_Log(RTMP_LOGDEBUG, "%s, updating outChunkSize too", __FUNCTION__);
++ SendClientChunkSize(r, r->m_inChunkSize);
++ }
+ }
+ }
+
diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch
new file mode 100644
index 00000000000000..146243bd111188
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch
@@ -0,0 +1,248 @@
+https://raw.githubusercontent.com/xbmc/inputstream.rtmp/master/depends/common/librtmp/0003-openssl-1.1.patch
+See also https://github.com/xbmc/inputstream.rtmp/pull/46
+--- a/librtmp/dh.h
++++ b/librtmp/dh.h
+@@ -253,20 +253,42 @@
+ if (!dh)
+ goto failed;
+
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ MP_new(dh->g);
+
+ if (!dh->g)
+ goto failed;
++#else
++ BIGNUM *g = NULL;
++ MP_new(g);
++ if (!g)
++ goto failed;
++#endif
+
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ MP_gethex(dh->p, P1024, res); /* prime P1024, see dhgroups.h */
++#else
++ BIGNUM* p = NULL;
++ DH_get0_pqg(dh, (BIGNUM const**)&p, NULL, NULL);
++ MP_gethex(p, P1024, res); /* prime P1024, see dhgroups.h */
++#endif
+ if (!res)
+ {
+ goto failed;
+ }
+
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ MP_set_w(dh->g, 2); /* base 2 */
++#else
++ MP_set_w(g, 2); /* base 2 */
++ DH_set0_pqg(dh, p, NULL, g);
++#endif
+
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ dh->length = nKeyBits;
++#else
++ DH_set_length(dh, nKeyBits);
++#endif
+ return dh;
+
+ failed:
+@@ -293,12 +315,24 @@
+ MP_gethex(q1, Q1024, res);
+ assert(res);
+
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ res = isValidPublicKey(dh->pub_key, dh->p, q1);
++#else
++ BIGNUM const* pub_key = NULL;
++ BIGNUM const* p = NULL;
++ DH_get0_key(dh, &pub_key, NULL);
++ DH_get0_pqg(dh, &p, NULL, NULL);
++ res = isValidPublicKey((BIGNUM*)pub_key, (BIGNUM*)p, q1);
++#endif
+ if (!res)
+ {
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ MP_free(dh->pub_key);
+ MP_free(dh->priv_key);
+ dh->pub_key = dh->priv_key = 0;
++#else
++ DH_free(dh);
++#endif
+ }
+
+ MP_free(q1);
+@@ -314,15 +348,29 @@
+ DHGetPublicKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen)
+ {
+ int len;
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ if (!dh || !dh->pub_key)
++#else
++ BIGNUM const* pub_key = NULL;
++ DH_get0_key(dh, &pub_key, NULL);
++ if (!dh || !pub_key)
++#endif
+ return 0;
+
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ len = MP_bytes(dh->pub_key);
++#else
++ len = MP_bytes(pub_key);
++#endif
+ if (len <= 0 || len > (int) nPubkeyLen)
+ return 0;
+
+ memset(pubkey, 0, nPubkeyLen);
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ MP_setbin(dh->pub_key, pubkey + (nPubkeyLen - len), len);
++#else
++ MP_setbin(pub_key, pubkey + (nPubkeyLen - len), len);
++#endif
+ return 1;
+ }
+
+@@ -364,7 +412,13 @@
+ MP_gethex(q1, Q1024, len);
+ assert(len);
+
++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L
+ if (isValidPublicKey(pubkeyBn, dh->p, q1))
++#else
++ BIGNUM const* p = NULL;
++ DH_get0_pqg(dh, &p, NULL, NULL);
++ if (isValidPublicKey(pubkeyBn, (BIGNUM*)p, q1))
++#endif
+ res = MDH_compute_key(secret, nPubkeyLen, pubkeyBn, dh);
+ else
+ res = -1;
+--- a/librtmp/handshake.h
++++ b/librtmp/handshake.h
+@@ -31,9 +31,9 @@
+ #define SHA256_DIGEST_LENGTH 32
+ #endif
+ #define HMAC_CTX sha2_context
+-#define HMAC_setup(ctx, key, len) sha2_hmac_starts(&ctx, (unsigned char *)key, len, 0)
+-#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(&ctx, buf, len)
+-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(&ctx, dig)
++#define HMAC_setup(ctx, key, len) sha2_hmac_starts(ctx, (unsigned char *)key, len, 0)
++#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(ctx, buf, len)
++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(ctx, dig)
+
+ typedef arc4_context * RC4_handle;
+ #define RC4_alloc(h) *h = malloc(sizeof(arc4_context))
+@@ -50,9 +50,9 @@
+ #endif
+ #undef HMAC_CTX
+ #define HMAC_CTX struct hmac_sha256_ctx
+-#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(&ctx, len, key)
+-#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(&ctx, len, buf)
+-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(&ctx, SHA256_DIGEST_LENGTH, dig)
++#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(ctx, len, key)
++#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(ctx, len, buf)
++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(ctx, SHA256_DIGEST_LENGTH, dig)
+ #define HMAC_close(ctx)
+
+ typedef struct arcfour_ctx* RC4_handle;
+@@ -64,14 +64,23 @@
+
+ #else /* USE_OPENSSL */
+ #include
++#include
+ #include
+ #include
+ #if OPENSSL_VERSION_NUMBER < 0x0090800 || !defined(SHA256_DIGEST_LENGTH)
+ #error Your OpenSSL is too old, need 0.9.8 or newer with SHA256
+ #endif
+-#define HMAC_setup(ctx, key, len) HMAC_CTX_init(&ctx); HMAC_Init_ex(&ctx, key, len, EVP_sha256(), 0)
+-#define HMAC_crunch(ctx, buf, len) HMAC_Update(&ctx, buf, len)
+-#define HMAC_finish(ctx, dig, dlen) HMAC_Final(&ctx, dig, &dlen); HMAC_CTX_cleanup(&ctx)
++#if OPENSSL_VERSION_NUMBER < 0x10100000L
++#define HMAC_setup(ctx, key, len) HMAC_CTX_init(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0)
++#else
++#define HMAC_setup(ctx, key, len) ctx = HMAC_CTX_new(); HMAC_CTX_reset(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0)
++#endif
++#define HMAC_crunch(ctx, buf, len) HMAC_Update(ctx, buf, len)
++#if OPENSSL_VERSION_NUMBER < 0x10100000L
++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, dig, &dlen); HMAC_CTX_cleanup(ctx)
++#else
++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, dig, &dlen); HMAC_CTX_free(ctx)
++#endif
+
+ typedef RC4_KEY * RC4_handle;
+ #define RC4_alloc(h) *h = malloc(sizeof(RC4_KEY))
+@@ -117,7 +126,7 @@
+ {
+ uint8_t digest[SHA256_DIGEST_LENGTH];
+ unsigned int digestLen = 0;
+- HMAC_CTX ctx;
++ HMAC_CTX* ctx = NULL;
+
+ RC4_alloc(rc4keyIn);
+ RC4_alloc(rc4keyOut);
+@@ -266,7 +275,7 @@
+ size_t keylen, uint8_t *digest)
+ {
+ unsigned int digestLen;
+- HMAC_CTX ctx;
++ HMAC_CTX* ctx = NULL;
+
+ HMAC_setup(ctx, key, keylen);
+ HMAC_crunch(ctx, message, messageLen);
+--- a/librtmp/hashswf.c
++++ b/librtmp/hashswf.c
+@@ -37,9 +37,9 @@
+ #define SHA256_DIGEST_LENGTH 32
+ #endif
+ #define HMAC_CTX sha2_context
+-#define HMAC_setup(ctx, key, len) sha2_hmac_starts(&ctx, (unsigned char *)key, len, 0)
+-#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(&ctx, buf, len)
+-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(&ctx, dig)
++#define HMAC_setup(ctx, key, len) sha2_hmac_starts(ctx, (unsigned char *)key, len, 0)
++#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(ctx, buf, len)
++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(ctx, dig)
+ #define HMAC_close(ctx)
+ #elif defined(USE_GNUTLS)
+ #include
+@@ -48,19 +48,27 @@
+ #endif
+ #undef HMAC_CTX
+ #define HMAC_CTX struct hmac_sha256_ctx
+-#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(&ctx, len, key)
+-#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(&ctx, len, buf)
+-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(&ctx, SHA256_DIGEST_LENGTH, dig)
++#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(ctx, len, key)
++#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(ctx, len, buf)
++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(ctx, SHA256_DIGEST_LENGTH, dig)
+ #define HMAC_close(ctx)
+ #else /* USE_OPENSSL */
+ #include
+ #include
+ #include
+ #include
+-#define HMAC_setup(ctx, key, len) HMAC_CTX_init(&ctx); HMAC_Init_ex(&ctx, (unsigned char *)key, len, EVP_sha256(), 0)
+-#define HMAC_crunch(ctx, buf, len) HMAC_Update(&ctx, (unsigned char *)buf, len)
+-#define HMAC_finish(ctx, dig, dlen) HMAC_Final(&ctx, (unsigned char *)dig, &dlen);
+-#define HMAC_close(ctx) HMAC_CTX_cleanup(&ctx)
++#if OPENSSL_VERSION_NUMBER < 0x10100000L
++#define HMAC_setup(ctx, key, len) HMAC_CTX_init(ctx); HMAC_Init_ex(ctx, (unsigned char *)key, len, EVP_sha256(), 0)
++#else
++#define HMAC_setup(ctx, key, len) ctx = HMAC_CTX_new(); HMAC_CTX_reset(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0)
++#endif
++#define HMAC_crunch(ctx, buf, len) HMAC_Update(ctx, (unsigned char *)buf, len)
++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, (unsigned char *)dig, &dlen);
++#if OPENSSL_VERSION_NUMBER < 0x10100000L
++#define HMAC_close(ctx) HMAC_CTX_cleanup(ctx)
++#else
++#define HMAC_close(ctx) HMAC_CTX_reset(ctx); HMAC_CTX_free(ctx)
++#endif
+ #endif
+
+ extern void RTMP_TLS_Init();
+@@ -289,7 +297,7 @@
+ struct info
+ {
+ z_stream *zs;
+- HMAC_CTX ctx;
++ HMAC_CTX *ctx;
+ int first;
+ int zlib;
+ int size;
diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch
new file mode 100644
index 00000000000000..cc7637d84943af
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch
@@ -0,0 +1,14 @@
+https://bugs.gentoo.org/669574
+diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c
+index 5311a8a..79fefae 100644
+--- a/librtmp/rtmp.c
++++ b/librtmp/rtmp.c
+@@ -2854,7 +2854,7 @@ HandleCtrl(RTMP *r, const RTMPPacket *packet)
+ if (nType == 0x1A)
+ {
+ RTMP_Log(RTMP_LOGDEBUG, "%s, SWFVerification ping received: ", __FUNCTION__);
+- if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x01)
++ if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x02)
+ {
+ RTMP_Log(RTMP_LOGERROR,
+ "%s: SWFVerification Type %d request not supported! Patches welcome...",
diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch
new file mode 100644
index 00000000000000..ade0d9baa79a46
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch
@@ -0,0 +1,22 @@
+https://bugs.gentoo.org/669574
+diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c
+index df2cb27..b72dc64 100644
+--- a/librtmp/rtmp.c
++++ b/librtmp/rtmp.c
+@@ -2857,14 +2857,14 @@ HandleCtrl(RTMP *r, const RTMPPacket *packet)
+ if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x01)
+ {
+ RTMP_Log(RTMP_LOGERROR,
+- "%s: SWFVerification Type %d request not supported! Patches welcome...",
++ "%s: SWFVerification Type %d request not supported, attempting to use SWFVerification Type 1! Patches welcome...",
+ __FUNCTION__, packet->m_body[2]);
+ }
+ #ifdef CRYPTO
+ /*RTMP_LogHex(packet.m_body, packet.m_nBodySize); */
+
+ /* respond with HMAC SHA256 of decompressed SWF, key is the 30byte player key, also the last 30 bytes of the server handshake are applied */
+- else if (r->Link.SWFSize)
++ if (r->Link.SWFSize)
+ {
+ RTMP_SendCtrl(r, 0x1B, 0, 0);
+ }
diff --git a/var/spack/repos/builtin/packages/rust-bootstrap/package.py b/var/spack/repos/builtin/packages/rust-bootstrap/package.py
index 84100bf2424a14..a8e9f7baae34a8 100644
--- a/var/spack/repos/builtin/packages/rust-bootstrap/package.py
+++ b/var/spack/repos/builtin/packages/rust-bootstrap/package.py
@@ -21,6 +21,17 @@ class RustBootstrap(Package):
# should update these binary releases as bootstrapping requirements are
# modified by new releases of Rust.
rust_releases = {
+ "1.73.0": {
+ "darwin": {
+ "x86_64": "ece9646bb153d4bc0f7f1443989de0cbcd8989a7d0bf3b7fb9956e1223954f0c",
+ "aarch64": "9c96e4c57328fb438ee2d87aa75970ce89b4426b49780ccb3c16af0d7c617cc6",
+ },
+ "linux": {
+ "x86_64": "aa4cf0b7e66a9f5b7c623d4b340bb1ac2864a5f2c2b981f39f796245dc84f2cb",
+ "aarch64": "e54d7d886ba413ae573151f668e76ea537f9a44406d3d29598269a4a536d12f6",
+ "powerpc64le": "8fa215ee3e274fb64364e7084613bc570369488fa22cf5bc8e0fe6dc810fe2b9",
+ },
+ },
"1.70.0": {
"darwin": {
"x86_64": "e5819fdbfc7f1a4d5d82cb4c3b7662250748450b45a585433bfb75648bc45547",
@@ -73,7 +84,7 @@ class RustBootstrap(Package):
# Determine system os and architecture/target.
os = platform.system().lower()
- target = rust_targets[platform.machine().lower()]
+ target = rust_targets.get(platform.machine().lower(), platform.machine().lower())
# Pre-release versions of the bootstrap compiler.
# Note: These versions are unchecksumed since they will change
diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py
index 66e31f14dcbee2..79f6d9c1ff5a5f 100644
--- a/var/spack/repos/builtin/packages/rust/package.py
+++ b/var/spack/repos/builtin/packages/rust/package.py
@@ -18,6 +18,27 @@ class Rust(Package):
maintainers("alecbcs")
+ # When adding a version of Rust you may need to add an additional version
+ # to rust-bootstrap as the minimum bootstrapping requirements increase.
+ # As a general rule of thumb Rust can be built with either the previous major
+ # version or the current version of the compiler as shown above.
+
+ # Pre-release versions.
+ # Note: If you plan to use these versions remember to install with
+ # `-n` to prevent Spack from failing due to failed checksums.
+ #
+ # $ spack install -n rust@pre-release-version
+ #
+ version("beta")
+ version("master", branch="master", submodules=True)
+ version("nightly")
+
+ # Stable versions.
+ version("1.73.0", sha256="96d62e6d1f2d21df7ac8acb3b9882411f9e7c7036173f7f2ede9e1f1f6b1bb3a")
+ version("1.70.0", sha256="b2bfae000b7a5040e4ec4bbc50a09f21548190cb7570b0ed77358368413bd27c")
+ version("1.65.0", sha256="5828bb67f677eabf8c384020582b0ce7af884e1c84389484f7f8d00dd82c0038")
+ version("1.60.0", sha256="20ca826d1cf674daf8e22c4f8c4b9743af07973211c839b85839742314c838b7")
+
# Core dependencies
depends_on("cmake@3.13.4:", type="build")
depends_on("curl+nghttp2")
@@ -41,26 +62,7 @@ class Rust(Package):
depends_on("rust-bootstrap@1.59:1.60", type="build", when="@1.60")
depends_on("rust-bootstrap@1.64:1.65", type="build", when="@1.65")
depends_on("rust-bootstrap@1.69:1.70", type="build", when="@1.70")
-
- # When adding a version of Rust you may need to add an additional version
- # to rust-bootstrap as the minimum bootstrapping requirements increase.
- # As a general rule of thumb Rust can be built with either the previous major
- # version or the current version of the compiler as shown above.
-
- # Pre-release versions.
- # Note: If you plan to use these versions remember to install with
- # `-n` to prevent Spack from failing due to failed checksums.
- #
- # $ spack install -n rust@pre-release-version
- #
- version("beta")
- version("master", branch="master", submodules=True)
- version("nightly")
-
- # Stable versions.
- version("1.70.0", sha256="b2bfae000b7a5040e4ec4bbc50a09f21548190cb7570b0ed77358368413bd27c")
- version("1.65.0", sha256="5828bb67f677eabf8c384020582b0ce7af884e1c84389484f7f8d00dd82c0038")
- version("1.60.0", sha256="20ca826d1cf674daf8e22c4f8c4b9743af07973211c839b85839742314c838b7")
+ depends_on("rust-bootstrap@1.72:1.73", type="build", when="@1.73")
variant(
"analysis",
@@ -87,14 +89,41 @@ def determine_version(csl, exe):
match = re.match(r"rustc (\S+)", output)
return match.group(1) if match else None
+ def setup_dependent_package(self, module, dependent_spec):
+ module.cargo = Executable(os.path.join(self.spec.prefix.bin, "cargo"))
+
def setup_build_environment(self, env):
# Manually inject the path of ar for build.
ar = which("ar", required=True)
env.set("AR", ar.path)
- # Manually inject the path of openssl's certs for build.
- certs = join_path(self.spec["openssl"].prefix, "etc/openssl/cert.pem")
- env.set("CARGO_HTTP_CAINFO", certs)
+ # Manually inject the path of openssl's certs for build
+ # if certs are present on system via Spack or via external
+ # openssl.
+ def get_test_path(p):
+ certs = join_path(p, "cert.pem")
+ if os.path.exists(certs):
+ return certs
+ return None
+
+ # find certs, don't set if no file is found in case
+ # ca-certificates isn't installed
+ certs = None
+ openssl = self.spec["openssl"]
+ if openssl.external:
+ try:
+ output = which("openssl", required=True)("version", "-d", output=str, error=str)
+ openssl_dir = re.match('OPENSSLDIR: "([^"]+)"', output)
+ if openssl_dir:
+ certs = get_test_path(openssl_dir.group(1))
+ except ProcessError:
+ pass
+
+ if certs is None:
+ certs = get_test_path(join_path(openssl.prefix, "etc/openssl"))
+
+ if certs is not None:
+ env.set("CARGO_HTTP_CAINFO", certs)
# Set CARGO_HOME.
env.set("CARGO_HOME", os.path.join(os.path.dirname(self.stage.path), ".cargo"))
diff --git a/var/spack/repos/builtin/packages/scafacos/package.py b/var/spack/repos/builtin/packages/scafacos/package.py
new file mode 100644
index 00000000000000..84f73ac0e1339c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scafacos/package.py
@@ -0,0 +1,40 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class Scafacos(AutotoolsPackage):
+ """ScaFaCoS is a library of scalable fast coulomb solvers."""
+
+ homepage = "http://www.scafacos.de/"
+ url = "https://github.com/scafacos/scafacos/releases/download/v1.0.4/scafacos-1.0.4.tar.gz"
+
+ maintainers("hmenke")
+
+ license("GPL-3.0-or-later OR LGPL-3.0-or-later")
+
+ version("1.0.4", sha256="6634c4202e825e771d1dd75bbe9cac5cee41136c87653fde98fbd634681c1be6")
+ version("1.0.3", sha256="d3579f4cddb10a562722c190c2452ebc455592d44f6dbde8f155849ba6e2b3d0")
+ version("1.0.2", sha256="158078665e48e28fd12b7895063db056cee5d135423fc36802e39c9160102b97")
+ version("1.0.1", sha256="2b125f313795c81b0e87eb920082e91addf94c17444f9486d979e691aaded99b")
+ version("1.0.0", sha256="cc5762edbecfec0323126b6a6a535dcc3e134fcfef4b00f63eb05fae15244a96")
+
+ depends_on("fftw")
+ depends_on("file")
+ depends_on("gmp")
+ depends_on("gsl")
+ depends_on("mpi")
+ depends_on("pfft")
+ depends_on("pnfft")
+
+ def configure_args(self):
+ args = [
+ "--disable-doc",
+ "--enable-fcs-solvers=direct,ewald,fmm,p3m",
+ "FC={0}".format(self.spec["mpi"].mpifc),
+ "F77={0}".format(self.spec["mpi"].mpif77),
+ ]
+ return args
diff --git a/var/spack/repos/builtin/packages/scc/package.py b/var/spack/repos/builtin/packages/scc/package.py
new file mode 100644
index 00000000000000..b0eae1b3c868f4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scc/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class Scc(GoPackage):
+ """
+ Sloc, Cloc and Code: scc is a very fast accurate code counter with
+ complexity calculations and COCOMO estimates written in pure Go.
+ """
+
+ homepage = "https://github.com/boyter/scc"
+ url = "https://github.com/boyter/scc/archive/refs/tags/v3.1.0.tar.gz"
+ git = "https://github.com/boyter/scc.git"
+
+ license("MIT")
+
+ version("3.1.0", sha256="bffea99c7f178bc48bfba3c64397d53a20a751dfc78221d347aabdce3422fd20")
diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py
index ba9ac487e521e1..316173a73e297d 100644
--- a/var/spack/repos/builtin/packages/scorep/package.py
+++ b/var/spack/repos/builtin/packages/scorep/package.py
@@ -16,6 +16,8 @@ class Scorep(AutotoolsPackage):
url = "https://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-7.1/scorep-7.1.tar.gz"
maintainers("wrwilliams")
+ version("8.3", sha256="76c914e6319221c059234597a3bc53da788ed679179ac99c147284dcefb1574a")
+ # version 8.2 was immediately superseded before it hit Spack
version("8.1", sha256="3a40b481fce610871ddf6bdfb88a6d06b9e5eb38c6080faac6d5e44990060a37")
version("8.0", sha256="4c0f34f20999f92ebe6ca1ff706d0846b8ce6cd537ffbedb49dfaef0faa66311")
version("7.1", sha256="98dea497982001fb82da3429ca55669b2917a0858c71abe2cfe7cd113381f1f7")
@@ -93,8 +95,10 @@ def url_for_version(self, version):
# SCOREP 8
depends_on("binutils", type="link", when="@8:")
depends_on("otf2@3:", when="@8:")
- depends_on("cubew@4.8:", when="@8:")
- depends_on("cubelib@4.8:", when="@8:")
+ depends_on("cubew@4.8.2:", when="@8.3:")
+ depends_on("cubelib@4.8.2:", when="@8.3:")
+ depends_on("cubew@4.8:", when="@8:8.2")
+ depends_on("cubelib@4.8:", when="@8:8.2")
# fall through to Score-P 7's OPARI2, no new release
# SCOREP 7
depends_on("otf2@2.3:2.3.99", when="@7.0:7")
diff --git a/var/spack/repos/builtin/packages/seacas/package.py b/var/spack/repos/builtin/packages/seacas/package.py
index 7a7d48c4087cef..44b4b6a6034cce 100644
--- a/var/spack/repos/builtin/packages/seacas/package.py
+++ b/var/spack/repos/builtin/packages/seacas/package.py
@@ -31,6 +31,9 @@ class Seacas(CMakePackage):
# ###################### Versions ##########################
version("master", branch="master")
+ version(
+ "2023-10-24", sha256="f93bf0327329c302ed3feb6adf2e3968f01ec325084a457b2c2dbbf6c4f751a2"
+ )
version(
"2023-05-30", sha256="3dd982841854466820a3902163ad1cf1b3fbab65ed7542456d328f2d1a5373c1"
)
@@ -132,7 +135,8 @@ class Seacas(CMakePackage):
variant("x11", default=True, description="Compile with X11")
# ###################### Dependencies ##########################
- depends_on("cmake@3.17:", type="build")
+ depends_on("cmake@3.22:", when="@2023-10-24:", type="build")
+ depends_on("cmake@3.17:", when="@:2023-05-30", type="build")
depends_on("mpi", when="+mpi")
# Always depends on netcdf-c
@@ -140,9 +144,10 @@ class Seacas(CMakePackage):
depends_on("netcdf-c@4.8.0:~mpi", when="~mpi")
depends_on("hdf5+hl~mpi", when="~mpi")
+ depends_on("fmt@10.1.0", when="@2023-10-24:")
+ depends_on("fmt@9.1.0", when="@2022-10-14:2023-05-30")
depends_on("fmt@8.1.0:9", when="@2022-03-04:2022-05-16")
- depends_on("fmt@9.1.0", when="@2022-10-14")
- depends_on("fmt@9.1.0:", when="@2023-05-30")
+
depends_on("matio", when="+matio")
depends_on("libx11", when="+x11")
diff --git a/var/spack/repos/builtin/packages/selalib/package.py b/var/spack/repos/builtin/packages/selalib/package.py
index 23b56afc217f35..d36a4d20538ca3 100644
--- a/var/spack/repos/builtin/packages/selalib/package.py
+++ b/var/spack/repos/builtin/packages/selalib/package.py
@@ -39,10 +39,12 @@ class Selalib(CMakePackage):
depends_on("fgsl")
depends_on("git", type=("build", "run", "test"))
depends_on("hdf5+fortran+cxx")
+ depends_on("lapack", when="~mpi")
with when("+mpi"):
depends_on("mpi")
depends_on("fftw+mpi")
depends_on("hdf5+mpi")
+ depends_on("scalapack")
depends_on("python@3.0.0:", type=("build"))
# beware: compiling w/ zfp may throw type mismatch errors
depends_on("zfp+fortran", when="+compression")
diff --git a/var/spack/repos/builtin/packages/sherpa/package.py b/var/spack/repos/builtin/packages/sherpa/package.py
index 7ae8efc2e45997..38fa6277f59a09 100644
--- a/var/spack/repos/builtin/packages/sherpa/package.py
+++ b/var/spack/repos/builtin/packages/sherpa/package.py
@@ -217,7 +217,7 @@ def configure_args(self):
args.extend(self.enable_or_disable("pythia"))
hepmc_root = lambda x: self.spec["hepmc"].prefix
args.extend(self.enable_or_disable("hepmc2", activation_value=hepmc_root))
- if self.spec.satisfies("@2.2.13:"):
+ if self.spec.satisfies("@3:"):
args.extend(self.enable_or_disable("hepmc3", activation_value="prefix"))
args.extend(self.enable_or_disable("rivet", activation_value="prefix"))
args.extend(self.enable_or_disable("lhapdf", activation_value="prefix"))
diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py
index 4b4a0194e867ea..2678b0d7c56021 100644
--- a/var/spack/repos/builtin/packages/silo/package.py
+++ b/var/spack/repos/builtin/packages/silo/package.py
@@ -111,7 +111,6 @@ def flag_handler(self, name, flags):
if "+hdf5" in spec:
if spec["hdf5"].satisfies("~shared"):
flags.append("-ldl")
- flags.append(spec["readline"].libs.search_flags)
if "+pic" in spec:
if name == "cflags":
diff --git a/var/spack/repos/builtin/packages/sirius/package.py b/var/spack/repos/builtin/packages/sirius/package.py
index 20f5a4246d420c..fd73c669ec9a3c 100644
--- a/var/spack/repos/builtin/packages/sirius/package.py
+++ b/var/spack/repos/builtin/packages/sirius/package.py
@@ -21,6 +21,7 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage):
version("develop", branch="develop")
version("master", branch="master")
+ version("7.5.0", sha256="c583f88ffc02e9acac24e786bc35c7c32066882d2f70a1e0c14b5780b510365d")
version("7.4.3", sha256="015679a60a39fa750c5d1bd8fb1ce73945524bef561270d8a171ea2fd4687fec")
version("7.4.0", sha256="f9360a695a1e786d8cb9d6702c82dd95144a530c4fa7e8115791c7d1e92b020b")
version("7.3.2", sha256="a256508de6b344345c295ad8642dbb260c4753cd87cc3dd192605c33542955d7")
@@ -79,17 +80,11 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage):
variant("shared", default=True, description="Build shared libraries")
variant("openmp", default=True, description="Build with OpenMP support")
- variant(
- "boost_filesystem",
- default=False,
- description="Use Boost filesystem for self-consistent field method "
- "mini-app. Only required when the compiler does not "
- "support std::experimental::filesystem nor std::filesystem",
- )
variant("fortran", default=False, description="Build Fortran bindings")
variant("python", default=False, description="Build Python bindings")
variant("memory_pool", default=True, description="Build with memory pool")
variant("elpa", default=False, description="Use ELPA")
+ variant("dlaf", default=False, when="@7.5.0:", description="Use DLA-Future")
variant("vdwxc", default=False, description="Enable libvdwxc support")
variant("scalapack", default=False, description="Enable scalapack support")
variant("magma", default=False, description="Enable MAGMA support")
@@ -107,6 +102,7 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage):
variant(
"profiler", default=True, description="Use internal profiler to measure execution time"
)
+ variant("nvtx", default=False, description="Use NVTX profiler")
depends_on("cmake@3.23:", type="build")
depends_on("mpi")
@@ -133,7 +129,6 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage):
extends("python", when="+python")
depends_on("magma", when="+magma")
- depends_on("boost cxxstd=14 +filesystem", when="+boost_filesystem")
depends_on("spfft@0.9.13:", when="@7.0.1:")
depends_on("spfft+single_precision", when="+single_precision ^spfft")
@@ -154,13 +149,18 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage):
depends_on("scalapack", when="+scalapack")
+ with when("+dlaf"):
+ depends_on("dla-future@0.3.0:")
+ depends_on("dla-future +scalapack", when="+scalapack")
+ depends_on("dla-future +cuda", when="+cuda")
+ depends_on("dla-future +rocm", when="+rocm")
+
depends_on("rocblas", when="+rocm")
depends_on("rocsolver", when="@7.5.0: +rocm")
# FindHIP cmake script only works for < 4.1
depends_on("hip@:4.0", when="@:7.2.0 +rocm")
- conflicts("+boost_filesystem", when="~apps")
conflicts("^libxc@5.0.0") # known to produce incorrect results
conflicts("+single_precision", when="@:7.2.4")
conflicts("+scalapack", when="^cray-libsci")
@@ -203,15 +203,17 @@ def cmake_args(self):
self.define_from_variant(cm_label + "USE_VDWXC", "vdwxc"),
self.define_from_variant(cm_label + "USE_MEMORY_POOL", "memory_pool"),
self.define_from_variant(cm_label + "USE_SCALAPACK", "scalapack"),
+ self.define_from_variant(cm_label + "USE_DLAF", "dlaf"),
self.define_from_variant(cm_label + "CREATE_FORTRAN_BINDINGS", "fortran"),
self.define_from_variant(cm_label + "CREATE_PYTHON_MODULE", "python"),
self.define_from_variant(cm_label + "USE_CUDA", "cuda"),
self.define_from_variant(cm_label + "USE_ROCM", "rocm"),
self.define_from_variant(cm_label + "BUILD_APPS", "apps"),
- self.define_from_variant(cm_label + "BUILD_SHARED_LIBS", "shared"),
self.define_from_variant(cm_label + "USE_FP32", "single_precision"),
self.define_from_variant(cm_label + "USE_PROFILER", "profiler"),
+ self.define_from_variant(cm_label + "USE_NVTX", "nvtx"),
self.define_from_variant(cm_label + "USE_WANNIER90", "wannier90"),
+ self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
self.define_from_variant("BUILD_TESTING", "tests"),
]
@@ -254,7 +256,7 @@ def cmake_args(self):
cuda_arch = spec.variants["cuda_arch"].value
if cuda_arch[0] != "none":
# Make SIRIUS handle it
- if "@6:7.4.3" in spec:
+ if "@:7.4.3" in spec:
args.append(self.define("CMAKE_CUDA_ARCH", ";".join(cuda_arch)))
else:
args.append(self.define("CMAKE_CUDA_ARCHITECTURES", ";".join(cuda_arch)))
diff --git a/var/spack/repos/builtin/packages/sleef/package.py b/var/spack/repos/builtin/packages/sleef/package.py
index 663ffff3def032..43c50e2d3c8156 100644
--- a/var/spack/repos/builtin/packages/sleef/package.py
+++ b/var/spack/repos/builtin/packages/sleef/package.py
@@ -14,9 +14,7 @@ class Sleef(CMakePackage):
git = "https://github.com/shibatch/sleef.git"
version("master", branch="master")
- version(
- "3.5.1_2020-12-22", commit="e0a003ee838b75d11763aa9c3ef17bf71a725bff"
- ) # py-torch@1.8:1.9
+ version("3.5.1_2020-12-22", commit="e0a003ee838b75d11763aa9c3ef17bf71a725bff") # py-torch@1.8:
version(
"3.5.1",
sha256="415ee9b1bcc5816989d3d4d92afd0cd3f9ee89cbd5a33eb008e69751e40438ab",
@@ -40,17 +38,24 @@ class Sleef(CMakePackage):
) # py-torch@0.4.1:1.0
version("3.2", sha256="3130c5966e204e6d6a3ace81e543d12b5b21f60897f1c185bfa587c1bd77bee2")
- # Some versions have ICE when building RelWithDebInfo with GCC 7
- # See https://github.com/shibatch/sleef/issues/234
- # See https://github.com/pytorch/pytorch/issues/26892
- # See https://github.com/pytorch/pytorch/pull/26993
+ # https://github.com/shibatch/sleef/issues/474
+ conflicts("%apple-clang@15:")
generator("ninja")
depends_on("cmake@3.4.3:", type="build")
+ # # https://github.com/shibatch/sleef/issues/475
+ # depends_on("fftw-api")
+ # depends_on("mpfr")
+ # depends_on("openssl")
+
+ # # https://github.com/shibatch/sleef/issues/458
+ # conflicts("^mpfr@4.2:")
+
def cmake_args(self):
+ # https://salsa.debian.org/science-team/sleef/-/blob/master/debian/rules
return [
- self.define("DISABLE_FFTW", True),
- self.define("DISABLE_MPFR", True),
- self.define("DISABLE_SSL", True),
+ self.define("BUILD_DFT", False),
+ self.define("SLEEF_TEST_ALL_IUT", True),
+ self.define("BUILD_TESTS", False),
]
diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py
index 979a252dd8ae3a..5a6e3e3b2f45b5 100644
--- a/var/spack/repos/builtin/packages/slepc/package.py
+++ b/var/spack/repos/builtin/packages/slepc/package.py
@@ -22,6 +22,7 @@ class Slepc(Package, CudaPackage, ROCmPackage):
test_requires_compiler = True
version("main", branch="main")
+ version("3.20.1", sha256="5a36b664895881d3858d0644f56bf7bb922bdab70d732fa11cbf6442fec11806")
version("3.20.0", sha256="780c50260a9bc9b72776cb920774800c73832370938f1d48c2ea5c66d31b7380")
version("3.19.2", sha256="ca7ed906795971fbe35f08ee251a26b86a4442a18609b878cba00835c9d62034")
version("3.19.1", sha256="280737e9ef762d7f0079ad3ad29913215c799ebf124651c723c1972f71fbc0db")
diff --git a/var/spack/repos/builtin/packages/slurm-drmaa/package.py b/var/spack/repos/builtin/packages/slurm-drmaa/package.py
index 100b328b9a4dfc..012fc6dcd1b531 100644
--- a/var/spack/repos/builtin/packages/slurm-drmaa/package.py
+++ b/var/spack/repos/builtin/packages/slurm-drmaa/package.py
@@ -10,7 +10,7 @@ class SlurmDrmaa(AutotoolsPackage):
"""
DRMAA for Slurm is an implementation of Open Grid Forum DRMAA 1.0 (Distributed
Resource Management Application API) specification for submission and control of
- jobs to SLURM. Using DRMAA, grid applications builders, portal developers and
+ jobs to Slurm. Using DRMAA, grid applications builders, portal developers and
ISVs can use the same high-level API to link their software with different
cluster/resource management systems.
"""
diff --git a/var/spack/repos/builtin/packages/slurm/package.py b/var/spack/repos/builtin/packages/slurm/package.py
index 61214702b08710..aa4f126018bf39 100644
--- a/var/spack/repos/builtin/packages/slurm/package.py
+++ b/var/spack/repos/builtin/packages/slurm/package.py
@@ -129,6 +129,10 @@ class Slurm(AutotoolsPackage):
description="Set system configuration path (possibly /etc/slurm)",
)
variant("restd", default=False, description="Enable the slurmrestd server")
+ variant("nvml", default=False, description="Enable NVML autodetection")
+ variant("cgroup", default=False, description="Enable cgroup plugin")
+ variant("pam", default=False, description="Enable PAM support")
+ variant("rsmi", default=False, description="Enable ROCm SMI support")
# TODO: add variant for BG/Q and Cray support
@@ -156,6 +160,11 @@ class Slurm(AutotoolsPackage):
depends_on("libyaml", when="+restd")
depends_on("libjwt", when="+restd")
+ depends_on("cuda", when="+nvml")
+ depends_on("dbus", when="+cgroup")
+ depends_on("linux-pam", when="+pam")
+ depends_on("rocm-smi-lib", when="+rsmi")
+
executables = ["^srun$", "^salloc$"]
@classmethod
@@ -213,6 +222,15 @@ def configure_args(self):
else:
args.append("--without-pmix")
+ if spec.satisfies("+nvml"):
+ args.append(f"--with-nvml={spec['cuda'].prefix}")
+
+ if spec.satisfies("+pam"):
+ args.append(f"--with-pam_dir={spec['linux-pam'].prefix}")
+
+ if spec.satisfies("+rsmi"):
+ args.append(f"--with-rsmi={spec['rocm-smi-lib'].prefix}")
+
sysconfdir = spec.variants["sysconfdir"].value
if sysconfdir != "PREFIX/etc":
args.append("--sysconfdir={0}".format(sysconfdir))
diff --git a/var/spack/repos/builtin/packages/smee-client/package.py b/var/spack/repos/builtin/packages/smee-client/package.py
new file mode 100644
index 00000000000000..efb0809dda1de9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/smee-client/package.py
@@ -0,0 +1,42 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+from spack.util.executable import ProcessError
+
+
+class SmeeClient(Package):
+ """
+ Client and CLI for smee.io, a service that delivers webhooks to your
+ local development environment.
+ """
+
+ homepage = "https://smee.io"
+ url = "https://github.com/probot/smee-client/archive/refs/tags/v1.2.5.tar.gz"
+
+ maintainers("alecbcs")
+
+ license("ISC")
+
+ version("1.2.3", sha256="b9afff843fc7a3c2b5d6659acf45357b5db7a739243b99f6d18a9b110981a328")
+
+ depends_on("node-js", type=("build", "link", "run"))
+ depends_on("npm", type="build")
+ depends_on("typescript", type="build")
+
+ phases = ["build", "install"]
+
+ def build(self, spec, prefix):
+ npm = which("npm", required=True)
+
+ # Allow tsc to fail with typing "errors" which don't affect results
+ output = npm("run", "build", output=str, error=str, fail_on_error=False)
+ if npm.returncode not in (0, 2):
+ raise ProcessError(output)
+
+ def install(self, spec, prefix):
+ npm = which("npm", required=True)
+ npm("install", "--global", f"--prefix={prefix}")
diff --git a/var/spack/repos/builtin/packages/spectre/package.py b/var/spack/repos/builtin/packages/spectre/package.py
index 7b8bc9dfebb1be..df0ff30acd88db 100644
--- a/var/spack/repos/builtin/packages/spectre/package.py
+++ b/var/spack/repos/builtin/packages/spectre/package.py
@@ -29,6 +29,9 @@ class Spectre(CMakePackage):
generator("ninja")
version("develop", branch="develop")
+ version(
+ "2023.10.11", sha256="f25d17bc80cc49ebdd81726326701fe9ecd2b6705d86e6e3d48d9e4a458c8aff"
+ )
version(
"2023.09.07", sha256="2375117df09d99a2716d445ff51d151422467bd42cd38b5f1177d2d40cb90916"
)
diff --git a/var/spack/repos/builtin/packages/sperr/package.py b/var/spack/repos/builtin/packages/sperr/package.py
index 131a6a7fdadc77..5def42991f7d5b 100644
--- a/var/spack/repos/builtin/packages/sperr/package.py
+++ b/var/spack/repos/builtin/packages/sperr/package.py
@@ -12,23 +12,32 @@ class Sperr(CMakePackage):
# Package info
homepage = "https://github.com/NCAR/SPERR"
- url = "https://github.com/NCAR/SPERR/archive/refs/tags/v0.6.2.tar.gz"
+ url = "https://github.com/NCAR/SPERR/archive/refs/tags/v0.7.1.tar.gz"
git = "https://github.com/NCAR/SPERR.git"
maintainers("shaomeng", "robertu94")
# Versions
version("main", branch="main")
+ version("0.7.1", sha256="1c3f46200be365427d1f57f5873f1b0b6dbcd297de4603a47a7fa3f41b273d79")
version("0.6.2", sha256="d986997e2d79a1f27146ad02c623359976a1e72a1ab0d957e128d430cda3782d")
version("0.5", sha256="20ad48c0e7599d3e5866e024d0c49648eb817f72ad5459f5468122cf14a97171")
- depends_on("git", type="build")
- depends_on("zstd", type=("build", "link"), when="+zstd")
- depends_on("pkgconfig", type=("build"), when="+zstd")
-
+ # Variants
variant("shared", description="build shared libaries", default=True)
- variant("zstd", description="use zstd for more compression", default=True)
- variant("openmp", description="use openmp in 3D inputs", default=True)
+ variant("openmp", description="use OpenMP in 3D inputs", default=True)
variant("utilities", description="build SPERR CLI utilities", default=True)
+ variant("zstd", description="use ZSTD for more compression", default=True, when="@:0.6.2")
+ variant(
+ "bundle_zstd",
+ description="Use SPERR bundled ZSTD. Keep it off in SPACK builds.",
+ default=False,
+ when="@:0.6.2",
+ )
+
+ # Depend ons
+ depends_on("git", type="build")
+ depends_on("pkgconfig", type=("build"), when="+zstd")
+ depends_on("zstd", type=("build", "link"), when="@:0.6.2+zstd")
def cmake_args(self):
# ensure the compiler supports OpenMP if it is used
@@ -37,11 +46,11 @@ def cmake_args(self):
args = [
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
- self.define_from_variant("USE_ZSTD", "zstd"),
self.define_from_variant("USE_OMP", "openmp"),
self.define_from_variant("BUILD_CLI_UTILITIES", "utilities"),
+ self.define_from_variant("USE_ZSTD", "zstd"),
+ self.define_from_variant("USE_BUNDLED_ZSTD", "bundle_zstd"),
"-DSPERR_PREFER_RPATH=OFF",
- "-DUSE_BUNDLED_ZSTD=OFF",
"-DBUILD_UNIT_TESTS=OFF",
]
return args
diff --git a/var/spack/repos/builtin/packages/spglib/package.py b/var/spack/repos/builtin/packages/spglib/package.py
index 2715f3f3c0295b..1d00091c13158c 100644
--- a/var/spack/repos/builtin/packages/spglib/package.py
+++ b/var/spack/repos/builtin/packages/spglib/package.py
@@ -48,10 +48,15 @@ class Spglib(CMakePackage):
version("1.10.0", sha256="117fff308731784bea2ddaf3d076f0ecbf3981b31ea1c1bfd5ce4f057a5325b1")
variant("openmp", default=True, description="Build with OpenMP support", when="@1.16.2:")
+ variant("fortran", default=True, description="Build Fortran interface", when="@1.16.4:")
@property
def libs(self):
return find_libraries("libsymspg", root=self.prefix, shared=True, recursive=True)
def cmake_args(self):
- return [self.define_from_variant("USE_OMP", "openmp")]
+ pfx = "SPGLIB_" if self.spec.satisfies("@2.1.0:") else ""
+ return [
+ self.define_from_variant(pfx + "USE_OMP", "openmp"),
+ self.define_from_variant(pfx + "WITH_Fortran", "fortran"),
+ ]
diff --git a/var/spack/repos/builtin/packages/squashfuse/package.py b/var/spack/repos/builtin/packages/squashfuse/package.py
index 939b738c7d99a4..85b7c03c8a8a42 100644
--- a/var/spack/repos/builtin/packages/squashfuse/package.py
+++ b/var/spack/repos/builtin/packages/squashfuse/package.py
@@ -16,6 +16,7 @@ class Squashfuse(AutotoolsPackage):
maintainers("haampie")
version("master", branch="master")
+ version("0.5.0", sha256="d7602c7a3b1d0512764547d27cb8cc99d1b21181e1c9819e76461ee96c2ab4d9")
version("0.1.104", sha256="aa52460559e0d0b1753f6b1af5c68cfb777ca5a13913285e93f4f9b7aa894b3a")
version("0.1.103", sha256="42d4dfd17ed186745117cfd427023eb81effff3832bab09067823492b6b982e7")
diff --git a/var/spack/repos/builtin/packages/sst-core/package.py b/var/spack/repos/builtin/packages/sst-core/package.py
index 860d30d0b38289..891fbf5d2d5c97 100644
--- a/var/spack/repos/builtin/packages/sst-core/package.py
+++ b/var/spack/repos/builtin/packages/sst-core/package.py
@@ -14,10 +14,11 @@ class SstCore(AutotoolsPackage):
homepage = "https://github.com/sstsimulator"
git = "https://github.com/sstsimulator/sst-core.git"
- url = "https://github.com/sstsimulator/sst-core/releases/download/v13.0.0_Final/sstcore-13.0.0.tar.gz"
+ url = "https://github.com/sstsimulator/sst-core/releases/download/v13.1.0_Final/sstcore-13.1.0.tar.gz"
maintainers("berquist", "naromero77")
+ version("13.1.0", sha256="0a44c62ee0b18a20a3cb089f4e0d43e293dc5adc6c3fa7639d40986cf5b9854c")
version("13.0.0", sha256="c9d868dcdd75d59bef7c73146709a3b2a52a78f0df5ec2c3dc9f21434c51d935")
version("12.1.0", sha256="f7530226643439678e2f4183ec4dbadf7750411bdaa44d9443887f81feb97574")
version("12.0.1", sha256="8662a778354e587e55b909725943dd5bb01d55121b1abc1a384a4eea161e9f5a")
@@ -61,7 +62,7 @@ class SstCore(AutotoolsPackage):
variant("preview", default=False, description="Preview build with deprecated features removed")
variant("profile", default=False, description="Enable performance profiling of core features")
- depends_on("python", type=("build", "run", "link"))
+ depends_on("python@:3.11", type=("build", "run", "link"))
depends_on("mpi", when="+pdes_mpi")
depends_on("zoltan", when="+zoltan")
depends_on("hdf5", when="+hdf5")
diff --git a/var/spack/repos/builtin/packages/sst-elements/package.py b/var/spack/repos/builtin/packages/sst-elements/package.py
index 49677daf049f7b..f5f9e7621d0f7a 100644
--- a/var/spack/repos/builtin/packages/sst-elements/package.py
+++ b/var/spack/repos/builtin/packages/sst-elements/package.py
@@ -14,12 +14,14 @@ class SstElements(AutotoolsPackage):
homepage = "https://github.com/sstsimulator"
git = "https://github.com/sstsimulator/sst-elements.git"
- url = "https://github.com/sstsimulator/sst-elements/releases/download/v13.0.0_Final/sstelements-13.0.0.tar.gz"
+ url = "https://github.com/sstsimulator/sst-elements/releases/download/v13.1.0_Final/sstelements-13.1.0.tar.gz"
maintainers("berquist", "naromero77")
+ version("13.1.0", sha256="ebda6ee5af858192dff8a7faf3125010001d5c439beec22afe5b9828a74adf1a")
version("13.0.0", sha256="1f6f6b403a8c1b22a27cdf2943c9e505825ee14866891e7bc944d4471b7b0321")
version("12.1.0", sha256="77948cf8e1f8bf8d238d475cea111c9a72b307cbf403cb429ef0426d0cf708a4")
+ version("12.0.1", sha256="fe6bd9e2c14ffca77cfb31ee39410d0df3a353524b6a5a35270104dd25836e48")
version("12.0.0", sha256="d3caacf8ba621a644151e1670dfc0fd8e91b45a583699998f94312897b0eca26")
version("11.1.0", sha256="2dd20ecf2e0896b59eb9d65d31ef928daa0188239016216f4ad11b7e6447ca0b")
version("11.0.0", sha256="bf265cb25afc041b74422cc5cddc8e3ae1e7c3efa3e37e699dac4e3f7629be6e")
@@ -40,7 +42,6 @@ class SstElements(AutotoolsPackage):
# Contact SST developers (https://github.com/sstsimulator)
# if your use case requires support for:
# - balar
- # - OTF2
# - stake (riscv simulator)
variant("pin", default=False, description="Enable the Ariel CPU model")
@@ -56,7 +57,7 @@ class SstElements(AutotoolsPackage):
variant("otf", default=False, description="Build with OTF")
variant("otf2", default=False, description="Build with OTF2")
- depends_on("python", type=("build", "run"))
+ depends_on("python@:3.11", type=("build", "run"))
depends_on("sst-core")
depends_on("sst-core@develop", when="@develop")
depends_on("sst-core@master", when="@master")
@@ -85,7 +86,6 @@ class SstElements(AutotoolsPackage):
conflicts("+dumpi", msg="Dumpi not currently supported, contact SST Developers for help")
conflicts("+otf", msg="OTF not currently supported, contact SST Developers for help")
- conflicts("+otf2", msg="OTF2 not currently supported, contact SST Developers for help")
conflicts(
"~dramsim2",
when="+hybridsim",
diff --git a/var/spack/repos/builtin/packages/sst-macro/package.py b/var/spack/repos/builtin/packages/sst-macro/package.py
index df129cfb4f7549..3f7c09bea7bf74 100644
--- a/var/spack/repos/builtin/packages/sst-macro/package.py
+++ b/var/spack/repos/builtin/packages/sst-macro/package.py
@@ -17,12 +17,14 @@ class SstMacro(AutotoolsPackage):
homepage = "https://github.com/sstsimulator"
git = "https://github.com/sstsimulator/sst-macro.git"
- url = "https://github.com/sstsimulator/sst-macro/releases/download/v13.0.0_Final/sstmacro-13.0.0.tar.gz"
+ url = "https://github.com/sstsimulator/sst-macro/releases/download/v13.1.0_Final/sstmacro-13.1.0.tar.gz"
maintainers("berquist")
+ version("13.1.0", sha256="022e39daae1067b56c0011dbe87e3234fee4587049fd53671e1ed6b23233f70e")
version("13.0.0", sha256="410dad4ac0c7a4c0e16c54da308b6c6b631112af18ae2c37585c8a14472987d4")
version("12.1.0", sha256="ee57e08acfd4b6429a0500d981d468ee6ded2638ec5abec7b47f172388b267f1")
+ version("12.0.1", sha256="1491a149f4554777a6c3aa62730b3cd1a24c43a8d3d7fb61edfb4fe5c773aed8")
version("12.0.0", sha256="259237a47cf341830ce3956cfadfd6e77ff1824da05da4a7b212fc5867ce64b2")
version("11.1.0", sha256="4b1226e75e2e99faa42b218461d85e8e17c1d4f333dd973e72a5dc052328d34c")
version("11.0.0", sha256="30367baed670b5b501320a068671556c9071286a0f0c478f9994a30d8fe5bdea")
@@ -50,10 +52,8 @@ class SstMacro(AutotoolsPackage):
depends_on("otf2", when="+otf2")
depends_on("llvm+clang@5:9", when="+skeletonizer")
depends_on("mpi", when="+pdes_mpi")
- depends_on("sst-core@develop", when="@develop+core")
- depends_on("sst-core@master", when="@master+core")
- depends_on("sst-core@10.1.0", when="@10.1.0+core")
- depends_on("sst-core@10.0.0", when="@10.0.0+core")
+ # Allow mismatch between core dependency version and current macro version.
+ depends_on("sst-core", when="+core")
depends_on("gettext")
variant("pdes_threads", default=True, description="Enable thread-parallel PDES simulation")
diff --git a/var/spack/repos/builtin/packages/starpu/package.py b/var/spack/repos/builtin/packages/starpu/package.py
index 45a2ca2cd2e09f..f448e3ae78a102 100644
--- a/var/spack/repos/builtin/packages/starpu/package.py
+++ b/var/spack/repos/builtin/packages/starpu/package.py
@@ -30,6 +30,7 @@ class Starpu(AutotoolsPackage):
maintainers("nfurmento", "sthibaul")
+ version("1.4.2", sha256="6c1fce80593a96d599881c1e9697a10e2072195b1c4c64a99528192b6715ddd6")
version("1.4.1", sha256="f023aa53da245a0f43944c3a13f63b4bfdf1324f3e66bf5cd367ce51e2044925")
version("1.4.0", sha256="5058127761a0604606a852fd6d20b07040d5fbd9f798c5383e49f336b4eeaca1")
version("1.3.11", sha256="580c6d98d49bacd2c666504c88890335d2689b6547d97f6a088d4ab4812df36e")
diff --git a/var/spack/repos/builtin/packages/stdexec/package.py b/var/spack/repos/builtin/packages/stdexec/package.py
index eeebe847d91bd0..ae6b2bfed39d34 100644
--- a/var/spack/repos/builtin/packages/stdexec/package.py
+++ b/var/spack/repos/builtin/packages/stdexec/package.py
@@ -14,6 +14,8 @@ class Stdexec(CMakePackage):
git = "https://github.com/NVIDIA/stdexec.git"
maintainers("msimberg", "aurianer")
+ license("Apache-2.0")
+
version("23.03", sha256="2c9dfb6e56a190543049d2300ccccd1b626f4bb82af5b607869c626886fadd15")
version("main", branch="main")
diff --git a/var/spack/repos/builtin/packages/strumpack/package.py b/var/spack/repos/builtin/packages/strumpack/package.py
index 15133630a80d59..a82b3784b49a32 100644
--- a/var/spack/repos/builtin/packages/strumpack/package.py
+++ b/var/spack/repos/builtin/packages/strumpack/package.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from platform import machine
+
from spack.package import *
from spack.util.environment import set_env
@@ -29,6 +31,7 @@ class Strumpack(CMakePackage, CudaPackage, ROCmPackage):
test_requires_compiler = True
version("master", branch="master")
+ version("7.2.0", sha256="6988c00c3213f13e53d75fb474102358f4fecf07a4b4304b7123d86fdc784639")
version("7.1.3", sha256="c951f38ee7af20da3ff46429e38fcebd57fb6f12619b2c56040d6da5096abcb0")
version("7.1.2", sha256="262a0193fa1682d0eaa90363f739e0be7a778d5deeb80e4d4ae12446082a39cc")
version("7.1.1", sha256="56481a22955c2eeb40932777233fc227347743c75683d996cb598617dd2a8635")
@@ -172,7 +175,7 @@ def cmake_args(self):
if "%cce" in spec:
# Assume the proper Cray CCE module (cce) is loaded:
- craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()]
+ craylibs_path = env["CRAYLIBS_" + machine().upper()]
env.setdefault("LDFLAGS", "")
env["LDFLAGS"] += " -Wl,-rpath," + craylibs_path
diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py
index f424a523f115ab..b05d31f360e328 100644
--- a/var/spack/repos/builtin/packages/sundials/package.py
+++ b/var/spack/repos/builtin/packages/sundials/package.py
@@ -22,11 +22,13 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage):
test_requires_compiler = True
maintainers("balos1", "cswoodward", "gardner48")
+ license("BSD-3-Clause")
# ==========================================================================
# Versions
# ==========================================================================
version("develop", branch="develop")
+ version("6.6.2", sha256="08f8223a5561327e44c072e46faa7f665c0c0bc8cd7e45d23f486c3d24c65009")
version("6.6.1", sha256="21f71e4aef95b18f954c8bbdc90b62877443950533d595c68051ab768b76984b")
version("6.6.0", sha256="f90029b8da846c8faff5530fd1fa4847079188d040554f55c1d5d1e04743d29d")
version("6.5.1", sha256="4252303805171e4dbdd19a01e52c1dcfe0dafc599c3cfedb0a5c2ffb045a8a75")
@@ -292,6 +294,12 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage):
# fix issues with exported PETSc target(s) in SUNDIALSConfig.cmake
patch("sundials-v5.8.0.patch", when="@5.8.0")
+ def flag_handler(self, name, flags):
+ if name == "cxxflags":
+ if self.spec.satisfies("+sycl"):
+ flags.append("-fsycl")
+ return (flags, None, None)
+
# ==========================================================================
# SUNDIALS Settings
# ==========================================================================
diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py
index 7af573699337ed..6e46ba14307e4d 100644
--- a/var/spack/repos/builtin/packages/superlu-dist/package.py
+++ b/var/spack/repos/builtin/packages/superlu-dist/package.py
@@ -20,6 +20,8 @@ class SuperluDist(CMakePackage, CudaPackage, ROCmPackage):
version("develop", branch="master")
version("amd", branch="amd")
+ version("8.2.1", sha256="b77d065cafa6bc1a1dcc15bf23fd854f54b05762b165badcffc195835ad2bddf")
+ version("8.2.0", sha256="d53573e5a399b2b4ab1fcc36e8421c1b6fab36345c0af14f8fa20326e3365f1f")
version("8.1.2", sha256="7b16c442bb01ea8b298c0aab9a2584aa4615d09786aac968cb2f3118c058206b")
version("8.1.1", sha256="766d70b84ece79d88249fe10ff51d2a397a29f274d9fd1e4a4ac39179a9ef23f")
version("8.1.0", sha256="9308844b99a7e762d5704934f7e9f79daf158b0bfc582994303c2e0b31518b34")
@@ -53,14 +55,16 @@ class SuperluDist(CMakePackage, CudaPackage, ROCmPackage):
),
)
variant("shared", default=True, description="Build shared libraries")
+ variant("parmetis", default=True, description="Enable ParMETIS library")
depends_on("mpi")
depends_on("blas")
depends_on("lapack")
- depends_on("parmetis +int64", when="+int64")
- depends_on("metis@5: +int64", when="+int64")
- depends_on("parmetis ~int64", when="~int64")
- depends_on("metis@5: ~int64", when="~int64")
+ with when("+parmetis"):
+ depends_on("metis@5: +int64", when="+int64")
+ depends_on("parmetis +int64", when="+int64")
+ depends_on("metis@5: ~int64", when="~int64")
+ depends_on("parmetis ~int64", when="~int64")
depends_on("cmake@3.18.1:", type="build", when="@7.1.0:")
depends_on("hipblas", when="+rocm")
depends_on("rocsolver", when="+rocm")
@@ -93,13 +97,17 @@ def append_from_variant(*args):
append_define("TPL_LAPACK_LIBRARIES", spec["lapack"].libs)
append_define("TPL_ENABLE_LAPACKLIB", True)
append_define("USE_XSDK_DEFAULTS", True)
- append_define(
- "TPL_PARMETIS_LIBRARIES", [spec["parmetis"].libs.ld_flags, spec["metis"].libs.ld_flags]
- )
- append_define(
- "TPL_PARMETIS_INCLUDE_DIRS",
- [spec["parmetis"].prefix.include, spec["metis"].prefix.include],
- )
+
+ append_from_variant("TPL_ENABLE_PARMETISLIB", "parmetis")
+ if "+parmetis" in spec:
+ append_define(
+ "TPL_PARMETIS_LIBRARIES",
+ [spec["parmetis"].libs.ld_flags, spec["metis"].libs.ld_flags],
+ )
+ append_define(
+ "TPL_PARMETIS_INCLUDE_DIRS",
+ [spec["parmetis"].prefix.include, spec["metis"].prefix.include],
+ )
append_define("XSDK_INDEX_SIZE", "64" if "+int64" in spec else "32")
@@ -134,8 +142,6 @@ def flag_handler(self, name, flags):
flags = list(flags)
if name == "cxxflags":
flags.append(self.compiler.cxx11_flag)
- if name == "cflags" and "%pgi" not in self.spec:
- flags.append("-std=c99")
if (
name == "cflags"
and (self.spec.satisfies("%xl") or self.spec.satisfies("%xl_r"))
diff --git a/var/spack/repos/builtin/packages/survey/package.py b/var/spack/repos/builtin/packages/survey/package.py
index 79bac929665e8e..1fc4c550f0d37e 100644
--- a/var/spack/repos/builtin/packages/survey/package.py
+++ b/var/spack/repos/builtin/packages/survey/package.py
@@ -19,7 +19,7 @@ class Survey(CMakePackage):
available for tools inside current MPI implementations including:
MPICH, MVAPICH, MPT, and OpenMPI. It also supports multiple
architectures and has been tested on machines based on Intel,
- AMD, ARM, and IBM P8/9 processors and integrated GPUs.
+ AMD, ARM, and IBM P8/9 processors and integrated NVIDIA GPUs.
Survey is a licensed product with the source not openly available.
To access the survey source and build with spack please contact:
@@ -33,7 +33,8 @@ class Survey(CMakePackage):
maintainers("jgalarowicz")
version("master", branch="master")
- version("1.0.8", branch="1.0.8")
+ version("1.0.9", branch="1.0.9")
+ version("1.0.8", tag="1.0.8")
version("1.0.7", tag="1.0.7")
version("1.0.6", tag="1.0.6")
version("1.0.5", tag="1.0.5")
@@ -45,6 +46,7 @@ class Survey(CMakePackage):
version("1.0.0", branch="1.0.0")
variant("mpi", default=False, description="Enable mpi, build MPI data collector")
+ variant("debug", default=False, description="Build a debug survey version")
variant(
"tls_model",
@@ -61,9 +63,10 @@ class Survey(CMakePackage):
depends_on("libmonitor@2021.11.08+commrank", type=("build", "link", "run"), when="@1.0.3:")
depends_on("papi@5:", type=("build", "link", "run"))
- depends_on("gotcha@master", type=("build", "link", "run"))
- depends_on("llvm-openmp@9.0.0", type=("build", "link", "run"), when="@:1.0.2")
- depends_on("llvm-openmp@12.0.1", type=("build", "link", "run"), when="@1.0.3:")
+ depends_on("gotcha@master", type=("build", "link"), when="@:1.0.7")
+ depends_on("gotcha@1.0.4", type=("build", "link"), when="@1.0.8:")
+ depends_on("llvm-openmp@9.0.0", type=("build", "link"), when="@:1.0.2")
+ depends_on("llvm-openmp@12.0.1", type=("build", "link"), when="@1.0.3:")
# MPI Installation
depends_on("mpi", when="+mpi")
@@ -81,6 +84,10 @@ class Survey(CMakePackage):
depends_on("py-more-itertools", type=("build", "run"), when="@1.0.4:")
depends_on("py-versioneer", type=("build", "run"), when="@1.0.5:")
depends_on("py-filelock", type=("build", "run"), when="@1.0.7:")
+ depends_on("py-zipp", type=("build", "run"), when="@1.0.7:")
+ depends_on("py-humanize", type=("build", "run"), when="@1.0.8:")
+ depends_on("py-importlib-resources", type=("build", "run"), when="@1.0.8:")
+ depends_on("py-gitpython", type=("build", "run"), when="@1.0.9:")
extends("python")
@@ -117,6 +124,11 @@ def cmake_args(self):
mpi_options = self.get_mpi_cmake_options(spec)
cmake_args.extend(mpi_options)
+ if "+debug" in spec:
+ cmake_args.append("-DCMAKE_C_FLAGS=-g -O2")
+ cmake_args.append("-DCMAKE_CXX_FLAGS=-g -O2")
+ cmake_args.append("-DCMAKE_BUILD_TYPE=Custom")
+
return cmake_args
def setup_run_environment(self, env):
diff --git a/var/spack/repos/builtin/packages/sys-sage/package.py b/var/spack/repos/builtin/packages/sys-sage/package.py
index 9670e01aa98b5c..0fb153bf0e5d6f 100644
--- a/var/spack/repos/builtin/packages/sys-sage/package.py
+++ b/var/spack/repos/builtin/packages/sys-sage/package.py
@@ -3,23 +3,76 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
from spack.package import *
class SysSage(CMakePackage):
- """A library for capturing hadrware topology and attributes of compute systems."""
+ """A library for capturing hardware topology and attributes of compute systems."""
- homepage = "https://github.com/stepanvanecek/sys-sage"
- url = "https://github.com/stepanvanecek/sys-sage/archive/refs/tags/v0.1.1-alpha.2.tar.gz"
- git = "https://github.com/stepanvanecek/sys-sage.git"
+ homepage = "https://github.com/caps-tum/sys-sage"
+ url = "https://github.com/caps-tum/sys-sage/archive/refs/tags/v0.4.3.tar.gz"
+ git = "https://github.com/caps-tum/sys-sage.git"
maintainers("stepanvanecek")
+ version("0.4.3", sha256="e24313c4274576c1511a62e1b27c86a78cea7e4c123b8a53303cfc70de978faa")
version("master", branch="master")
- version(
- "0.1.1-alpha.2", sha256="991a77cf37b061a911c8566fd4486f914de4f4c8cdf39112ec8a32903450c178"
+ version("develop", branch="develop")
+
+ conflicts("%gcc@:7", msg="gcc can be used from version 8 and above")
+
+ variant(
+ "nvidia_mig",
+ default=False,
+ description="Build and install functionality regarding NVidia MIG(multi-instance GPU, "
+ "ampere or newer).",
+ )
+ variant(
+ "cpuinfo",
+ default=True,
+ description="Build and install functionality regarding Linux cpuinfo (only x86) -- "
+ "default ON.",
+ )
+ variant(
+ "build_data_sources",
+ default=False,
+ when="platform=linux",
+ description="Build all data sources (programs to collect data about the machine sys-sage "
+ "runs on).",
+ )
+ variant(
+ "ds_hwloc",
+ default=False,
+ description="Builds the hwloc data source for retrieving the CPU topology",
+ )
+ variant(
+ "ds_numa",
+ default=False,
+ when="platform=linux",
+ description="builds the caps-numa-benchmark. If turned on, includes Linux-specific "
+ "libraries.",
)
- depends_on("cmake@3.21:", type="build")
+ depends_on("cmake@3.22:", type="build")
depends_on("libxml2@2.9.13:")
+
+ depends_on("numactl", when="+build_data_sources platform=linux")
+ depends_on("numactl", when="+ds_numa platform=linux")
+ depends_on("hwloc@2.9:", when="+build_data_sources")
+ depends_on("hwloc@2.9:", when="+ds_hwloc")
+ depends_on("cuda", when="+nvidia_mig platform=linux")
+ depends_on("cuda", when="+build_data_sources platform=linux")
+
+ def cmake_args(self):
+ spec = self.spec
+ args = []
+ args.append(self.define_from_variant("NVIDIA_MIG", "nvidia_mig"))
+ if "+cpuinfo" in spec and spec.target == "x86_64" and spec.platform == "linux":
+ args.append(self.define("CPUINFO", True))
+ else:
+ args.append(self.define("CPUINFO", False))
+ if "+ds_hwloc" in spec or "+build_data_sources" in spec:
+ args.append(self.define("DS_HWLOC", True))
+ if "+ds_numa" in spec or "+build_data_sources" in spec:
+ args.append(self.define("DS_NUMA", True))
+ return args
diff --git a/var/spack/repos/builtin/packages/taskflow/package.py b/var/spack/repos/builtin/packages/taskflow/package.py
index df921639b28781..1694dc7c95f8c6 100644
--- a/var/spack/repos/builtin/packages/taskflow/package.py
+++ b/var/spack/repos/builtin/packages/taskflow/package.py
@@ -16,6 +16,7 @@ class Taskflow(CMakePackage):
git = "https://github.com/taskflow/taskflow.git"
version("master", branch="master")
+ version("3.6.0", sha256="5a1cd9cf89f93a97fcace58fd73ed2fc8ee2053bcb43e047acb6bc121c3edf4c")
version("2.7.0", sha256="bc2227dcabec86abeba1fee56bb357d9d3c0ef0184f7c2275d7008e8758dfc3e")
# Compiler must offer C++14 support
diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py
index b61ab5753ca64c..56cf5f1d721a7b 100644
--- a/var/spack/repos/builtin/packages/tau/package.py
+++ b/var/spack/repos/builtin/packages/tau/package.py
@@ -26,6 +26,7 @@ class Tau(Package):
tags = ["e4s"]
version("master", branch="master")
+ version("2.33", sha256="04d9d67adb495bc1ea56561f33c5ce5ba44f51cc7f64996f65bd446fac5483d9")
version("2.32.1", sha256="0eec3de46b0873846dfc639270c5e30a226b463dd6cb41aa12e975b7563f0eeb")
version("2.32", sha256="ee774a06e30ce0ef0f053635a52229152c39aba4f4933bed92da55e5e13466f3")
version("2.31.1", sha256="bf445b9d4fe40a5672a7b175044d2133791c4dfb36a214c1a55a931aebc06b9d")
@@ -85,6 +86,7 @@ class Tau(Package):
variant("io", default=True, description="Activates POSIX I/O support")
variant("adios2", default=False, description="Activates ADIOS2 output support")
variant("sqlite", default=False, description="Activates SQLite3 output support")
+ variant("syscall", default=False, description="Activates syscall wrapper")
variant(
"profileparam",
default=False,
@@ -99,6 +101,7 @@ class Tau(Package):
variant(
"x86_64", default=False, description="Force build for x86 Linux instead of auto-detect"
)
+ variant("dyninst", default=False, description="Activates dyninst support")
depends_on("cmake@3.14:", type="build", when="%clang")
depends_on("zlib-api", type="link")
@@ -128,6 +131,7 @@ class Tau(Package):
depends_on("rocm-smi-lib", when="@2.32.1: +rocm")
depends_on("java", type="run") # for paraprof
depends_on("oneapi-level-zero", when="+level_zero")
+ depends_on("dyninst@12.3.0:", when="+dyninst")
# Elf only required from 2.28.1 on
conflicts("+elf", when="@:2.28.0")
@@ -136,6 +140,7 @@ class Tau(Package):
# ADIOS2, SQLite only available from 2.29.1 on
conflicts("+adios2", when="@:2.29.1")
conflicts("+sqlite", when="@:2.29.1")
+ conflicts("+dyninst", when="@:2.32.1")
patch("unwind.patch", when="@2.29.0")
@@ -243,6 +248,9 @@ def install(self, spec, prefix):
if "+io" in spec:
options.append("-iowrapper")
+ if "+syscall" in spec:
+ options.append("-syscall")
+
if "+binutils" in spec:
options.append("-bfd=%s" % spec["binutils"].prefix)
@@ -337,6 +345,15 @@ def install(self, spec, prefix):
break
options.append("-pythonlib=%s" % lib_path)
+ if "+dyninst" in spec:
+ options.append("-dyninst=%s" % spec["dyninst"].prefix)
+ if "+tbb" not in spec:
+ options.append("-tbb=%s" % spec["intel-tbb"].prefix)
+ if "+boost" not in spec:
+ options.append("-boost=%s" % spec["boost"].prefix)
+ if "+elf" not in spec:
+ options.append("-elf=%s" % spec["elfutils"].prefix)
+
compiler_specific_options = self.set_compiler_options(spec)
options.extend(compiler_specific_options)
configure(*options)
diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py
index c0082dc52cc1f7..dee78161bb1149 100644
--- a/var/spack/repos/builtin/packages/tcl/package.py
+++ b/var/spack/repos/builtin/packages/tcl/package.py
@@ -37,6 +37,8 @@ class Tcl(AutotoolsPackage, SourceforgePackage):
configure_directory = "unix"
+ filter_compiler_wrappers("tclConfig.sh", relative_root="lib")
+
def install(self, spec, prefix):
with working_dir(self.build_directory):
make("install")
diff --git a/var/spack/repos/builtin/packages/tracy-client/package.py b/var/spack/repos/builtin/packages/tracy-client/package.py
index dd219f31ee039a..c0ff6a7b712ed2 100644
--- a/var/spack/repos/builtin/packages/tracy-client/package.py
+++ b/var/spack/repos/builtin/packages/tracy-client/package.py
@@ -14,6 +14,8 @@ class TracyClient(CMakePackage):
url = "https://github.com/wolfpld/tracy/archive/v0.0.0.tar.gz"
maintainers("msimberg")
+ license("BSD-3-Clause")
+
version("master", git="https://github.com/wolfpld/tracy.git", branch="master")
version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600")
version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc")
diff --git a/var/spack/repos/builtin/packages/tracy/package.py b/var/spack/repos/builtin/packages/tracy/package.py
index 111b4a86534600..021e18d00f4f73 100644
--- a/var/spack/repos/builtin/packages/tracy/package.py
+++ b/var/spack/repos/builtin/packages/tracy/package.py
@@ -14,6 +14,8 @@ class Tracy(MakefilePackage):
url = "https://github.com/wolfpld/tracy/archive/v0.0.0.tar.gz"
maintainers("msimberg")
+ license("BSD-3-Clause")
+
version("master", git="https://github.com/wolfpld/tracy.git", branch="master")
version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600")
version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc")
diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py
index 9af8ab14dcdd73..1681ac35d2e9a2 100644
--- a/var/spack/repos/builtin/packages/trilinos/package.py
+++ b/var/spack/repos/builtin/packages/trilinos/package.py
@@ -358,6 +358,11 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage):
conflicts("@:13.0.1 +cuda", when="^cuda@11:")
# Build hangs with CUDA 11.6 (see #28439)
conflicts("+cuda +stokhos", when="^cuda@11.6:")
+ # superlu-dist defines a macro EMPTY which conflicts with a header in cuda
+ # used when building stokhos
+ # Fix: https://github.com/xiaoyeli/superlu_dist/commit/09cb1430f7be288fd4d75b8ed461aa0b7e68fefe
+ # is not tagged yet. See discussion here https://github.com/trilinos/Trilinos/issues/11839
+ conflicts("+cuda +stokhos +superlu-dist")
# Cuda UVM must be enabled prior to 13.2
# See https://github.com/spack/spack/issues/28869
conflicts("~uvm", when="@:13.1 +cuda")
diff --git a/var/spack/repos/builtin/packages/typescript/package.py b/var/spack/repos/builtin/packages/typescript/package.py
new file mode 100644
index 00000000000000..9a0e0a69e4b1a6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/typescript/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class Typescript(Package):
+ """TypeScript is a superset of JavaScript that compiles to clean JavaScript output."""
+
+ homepage = "https://www.typescriptlang.org"
+ url = "https://github.com/microsoft/TypeScript/archive/refs/tags/v5.3.2.tar.gz"
+
+ license("Apache-2.0")
+
+ version("5.3.2", sha256="c5a12507006e7d2b8020dec9589191ce070fd88203f2c80aca00d641cee7866f")
+
+ depends_on("node-js", type=("build", "link", "run"))
+ depends_on("npm", type="build")
+
+ def install(self, spec, prefix):
+ npm = which("npm", required=True)
+ npm("install", "--global", f"--prefix={prefix}")
diff --git a/var/spack/repos/builtin/packages/ufs-utils/package.py b/var/spack/repos/builtin/packages/ufs-utils/package.py
index e551e7fec1fdbb..50380bfe5889b2 100644
--- a/var/spack/repos/builtin/packages/ufs-utils/package.py
+++ b/var/spack/repos/builtin/packages/ufs-utils/package.py
@@ -18,6 +18,12 @@ class UfsUtils(CMakePackage):
maintainers("t-brown", "edwardhartnett", "AlexanderRichert-NOAA", "Hang-Lei-NOAA")
+ version(
+ "1.11.0",
+ tag="ufs_utils_1_11_0",
+ commit="72701ab45165ae67a1c4b4d855e763bf5674dbd2",
+ submodules=True,
+ )
version(
"1.10.0",
tag="ufs_utils_1_10_0",
diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py
index c64bfdf256db78..e97db334dcfaf7 100644
--- a/var/spack/repos/builtin/packages/umpire/package.py
+++ b/var/spack/repos/builtin/packages/umpire/package.py
@@ -208,11 +208,6 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage):
# currently only available for cuda.
conflicts("+shared", when="+cuda")
- # https://github.com/LLNL/Umpire/issues/653
- # This range looks weird, but it ensures the concretizer looks at it as a
- # range, not as a concrete version, so that it also matches 10.3.* versions.
- conflicts("%gcc@10.3.0:10.3", when="+cuda")
-
def _get_sys_type(self, spec):
sys_type = spec.architecture
if "SYS_TYPE" in env:
diff --git a/var/spack/repos/builtin/packages/upp/package.py b/var/spack/repos/builtin/packages/upp/package.py
index 3cef205afdb6ef..8bdb1187921da4 100644
--- a/var/spack/repos/builtin/packages/upp/package.py
+++ b/var/spack/repos/builtin/packages/upp/package.py
@@ -20,10 +20,25 @@ class Upp(CMakePackage):
maintainers("AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA")
version("develop", branch="develop")
- version("11.0.0", tag="upp_v11.0.0", submodules=True)
+ version(
+ "11.0.0",
+ tag="upp_v11.0.0",
+ commit="6b5c589c7650132c6f13a729a2853676a7b93bbb",
+ submodules=True,
+ )
version("10.0.10", sha256="0c96a88d0e79b554d5fcee9401efcf4d6273da01d15e3413845274f73d70b66e")
- version("10.0.9", tag="upp_v10.0.9", submodules=True)
- version("10.0.8", tag="upp_v10.0.8", submodules=True)
+ version(
+ "10.0.9",
+ tag="upp_v10.0.9",
+ commit="a49af0549958def4744cb3903c7315476fe44530",
+ submodules=True,
+ )
+ version(
+ "10.0.8",
+ tag="upp_v10.0.8",
+ commit="ce989911a7a09a2e2a0e61b3acc87588b5b9fc26",
+ submodules=True,
+ )
version("8.2.0", sha256="38de2178dc79420f42aa3fb8b85796fc49d43d66f90e5276e47ab50c282627ac")
variant("openmp", default=True, description="Use OpenMP threading")
diff --git a/var/spack/repos/builtin/packages/ut/package.py b/var/spack/repos/builtin/packages/ut/package.py
index 9c5d9f44603de7..7d7d2b573ab92d 100644
--- a/var/spack/repos/builtin/packages/ut/package.py
+++ b/var/spack/repos/builtin/packages/ut/package.py
@@ -15,6 +15,8 @@ class Ut(CMakePackage):
maintainers("msimberg")
+ license("BSL-1.0")
+
version("master", branch="master")
version("1.1.9", sha256="1a666513157905aa0e53a13fac602b5673dcafb04a869100a85cd3f000c2ed0d")
diff --git a/var/spack/repos/builtin/packages/vapor/package.py b/var/spack/repos/builtin/packages/vapor/package.py
new file mode 100644
index 00000000000000..1dca76673990ca
--- /dev/null
+++ b/var/spack/repos/builtin/packages/vapor/package.py
@@ -0,0 +1,132 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+import re
+
+from spack.package import *
+
+
+class Vapor(CMakePackage):
+ """VAPOR is the Visualization and Analysis Platform for Ocean,
+ Atmosphere, and Solar Researchers. VAPOR provides an interactive 3D
+ visualization environment that can also produce animations and
+ still frame images.
+ """
+
+ homepage = "https://www.vapor.ucar.edu"
+ url = "https://github.com/NCAR/VAPOR/archive/refs/tags/v3.9.0.tar.gz"
+ git = "https://github.com/NCAR/VAPOR.git"
+
+ maintainers("vanderwb")
+
+ version("main", branch="main")
+ version(
+ "3.9.0",
+ sha256="343ababe40b5824ef826f16c935a6dc1fb18e1a4c88ef967c8d64386f28a99a3",
+ preferred=True,
+ )
+
+ variant("doc", default=True, description="Build docs using Doxygen")
+ variant("ospray", default=False, description="Enable OSPRay raytracing")
+
+ depends_on("cmake@3.17:", type="build")
+ depends_on("python+ssl", type="build")
+ depends_on("py-numpy@1.21", type="build")
+ depends_on("py-scipy", type="build")
+ depends_on("py-matplotlib", type="build")
+
+ depends_on("zlib-api")
+ depends_on("gl")
+
+ depends_on("xz")
+ depends_on("openssl")
+ depends_on("expat")
+ depends_on("curl")
+ depends_on("mesa-glu")
+ depends_on("libxtst")
+ depends_on("libxcb")
+ depends_on("xcb-util")
+ depends_on("libxkbcommon")
+ depends_on("libpng")
+ depends_on("assimp")
+ depends_on("netcdf-c~dap~byterange")
+ depends_on("udunits")
+ depends_on("freetype")
+ depends_on("proj@:7")
+ depends_on("libgeotiff")
+ depends_on("glm")
+ depends_on("qt+opengl+dbus@5")
+
+ depends_on("ospray~mpi", when="+ospray")
+ depends_on("doxygen", when="+doc")
+
+ # These images are required but not provided by the source
+ resource(
+ name="map-images",
+ url="https://stratus.ucar.edu/vapor-images/2023-Jun-images.tar.xz",
+ sha256="3f0c6d40446abdb16d5aaaa314349a140e497b3be6f4971394b3e78f22d47c7d",
+ placement="share/extras/images",
+ )
+
+ def cmake_args(self):
+ spec = self.spec
+ pyvers = spec["python"].version.up_to(2)
+ pypath = "{}/python{}".format(spec.prefix.lib, pyvers)
+
+ args = [
+ self.define_from_variant("BUILD_OSP", "ospray"),
+ self.define_from_variant("BUILD_DOC", "doc"),
+ self.define("BUILD_PYTHON", False),
+ self.define("THIRD_PARTY_DIR", spec.prefix),
+ self.define("THIRD_PARTY_LIB_DIR", spec.prefix.lib),
+ self.define("THIRD_PARTY_INC_DIR", spec["python"].prefix.include),
+ self.define("PYTHONVERSION", pyvers),
+ self.define("PYTHONDIR", spec.prefix),
+ self.define("PYTHONPATH", pypath),
+ self.define("NUMPY_INCLUDE_DIR", pypath + "/site-packages/numpy/core/include"),
+ self.define("MAP_IMAGES_PATH", "extras/images"),
+ ]
+
+ return args
+
+ # VAPOR depends on custom version of GeometryEngine that is
+ # packaged with the source code - need to extract and move
+ @run_before("cmake")
+ def extract_gte(self):
+ unzip = which("unzip")
+
+ with working_dir("buildutils"):
+ unzip("GTE.zip")
+ move("GTE", "../include")
+
+ # Build will use these optional site defaults which aren't
+ # generally applicable to other sites
+ @run_before("cmake")
+ def clean_local_refs(self):
+ force_remove("site_files/site.NCAR")
+
+ # Vapor wants all of the Python packages in its build path. This
+ # somewhat objectionable code copies packages to the tree. It also
+ # copies the Python library so that the site-library is found.
+ @run_before("cmake")
+ def copy_python_library(self):
+ spec = self.spec
+ mkdirp(spec.prefix.lib)
+ pp = re.compile("py-[a-z0-9-]*")
+
+ for pydep in ["python"] + pp.findall(str(spec)):
+ install_tree(spec[pydep].prefix.lib, spec.prefix.lib)
+
+ # The documentation will not be built without this target (though
+ # it will try to install!)
+ @property
+ def build_targets(self):
+ targets = []
+
+ if "+doc" in self.spec:
+ targets.append("doc")
+
+ return targets + ["all"]
diff --git a/var/spack/repos/builtin/packages/vc/package.py b/var/spack/repos/builtin/packages/vc/package.py
index f00154e851ed61..73b48537a06526 100644
--- a/var/spack/repos/builtin/packages/vc/package.py
+++ b/var/spack/repos/builtin/packages/vc/package.py
@@ -13,6 +13,7 @@ class Vc(CMakePackage):
git = "https://github.com/VcDevel/Vc.git"
url = "https://github.com/VcDevel/Vc/archive/refs/tags/1.3.3.tar.gz"
+ version("1.4.4", sha256="5933108196be44c41613884cd56305df320263981fe6a49e648aebb3354d57f3")
version("1.4.3", sha256="988ea0053f3fbf17544ca776a2749c097b3139089408b0286fa4e9e8513e037f")
version("1.4.2", sha256="50d3f151e40b0718666935aa71d299d6370fafa67411f0a9e249fbce3e6e3952")
version("1.4.1", sha256="7e8b57ed5ff9eb0835636203898c21302733973ff8eaede5134dd7cb87f915f6")
diff --git a/var/spack/repos/builtin/packages/vcftools/package.py b/var/spack/repos/builtin/packages/vcftools/package.py
index 944760be387763..e592728113f0cb 100644
--- a/var/spack/repos/builtin/packages/vcftools/package.py
+++ b/var/spack/repos/builtin/packages/vcftools/package.py
@@ -16,6 +16,7 @@ class Vcftools(AutotoolsPackage):
homepage = "https://vcftools.github.io/"
url = "https://github.com/vcftools/vcftools/releases/download/v0.1.14/vcftools-0.1.14.tar.gz"
+ version("0.1.16", sha256="dbfc774383c106b85043daa2c42568816aa6a7b4e6abc965eeea6c47dde914e3")
# this is "a pre-release"
# version('0.1.15', sha256='31e47afd5be679d89ece811a227525925b6907cce4af2c86f10f465e080383e3')
version("0.1.14", sha256="76d799dd9afcb12f1ed42a07bc2886cd1a989858a4d047f24d91dcf40f608582")
@@ -26,7 +27,15 @@ class Vcftools(AutotoolsPackage):
# this needs to be in sync with what setup_run_environment adds to
# PERL5LIB below
def configure_args(self):
- return ["--with-pmdir={0}".format(self.prefix.lib)]
+ args = []
+ # between 0.1.16 and 14 the behavior of the configure script
+ # wrt the perl lib dir changed and it became relative to the
+ # install directory, if you specify the whole prefix in
+ # it now you end up with a nasty recreation of the
+ # prefix tree in self.prefix.
+ if self.spec.satisfies("@:0.1.14"):
+ args.append(f"--with-pmdir={self.prefix.lib}")
+ return args
@run_before("install")
def filter_sbang(self):
diff --git a/var/spack/repos/builtin/packages/verilator/package.py b/var/spack/repos/builtin/packages/verilator/package.py
index 90b1f04a2fb524..df49bf3bb6a33d 100644
--- a/var/spack/repos/builtin/packages/verilator/package.py
+++ b/var/spack/repos/builtin/packages/verilator/package.py
@@ -33,38 +33,61 @@ class Verilator(AutotoolsPackage):
designs with thousands of modules."""
homepage = "https://www.veripool.org/projects/verilator"
- url = "https://www.veripool.org/ftp/verilator-3.920.tgz"
+ url = "https://github.com/verilator/verilator/archive/refs/tags/v5.018.tar.gz"
+ git = "https://github.com/verilator/verilator.git"
- version("4.108", sha256="8e8ec1de0bf200b6206035214f9071a5acc64bd2e7134361d564271e48552702")
- version("4.020", sha256="abd79fc2a54cab9da33dfccd669bda3baa71e79060abec17517f0b7374dbc31a")
- version("3.920", sha256="2b5c38aa432d0766a38475219f9548d64d18104ce8bdcb5d29e42f5da06943ff")
- version("3.904", sha256="ea95e08b2d70682ad42e6c2f5ba99f59b2e7b220791214076099cdf6b7a8c1cb")
+ maintainers("davekeeshan")
+ version("master", branch="master")
+
+ version("5.018", sha256="8b544273eedee379e3c1a3bb849e14c754c9b5035d61ad03acdf3963092ba6c0")
+ version("5.016", sha256="66fc36f65033e5ec904481dd3d0df56500e90c0bfca23b2ae21b4a8d39e05ef1")
+ version("5.014", sha256="36e16c8a7c4b376f88d87411cea6ee68710e6d1382a13faf21f35d65b54df4a7")
+ version("5.012", sha256="db19a7d7615b37d9108654e757427e4c3f44e6e973ed40dd5e0e80cc6beb8467")
+ version("5.010", sha256="ca82b57ce2d2b34eed3f04d5daf7eae6ad41276cda88efbb59ebd6467e65d635")
+ version("5.008", sha256="1d19f4cd186eec3dfb363571e3fe2e6d3377386ead6febc6ad45402f0634d2a6")
+ version("5.006", sha256="eb4ca4157ba854bc78c86173c58e8bd13311984e964006803dd45dc289450cfe")
+ version("5.004", sha256="7d193a09eebefdbec8defaabfc125663f10cf6ab0963ccbefdfe704a8a4784d2")
+ version("5.002", sha256="72d68469fc1262e6288d099062b960a2f65e9425bdb546cba141a2507decd951")
+ version("4.228", sha256="be6af6572757013802be5b0ff9c64cbf509e98066737866abaae692fe04edf09")
+ version("4.226", sha256="70bc941d86e4810253d51aa94898b0802d916ab76296a398f8ceb8798122c9be")
+ version("4.224", sha256="010ff2b5c76d4dbc2ed4a3278a5599ba35c8ed4c05690e57296d6b281591367b")
+ version("4.222", sha256="15c60175807c0f3536c3c5b435f131c2b1e8725aefd30645efd946bf401b4c84")
+ version("4.220", sha256="e00e0c31a0c00887bebbaf7a8c771efa09420a4d1fbae54d45843baf50df4426")
+ version("4.218", sha256="ef7b1e6ddb715ddb3cc998fcbefc7150cfa2efc5118cf43ddb594bf41ea41cc7")
+ version("4.216", sha256="64e5093b629a7e96178e3b2494f208955f218dfac6f310a91e4fc07d050c980b")
+ version("4.214", sha256="e14c7f6ffb00a6746ae2a8ea0424e90a1a30067e8ae4c96b8c42689ca1ca0b1f")
+ version("4.212", sha256="7b655859e4e75c9673141aede8f5a20f47e4c380055d1a588d5be60cbbc73619")
+ version("4.210", sha256="3a2e6f27a5d80116a268ba054a3be61aca924bc54c5556ea25e75ee974201abb")
+ version("4.204", sha256="dbad9bd3cac34e63bbd945fff9a59eaabe31dae1e1c93c847d0f894db9919498")
+ version("4.202", sha256="a60c02f299ddb5bb8e963dc7d81983c55c293d97718685c1cd4b66638a33d98e")
+ version("4.200", sha256="2cd0fd48152f152d0487eaac23803d35ff75e924734435b366a523deb1185407")
+ version("4.110", sha256="603c23944577a5d53a2e09191d04d5c61740a77b58f3a590a70e56f4526a5a0b")
+ version("4.108", sha256="ce521dc57754e5a325ff7000c434ce23674c8e1de30e1f2a6506dc3a33bd7c55")
+
+ depends_on("autoconf", type="build")
+ depends_on("automake", type="build")
+ depends_on("libtool", type="build")
+ depends_on("help2man", type="build")
depends_on("bison", type="build")
- depends_on("flex")
+ depends_on("flex", type="build")
+ depends_on("ccache", type=("build", "run"), when="@5.018:")
depends_on("perl", type=("build", "run"))
+ depends_on("bash", type="build")
+
+ # we need to fix the CXX and LINK paths, as they point to the spack
+ # wrapper scripts which aren't usable without spack
+ filter_compiler_wrappers("verilated.mk", relative_root="include")
def setup_run_environment(self, env):
env.prepend_path("VERILATOR_ROOT", self.prefix)
+ def autoreconf(self, spec, prefix):
+ which("bash")("autoconf")
+
# verilator requires access to its shipped scripts (bin) and include
# but the standard make doesn't put it in the correct places
@run_before("install")
def install_include(self):
install_tree("include", prefix.include)
install_tree("bin", prefix.bin)
-
- # we need to fix the CXX and LINK paths, as they point to the spack
- # wrapper scripts which aren't usable without spack
- @run_after("install")
- def patch_cxx(self):
- filter_file(
- r"^CXX\s*=.*",
- "CXX = {0}".format(self.compiler.cxx),
- join_path(self.prefix.include, "verilated.mk"),
- )
- filter_file(
- r"^LINK\s*=.*",
- "LINK = {0}".format(self.compiler.cxx),
- join_path(self.prefix.include, "verilated.mk"),
- )
diff --git a/var/spack/repos/builtin/packages/votca/package.py b/var/spack/repos/builtin/packages/votca/package.py
index db9d260f86530d..9dbd3a65ca506c 100644
--- a/var/spack/repos/builtin/packages/votca/package.py
+++ b/var/spack/repos/builtin/packages/votca/package.py
@@ -20,6 +20,7 @@ class Votca(CMakePackage):
maintainers("junghans")
version("master", branch="master")
+ version("2023", sha256="6150a38c77379d05592a56ae4392a00c4636d02198bb06108a3dc739a45115f8")
version("2022.1", sha256="358119b2645fe60f88ca621aed508c49fb61f88d29d3e3fa24b5b831ed4a66ec")
version("2022", sha256="7991137098ff4511f4ca2c6f1b6c45f53d92d9f84e5c0d0e32fbc31768f73a83")
diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py
index 122e29408eb652..6578a72af94100 100644
--- a/var/spack/repos/builtin/packages/vtk/package.py
+++ b/var/spack/repos/builtin/packages/vtk/package.py
@@ -67,7 +67,8 @@ class Vtk(CMakePackage):
# Patch for paraview 5.10: +hdf5 ^hdf5@1.13.2:
# https://gitlab.kitware.com/vtk/vtk/-/merge_requests/9690
- patch("xdmf2-hdf51.13.2.patch", when="@9:9.2 +xdmf")
+ # patch seems to effectively been added to vtk@9.2.3 (e81a2fe)
+ patch("xdmf2-hdf51.13.2.patch", when="@9:9.2.2 +xdmf")
# We cannot build with both osmesa and qt in spack
conflicts("+osmesa", when="+qt")
@@ -195,8 +196,8 @@ class Vtk(CMakePackage):
)
patch(
- "https://gitlab.kitware.com/vtk/vtk/-/commit/5a1c96e12e9b4a660d326be3bed115a2ceadb573.patch",
- sha256="65175731c080961f85d779d613ac1f6bce89783745e54e864edec7637b03b18a",
+ "https://gitlab.kitware.com/vtk/vtk/-/commit/5a1c96e12e9b4a660d326be3bed115a2ceadb573.diff",
+ sha256="c446a90459b108082db5b28d9aeda99d030e636325e01929beba062cafb16b76",
when="@9.1",
)
diff --git a/var/spack/repos/builtin/packages/whip/package.py b/var/spack/repos/builtin/packages/whip/package.py
index a269097ad6bc8f..44c6f1ad57391e 100644
--- a/var/spack/repos/builtin/packages/whip/package.py
+++ b/var/spack/repos/builtin/packages/whip/package.py
@@ -15,6 +15,8 @@ class Whip(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/eth-cscs/whip.git"
maintainers("msimberg", "rasolca")
+ license("BSD-3-Clause")
+
version("main", branch="main")
version("0.2.0", sha256="d8fec662526accbd1624922fdf01a077d6f312cf253382660e4a2f65e28e8686")
version("0.1.0", sha256="5d557794f4afc8332fc660948a342f69e22bc9e5d575ffb3e3944cf526db5ec9")
diff --git a/var/spack/repos/builtin/packages/xdmf3/fix_hdf5_hid_t.diff b/var/spack/repos/builtin/packages/xdmf3/fix_hdf5_hid_t.diff
new file mode 100644
index 00000000000000..8323ddda26def7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xdmf3/fix_hdf5_hid_t.diff
@@ -0,0 +1,40 @@
+diff --git a/core/XdmfHDF5Controller.hpp b/core/XdmfHDF5Controller.hpp
+index c5c15d0a..496cc80d 100644
+--- a/core/XdmfHDF5Controller.hpp
++++ b/core/XdmfHDF5Controller.hpp
+@@ -27,13 +27,14 @@
+ // C Compatible Includes
+ #include "XdmfCore.hpp"
+ #include "XdmfHeavyDataController.hpp"
++#include
+
+ // So that hdf5 does not need to be included in the header files
+ // It would add a dependancy to programs that use Xdmf
+ #ifndef _H5Ipublic_H
+ #ifndef XDMF_HID_T
+ #define XDMF_HID_T
+- typedef int hid_t;
++ typedef int64_t hid_t;
+ #endif
+ #endif
+
+diff --git a/core/XdmfHDF5Writer.hpp b/core/XdmfHDF5Writer.hpp
+index cfbec6f4..f83aa0de 100644
+--- a/core/XdmfHDF5Writer.hpp
++++ b/core/XdmfHDF5Writer.hpp
+@@ -28,13 +28,14 @@
+ #include "XdmfCore.hpp"
+ #include "XdmfHeavyDataWriter.hpp"
+ #include "XdmfHeavyDataController.hpp"
++#include
+
+ // So that hdf5 does not need to be included in the header files
+ // It would add a dependancy to programs that use Xdmf
+ #ifndef _H5Ipublic_H
+ #ifndef XDMF_HID_T
+ #define XDMF_HID_T
+- typedef int hid_t;
++ typedef int64_t hid_t;
+ #endif
+ #endif
+
diff --git a/var/spack/repos/builtin/packages/xdmf3/package.py b/var/spack/repos/builtin/packages/xdmf3/package.py
index 8a84aa27f10238..ba54eed8413081 100644
--- a/var/spack/repos/builtin/packages/xdmf3/package.py
+++ b/var/spack/repos/builtin/packages/xdmf3/package.py
@@ -30,8 +30,10 @@ class Xdmf3(CMakePackage):
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants)
depends_on("mpi", when="+mpi")
- depends_on("hdf5+mpi", when="+mpi")
- depends_on("hdf5~mpi", when="~mpi")
+ depends_on("hdf5@1.10:+mpi", when="+mpi")
+ depends_on("hdf5@1.10:~mpi", when="~mpi")
+ # motivated by discussion in https://gitlab.kitware.com/xdmf/xdmf/-/issues/28
+ patch("fix_hdf5_hid_t.diff")
def cmake_args(self):
"""Populate cmake arguments for XDMF."""
@@ -42,7 +44,7 @@ def cmake_args(self):
"-DXDMF_BUILD_UTILS=ON",
"-DXDMF_WRAP_JAVA=OFF",
"-DXDMF_WRAP_PYTHON=OFF",
- "-DXDMF_BUILD_TESTING=ON",
+ "-DXDMF_BUILD_TESTING=OFF",
]
return cmake_args
diff --git a/var/spack/repos/builtin/packages/xrdcl-record/package.py b/var/spack/repos/builtin/packages/xrdcl-record/package.py
index c43f668a612149..d045fcd06c8f8e 100644
--- a/var/spack/repos/builtin/packages/xrdcl-record/package.py
+++ b/var/spack/repos/builtin/packages/xrdcl-record/package.py
@@ -13,8 +13,6 @@ class XrdclRecord(CMakePackage):
homepage = "https://github.com/xrootd/xrdcl-record"
url = "https://github.com/xrootd/xrdcl-record/archive/refs/tags/v5.4.2.tar.gz"
- maintainers("iarspider")
-
version("5.4.2", sha256="fb76284491ff4e723bce4c9e9d87347e98e278e70c597167bc39a162bc876734")
depends_on("xrootd")
diff --git a/var/spack/repos/builtin/packages/xsdk/package.py b/var/spack/repos/builtin/packages/xsdk/package.py
index 1dd9d79a5c1b79..6b3ec2c126d6a6 100644
--- a/var/spack/repos/builtin/packages/xsdk/package.py
+++ b/var/spack/repos/builtin/packages/xsdk/package.py
@@ -85,9 +85,11 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
maintainers("balay", "luszczek", "balos1", "shuds13", "v-dobrev")
version("develop")
+ version("1.0.0")
version("0.8.0")
version("0.7.0", deprecated=True)
+ variant("sycl", default=False, sticky=True, description="Enable sycl variant of xsdk packages")
variant("trilinos", default=True, sticky=True, description="Enable trilinos package build")
variant("datatransferkit", default=True, description="Enable datatransferkit package build")
variant("omega-h", default=True, description="Enable omega-h package build")
@@ -107,8 +109,14 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
variant("exago", default=True, description="Enable exago build")
variant("hiop", default=True, description="Enable hiop build")
variant("raja", default=(sys.platform != "darwin"), description="Enable raja for hiop, exago")
+ variant("pflotran", default=True, description="Enable pflotran package build")
- xsdk_depends_on("hypre@develop+superlu-dist+shared", when="@develop", cuda_var="cuda")
+ xsdk_depends_on(
+ "hypre@develop+superlu-dist+shared", when="@develop", cuda_var="cuda", rocm_var="rocm"
+ )
+ xsdk_depends_on(
+ "hypre@2.30.0+superlu-dist+shared", when="@1.0.0", cuda_var="cuda", rocm_var="rocm"
+ )
xsdk_depends_on("hypre@2.26.0+superlu-dist+shared", when="@0.8.0", cuda_var="cuda")
xsdk_depends_on("hypre@2.23.0+superlu-dist+shared", when="@0.7.0", cuda_var="cuda")
@@ -118,6 +126,12 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
cuda_var="cuda",
rocm_var="rocm",
)
+ xsdk_depends_on(
+ "mfem@4.6.0+shared+mpi+superlu-dist+petsc+sundials+examples+miniapps",
+ when="@1.0.0",
+ cuda_var="cuda",
+ rocm_var="rocm",
+ )
xsdk_depends_on(
"mfem@4.5.0+shared+mpi+superlu-dist+petsc+sundials+examples+miniapps",
when="@0.8.0",
@@ -131,16 +145,26 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
rocm_var="rocm",
)
- xsdk_depends_on("superlu-dist@develop", when="@develop")
+ xsdk_depends_on("superlu-dist@develop", when="@develop", cuda_var="cuda", rocm_var="rocm")
+ xsdk_depends_on("superlu-dist@8.2.1", when="@1.0.0", cuda_var="cuda", rocm_var="rocm")
xsdk_depends_on("superlu-dist@8.1.2", when="@0.8.0")
xsdk_depends_on("superlu-dist@7.1.1", when="@0.7.0")
+
+ xsdk_depends_on("trilinos +superlu-dist", when="@1.0.0: +trilinos ~cuda ~rocm")
xsdk_depends_on(
- "trilinos@develop+hypre+superlu-dist+hdf5~mumps+boost"
+ "trilinos@develop+hypre+hdf5~mumps+boost"
+ "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2"
+ "~exodus~dtk+intrepid2+shards+stratimikos gotype=int"
+ " cxxstd=14",
when="@develop +trilinos",
)
+ xsdk_depends_on(
+ "trilinos@14.4.0+hypre+hdf5~mumps+boost"
+ + "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2"
+ + "~exodus~dtk+intrepid2+shards+stratimikos gotype=int"
+ + " cxxstd=17",
+ when="@1.0.0 +trilinos",
+ )
xsdk_depends_on(
"trilinos@13.4.1+hypre+superlu-dist+hdf5~mumps+boost"
+ "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2"
@@ -157,17 +181,25 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
)
xsdk_depends_on("datatransferkit@master", when="@develop +trilinos +datatransferkit")
+ xsdk_depends_on("datatransferkit@3.1.1", when="@1.0.0 +trilinos +datatransferkit")
dtk7ver = "3.1-rc2" if sys.platform == "darwin" else "3.1-rc3"
xsdk_depends_on("datatransferkit@" + dtk7ver, when="@0.8.0 +trilinos +datatransferkit")
xsdk_depends_on("datatransferkit@" + dtk7ver, when="@0.7.0 +trilinos +datatransferkit")
xsdk_depends_on("petsc +batch", when="@0.7.0: ^cray-mpich")
+ xsdk_depends_on("petsc +sycl +kokkos", when="@1.0.0: +sycl")
xsdk_depends_on(
"petsc@main+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64",
when="@develop",
cuda_var="cuda",
rocm_var="rocm",
)
+ xsdk_depends_on(
+ "petsc@3.20.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64",
+ when="@1.0.0",
+ cuda_var="cuda",
+ rocm_var="rocm",
+ )
xsdk_depends_on(
"petsc@3.18.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64",
when="@0.8.0",
@@ -184,9 +216,14 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
xsdk_depends_on("dealii ~trilinos", when="~trilinos +dealii")
xsdk_depends_on(
"dealii@master~assimp~python~doc~gmsh+petsc+slepc+mpi~int64"
- + "~netcdf+metis+sundials~ginkgo~symengine~nanoflann~simplex~arborx~cgal",
+ + "~netcdf+metis+sundials~ginkgo~symengine~nanoflann~simplex~arborx~cgal~oce",
when="@develop +dealii",
)
+ xsdk_depends_on(
+ "dealii@9.5.1~assimp~python~doc~gmsh+petsc+slepc+mpi~int64"
+ + "~netcdf+metis+sundials~ginkgo~symengine~simplex~arborx~cgal~oce",
+ when="@1.0.0 +dealii",
+ )
xsdk_depends_on(
"dealii@9.4.0~assimp~python~doc~gmsh+petsc+slepc+mpi~int64"
+ "~netcdf+metis+sundials~ginkgo~symengine~simplex~arborx~cgal",
@@ -198,22 +235,31 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
when="@0.7.0 +dealii",
)
- xsdk_depends_on("pflotran@develop", when="@develop")
- xsdk_depends_on("pflotran@4.0.1", when="@0.8.0")
- xsdk_depends_on("pflotran@3.0.2", when="@0.7.0")
+ xsdk_depends_on("pflotran@develop", when="@develop +pflotran")
+ xsdk_depends_on("pflotran@5.0.0", when="@1.0.0 +pflotran")
+ xsdk_depends_on("pflotran@4.0.1", when="@0.8.0 +pflotran")
+ xsdk_depends_on("pflotran@3.0.2", when="@0.7.0 +pflotran")
- xsdk_depends_on("alquimia@develop", when="@develop +alquimia")
+ xsdk_depends_on("alquimia@master", when="@develop +alquimia")
+ xsdk_depends_on("alquimia@1.1.0", when="@1.0.0 +alquimia")
xsdk_depends_on("alquimia@1.0.10", when="@0.8.0 +alquimia")
xsdk_depends_on("alquimia@1.0.9", when="@0.7.0 +alquimia")
xsdk_depends_on("sundials +trilinos", when="+trilinos @0.7.0:")
xsdk_depends_on("sundials +ginkgo", when="+ginkgo @0.8.0:")
+ xsdk_depends_on("sundials +sycl cxxstd=17", when="@1.0.0: +sycl")
xsdk_depends_on(
"sundials@develop~int64+hypre+petsc+superlu-dist",
when="@develop",
cuda_var=["cuda", "?magma"],
rocm_var=["rocm", "?magma"],
)
+ xsdk_depends_on(
+ "sundials@6.6.2~int64+hypre+petsc+superlu-dist",
+ when="@1.0.0",
+ cuda_var=["cuda", "?magma"],
+ rocm_var=["rocm", "?magma"],
+ )
xsdk_depends_on(
"sundials@6.4.1~int64+hypre+petsc+superlu-dist",
when="@0.8.0",
@@ -228,13 +274,16 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
)
xsdk_depends_on("plasma@develop:", when="@develop %gcc@6.0:")
+ xsdk_depends_on("plasma@23.8.2:", when="@1.0.0 %gcc@6.0:")
xsdk_depends_on("plasma@22.9.29:", when="@0.8.0 %gcc@6.0:")
xsdk_depends_on("plasma@21.8.29:", when="@0.7.0 %gcc@6.0:")
xsdk_depends_on("magma@master", when="@develop", cuda_var="?cuda", rocm_var="?rocm")
+ xsdk_depends_on("magma@2.7.1", when="@1.0.0", cuda_var="?cuda", rocm_var="?rocm")
xsdk_depends_on("magma@2.7.0", when="@0.8.0", cuda_var="?cuda", rocm_var="?rocm")
xsdk_depends_on("magma@2.6.1", when="@0.7.0", cuda_var="?cuda", rocm_var="?rocm")
+ xsdk_depends_on("amrex +sycl", when="@1.0.0: +sycl")
xsdk_depends_on(
"amrex@develop+sundials", when="@develop %intel", cuda_var="cuda", rocm_var="rocm"
)
@@ -244,6 +293,9 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
xsdk_depends_on(
"amrex@develop+sundials", when="@develop %cce", cuda_var="cuda", rocm_var="rocm"
)
+ xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %intel", cuda_var="cuda", rocm_var="rocm")
+ xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %gcc", cuda_var="cuda", rocm_var="rocm")
+ xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %cce", cuda_var="cuda", rocm_var="rocm")
xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %intel", cuda_var="cuda", rocm_var="rocm")
xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %gcc", cuda_var="cuda", rocm_var="rocm")
xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %cce", cuda_var="cuda", rocm_var="rocm")
@@ -252,32 +304,39 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
xsdk_depends_on("amrex@21.10+sundials", when="@0.7.0 %cce", cuda_var="cuda", rocm_var="rocm")
xsdk_depends_on("slepc@main", when="@develop")
+ xsdk_depends_on("slepc@3.20.0", when="@1.0.0", cuda_var="cuda", rocm_var="rocm")
xsdk_depends_on("slepc@3.18.1", when="@0.8.0", cuda_var="cuda", rocm_var="rocm")
xsdk_depends_on("slepc@3.16.0", when="@0.7.0")
xsdk_depends_on("omega-h +trilinos", when="+trilinos +omega-h")
xsdk_depends_on("omega-h ~trilinos", when="~trilinos +omega-h")
xsdk_depends_on("omega-h@main", when="@develop +omega-h")
+ xsdk_depends_on("omega-h@scorec.10.6.0", when="@1.0.0 +omega-h")
xsdk_depends_on("omega-h@9.34.13", when="@0.8.0 +omega-h")
xsdk_depends_on("omega-h@9.34.1", when="@0.7.0 +omega-h")
xsdk_depends_on("strumpack ~cuda", when="~cuda @0.7.0: +strumpack")
xsdk_depends_on("strumpack ~slate~openmp", when="~slate @0.8.0: +strumpack")
xsdk_depends_on("strumpack@master", when="@develop +strumpack", cuda_var=["cuda"])
+ xsdk_depends_on("strumpack@7.2.0", when="@1.0.0 +strumpack", cuda_var=["cuda"])
xsdk_depends_on("strumpack@7.0.1", when="@0.8.0 +strumpack", cuda_var=["cuda"])
xsdk_depends_on("strumpack@6.1.0~slate~openmp", when="@0.7.0 +strumpack")
xsdk_depends_on("pumi@master+shared", when="@develop")
+ xsdk_depends_on("pumi@2.2.8+shared", when="@1.0.0")
xsdk_depends_on("pumi@2.2.7+shared", when="@0.8.0")
xsdk_depends_on("pumi@2.2.6", when="@0.7.0")
tasmanian_openmp = "~openmp" if sys.platform == "darwin" else "+openmp"
xsdk_depends_on(
- "tasmanian@develop+xsdkflags+blas" + tasmanian_openmp,
+ "tasmanian@develop+blas" + tasmanian_openmp,
when="@develop",
cuda_var=["cuda", "?magma"],
rocm_var=["rocm", "?magma"],
)
+ xsdk_depends_on(
+ "tasmanian@8.0+mpi+blas" + tasmanian_openmp, when="@1.0.0", cuda_var=["cuda", "?magma"]
+ )
xsdk_depends_on(
"tasmanian@7.9+xsdkflags+mpi+blas" + tasmanian_openmp,
when="@0.8.0",
@@ -290,6 +349,8 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
)
xsdk_depends_on("arborx@master", when="@develop +arborx")
+ xsdk_depends_on("arborx+sycl", when="@1.0.0: +arborx +sycl")
+ xsdk_depends_on("arborx@1.4.1", when="@1.0.0 +arborx")
xsdk_depends_on("arborx@1.2", when="@0.8.0 +arborx")
xsdk_depends_on("arborx@1.1", when="@0.7.0 +arborx")
@@ -302,12 +363,17 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
xsdk_depends_on("phist kernel_lib=tpetra", when="+trilinos +phist")
xsdk_depends_on("phist kernel_lib=petsc", when="~trilinos +phist")
xsdk_depends_on("phist@develop ~fortran ~scamac ~openmp ~host ~int64", when="@develop +phist")
+ xsdk_depends_on("phist@1.12.0 ~fortran ~scamac ~openmp ~host ~int64", when="@1.0.0 +phist")
xsdk_depends_on("phist@1.11.2 ~fortran ~scamac ~openmp ~host ~int64", when="@0.8.0 +phist")
xsdk_depends_on("phist@1.9.5 ~fortran ~scamac ~openmp ~host ~int64", when="@0.7.0 +phist")
+ xsdk_depends_on("ginkgo+sycl", when="@1.0.0: +ginkgo +sycl")
xsdk_depends_on(
"ginkgo@develop +mpi ~openmp", when="@develop +ginkgo", cuda_var="cuda", rocm_var="rocm"
)
+ xsdk_depends_on(
+ "ginkgo@1.7.0 +mpi ~openmp", when="@1.0.0 +ginkgo", cuda_var="cuda", rocm_var="rocm"
+ )
xsdk_depends_on(
"ginkgo@1.5.0 +mpi ~openmp", when="@0.8.0 +ginkgo", cuda_var="cuda", rocm_var="rocm"
)
@@ -317,6 +383,8 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
xsdk_depends_on("py-libensemble@develop+petsc4py", when="@develop +libensemble")
xsdk_depends_on("py-petsc4py@main", when="@develop +libensemble")
+ xsdk_depends_on("py-libensemble@1.0.0+petsc4py", when="@1.0.0 +libensemble")
+ xsdk_depends_on("py-petsc4py@3.20.1", when="@1.0.0 +libensemble")
xsdk_depends_on("py-libensemble@0.9.3+petsc4py", when="@0.8.0 +libensemble")
xsdk_depends_on("py-petsc4py@3.18.1", when="@0.8.0 +libensemble")
xsdk_depends_on("py-libensemble@0.8.0+petsc4py", when="@0.7.0 +libensemble")
@@ -324,11 +392,13 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
xsdk_depends_on("precice ~petsc", when="+precice ^cray-mpich")
xsdk_depends_on("precice@develop", when="@develop +precice")
+ xsdk_depends_on("precice@2.5.0", when="@1.0.0 +precice")
xsdk_depends_on("precice@2.5.0", when="@0.8.0 +precice")
xsdk_depends_on("precice@2.3.0", when="@0.7.0 +precice")
bfpk_openmp = "~openmp" if sys.platform == "darwin" else "+openmp"
xsdk_depends_on("butterflypack@master", when="@develop +butterflypack")
+ xsdk_depends_on("butterflypack@2.4.0" + bfpk_openmp, when="@1.0.0 +butterflypack")
xsdk_depends_on("butterflypack@2.2.2" + bfpk_openmp, when="@0.8.0 +butterflypack")
xsdk_depends_on("butterflypack@2.0.0", when="@0.7.0 +butterflypack")
@@ -338,6 +408,12 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
cuda_var=["cuda", "?magma"],
rocm_var=["rocm", "?magma"],
)
+ xsdk_depends_on(
+ "heffte@2.4.0+fftw",
+ when="@1.0.0 +heffte",
+ cuda_var=["cuda", "?magma"],
+ rocm_var=["rocm", "?magma"],
+ )
xsdk_depends_on(
"heffte@2.3.0+fftw",
when="@0.8.0 +heffte",
@@ -352,15 +428,20 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage):
)
xsdk_depends_on("slate@master", when="@develop +slate", cuda_var="cuda")
+ xsdk_depends_on("slate@2023.08.25", when="@1.0.0 +slate", cuda_var="cuda")
xsdk_depends_on("slate@2022.07.00", when="@0.8.0 +slate", cuda_var="cuda")
xsdk_depends_on("slate@2021.05.02", when="@0.7.0 +slate %gcc@6.0:", cuda_var="cuda")
xsdk_depends_on("exago@develop~ipopt~hiop~python", when="@develop +exago ~raja")
xsdk_depends_on("exago@develop~ipopt+hiop+raja", when="@develop +exago +raja", cuda_var="cuda")
+ xsdk_depends_on("exago@1.6.0~ipopt~hiop~python", when="@1.0.0 +exago ~raja")
+ xsdk_depends_on("exago@1.6.0~ipopt+hiop+raja", when="@1.0.0 +exago +raja", cuda_var="cuda")
xsdk_depends_on("exago@1.5.0~ipopt~hiop~python", when="@0.8.0 +exago ~raja")
xsdk_depends_on("exago@1.5.0~ipopt+hiop+raja", when="@0.8.0 +exago +raja", cuda_var="cuda")
xsdk_depends_on("hiop@develop", when="@develop +hiop ~raja")
xsdk_depends_on("hiop@develop+raja", when="@develop +hiop +raja", cuda_var="cuda")
+ xsdk_depends_on("hiop@1.0.0", when="@1.0.0 +hiop ~raja")
+ xsdk_depends_on("hiop@1.0.0+raja", when="@1.0.0 +hiop +raja", cuda_var="cuda")
xsdk_depends_on("hiop@0.7.1", when="@0.8.0 +hiop ~raja")
xsdk_depends_on("hiop@0.7.1+raja", when="@0.8.0 +hiop +raja", cuda_var="cuda")
diff --git a/var/spack/repos/builtin/packages/zlib-ng/package.py b/var/spack/repos/builtin/packages/zlib-ng/package.py
index d069545dc1ec78..8444736856a3c2 100644
--- a/var/spack/repos/builtin/packages/zlib-ng/package.py
+++ b/var/spack/repos/builtin/packages/zlib-ng/package.py
@@ -16,8 +16,17 @@ class ZlibNg(AutotoolsPackage, CMakePackage):
maintainers("haampie")
- version("2.1.3", sha256="d20e55f89d71991c59f1c5ad1ef944815e5850526c0d9cd8e504eaed5b24491a")
- version("2.1.2", sha256="383560d6b00697c04e8878e26c0187b480971a8bce90ffd26a5a7b0f7ecf1a33")
+ version("2.1.4", sha256="a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a")
+ version(
+ "2.1.3",
+ sha256="d20e55f89d71991c59f1c5ad1ef944815e5850526c0d9cd8e504eaed5b24491a",
+ deprecated=True,
+ )
+ version(
+ "2.1.2",
+ sha256="383560d6b00697c04e8878e26c0187b480971a8bce90ffd26a5a7b0f7ecf1a33",
+ deprecated=True,
+ )
version("2.0.7", sha256="6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200")
version("2.0.0", sha256="86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8")
diff --git a/var/spack/repos/edges.test/packages/blas-only-client/package.py b/var/spack/repos/edges.test/packages/blas-only-client/package.py
new file mode 100644
index 00000000000000..9e9652a752f44a
--- /dev/null
+++ b/var/spack/repos/edges.test/packages/blas-only-client/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class BlasOnlyClient(Package):
+ """This package depends on the 'blas' virtual only, but should be able to use also provider
+ that provide e.g. 'blas' together with 'lapack'.
+ """
+
+ homepage = "http://www.openblas.net"
+ url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
+
+ version("0.2.16", md5="b1190f3d3471685f17cfd1ec1d252ac9")
+
+ depends_on("blas")
diff --git a/var/spack/repos/edges.test/packages/conditional-edge/package.py b/var/spack/repos/edges.test/packages/conditional-edge/package.py
new file mode 100644
index 00000000000000..964596fcc14a7e
--- /dev/null
+++ b/var/spack/repos/edges.test/packages/conditional-edge/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from spack.package import *
+
+
+class ConditionalEdge(Package):
+ """This package has a variant that triggers a condition only if a required dependency is
+ providing a virtual.
+ """
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/a-1.0.tar.gz"
+
+ version("2.0", md5="abcdef0123456789abcdef0123456789")
+ version("1.0", md5="0123456789abcdef0123456789abcdef")
+
+ variant("foo", default=False, description="Just a regular foo")
+
+ # zlib is a real package, providing zlib-api
+ depends_on("zlib")
+ depends_on("zlib-api", when="+foo")
+ depends_on("zlib@1.0", when="^[virtuals=zlib-api] zlib")
diff --git a/var/spack/repos/edges.test/packages/openblas/package.py b/var/spack/repos/edges.test/packages/openblas/package.py
new file mode 100644
index 00000000000000..d162e069b0bae0
--- /dev/null
+++ b/var/spack/repos/edges.test/packages/openblas/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class Openblas(Package):
+ """This package provides two virtuals together, so if one is chosen the other
+ must be used too if needed.
+ """
+
+ homepage = "http://www.openblas.net"
+ url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
+
+ version("0.2.16", md5="b1190f3d3471685f17cfd1ec1d252ac9")
+ version("0.2.15", md5="b1190f3d3471685f17cfd1ec1d252ac9")
+ version("0.2.14", md5="b1190f3d3471685f17cfd1ec1d252ac9")
+ version("0.2.13", md5="b1190f3d3471685f17cfd1ec1d252ac9")
+
+ provides("blas", "lapack")
diff --git a/var/spack/repos/edges.test/packages/zlib/package.py b/var/spack/repos/edges.test/packages/zlib/package.py
new file mode 100644
index 00000000000000..66dfc4f58bb94b
--- /dev/null
+++ b/var/spack/repos/edges.test/packages/zlib/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from spack.package import *
+
+
+class Zlib(Package):
+ """This package has a variant that triggers a condition only if a required dependency is
+ providing a virtual.
+ """
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/a-1.0.tar.gz"
+
+ version("2.0", md5="abcdef0123456789abcdef0123456789")
+ version("1.0", md5="0123456789abcdef0123456789abcdef")
+
+ provides("zlib-api")
diff --git a/var/spack/repos/edges.test/repo.yaml b/var/spack/repos/edges.test/repo.yaml
new file mode 100644
index 00000000000000..86df79affe294a
--- /dev/null
+++ b/var/spack/repos/edges.test/repo.yaml
@@ -0,0 +1,2 @@
+repo:
+ namespace: edges.test
diff --git a/var/spack/repos/tutorial/packages/libpspio/package.py b/var/spack/repos/tutorial/packages/libpspio/package.py
new file mode 100644
index 00000000000000..9cd4e7fc4df251
--- /dev/null
+++ b/var/spack/repos/tutorial/packages/libpspio/package.py
@@ -0,0 +1,37 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class Libpspio(AutotoolsPackage):
+ """Library to perform I/O operations on pseudopotential data files."""
+
+ homepage = "https://gitlab.com/ElectronicStructureLibrary/libpspio"
+ url = "https://gitlab.com/ElectronicStructureLibrary/libpspio/-/archive/0.3.0/libpspio-0.3.0.tar.gz"
+
+ maintainers("hmenke")
+
+ license("MPL-2.0")
+
+ version("0.3.0", sha256="4dc092457e481e5cd703eeecd87e6f17749941fe274043550c8a2557a649afc5")
+
+ variant("fortran", default=False, description="Enable Fortran bindings")
+
+ depends_on("autoconf", type="build")
+ depends_on("automake", type="build")
+ depends_on("libtool", type="build")
+ depends_on("m4", type="build")
+ depends_on("pkgconfig", type="build")
+
+ depends_on("check")
+ depends_on("gsl")
+
+ def autoreconf(self, spec, prefix):
+ Executable("./autogen.sh")()
+
+ def configure_args(self):
+ args = self.enable_or_disable("fortran")
+ return args
]