diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index ca9c02e62d475b..3d8e1f10ae9057 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -34,6 +34,7 @@ jobs: run: | . share/spack/setup-env.sh coverage run $(which spack) audit packages + coverage run $(which spack) audit externals coverage combine coverage xml - name: Package audits (without coverage) @@ -41,6 +42,7 @@ jobs: run: | . share/spack/setup-env.sh $(which spack) audit packages + $(which spack) audit externals - uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0 if: ${{ inputs.with_coverage == 'true' }} with: diff --git a/lib/spack/docs/.gitignore b/lib/spack/docs/.gitignore index d481aa0923b9db..b349291a8a598b 100644 --- a/lib/spack/docs/.gitignore +++ b/lib/spack/docs/.gitignore @@ -1,4 +1,3 @@ -package_list.html command_index.rst spack*.rst llnl*.rst diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index af6d2dab91a8c9..d1f048ac055acc 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -45,7 +45,8 @@ Listing available packages To install software with Spack, you need to know what software is available. You can see a list of available package names at the -:ref:`package-list` webpage, or using the ``spack list`` command. +`packages.spack.io `_ website, or +using the ``spack list`` command. .. _cmd-spack-list: @@ -60,7 +61,7 @@ can install: :ellipsis: 10 There are thousands of them, so we've truncated the output above, but you -can find a :ref:`full list here `. +can find a `full list here `_. Packages are listed by name in alphabetical order. A pattern to match with no wildcards, ``*`` or ``?``, will be treated as though it started and ended with diff --git a/lib/spack/docs/build_systems/inteloneapipackage.rst b/lib/spack/docs/build_systems/inteloneapipackage.rst index bd2e370de09330..e9fd26690f9ddb 100644 --- a/lib/spack/docs/build_systems/inteloneapipackage.rst +++ b/lib/spack/docs/build_systems/inteloneapipackage.rst @@ -25,8 +25,8 @@ use Spack to build packages with the tools. The Spack Python class ``IntelOneapiPackage`` is a base class that is used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``, ``IntelOneapiTbb`` and other classes to implement the oneAPI -packages. See the :ref:`package-list` for the full list of available -oneAPI packages or use:: +packages. Search for ``oneAPI`` at ``_ for the full +list of available oneAPI packages, or use:: spack list -d oneAPI diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 63ad656cf310e4..f1bde9c9fbdfd0 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -48,9 +48,6 @@ os.environ["COLIFY_SIZE"] = "25x120" os.environ["COLUMNS"] = "120" -# Generate full package list if needed -subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"]) - # Generate a command index if an update is needed subprocess.call( [ diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst index 8933f590d7ca06..ec9234aa461ab5 100644 --- a/lib/spack/docs/contribution_guide.rst +++ b/lib/spack/docs/contribution_guide.rst @@ -310,53 +310,11 @@ Once all of the dependencies are installed, you can try building the documentati $ make clean $ make -If you see any warning or error messages, you will have to correct those before -your PR is accepted. - -If you are editing the documentation, you should obviously be running the -documentation tests. But even if you are simply adding a new package, your -changes could cause the documentation tests to fail: - -.. code-block:: console - - package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent. - -At first, this error message will mean nothing to you, since you didn't edit -that file. Until you look at line 8745 of the file in question: - -.. code-block:: rst - - Description: - NetCDF is a set of software libraries and self-describing, machine- - independent data formats that support the creation, access, and sharing - of array-oriented scientific data. - -Our documentation includes :ref:`a list of all Spack packages `. -If you add a new package, its docstring is added to this page. The problem in -this case was that the docstring looked like: - -.. code-block:: python - - class Netcdf(Package): - """ - NetCDF is a set of software libraries and self-describing, - machine-independent data formats that support the creation, - access, and sharing of array-oriented scientific data. - """ - -Docstrings cannot start with a newline character, or else Sphinx will complain. -Instead, they should look like: - -.. code-block:: python - - class Netcdf(Package): - """NetCDF is a set of software libraries and self-describing, - machine-independent data formats that support the creation, - access, and sharing of array-oriented scientific data.""" - -Documentation changes can result in much more obfuscated warning messages. -If you don't understand what they mean, feel free to ask when you submit -your PR. +If you see any warning or error messages, you will have to correct those before your PR +is accepted. If you are editing the documentation, you should be running the +documentation tests to make sure there are no errors. Documentation changes can result +in some obfuscated warning messages. If you don't understand what they mean, feel free +to ask when you submit your PR. -------- Coverage diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index be4a42045d1b71..0dd27a2444516a 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -54,9 +54,16 @@ or refer to the full manual below. features getting_started basic_usage - Tutorial: Spack 101 replace_conda_homebrew +.. toctree:: + :maxdepth: 2 + :caption: Links + + Tutorial (spack-tutorial.rtfd.io) + Packages (packages.spack.io) + Binaries (binaries.spack.io) + .. toctree:: :maxdepth: 2 :caption: Reference @@ -72,7 +79,6 @@ or refer to the full manual below. repositories binary_caches command_index - package_list chain extensions pipelines diff --git a/lib/spack/docs/package_list.rst b/lib/spack/docs/package_list.rst deleted file mode 100644 index dfff0704608fb2..00000000000000 --- a/lib/spack/docs/package_list.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other - Spack Project Developers. See the top-level COPYRIGHT file for details. - - SPDX-License-Identifier: (Apache-2.0 OR MIT) - -.. _package-list: - -============ -Package List -============ - -This is a list of things you can install using Spack. It is -automatically generated based on the packages in this Spack -version. - -.. raw:: html - :file: package_list.html diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index f433996ec7d5e1..d25009532ad7a1 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -6196,7 +6196,100 @@ follows: "foo-package@{0}".format(version_str) ) -.. _package-lifecycle: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Add detection tests to packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To ensure that software is detected correctly for multiple configurations +and on different systems users can write a ``detection_test.yaml`` file and +put it in the package directory alongside the ``package.py`` file. +This YAML file contains enough information for Spack to mock an environment +and try to check if the detection logic yields the results that are expected. + +As a general rule, attributes at the top-level of ``detection_test.yaml`` +represent search mechanisms and they each map to a list of tests that should confirm +the validity of the package's detection logic. + +The detection tests can be run with the following command: + +.. code-block:: console + + $ spack audit externals + +Errors that have been detected are reported to screen. + +"""""""""""""""""""""""""" +Tests for PATH inspections +"""""""""""""""""""""""""" + +Detection tests insisting on ``PATH`` inspections are listed under +the ``paths`` attribute: + +.. code-block:: yaml + + paths: + - layout: + - executables: + - "bin/clang-3.9" + - "bin/clang++-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + results: + - spec: 'llvm@3.9.1 +clang~lld~lldb' + +Each test is performed by first creating a temporary directory structure as +specified in the corresponding ``layout`` and by then running +package detection and checking that the outcome matches the expected +``results``. The exact details on how to specify both the ``layout`` and the +``results`` are reported in the table below: + +.. list-table:: Test based on PATH inspections + :header-rows: 1 + + * - Option Name + - Description + - Allowed Values + - Required Field + * - ``layout`` + - Specifies the filesystem tree used for the test + - List of objects + - Yes + * - ``layout:[0]:executables`` + - Relative paths for the mock executables to be created + - List of strings + - Yes + * - ``layout:[0]:script`` + - Mock logic for the executable + - Any valid shell script + - Yes + * - ``results`` + - List of expected results + - List of objects (empty if no result is expected) + - Yes + * - ``results:[0]:spec`` + - A spec that is expected from detection + - Any valid spec + - Yes + +""""""""""""""""""""""""""""""" +Reuse tests from other packages +""""""""""""""""""""""""""""""" + +When using a custom repository, it is possible to customize a package that already exists in ``builtin`` +and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized +``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for +``myrepo.llvm`` might look like: + +.. code-block:: yaml + + includes: + - "builtin.llvm" + +This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to +those locally defined in the file. ----------------------------- Style guidelines for packages diff --git a/lib/spack/docs/replace_conda_homebrew.rst b/lib/spack/docs/replace_conda_homebrew.rst index 42a3561300eab4..c0d2060c703b96 100644 --- a/lib/spack/docs/replace_conda_homebrew.rst +++ b/lib/spack/docs/replace_conda_homebrew.rst @@ -4,7 +4,7 @@ SPDX-License-Identifier: (Apache-2.0 OR MIT) ===================================== -Using Spack to Replace Homebrew/Conda +Spack for Homebrew/Conda Users ===================================== Spack is an incredibly powerful package manager, designed for supercomputers @@ -191,18 +191,18 @@ The ``--fresh`` flag tells Spack to use the latest version of every package where possible instead of trying to optimize for reuse of existing installed packages. -The ``--force`` flag in addition tells Spack to overwrite its previous -concretization decisions, allowing you to choose a new version of Python. -If any of the new packages like Bash are already installed, ``spack install`` +The ``--force`` flag in addition tells Spack to overwrite its previous +concretization decisions, allowing you to choose a new version of Python. +If any of the new packages like Bash are already installed, ``spack install`` won't re-install them, it will keep the symlinks in place. ----------------------------------- Updating & Cleaning Up Old Packages ----------------------------------- -If you're looking to mimic the behavior of Homebrew, you may also want to -clean up out-of-date packages from your environment after an upgrade. To -upgrade your entire software stack within an environment and clean up old +If you're looking to mimic the behavior of Homebrew, you may also want to +clean up out-of-date packages from your environment after an upgrade. To +upgrade your entire software stack within an environment and clean up old package versions, simply run the following commands: .. code-block:: console @@ -212,9 +212,9 @@ package versions, simply run the following commands: $ spack concretize --fresh --force $ spack install $ spack gc - -Running ``spack mark -i --all`` tells Spack to mark all of the existing -packages within an environment as "implicitly" installed. This tells + +Running ``spack mark -i --all`` tells Spack to mark all of the existing +packages within an environment as "implicitly" installed. This tells spack's garbage collection system that these packages should be cleaned up. Don't worry however, this will not remove your entire environment. @@ -223,8 +223,8 @@ a fresh concretization and will re-mark any packages that should remain installed as "explicitly" installed. **Note:** if you use multiple spack environments you should re-run ``spack install`` -in each of your environments prior to running ``spack gc`` to prevent spack -from uninstalling any shared packages that are no longer required by the +in each of your environments prior to running ``spack gc`` to prevent spack +from uninstalling any shared packages that are no longer required by the environment you just upgraded. -------------- diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index c3a028a72b1485..176c45487f51d0 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -38,10 +38,13 @@ def _search_duplicate_compilers(error_cls): import ast import collections import collections.abc +import glob import inspect import itertools +import pathlib import pickle import re +import warnings from urllib.request import urlopen import llnl.util.lang @@ -798,3 +801,76 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls): errors.append(err) return errors + + +#: Sanity checks on package directives +external_detection = AuditClass( + group="externals", + tag="PKG-EXTERNALS", + description="Sanity checks for external software detection", + kwargs=("pkgs",), +) + + +def packages_with_detection_tests(): + """Return the list of packages with a corresponding detection_test.yaml file.""" + import spack.config + import spack.util.path + + to_be_tested = [] + for current_repo in spack.repo.PATH.repos: + namespace = current_repo.namespace + packages_dir = pathlib.PurePath(current_repo.packages_path) + pattern = packages_dir / "**" / "detection_test.yaml" + pkgs_with_tests = [ + f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern)) + ] + to_be_tested.extend(pkgs_with_tests) + + return to_be_tested + + +@external_detection +def _test_detection_by_executable(pkgs, error_cls): + """Test drive external detection for packages""" + import spack.detection + + errors = [] + + # Filter the packages and retain only the ones with detection tests + pkgs_with_tests = packages_with_detection_tests() + selected_pkgs = [] + for current_package in pkgs_with_tests: + _, unqualified_name = spack.repo.partition_package_name(current_package) + # Check for both unqualified name and qualified name + if unqualified_name in pkgs or current_package in pkgs: + selected_pkgs.append(current_package) + selected_pkgs.sort() + + if not selected_pkgs: + summary = "No detection test to run" + details = [f' "{p}" has no detection test' for p in pkgs] + warnings.warn("\n".join([summary] + details)) + return errors + + for pkg_name in selected_pkgs: + for idx, test_runner in enumerate( + spack.detection.detection_tests(pkg_name, spack.repo.PATH) + ): + specs = test_runner.execute() + expected_specs = test_runner.expected_specs + + not_detected = set(expected_specs) - set(specs) + if not_detected: + summary = pkg_name + ": cannot detect some specs" + details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)] + errors.append(error_cls(summary=summary, details=details)) + + not_expected = set(specs) - set(expected_specs) + if not_expected: + summary = pkg_name + ": detected unexpected specs" + msg = '"{0}" was detected, but was not expected [test_id={1}]' + details = [msg.format(s, idx) for s in sorted(not_expected)] + errors.append(error_cls(summary=summary, details=details)) + + return errors diff --git a/lib/spack/spack/cmd/audit.py b/lib/spack/spack/cmd/audit.py index cd56dfadd95ef1..86eea9f7bc8b9a 100644 --- a/lib/spack/spack/cmd/audit.py +++ b/lib/spack/spack/cmd/audit.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import llnl.util.tty as tty +import llnl.util.tty.colify import llnl.util.tty.color as cl import spack.audit @@ -20,6 +21,15 @@ def setup_parser(subparser): # Audit configuration files sp.add_parser("configs", help="audit configuration files") + # Audit package recipes + external_parser = sp.add_parser("externals", help="check external detection in packages") + external_parser.add_argument( + "--list", + action="store_true", + dest="list_externals", + help="if passed, list which packages have detection tests", + ) + # Https and other linting https_parser = sp.add_parser("packages-https", help="check https in packages") https_parser.add_argument( @@ -29,7 +39,7 @@ def setup_parser(subparser): # Audit package recipes pkg_parser = sp.add_parser("packages", help="audit package recipes") - for group in [pkg_parser, https_parser]: + for group in [pkg_parser, https_parser, external_parser]: group.add_argument( "name", metavar="PKG", @@ -62,6 +72,18 @@ def packages_https(parser, args): _process_reports(reports) +def externals(parser, args): + if args.list_externals: + msg = "@*{The following packages have detection tests:}" + tty.msg(cl.colorize(msg)) + llnl.util.tty.colify.colify(spack.audit.packages_with_detection_tests(), indent=2) + return + + pkgs = args.name or spack.repo.PATH.all_package_names() + reports = spack.audit.run_group(args.subcommand, pkgs=pkgs) + _process_reports(reports) + + def list(parser, args): for subcommand, check_tags in spack.audit.GROUPS.items(): print(cl.colorize("@*b{" + subcommand + "}:")) @@ -78,6 +100,7 @@ def list(parser, args): def audit(parser, args): subcommands = { "configs": configs, + "externals": externals, "packages": packages, "packages-https": packages_https, "list": list, diff --git a/lib/spack/spack/cmd/external.py b/lib/spack/spack/cmd/external.py index bf29787db9bc01..081ec8039438bd 100644 --- a/lib/spack/spack/cmd/external.py +++ b/lib/spack/spack/cmd/external.py @@ -5,6 +5,7 @@ import argparse import errno import os +import re import sys from typing import List, Optional @@ -156,11 +157,20 @@ def packages_to_search_for( ): result = [] for current_tag in tags: - result.extend(spack.repo.PATH.packages_with_tags(current_tag)) + result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True)) + if names: - result = [x for x in result if x in names] + # Match both fully qualified and unqualified + parts = [rf"(^{x}$|[.]{x}$)" for x in names] + select_re = re.compile("|".join(parts)) + result = [x for x in result if select_re.search(x)] + if exclude: - result = [x for x in result if x not in exclude] + # Match both fully qualified and unqualified + parts = [rf"(^{x}$|[.]{x}$)" for x in exclude] + select_re = re.compile("|".join(parts)) + result = [x for x in result if not select_re.search(x)] + return result diff --git a/lib/spack/spack/compilers/aocc.py b/lib/spack/spack/compilers/aocc.py index 51f7b02e2bd987..a642960b7df522 100644 --- a/lib/spack/spack/compilers/aocc.py +++ b/lib/spack/spack/compilers/aocc.py @@ -112,6 +112,7 @@ def extract_version_from_output(cls, output): match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output) if match: return ".".join(match.groups()) + return "unknown" @classmethod def fc_version(cls, fortran_compiler): diff --git a/lib/spack/spack/cray_manifest.py b/lib/spack/spack/cray_manifest.py index ac40191d0f5f39..48ec52d782d3bb 100644 --- a/lib/spack/spack/cray_manifest.py +++ b/lib/spack/spack/cray_manifest.py @@ -4,6 +4,9 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import json +import os +import traceback +import warnings import jsonschema import jsonschema.exceptions @@ -46,9 +49,29 @@ def translated_compiler_name(manifest_compiler_name): ) -def compiler_from_entry(entry): +def compiler_from_entry(entry: dict, manifest_path: str): + # Note that manifest_path is only passed here to compose a + # useful warning message when paths appear to be missing. compiler_name = translated_compiler_name(entry["name"]) - paths = entry["executables"] + + if "prefix" in entry: + prefix = entry["prefix"] + paths = dict( + (lang, os.path.join(prefix, relpath)) + for (lang, relpath) in entry["executables"].items() + ) + else: + paths = entry["executables"] + + # Do a check for missing paths. Note that this isn't possible for + # all compiler entries, since their "paths" might actually be + # exe names like "cc" that depend on modules being loaded. Cray + # manifest entries are always paths though. + missing_paths = [] + for path in paths.values(): + if not os.path.exists(path): + missing_paths.append(path) + # to instantiate a compiler class we may need a concrete version: version = "={}".format(entry["version"]) arch = entry["arch"] @@ -57,8 +80,18 @@ def compiler_from_entry(entry): compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) spec = spack.spec.CompilerSpec(compiler_cls.name, version) - paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] - return compiler_cls(spec, operating_system, target, paths) + path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] + + if missing_paths: + warnings.warn( + "Manifest entry refers to nonexistent paths:\n\t" + + "\n\t".join(missing_paths) + + f"\nfor {str(spec)}" + + f"\nin {manifest_path}" + + "\nPlease report this issue" + ) + + return compiler_cls(spec, operating_system, target, path_list) def spec_from_entry(entry): @@ -187,12 +220,21 @@ def read(path, apply_updates): tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs)))) compilers = list() if "compilers" in json_data: - compilers.extend(compiler_from_entry(x) for x in json_data["compilers"]) + compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"]) tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers)))) # Filter out the compilers that already appear in the configuration compilers = spack.compilers.select_new_compilers(compilers) if apply_updates and compilers: - spack.compilers.add_compilers_to_config(compilers, init_config=False) + for compiler in compilers: + try: + spack.compilers.add_compilers_to_config([compiler], init_config=False) + except Exception: + warnings.warn( + f"Could not add compiler {str(compiler.spec)}: " + f"\n\tfrom manifest: {path}" + "\nPlease reexecute with 'spack -d' and include the stack trace" + ) + tty.debug(f"Include this\n{traceback.format_exc()}") if apply_updates: for spec in specs.values(): spack.store.STORE.db.add(spec, directory_layout=None) diff --git a/lib/spack/spack/detection/__init__.py b/lib/spack/spack/detection/__init__.py index 73ae34ce639fb0..7c54fb9d49ba76 100644 --- a/lib/spack/spack/detection/__init__.py +++ b/lib/spack/spack/detection/__init__.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from .common import DetectedPackage, executable_prefix, update_configuration from .path import by_path, executables_in_path +from .test import detection_tests __all__ = [ "DetectedPackage", @@ -11,4 +12,5 @@ "executables_in_path", "executable_prefix", "update_configuration", + "detection_tests", ] diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py index 50a3a2695a8346..0e873c3f555095 100644 --- a/lib/spack/spack/detection/common.py +++ b/lib/spack/spack/detection/common.py @@ -299,36 +299,36 @@ def find_windows_compiler_bundled_packages() -> List[str]: class WindowsKitExternalPaths: - plat_major_ver = None - if sys.platform == "win32": - plat_major_ver = str(winOs.windows_version()[0]) - @staticmethod - def find_windows_kit_roots() -> Optional[str]: + def find_windows_kit_roots() -> List[str]: """Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\""" if sys.platform != "win32": - return None + return [] program_files = os.environ["PROGRAMFILES(x86)"] - kit_base = os.path.join( - program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver - ) - return kit_base + kit_base = os.path.join(program_files, "Windows Kits", "**") + return glob.glob(kit_base) @staticmethod def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]: """Returns Windows kit bin directory per version""" kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base - assert kit_base is not None, "unexpected value for kit_base" - kit_bin = os.path.join(kit_base, "bin") - return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\")) + assert kit_base, "Unexpectedly empty value for Windows kit base path" + kit_paths = [] + for kit in kit_base: + kit_bin = os.path.join(kit, "bin") + kit_paths.extend(glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))) + return kit_paths @staticmethod def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]: """Returns Windows kit lib directory per version""" kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base - assert kit_base is not None, "unexpected value for kit_base" - kit_lib = os.path.join(kit_base, "Lib") - return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\")) + assert kit_base, "Unexpectedly empty value for Windows kit base path" + kit_paths = [] + for kit in kit_base: + kit_lib = os.path.join(kit, "Lib") + kit_paths.extend(glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))) + return kit_paths @staticmethod def find_windows_driver_development_kit_paths() -> List[str]: @@ -347,23 +347,30 @@ def find_windows_kit_reg_installed_roots_paths() -> List[str]: if not reg: # couldn't find key, return empty list return [] - return WindowsKitExternalPaths.find_windows_kit_lib_paths( - reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value - ) + kit_root_reg = re.compile(r"KitsRoot[0-9]+") + root_paths = [] + for kit_root in filter(kit_root_reg.match, reg.get_values().keys()): + root_paths.extend( + WindowsKitExternalPaths.find_windows_kit_lib_paths(reg.get_value(kit_root).value) + ) + return root_paths @staticmethod def find_windows_kit_reg_sdk_paths() -> List[str]: - reg = spack.util.windows_registry.WindowsRegistryView( - "SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0" - % WindowsKitExternalPaths.plat_major_ver, + sdk_paths = [] + sdk_regex = re.compile(r"v[0-9]+.[0-9]+") + windows_reg = spack.util.windows_registry.WindowsRegistryView( + "SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows", root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE, ) - if not reg: - # couldn't find key, return empty list - return [] - return WindowsKitExternalPaths.find_windows_kit_lib_paths( - reg.get_value("InstallationFolder").value - ) + for key in filter(sdk_regex.match, [x.name for x in windows_reg.get_subkeys()]): + reg = windows_reg.get_subkey(key) + sdk_paths.extend( + WindowsKitExternalPaths.find_windows_kit_lib_paths( + reg.get_value("InstallationFolder").value + ) + ) + return sdk_paths def find_win32_additional_install_paths() -> List[str]: diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py index 4a085aacd06f69..4de703ac97b0f3 100644 --- a/lib/spack/spack/detection/path.py +++ b/lib/spack/spack/detection/path.py @@ -2,7 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -"""Detection of software installed in the system based on paths inspections +"""Detection of software installed in the system, based on paths inspections and running executables. """ import collections @@ -322,12 +322,14 @@ def by_path( path_hints: Optional[List[str]] = None, max_workers: Optional[int] = None, ) -> Dict[str, List[DetectedPackage]]: - """Return the list of packages that have been detected on the system, - searching by path. + """Return the list of packages that have been detected on the system, keyed by + unqualified package name. Args: - packages_to_search: list of package classes to be detected + packages_to_search: list of packages to be detected. Each package can be either unqualified + of fully qualified path_hints: initial list of paths to be searched + max_workers: maximum number of workers to search for packages in parallel """ # TODO: Packages should be able to define both .libraries and .executables in the future # TODO: determine_spec_details should get all relevant libraries and executables in one call @@ -355,7 +357,8 @@ def by_path( try: detected = future.result(timeout=DETECTION_TIMEOUT) if detected: - result[pkg_name].extend(detected) + _, unqualified_name = spack.repo.partition_package_name(pkg_name) + result[unqualified_name].extend(detected) except Exception: llnl.util.tty.debug( f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached" diff --git a/lib/spack/spack/detection/test.py b/lib/spack/spack/detection/test.py new file mode 100644 index 00000000000000..f33040f2929e86 --- /dev/null +++ b/lib/spack/spack/detection/test.py @@ -0,0 +1,187 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Create and run mock e2e tests for package detection.""" +import collections +import contextlib +import pathlib +import tempfile +from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple + +import jinja2 + +from llnl.util import filesystem + +import spack.repo +import spack.spec +from spack.util import spack_yaml + +from .path import by_path + + +class MockExecutables(NamedTuple): + """Mock executables to be used in detection tests""" + + #: Relative paths for mock executables to be created + executables: List[str] + #: Shell script for the mock executable + script: str + + +class ExpectedTestResult(NamedTuple): + """Data structure to model assertions on detection tests""" + + #: Spec to be detected + spec: str + + +class DetectionTest(NamedTuple): + """Data structure to construct detection tests by PATH inspection. + + Packages may have a YAML file containing the description of one or more detection tests + to be performed. Each test creates a few mock executable scripts in a temporary folder, + and checks that detection by PATH gives the expected results. + """ + + pkg_name: str + layout: List[MockExecutables] + results: List[ExpectedTestResult] + + +class Runner: + """Runs an external detection test""" + + def __init__(self, *, test: DetectionTest, repository: spack.repo.RepoPath) -> None: + self.test = test + self.repository = repository + self.tmpdir = tempfile.TemporaryDirectory() + + def execute(self) -> List[spack.spec.Spec]: + """Executes a test and returns the specs that have been detected. + + This function sets-up a test in a temporary directory, according to the prescriptions + in the test layout, then performs a detection by executables and returns the specs that + have been detected. + """ + with self._mock_layout() as path_hints: + entries = by_path([self.test.pkg_name], path_hints=path_hints) + _, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name) + specs = set(x.spec for x in entries[unqualified_name]) + return list(specs) + + @contextlib.contextmanager + def _mock_layout(self) -> Generator[List[str], None, None]: + hints = set() + try: + for entry in self.test.layout: + exes = self._create_executable_scripts(entry) + + for mock_executable in exes: + hints.add(str(mock_executable.parent)) + + yield list(hints) + finally: + self.tmpdir.cleanup() + + def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]: + relative_paths = mock_executables.executables + script = mock_executables.script + script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n") + result = [] + for mock_exe_path in relative_paths: + rel_path = pathlib.Path(mock_exe_path) + abs_path = pathlib.Path(self.tmpdir.name) / rel_path + abs_path.parent.mkdir(parents=True, exist_ok=True) + abs_path.write_text(script_template.render(script=script)) + filesystem.set_executable(abs_path) + result.append(abs_path) + return result + + @property + def expected_specs(self) -> List[spack.spec.Spec]: + return [spack.spec.Spec(r.spec) for r in self.test.results] + + +def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]: + """Returns a list of test runners for a given package. + + Currently, detection tests are specified in a YAML file, called ``detection_test.yaml``, + alongside the ``package.py`` file. + + This function reads that file to create a bunch of ``Runner`` objects. + + Args: + pkg_name: name of the package to test + repository: repository where the package lives + """ + result = [] + detection_tests_content = read_detection_tests(pkg_name, repository) + + tests_by_path = detection_tests_content.get("paths", []) + for single_test_data in tests_by_path: + mock_executables = [] + for layout in single_test_data["layout"]: + mock_executables.append( + MockExecutables(executables=layout["executables"], script=layout["script"]) + ) + expected_results = [] + for assertion in single_test_data["results"]: + expected_results.append(ExpectedTestResult(spec=assertion["spec"])) + + current_test = DetectionTest( + pkg_name=pkg_name, layout=mock_executables, results=expected_results + ) + result.append(Runner(test=current_test, repository=repository)) + + return result + + +def read_detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> Dict[str, Any]: + """Returns the normalized content of the detection_tests.yaml associated with the package + passed in input. + + The content is merged with that of any package that is transitively included using the + "includes" attribute. + + Args: + pkg_name: name of the package to test + repository: repository in which to search for packages + """ + content_stack, seen = [], set() + included_packages: Deque[str] = collections.deque() + + root_detection_yaml, result = _detection_tests_yaml(pkg_name, repository) + included_packages.extend(result.get("includes", [])) + seen |= set(result.get("includes", [])) + + while included_packages: + current_package = included_packages.popleft() + try: + current_detection_yaml, content = _detection_tests_yaml(current_package, repository) + except FileNotFoundError as e: + msg = ( + f"cannot read the detection tests from the '{current_package}' package, " + f"included by {root_detection_yaml}" + ) + raise FileNotFoundError(msg + f"\n\n\t{e}\n") + + content_stack.append((current_package, content)) + included_packages.extend(x for x in content.get("includes", []) if x not in seen) + seen |= set(content.get("includes", [])) + + result.setdefault("paths", []) + for pkg_name, content in content_stack: + result["paths"].extend(content.get("paths", [])) + + return result + + +def _detection_tests_yaml( + pkg_name: str, repository: spack.repo.RepoPath +) -> Tuple[pathlib.Path, Dict[str, Any]]: + pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent + detection_tests_yaml = pkg_dir / "detection_test.yaml" + with open(str(detection_tests_yaml)) as f: + content = spack_yaml.load(f) + return detection_tests_yaml, content diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 8578b110fceb39..aa96bbbe5106d9 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -734,7 +734,11 @@ def version_from_git(git_exe): @property def git(self): if not self._git: - self._git = spack.util.git.git() + try: + self._git = spack.util.git.git(required=True) + except CommandNotFoundError as exc: + tty.error(str(exc)) + raise # Disable advice for a quieter fetch # https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index 95fbae5847d73a..99ab7d45bd412c 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -847,10 +847,11 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int: else: cache_only = self.install_args.get("dependencies_cache_only") - # Include build dependencies if pkg is not installed and cache_only - # is False, or if build depdencies are explicitly called for - # by include_build_deps. - if include_build_deps or not (cache_only or pkg.spec.installed): + # Include build dependencies if pkg is going to be built from sources, or + # if build deps are explicitly requested. + if include_build_deps or not ( + cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite + ): depflag |= dt.BUILD if self.run_tests(pkg): depflag |= dt.TEST diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 65fe65020bce2f..16b20abf0b528e 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -24,7 +24,7 @@ import traceback import types import uuid -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Tuple, Union import llnl.path import llnl.util.filesystem as fs @@ -745,10 +745,18 @@ def all_package_paths(self): for name in self.all_package_names(): yield self.package_path(name) - def packages_with_tags(self, *tags): + def packages_with_tags(self, *tags, full=False): + """Returns a list of packages matching any of the tags in input. + + Args: + full: if True the package names in the output are fully-qualified + """ r = set() for repo in self.repos: - r |= set(repo.packages_with_tags(*tags)) + current = repo.packages_with_tags(*tags) + if full: + current = [f"{repo.namespace}.{x}" for x in current] + r |= set(current) return sorted(r) def all_package_classes(self): @@ -1129,7 +1137,8 @@ def extensions_for(self, extendee_spec): def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" - return os.path.join(self.packages_path, pkg_name) + _, unqualified_name = self.partition_package_name(pkg_name) + return os.path.join(self.packages_path, unqualified_name) def filename_for_package_name(self, pkg_name): """Get the filename for the module we should load for a particular @@ -1227,15 +1236,10 @@ def get_pkg_class(self, pkg_name): package. Then extracts the package class from the module according to Spack's naming convention. """ - namespace, _, pkg_name = pkg_name.rpartition(".") - if namespace and (namespace != self.namespace): - raise InvalidNamespaceError( - "Invalid namespace for %s repo: %s" % (self.namespace, namespace) - ) - + namespace, pkg_name = self.partition_package_name(pkg_name) class_name = nm.mod_to_class(pkg_name) + fullname = f"{self.full_namespace}.{pkg_name}" - fullname = "{0}.{1}".format(self.full_namespace, pkg_name) try: module = importlib.import_module(fullname) except ImportError: @@ -1246,7 +1250,7 @@ def get_pkg_class(self, pkg_name): cls = getattr(module, class_name) if not inspect.isclass(cls): - tty.die("%s.%s is not a class" % (pkg_name, class_name)) + tty.die(f"{pkg_name}.{class_name} is not a class") new_cfg_settings = ( spack.config.get("packages").get(pkg_name, {}).get("package_attributes", {}) @@ -1285,6 +1289,15 @@ def get_pkg_class(self, pkg_name): return cls + def partition_package_name(self, pkg_name: str) -> Tuple[str, str]: + namespace, pkg_name = partition_package_name(pkg_name) + if namespace and (namespace != self.namespace): + raise InvalidNamespaceError( + f"Invalid namespace for the '{self.namespace}' repo: {namespace}" + ) + + return namespace, pkg_name + def __str__(self): return "[Repo '%s' at '%s']" % (self.namespace, self.root) @@ -1298,6 +1311,20 @@ def __contains__(self, pkg_name): RepoType = Union[Repo, RepoPath] +def partition_package_name(pkg_name: str) -> Tuple[str, str]: + """Given a package name that might be fully-qualified, returns the namespace part, + if present and the unqualified package name. + + If the package name is unqualified, the namespace is an empty string. + + Args: + pkg_name: a package name, either unqualified like "llvl", or + fully-qualified, like "builtin.llvm" + """ + namespace, _, pkg_name = pkg_name.rpartition(".") + return namespace, pkg_name + + def create_repo(root, namespace=None, subdir=packages_dir_name): """Create a new repository in root with the specified namespace. diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py index 7d54057b46ee4b..e94d6efe5c4d4d 100644 --- a/lib/spack/spack/test/cmd/external.py +++ b/lib/spack/spack/test/cmd/external.py @@ -120,8 +120,9 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu "names,tags,exclude,expected", [ # find --all - (None, ["detectable"], [], ["find-externals1"]), + (None, ["detectable"], [], ["builtin.mock.find-externals1"]), # find --all --exclude find-externals1 + (None, ["detectable"], ["builtin.mock.find-externals1"], []), (None, ["detectable"], ["find-externals1"], []), # find cmake (and cmake is not detectable) (["cmake"], ["detectable"], [], []), @@ -202,19 +203,6 @@ def fail(): assert "Skipping manifest and continuing" in output -def test_find_external_nonempty_default_manifest_dir( - mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest -): - """The user runs 'spack external find'; the default manifest directory - contains a manifest file. Ensure that the specs are read. - """ - monkeypatch.setenv("PATH", "") - monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest)) - external("find") - specs = spack.store.STORE.db.query("hwloc") - assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs) - - def test_find_external_merge(mutable_config, mutable_mock_repo): """Check that 'spack find external' doesn't overwrite an existing spec entry in packages.yaml. diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 25417de6f42d33..c4b3df92edf17f 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -1714,17 +1714,6 @@ def brand_new_binary_cache(): ) -@pytest.fixture -def directory_with_manifest(tmpdir): - """Create a manifest file in a directory. Used by 'spack external'.""" - with tmpdir.as_cwd(): - test_db_fname = "external-db.json" - with open(test_db_fname, "w") as db_file: - json.dump(spack.test.cray_manifest.create_manifest_content(), db_file) - - yield str(tmpdir) - - @pytest.fixture() def noncyclical_dir_structure(tmpdir): """ diff --git a/lib/spack/spack/test/cray_manifest.py b/lib/spack/spack/test/cray_manifest.py index f9e7ae8594b729..123e2ac3f12fd6 100644 --- a/lib/spack/spack/test/cray_manifest.py +++ b/lib/spack/spack/test/cray_manifest.py @@ -23,53 +23,6 @@ import spack.store from spack.cray_manifest import compiler_from_entry, entries_to_specs -example_x_json_str = """\ -{ - "name": "packagex", - "hash": "hash-of-x", - "prefix": "/path/to/packagex-install/", - "version": "1.0", - "arch": { - "platform": "linux", - "platform_os": "centos8", - "target": { - "name": "haswell" - } - }, - "compiler": { - "name": "gcc", - "version": "10.2.0.cray" - }, - "dependencies": { - "packagey": { - "hash": "hash-of-y", - "type": ["link"] - } - }, - "parameters": { - "precision": ["double", "float"] - } -} -""" - - -example_compiler_entry = """\ -{ - "name": "gcc", - "prefix": "/path/to/compiler/", - "version": "7.5.0", - "arch": { - "os": "centos8", - "target": "x86_64" - }, - "executables": { - "cc": "/path/to/compiler/cc", - "cxx": "/path/to/compiler/cxx", - "fc": "/path/to/compiler/fc" - } -} -""" - class JsonSpecEntry: def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters): @@ -104,16 +57,19 @@ def __init__(self, platform, os, target): self.os = os self.target = target - def to_dict(self): + def spec_json(self): return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}} + def compiler_json(self): + return {"os": self.os, "target": self.target} + class JsonCompilerEntry: def __init__(self, name, version, arch=None, executables=None): self.name = name self.version = version if not arch: - arch = {"os": "centos8", "target": "x86_64"} + arch = JsonArchEntry("anyplatform", "anyos", "anytarget") if not executables: executables = { "cc": "/path/to/compiler/cc", @@ -127,7 +83,7 @@ def compiler_json(self): return { "name": self.name, "version": self.version, - "arch": self.arch, + "arch": self.arch.compiler_json(), "executables": self.executables, } @@ -138,22 +94,58 @@ def spec_json(self): return {"name": self.name, "version": self.version} -_common_arch = JsonArchEntry(platform="linux", os="centos8", target="haswell").to_dict() +@pytest.fixture +def _common_arch(test_platform): + return JsonArchEntry( + platform=test_platform.name, + os=test_platform.front_os, + target=test_platform.target("fe").name, + ) + + +@pytest.fixture +def _common_compiler(_common_arch): + return JsonCompilerEntry( + name="gcc", + version="10.2.0.2112", + arch=_common_arch, + executables={ + "cc": "/path/to/compiler/cc", + "cxx": "/path/to/compiler/cxx", + "fc": "/path/to/compiler/fc", + }, + ) + + +@pytest.fixture +def _other_compiler(_common_arch): + return JsonCompilerEntry( + name="clang", + version="3.0.0", + arch=_common_arch, + executables={ + "cc": "/path/to/compiler/clang", + "cxx": "/path/to/compiler/clang++", + "fc": "/path/to/compiler/flang", + }, + ) + -# Intended to match example_compiler_entry above -_common_compiler = JsonCompilerEntry( - name="gcc", - version="10.2.0.cray", - arch={"os": "centos8", "target": "x86_64"}, - executables={ - "cc": "/path/to/compiler/cc", - "cxx": "/path/to/compiler/cxx", - "fc": "/path/to/compiler/fc", - }, -) +@pytest.fixture +def _raw_json_x(_common_arch): + return { + "name": "packagex", + "hash": "hash-of-x", + "prefix": "/path/to/packagex-install/", + "version": "1.0", + "arch": _common_arch.spec_json(), + "compiler": {"name": "gcc", "version": "10.2.0.2112"}, + "dependencies": {"packagey": {"hash": "hash-of-y", "type": ["link"]}}, + "parameters": {"precision": ["double", "float"]}, + } -def test_compatibility(): +def test_manifest_compatibility(_common_arch, _common_compiler, _raw_json_x): """Make sure that JsonSpecEntry outputs the expected JSON structure by comparing it with JSON parsed from an example string. This ensures that the testing objects like JsonSpecEntry produce the @@ -164,7 +156,7 @@ def test_compatibility(): hash="hash-of-y", prefix="/path/to/packagey-install/", version="1.0", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies={}, parameters={}, @@ -175,23 +167,44 @@ def test_compatibility(): hash="hash-of-x", prefix="/path/to/packagex-install/", version="1.0", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies=dict([y.as_dependency(deptypes=["link"])]), parameters={"precision": ["double", "float"]}, ) x_from_entry = x.to_dict() - x_from_str = json.loads(example_x_json_str) - assert x_from_entry == x_from_str + assert x_from_entry == _raw_json_x def test_compiler_from_entry(): - compiler_data = json.loads(example_compiler_entry) - compiler_from_entry(compiler_data) + compiler_data = json.loads( + """\ +{ + "name": "gcc", + "prefix": "/path/to/compiler/", + "version": "7.5.0", + "arch": { + "os": "centos8", + "target": "x86_64" + }, + "executables": { + "cc": "/path/to/compiler/cc", + "cxx": "/path/to/compiler/cxx", + "fc": "/path/to/compiler/fc" + } +} +""" + ) + compiler = compiler_from_entry(compiler_data, "/example/file") + assert compiler.cc == "/path/to/compiler/cc" + assert compiler.cxx == "/path/to/compiler/cxx" + assert compiler.fc == "/path/to/compiler/fc" + assert compiler.operating_system == "centos8" -def generate_openmpi_entries(): +@pytest.fixture +def generate_openmpi_entries(_common_arch, _common_compiler): """Generate two example JSON entries that refer to an OpenMPI installation and a hwloc dependency. """ @@ -202,7 +215,7 @@ def generate_openmpi_entries(): hash="hwlocfakehashaaa", prefix="/path/to/hwloc-install/", version="2.0.3", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies={}, parameters={}, @@ -216,26 +229,25 @@ def generate_openmpi_entries(): hash="openmpifakehasha", prefix="/path/to/openmpi-install/", version="4.1.0", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies=dict([hwloc.as_dependency(deptypes=["link"])]), parameters={"internal-hwloc": False, "fabrics": ["psm"], "missing_variant": True}, ) - return [openmpi, hwloc] + return list(x.to_dict() for x in [openmpi, hwloc]) -def test_generate_specs_from_manifest(): +def test_generate_specs_from_manifest(generate_openmpi_entries): """Given JSON entries, check that we can form a set of Specs including dependency references. """ - entries = list(x.to_dict() for x in generate_openmpi_entries()) - specs = entries_to_specs(entries) + specs = entries_to_specs(generate_openmpi_entries) (openmpi_spec,) = list(x for x in specs.values() if x.name == "openmpi") assert openmpi_spec["hwloc"] -def test_translate_cray_platform_to_linux(monkeypatch): +def test_translate_cray_platform_to_linux(monkeypatch, _common_compiler): """Manifests might list specs on newer Cray platforms as being "cray", but Spack identifies such platforms as "linux". Make sure we automaticaly transform these entries. @@ -247,13 +259,13 @@ def the_host_is_linux(): monkeypatch.setattr(spack.platforms, "host", the_host_is_linux) - cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64").to_dict() + cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64") spec_json = JsonSpecEntry( name="cray-mpich", hash="craympichfakehashaaa", prefix="/path/to/cray-mpich/", version="1.0.0", - arch=cray_arch, + arch=cray_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies={}, parameters={}, @@ -263,14 +275,15 @@ def the_host_is_linux(): assert spec.architecture.platform == "linux" -def test_translate_compiler_name(): +def test_translate_compiler_name(_common_arch): nvidia_compiler = JsonCompilerEntry( name="nvidia", version="19.1", + arch=_common_arch, executables={"cc": "/path/to/compiler/nvc", "cxx": "/path/to/compiler/nvc++"}, ) - compiler = compiler_from_entry(nvidia_compiler.compiler_json()) + compiler = compiler_from_entry(nvidia_compiler.compiler_json(), "/example/file") assert compiler.name == "nvhpc" spec_json = JsonSpecEntry( @@ -278,7 +291,7 @@ def test_translate_compiler_name(): hash="hwlocfakehashaaa", prefix="/path/to/hwloc-install/", version="2.0.3", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=nvidia_compiler.spec_json(), dependencies={}, parameters={}, @@ -288,18 +301,18 @@ def test_translate_compiler_name(): assert spec.compiler.name == "nvhpc" -def test_failed_translate_compiler_name(): +def test_failed_translate_compiler_name(_common_arch): unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0") with pytest.raises(spack.compilers.UnknownCompilerError): - compiler_from_entry(unknown_compiler.compiler_json()) + compiler_from_entry(unknown_compiler.compiler_json(), "/example/file") spec_json = JsonSpecEntry( name="packagey", hash="hash-of-y", prefix="/path/to/packagey-install/", version="1.0", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=unknown_compiler.spec_json(), dependencies={}, parameters={}, @@ -309,7 +322,8 @@ def test_failed_translate_compiler_name(): entries_to_specs([spec_json]) -def create_manifest_content(): +@pytest.fixture +def manifest_content(generate_openmpi_entries, _common_compiler, _other_compiler): return { # Note: the cray_manifest module doesn't use the _meta section right # now, but it is anticipated to be useful @@ -319,43 +333,70 @@ def create_manifest_content(): "schema-version": "1.3", "cpe-version": "22.06", }, - "specs": list(x.to_dict() for x in generate_openmpi_entries()), - "compilers": [_common_compiler.compiler_json()], + "specs": generate_openmpi_entries, + "compilers": [_common_compiler.compiler_json(), _other_compiler.compiler_json()], } -@pytest.mark.only_original( - "The ASP-based concretizer is currently picky about OS matching and will fail." -) -def test_read_cray_manifest(tmpdir, mutable_config, mock_packages, mutable_database): +def test_read_cray_manifest( + tmpdir, mutable_config, mock_packages, mutable_database, manifest_content +): """Check that (a) we can read the cray manifest and add it to the Spack Database and (b) we can concretize specs based on that. """ with tmpdir.as_cwd(): test_db_fname = "external-db.json" with open(test_db_fname, "w") as db_file: - json.dump(create_manifest_content(), db_file) + json.dump(manifest_content, db_file) cray_manifest.read(test_db_fname, True) query_specs = spack.store.STORE.db.query("openmpi") assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs) concretized_specs = spack.cmd.parse_specs( - "depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64" " ^/openmpifakehasha".split(), - concretize=True, + "depends-on-openmpi ^/openmpifakehasha".split(), concretize=True ) assert concretized_specs[0]["hwloc"].dag_hash() == "hwlocfakehashaaa" -@pytest.mark.only_original( - "The ASP-based concretizer is currently picky about OS matching and will fail." -) +def test_read_cray_manifest_add_compiler_failure( + tmpdir, mutable_config, mock_packages, mutable_database, manifest_content, monkeypatch +): + """Check that cray manifest can be read even if some compilers cannot + be added. + """ + orig_add_compilers_to_config = spack.compilers.add_compilers_to_config + + class fail_for_clang: + def __init__(self): + self.called_with_clang = False + + def __call__(self, compilers, **kwargs): + if any(x.name == "clang" for x in compilers): + self.called_with_clang = True + raise Exception() + return orig_add_compilers_to_config(compilers, **kwargs) + + checker = fail_for_clang() + monkeypatch.setattr(spack.compilers, "add_compilers_to_config", checker) + + with tmpdir.as_cwd(): + test_db_fname = "external-db.json" + with open(test_db_fname, "w") as db_file: + json.dump(manifest_content, db_file) + cray_manifest.read(test_db_fname, True) + query_specs = spack.store.STORE.db.query("openmpi") + assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs) + + assert checker.called_with_clang + + def test_read_cray_manifest_twice_no_compiler_duplicates( - tmpdir, mutable_config, mock_packages, mutable_database + tmpdir, mutable_config, mock_packages, mutable_database, manifest_content ): with tmpdir.as_cwd(): test_db_fname = "external-db.json" with open(test_db_fname, "w") as db_file: - json.dump(create_manifest_content(), db_file) + json.dump(manifest_content, db_file) # Read the manifest twice cray_manifest.read(test_db_fname, True) @@ -363,7 +404,7 @@ def test_read_cray_manifest_twice_no_compiler_duplicates( compilers = spack.compilers.all_compilers() filtered = list( - c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.cray") + c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.2112") ) assert len(filtered) == 1 @@ -423,3 +464,27 @@ def test_convert_validation_error(tmpdir, mutable_config, mock_packages, mutable with pytest.raises(cray_manifest.ManifestValidationError) as e: cray_manifest.read(invalid_schema_path, True) str(e) + + +@pytest.fixture +def directory_with_manifest(tmpdir, manifest_content): + """Create a manifest file in a directory. Used by 'spack external'.""" + with tmpdir.as_cwd(): + test_db_fname = "external-db.json" + with open(test_db_fname, "w") as db_file: + json.dump(manifest_content, db_file) + + yield str(tmpdir) + + +def test_find_external_nonempty_default_manifest_dir( + mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest +): + """The user runs 'spack external find'; the default manifest directory + contains a manifest file. Ensure that the specs are read. + """ + monkeypatch.setenv("PATH", "") + monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest)) + spack.cmd.external._collect_and_consume_cray_manifest_files(ignore_default_dir=False) + specs = spack.store.STORE.db.query("hwloc") + assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs) diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 91f695dd70ab6b..6b42e591eb9dbe 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -20,6 +20,7 @@ import spack.concretize import spack.config import spack.database +import spack.deptypes as dt import spack.installer as inst import spack.package_base import spack.package_prefs as prefs @@ -1388,6 +1389,26 @@ def test_single_external_implicit_install(install_mockery, explicit_args, is_exp assert spack.store.STORE.db.get_record(pkg).explicit == is_explicit +def test_overwrite_install_does_install_build_deps(install_mockery, mock_fetch): + """When overwrite installing something from sources, build deps should be installed.""" + s = spack.spec.Spec("dtrun3").concretized() + create_installer([(s, {})]).install() + + # Verify there is a pure build dep + edge = s.edges_to_dependencies(name="dtbuild3").pop() + assert edge.depflag == dt.BUILD + build_dep = edge.spec + + # Uninstall the build dep + build_dep.package.do_uninstall() + + # Overwrite install the root dtrun3 + create_installer([(s, {"overwrite": [s.dag_hash()]})]).install() + + # Verify that the build dep was also installed. + assert build_dep.installed + + @pytest.mark.parametrize("run_tests", [True, False]) def test_print_install_test_log_skipped(install_mockery, mock_packages, capfd, run_tests): """Confirm printing of install log skipped if not run/no failures.""" diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py index 7314beebb5cac7..eb6b12391625aa 100644 --- a/lib/spack/spack/test/repo.py +++ b/lib/spack/spack/test/repo.py @@ -181,3 +181,15 @@ def test_repository_construction_doesnt_use_globals(nullify_globals, repo_paths, repo_path = spack.repo.RepoPath(*repo_paths) assert len(repo_path.repos) == len(namespaces) assert [x.namespace for x in repo_path.repos] == namespaces + + +@pytest.mark.parametrize("method_name", ["dirname_for_package_name", "filename_for_package_name"]) +def test_path_computation_with_names(method_name, mock_repo_path): + """Tests that repositories can compute the correct paths when using both fully qualified + names and unqualified names. + """ + repo_path = spack.repo.RepoPath(mock_repo_path) + method = getattr(repo_path, method_name) + unqualified = method("mpileaks") + qualified = method("builtin.mock.mpileaks") + assert qualified == unqualified diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index d7f8a38ef633d6..a9a9fee13c073f 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -827,16 +827,16 @@ e4s-cray-rhel-build: variables: SPACK_CI_STACK_NAME: e4s-cray-sles -# e4s-cray-sles-generate: -# extends: [ ".generate-cray-sles", ".e4s-cray-sles" ] +e4s-cray-sles-generate: + extends: [ ".generate-cray-sles", ".e4s-cray-sles" ] -# e4s-cray-sles-build: -# extends: [ ".build", ".e4s-cray-sles" ] -# trigger: -# include: -# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml -# job: e4s-cray-sles-generate -# strategy: depend -# needs: -# - artifacts: True -# job: e4s-cray-sles-generate +e4s-cray-sles-build: + extends: [ ".build", ".e4s-cray-sles" ] + trigger: + include: + - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml + job: e4s-cray-sles-generate + strategy: depend + needs: + - artifacts: True + job: e4s-cray-sles-generate diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml index d42881f68ef2be..dace63659ec54f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml @@ -20,42 +20,156 @@ spack: target: [zen4] variants: +mpi + tbb: + require: "intel-tbb" binutils: variants: +ld +gold +headers +libiberty ~nls + boost: + variants: +python +filesystem +iostreams +system + cuda: + version: [11.7.0] + elfutils: + variants: +bzip2 ~nls +xz + require: "%gcc" hdf5: variants: +fortran +hl +shared + libfabric: + variants: fabrics=sockets,tcp,udp,rxm libunwind: variants: +pic +xz + mpich: + variants: ~wrapperrpath ncurses: - require: '@6.3 +termlib' - openblas: - require: '@0.3.20' - variants: threads=openmp + variants: +termlib + paraview: + # Don't build GUI support or GLX rendering for HPC/container deployments + require: "@5.11 ~qt+osmesa" + python: + version: [3.8.13] + trilinos: + require: + - one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack + +intrepid +intrepid2 +isorropia +kokkos +minitensor +nox +piro +phalanx + +rol +rythmos +sacado +stk +shards +stratimikos +tempus +tpetra + +trilinoscouplings +zoltan] + - one_of: [gotype=long_long, gotype=all] + - one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko] xz: variants: +pic - elfutils: - variants: +bzip2 ~nls +xz - require: '%gcc' + mesa: + version: [21.3.8] unzip: - require: '%gcc' + require: "%gcc" specs: - - adios2 - - amrex + # CPU + - adios + - aml + - arborx + - argobots + - bolt - butterflypack + - boost +python +filesystem +iostreams +system + - cabana + - chai ~benchmarks ~tests - conduit + - datatransferkit + - flecsi + - fortrilinos + - ginkgo + - globalarrays + - gmp + - gotcha - h5bench - hdf5-vol-async - hdf5-vol-cache - hdf5-vol-log + - heffte +fftw - hypre - - kokkos - - kokkos-kernels + - kokkos +openmp + - kokkos-kernels +openmp + - lammps - legion + - libnrm + - libquo + - libunwind + - mercury + - metall - mfem + - mgard +serial +openmp +timing +unstructured ~cuda + - mpark-variant + - mpifileutils ~xattr + - nccmp + - nco + - netlib-scalapack + - omega-h + - openmpi + - openpmd-api + - papi + - papyrus + - pdt + - pumi + - qthreads scheduler=distrib - raja + - slate ~cuda + - stc + - sundials + - superlu - superlu-dist - # - flux-core # python cray sles issue + - swig + - swig@4.0.2-fortran + - sz3 + - tasmanian + - trilinos +belos +ifpack2 +stokhos + - turbine + - umap + - umpire + - veloc + - wannier90 + + # ERRORS + # - caliper # caliper: ModuleNotFoundError: No module named 'math'; src/mpi/services/mpiwrap/CMakeFiles/caliper-mpiwrap.dir/build.make:77: src/mpi/services/mpiwrap/Wrapper.cpp] Error 1 + # - charliecloud # python: Could not find platform dependent libraries + # - flit # python: Could not find platform dependent libraries + # - flux-core # python: Could not find platform dependent libraries + # - hpx max_cpu_count=512 networking=mpi # python: Could not find platform dependent libraries + # - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard # python: Could not find platform dependent libraries + # - petsc # petsc: SyntaxError: (unicode error) \N escapes not supported (can't load unicodedata module) + # - plumed # python: Could not find platform dependent libraries + # - precice # petsc: SyntaxError: (unicode error) \N escapes not supported (can't load unicodedata module) + # - py-h5py +mpi # python: Could not find platform dependent libraries + # - py-h5py ~mpi # python: Could not find platform dependent libraries + # - py-libensemble +mpi +nlopt # python: Could not find platform dependent libraries + # - py-petsc4py # python: Could not find platform dependent libraries + # - slepc # petsc: SyntaxError: (unicode error) \N escapes not supported (can't load unicodedata module) + # - tau +mpi +python # tau: ERROR: Cannot find python library (libpython*.[so|dylib] + + # HOLDING THESE BACK UNTIL CRAY SLES CAPACITY IS EXPANDED AT UO + # - alquimia + # - amrex + # - archer + # - axom + # - bricks + # - dealii + # - dyninst + # - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14 # llvm@14.0.6: ?; + # - exaworks + # - gasnet + # - gptune + # - hpctoolkit + # - nrm + # - nvhpc + # - parsec ~cuda + # - phist + # - plasma + # - py-jupyterhub + # - py-warpx + # - quantum-espresso + # - scr + # - strumpack ~slate + # - upcxx + # - variorum + # - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu mirrors: { "mirror": "s3://spack-binaries/develop/e4s-cray-sles" } diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 0ae044968cef8a..2c97d4e4bedf47 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -114,6 +114,7 @@ spack: - charliecloud - conduit - datatransferkit + - drishti - exaworks - flecsi - flit @@ -203,6 +204,7 @@ spack: # -- # - alquimia # pflotran: pflotran/hdf5_aux.F90(5): error #7013: This module file was not generated by any release of this compiler. [HDF5] # - dealii # intel-tbb: icpx: error: unknown argument: '-flifetime-dse=1' + # - dxt-explorer # r: https://github.com/spack/spack/issues/40257 # - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # sz: hdf5-filter/H5Z-SZ/src/H5Z_SZ.c:24:9: error: call to undeclared function 'gettimeofday'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration] # - geopm # geopm: In file included from src/ProfileTable.cpp:34: ./src/ProfileTable.hpp:79:45: error: no type named 'string' in namespace 'std' # - ginkgo # ginkgo: icpx: error: clang frontend command failed with exit code 139 (use -v to see invocation) @@ -235,6 +237,9 @@ spack: # - sundials +sycl cxxstd=17 # sundials: include/sunmemory/sunmemory_sycl.h:20:10: fatal error: 'CL/sycl.hpp' file not found # - tau +mpi +opencl +level_zero ~pdt # builds ok in container, but needs libdrm, will update container + # Not necessarily E4S, but useful for E4S packages + - py-scipy + # SKIPPED # - nvhpc # - dyninst # only %gcc diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index d138c63c35a9e1..a45fbbb3db2b01 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -64,6 +64,8 @@ spack: - charliecloud - conduit - datatransferkit + - drishti + - dxt-explorer - dyninst - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o: /tmp/ccgvkIk5.s: Assembler messages: /tmp/ccgvkIk5.s:260012: Error: invalid machine `power10' - exaworks diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 7a07de57780896..a10b279f532fcc 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -69,6 +69,8 @@ spack: - conduit - datatransferkit - dealii + - drishti + - dxt-explorer - dyninst - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14 - exaworks diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 1983b960d5451a..b9521b8f0cfaad 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -423,7 +423,7 @@ _spack_audit() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="configs packages-https packages list" + SPACK_COMPREPLY="configs externals packages-https packages list" fi } @@ -431,6 +431,15 @@ _spack_audit_configs() { SPACK_COMPREPLY="-h --help" } +_spack_audit_externals() { + if $list_options + then + SPACK_COMPREPLY="-h --help --list" + else + SPACK_COMPREPLY="" + fi +} + _spack_audit_packages_https() { if $list_options then diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index c5da416817cba9..f4ac310adacdb4 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -508,6 +508,7 @@ complete -c spack -n '__fish_spack_using_command arch' -s b -l backend -d 'print # spack audit set -g __fish_spack_optspecs_spack_audit h/help complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a configs -d 'audit configuration files' +complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a externals -d 'check external detection in packages' complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a packages-https -d 'check https in packages' complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a packages -d 'audit package recipes' complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a list -d 'list available checks and exits' @@ -519,6 +520,14 @@ set -g __fish_spack_optspecs_spack_audit_configs h/help complete -c spack -n '__fish_spack_using_command audit configs' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command audit configs' -s h -l help -d 'show this help message and exit' +# spack audit externals +set -g __fish_spack_optspecs_spack_audit_externals h/help list +complete -c spack -n '__fish_spack_using_command_pos_remainder 0 audit externals' -f -a '(__fish_spack_packages)' +complete -c spack -n '__fish_spack_using_command audit externals' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command audit externals' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command audit externals' -l list -f -a list_externals +complete -c spack -n '__fish_spack_using_command audit externals' -l list -d 'if passed, list which packages have detection tests' + # spack audit packages-https set -g __fish_spack_optspecs_spack_audit_packages_https h/help all complete -c spack -n '__fish_spack_using_command_pos_remainder 0 audit packages-https' -f -a '(__fish_spack_packages)' diff --git a/var/spack/repos/builtin/packages/embree/package.py b/var/spack/repos/builtin/packages/embree/package.py index 4f8c8664ef10c2..3da828eb6b0f56 100644 --- a/var/spack/repos/builtin/packages/embree/package.py +++ b/var/spack/repos/builtin/packages/embree/package.py @@ -37,6 +37,17 @@ class Embree(CMakePackage): depends_on("tbb") + # official aarch64 support on macOS starting with 3.13.0, on Linux since 4.0.0 + # upstream patch for Linux/aarch64 applies cleanly to 3.13.5, and 3.13.3 works by chance + conflicts("@:3.12", when="target=aarch64:") + conflicts("@:3.13.2", when="target=aarch64: platform=linux") + conflicts("@3.13.4", when="target=aarch64: platform=linux") + patch( + "https://github.com/embree/embree/commit/82ca6b5ccb7abe0403a658a0e079926478f04cb1.patch?full_index=1", + sha256="3af5a65e8875549b4c930d4b0f2840660beba4a7f295d8c89068250a1df376f2", + when="@3.13.5", + ) + def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/gcc/detection_test.yaml b/var/spack/repos/builtin/packages/gcc/detection_test.yaml new file mode 100644 index 00000000000000..0930f82d936568 --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/detection_test.yaml @@ -0,0 +1,38 @@ +paths: + # Ubuntu 18.04, system compilers without Fortran + - layout: + - executables: + - "bin/gcc" + - "bin/g++" + script: "echo 7.5.0" + results: + - spec: "gcc@7.5.0 languages=c,c++" + # Mock a version < 7 of GCC that requires -dumpversion and + # errors with -dumpfullversion + - layout: + - executables: + - "bin/gcc-5" + - "bin/g++-5" + - "bin/gfortran-5" + script: | + if [[ "$1" == "-dumpversion" ]] ; then + echo "5.5.0" + else + echo "gcc-5: fatal error: no input files" + echo "compilation terminated." + exit 1 + fi + results: + - spec: "gcc@5.5.0 languages=c,c++,fortran" + # Multiple compilers present at the same time + - layout: + - executables: + - "bin/x86_64-linux-gnu-gcc-6" + script: 'echo 6.5.0' + - executables: + - "bin/x86_64-linux-gnu-gcc-10" + - "bin/x86_64-linux-gnu-g++-10" + script: "echo 10.1.0" + results: + - spec: "gcc@6.5.0 languages=c" + - spec: "gcc@10.1.0 languages=c,c++" diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index 47a521955bc260..bf4ade6ce7baf7 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * +from spack.variant import _ConditionalVariantValues class Geant4(CMakePackage): @@ -43,16 +44,18 @@ class Geant4(CMakePackage): version("10.4.0", sha256="e919b9b0a88476e00c0b18ab65d40e6a714b55ee4778f66bac32a5396c22aa74") version("10.3.3", sha256="bcd36a453da44de9368d1d61b0144031a58e4b43a6d2d875e19085f2700a89d8") - _cxxstd_values = ("11", "14", "17") + _cxxstd_values = ( + conditional("11", "14", when="@:10"), + conditional("17", when="@10.4.1:"), + conditional("20", when="@10.7.0:"), + ) variant( "cxxstd", - default=_cxxstd_values[0], + default="11", values=_cxxstd_values, multi=False, description="Use the specified C++ standard when building.", ) - conflicts("cxxstd=11", when="@11:", msg="geant4@11: only supports cxxstd=17") - conflicts("cxxstd=14", when="@11:", msg="geant4@11: only supports cxxstd=17") variant("threads", default=True, description="Build with multithreading") variant("vecgeom", default=False, description="Enable vecgeom support") @@ -97,30 +100,39 @@ class Geant4(CMakePackage): depends_on("python@3:", when="+python") extends("python", when="+python") - for std in _cxxstd_values: - # CLHEP version requirements to be reviewed - depends_on("clhep@2.4.6.0: cxxstd=" + std, when="@11.1: cxxstd=" + std) - - depends_on("clhep@2.4.5.1: cxxstd=" + std, when="@11.0.0: cxxstd=" + std) - - depends_on("clhep@2.4.4.0: cxxstd=" + std, when="@10.7.0: cxxstd=" + std) + # CLHEP version requirements to be reviewed + depends_on("clhep@2.4.6.0:", when="@11.1:") + depends_on("clhep@2.4.5.1:", when="@11.0.0:") + depends_on("clhep@2.4.4.0:", when="@10.7.0:") + depends_on("clhep@2.3.3.0:", when="@10.3.3:10.6") + + # Vecgeom specific versions for each Geant4 version + with when("+vecgeom"): + depends_on("vecgeom@1.2.0:", when="@11.1:") + depends_on("vecgeom@1.1.18:1.1", when="@11.0.0:11.0") + depends_on("vecgeom@1.1.8:1.1", when="@10.7.0:10.7") + depends_on("vecgeom@1.1.5", when="@10.6.0:10.6") + depends_on("vecgeom@1.1.0", when="@10.5.0:10.5") + depends_on("vecgeom@0.5.2", when="@10.4.0:10.4") + depends_on("vecgeom@0.3rc", when="@10.3.0:10.3") + + def std_when(values): + for v in values: + if isinstance(v, _ConditionalVariantValues): + for c in v: + yield (c.value, c.when) + else: + yield (v, "") - depends_on("clhep@2.3.3.0: cxxstd=" + std, when="@10.3.3:10.6 cxxstd=" + std) + for _std, _when in std_when(_cxxstd_values): + depends_on(f"clhep cxxstd={_std}", when=f"{_when} cxxstd={_std}") + depends_on(f"vecgeom cxxstd={_std}", when=f"{_when} +vecgeom cxxstd={_std}") # Spack only supports Xerces-c 3 and above, so no version req - depends_on("xerces-c netaccessor=curl cxxstd=" + std, when="cxxstd=" + std) - - # Vecgeom specific versions for each Geant4 version - depends_on("vecgeom@1.2.0: cxxstd=" + std, when="@11.1: +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.18:1.1 cxxstd=" + std, when="@11.0.0:11.0 +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.8:1.1 cxxstd=" + std, when="@10.7.0:10.7 +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.5 cxxstd=" + std, when="@10.6.0:10.6 +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.0 cxxstd=" + std, when="@10.5.0:10.5 +vecgeom cxxstd=" + std) - depends_on("vecgeom@0.5.2 cxxstd=" + std, when="@10.4.0:10.4 +vecgeom cxxstd=" + std) - depends_on("vecgeom@0.3rc cxxstd=" + std, when="@10.3.0:10.3 +vecgeom cxxstd=" + std) + depends_on(f"xerces-c netaccessor=curl cxxstd={_std}", when=f"{_when} cxxstd={_std}") # Boost.python, conflict handled earlier - depends_on("boost@1.70: +python cxxstd=" + std, when="+python cxxstd=" + std) + depends_on(f"boost@1.70: +python cxxstd={_std}", when=f"{_when} +python cxxstd={_std}") # Visualization driver dependencies depends_on("gl", when="+opengl") diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index a6ae716bb730ff..8d58956508a1d9 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -27,6 +27,8 @@ class Hpctoolkit(AutotoolsPackage): test_requires_compiler = True version("develop", branch="develop") + version("2023.08.stable", branch="release/2023.08") + version("2023.08.1", tag="2023.08.1", commit="753a72affd584a5e72fe153d1e8c47a394a3886e") version("2023.03.stable", branch="release/2023.03") version("2023.03.01", commit="9e0daf2ad169f6c7f6c60408475b3c2f71baebbf") version("2022.10.01", commit="e8a5cc87e8f5ddfd14338459a4106f8e0d162c83") diff --git a/var/spack/repos/builtin/packages/intel/detection_test.yaml b/var/spack/repos/builtin/packages/intel/detection_test.yaml new file mode 100644 index 00000000000000..076bfeaabac3bd --- /dev/null +++ b/var/spack/repos/builtin/packages/intel/detection_test.yaml @@ -0,0 +1,19 @@ +paths: + - layout: + - executables: + - "bin/intel64/icc" + script: | + echo "icc (ICC) 18.0.5 20180823" + echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved." + - executables: + - "bin/intel64/icpc" + script: | + echo "icpc (ICC) 18.0.5 20180823" + echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved." + - executables: + - "bin/intel64/ifort" + script: | + echo "ifort (IFORT) 18.0.5 20180823" + echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved." + results: + - spec: 'intel@18.0.5' diff --git a/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py b/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py index 463f3132f41769..1ecc52340c5b4e 100644 --- a/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py +++ b/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py @@ -19,6 +19,7 @@ class KokkosNvccWrapper(Package): maintainers("Rombur") + version("4.1.00", sha256="cf725ea34ba766fdaf29c884cfe2daacfdc6dc2d6af84042d1c78d0f16866275") version("4.0.01", sha256="bb942de8afdd519fd6d5d3974706bfc22b6585a62dd565c12e53bdb82cd154f0") version("4.0.00", sha256="1829a423883d4b44223c7c3a53d3c51671145aad57d7d23e6a1a4bebf710dcf6") version("3.7.02", sha256="5024979f06bc8da2fb696252a66297f3e0e67098595a0cc7345312b3b4aa0f54") diff --git a/var/spack/repos/builtin/packages/libcatalyst/package.py b/var/spack/repos/builtin/packages/libcatalyst/package.py index ed7aa445783730..9000ca137fc158 100644 --- a/var/spack/repos/builtin/packages/libcatalyst/package.py +++ b/var/spack/repos/builtin/packages/libcatalyst/package.py @@ -3,6 +3,10 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import subprocess + +import llnl.util.tty as tty + from spack.package import * @@ -14,25 +18,47 @@ class Libcatalyst(CMakePackage): git = "https://gitlab.kitware.com/paraview/catalyst.git" url = "https://gitlab.kitware.com/api/v4/projects/paraview%2Fcatalyst/packages/generic/catalyst/v2.0.0/catalyst-v2.0.0.tar.gz" - maintainers("mathstuf") - - version("2.0.0-rc3", sha256="8862bd0a4d0be2176b4272f9affda1ea4e5092087acbb99a2fe2621c33834e05") - - # master as of 2021-05-12 - version("0.20210512", commit="8456ccd6015142b5a7705f79471361d4f5644fa7") + maintainers("mathstuf", "ayenpure") + version("master", branch="master") + version("2.0.0-rc4", sha256="cb491e4ccd344156cc2494f65b9f38885598c16d12e1016c36e2ee0bc3640863") variant("mpi", default=False, description="Enable MPI support") + variant("conduit", default=False, description="Use external Conduit for Catalyst") depends_on("mpi", when="+mpi") - - # TODO: catalyst doesn't support an external conduit - # depends_on('conduit') + depends_on("conduit", when="+conduit") def cmake_args(self): """Populate cmake arguments for libcatalyst.""" args = [ "-DCATALYST_BUILD_TESTING=OFF", self.define_from_variant("CATALYST_USE_MPI", "mpi"), + self.define_from_variant("CATALYST_WITH_EXTERNAL_CONDUIT", "conduit"), ] return args + + def setup_run_environment(self, env): + spec = self.spec + if spec.satisfies("+conduit"): + env.prepend_path("CMAKE_PREFIX_PATH", spec["conduit"].prefix) + + @on_package_attributes(run_tests=True) + @run_after("install") + def build_test(self): + testdir = "smoke_test_build" + cmakeExampleDir = join_path(self.stage.source_path, "examples") + cmake_args = [ + cmakeExampleDir, + "-DBUILD_SHARED_LIBS=ON", + self.define("CMAKE_PREFIX_PATH", self.prefix), + ] + cmake = which(self.spec["cmake"].prefix.bin.cmake) + + with working_dir(testdir, create=True): + cmake(*cmake_args) + cmake(*(["--build", "."])) + tty.info("Running Catalyst test") + + res = subprocess.run(["adaptor0/adaptor0_test", "catalyst"]) + assert res.returncode == 0 diff --git a/var/spack/repos/builtin/packages/llvm/detection_test.yaml b/var/spack/repos/builtin/packages/llvm/detection_test.yaml new file mode 100644 index 00000000000000..48e9d6751af20f --- /dev/null +++ b/var/spack/repos/builtin/packages/llvm/detection_test.yaml @@ -0,0 +1,56 @@ +paths: + - layout: + - executables: + - "bin/clang-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + - executables: + - "bin/clang++-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + results: + - spec: 'llvm@3.9.1 +clang~lld~lldb' + # Multiple LLVM packages in the same prefix + - layout: + - executables: + - "bin/clang-8" + - "bin/clang++-8" + script: | + echo "clang version 8.0.0-3~ubuntu18.04.2 (tags/RELEASE_800/final)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + - executables: + - "bin/ld.lld-8" + script: 'echo "LLD 8.0.0 (compatible with GNU linkers)"' + - executables: + - "bin/lldb" + script: 'echo "lldb version 8.0.0"' + - executables: + - "bin/clang-3.9" + - "bin/clang++-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + results: + - spec: 'llvm@8.0.0+clang+lld+lldb' + - spec: 'llvm@3.9.1+clang~lld~lldb' + # Apple Clang should not be detected + - layout: + - executables: + - "bin/clang" + - "bin/clang++" + script: | + echo "Apple clang version 11.0.0 (clang-1100.0.33.8)" + echo "Target: x86_64-apple-darwin19.5.0" + echo "Thread model: posix" + echo "InstalledDir: /Library/Developer/CommandLineTools/usr/bin" + results: [] diff --git a/var/spack/repos/builtin/packages/modeltest-ng/package.py b/var/spack/repos/builtin/packages/modeltest-ng/package.py index 55d1bd1ff603d0..7a2b4dcc2c2d0e 100644 --- a/var/spack/repos/builtin/packages/modeltest-ng/package.py +++ b/var/spack/repos/builtin/packages/modeltest-ng/package.py @@ -13,8 +13,9 @@ class ModeltestNg(CMakePackage): url = "https://github.com/ddarriba/modeltest/archive/refs/tags/v0.1.7.tar.gz" git = "https://github.com/ddarriba/modeltest.git" - maintainers("dorton21") + maintainers("snehring") + version("20220721", commit="1066356b984100897b8bd38ac771c5c950984c01", submodules=True) version("0.1.7", commit="cc028888f1d4222aaa53b99c6b02cd934a279001", submodules=True) variant("mpi", default=False, description="Enable MPI") @@ -24,5 +25,12 @@ class ModeltestNg(CMakePackage): depends_on("flex", type="build") depends_on("openmpi", when="+mpi") + # 40217: ICE by gcc-toolset-12-gcc-12.2.1-7.4.el8.aarch64 of Rocky Linux 8.8: + conflicts("%gcc@12.2.0:12.2", when="target=aarch64:", msg="ICE with gcc@12.2 on aarch64") + + requires( + "@20220721:", when="target=aarch64:", msg="Support for aarch64 was added after 20220721." + ) + def cmake_args(self): return [self.define_from_variant("ENABLE_MPI", "mpi")] diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index e194a366e4778b..43c3dafdadeca5 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -274,7 +274,7 @@ def libs(self): search_shared = bool(spec.variants["shared"].value) suffix = spec.variants["symbol_suffix"].value if suffix != "none": - name += suffix + name = [x + suffix for x in name] return find_libraries(name, spec.prefix, shared=search_shared, recursive=True) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index f03607582c4849..e7ed9e7e60da4c 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -136,3 +136,9 @@ class PyPandas(PythonPackage): depends_on("py-setuptools@24.2:", when="@:1.2", type="build") skip_modules = ["pandas.tests", "pandas.plotting._matplotlib", "pandas.core._numba.kernels"] + + def flag_handler(self, name, flags): + if name == "cflags": + if self.spec.satisfies("@0.24.2 %oneapi"): + flags.append("-Wno-error=implicit-function-declaration") + return (flags, None, None) diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index c1ad924907c408..4a07657d80d7bb 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -140,7 +140,7 @@ class PyScipy(PythonPackage): # https://github.com/mesonbuild/meson/pull/10909#issuecomment-1282241479 # Intel OneAPI ifx claims to support -fvisibility, but this does not work. # Meson adds this flag for all Python extensions which include Fortran code. - conflicts("%oneapi", when="@1.9:") + conflicts("%oneapi@:2023.0", when="@1.9:") # https://github.com/scipy/scipy/issues/12860 patch( diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index daf4933072b6b9..971a3c25052a33 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -689,9 +689,12 @@ def configure(self, spec, prefix): # Errors on bluetooth even when bluetooth is disabled... # at least on apple-clang%12 config_args.extend(["-skip", "connectivity"]) - elif version < Version("5.15") and "+gui" in spec: + elif "+gui" in spec: # Linux-only QT5 dependencies - config_args.append("-system-xcb") + if version < Version("5.9.9"): + config_args.append("-system-xcb") + else: + config_args.append("-xcb") if "+opengl" in spec: config_args.append("-I{0}/include".format(spec["libx11"].prefix)) config_args.append("-I{0}/include".format(spec["xproto"].prefix)) diff --git a/var/spack/repos/builtin/packages/vecgeom/package.py b/var/spack/repos/builtin/packages/vecgeom/package.py index 6e8b9be3ad763a..7a403bdd9ee93b 100644 --- a/var/spack/repos/builtin/packages/vecgeom/package.py +++ b/var/spack/repos/builtin/packages/vecgeom/package.py @@ -5,6 +5,7 @@ from spack.package import * +from spack.variant import _ConditionalVariantValues class Vecgeom(CMakePackage, CudaPackage): @@ -138,7 +139,7 @@ class Vecgeom(CMakePackage, CudaPackage): deprecated=True, ) - _cxxstd_values = ("11", "14", "17") + _cxxstd_values = (conditional("11", "14", when="@:1.1"), "17", conditional("20", when="@1.2:")) variant( "cxxstd", default="17", @@ -158,8 +159,6 @@ class Vecgeom(CMakePackage, CudaPackage): depends_on("veccore@0.4.2", when="@:1.0") conflicts("+cuda", when="@:1.1.5") - conflicts("cxxstd=14", when="@1.2:") - conflicts("cxxstd=11", when="@1.2:") # Fix missing CMAKE_CUDA_STANDARD patch( @@ -174,10 +173,18 @@ class Vecgeom(CMakePackage, CudaPackage): when="@1.1.18 +cuda ^cuda@:11.4", ) - for std in _cxxstd_values: - depends_on("geant4 cxxstd=" + std, when="+geant4 cxxstd=" + std) - depends_on("root cxxstd=" + std, when="+root cxxstd=" + std) - depends_on("xerces-c cxxstd=" + std, when="+gdml cxxstd=" + std) + def std_when(values): + for v in values: + if isinstance(v, _ConditionalVariantValues): + for c in v: + yield (c.value, c.when) + else: + yield (v, "") + + for _std, _when in std_when(_cxxstd_values): + depends_on(f"geant4 cxxstd={_std}", when=f"{_when} +geant4 cxxstd={_std}") + depends_on(f"root cxxstd={_std}", when=f"{_when} +root cxxstd={_std}") + depends_on(f"xerces-c cxxstd={_std}", when=f"{_when} +gdml cxxstd={_std}") def cmake_args(self): spec = self.spec