diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index dba4e4b1a7..833a4339f6 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -6,6 +6,8 @@ import os from functools import partial from importlib import import_module # noqa: F401 +from pathlib import Path +from typing import Iterable from conda import __version__ as CONDA_VERSION # noqa: F401 from conda.auxlib.packaging import ( # noqa: F401 @@ -53,7 +55,6 @@ human_bytes, input, install_actions, - is_linked, lchmod, linked, linked_data, @@ -75,7 +76,7 @@ ) from conda.models.channel import get_conda_build_local_url # noqa: F401 from conda.models.dist import Dist # noqa: F401 -from conda.models.records import PackageRecord +from conda.models.records import PackageRecord, PrefixRecord from .deprecations import deprecated @@ -125,46 +126,36 @@ class SignatureError(Exception): pass -@deprecated("3.28.0", "4.0.0") -def which_package(path): - """ - Given the path (of a (presumably) conda installed file) iterate over - the conda packages the file came from. Usually the iteration yields - only one package. - """ - from os.path import abspath, join +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda_build.inspect_pkg.which_package` instead.", +) +def which_package(path: str | os.PathLike | Path) -> Iterable[PrefixRecord]: + from .inspect_pkg import which_package - path = abspath(path) - prefix = which_prefix(path) - if prefix is None: - raise RuntimeError("could not determine conda prefix from: %s" % path) - for dist in linked(prefix): - meta = is_linked(prefix, dist) - if any(abspath(join(prefix, f)) == path for f in meta["files"]): - yield dist + return which_package(path, which_prefix(path)) @deprecated("3.28.0", "4.0.0") -def which_prefix(path): +def which_prefix(path: str | os.PathLike | Path) -> Path: """ Given the path (to a (presumably) conda installed file) return the environment prefix in which the file in located """ - from os.path import abspath, dirname, isdir, join + from conda.gateways.disk.test import is_conda_environment - prefix = abspath(path) - iteration = 0 - while iteration < 20: - if isdir(join(prefix, "conda-meta")): - # we found it, so let's return it - break - if prefix == dirname(prefix): + prefix = Path(path) + for _ in range(20): + if is_conda_environment(prefix): + return prefix + elif prefix == (parent := prefix.parent): # we cannot chop off any more directories, so we didn't find it - prefix = None break - prefix = dirname(prefix) - iteration += 1 - return prefix + else: + prefix = parent + + raise RuntimeError("could not determine conda prefix from: %s" % path) @deprecated("3.28.0", "4.0.0") diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index cbb60d4f25..e8b94978c1 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import json import os import re @@ -9,13 +11,19 @@ from functools import lru_cache from itertools import groupby from operator import itemgetter -from os.path import abspath, basename, dirname, exists, join, normcase +from os.path import abspath, basename, dirname, exists, join +from pathlib import Path +from typing import Iterable, Literal + +from conda.core.prefix_data import PrefixData +from conda.models.dist import Dist +from conda.models.records import PrefixRecord +from conda.resolve import MatchSpec from conda_build.conda_interface import ( display_actions, get_index, install_actions, - is_linked, linked_data, specs_from_args, ) @@ -34,32 +42,47 @@ rm_rf, ) +from .deprecations import deprecated +from .utils import on_mac, on_win + +@deprecated("3.28.0", "4.0.0") @lru_cache(maxsize=None) -def dist_files(prefix, dist): - meta = is_linked(prefix, dist) - return set(meta["files"]) if meta else set() +def dist_files(prefix: str | os.PathLike | Path, dist: Dist) -> set[str]: + if (prec := PrefixData(prefix).get(dist.name, None)) is None: + return set() + elif MatchSpec(dist).match(prec): + return set(prec["files"]) + else: + return set() -def which_package(in_prefix_path, prefix, avoid_canonical_channel_name=False): +@deprecated.argument("3.28.0", "4.0.0", "avoid_canonical_channel_name") +def which_package( + path: str | os.PathLike | Path, + prefix: str | os.PathLike | Path, +) -> Iterable[PrefixRecord]: """ - given the path of a conda installed file iterate over + Given the path (of a (presumably) conda installed file) iterate over the conda packages the file came from. Usually the iteration yields only one package. """ - norm_ipp = normcase(in_prefix_path.replace(os.sep, "/")) - from conda_build.utils import linked_data_no_multichannels + prefix = Path(prefix) + # historically, path was relative to prefix just to be safe we append to prefix + # (pathlib correctly handles this even if path is absolute) + path = prefix / path - if avoid_canonical_channel_name: - fn = linked_data_no_multichannels - else: - fn = linked_data - for dist in fn(prefix): - # dfiles = set(dist.get('files', [])) - dfiles = dist_files(prefix, dist) - # TODO :: This is completely wrong when the env is on a case-sensitive FS! - if any(norm_ipp == normcase(w) for w in dfiles): - yield dist + def samefile(path1: Path, path2: Path) -> bool: + try: + return path1.samefile(path2) + except FileNotFoundError: + # FileNotFoundError: path doesn't exist + return path1 == path2 + + for prec in PrefixData(str(prefix)).iter_records(): + for file in prec["files"]: + if samefile(prefix / file, path): + yield prec def print_object_info(info, key): @@ -106,25 +129,37 @@ def check_install( return None -def print_linkages(depmap, show_files=False): - # Print system and not found last - dist_depmap = {} - for k, v in depmap.items(): - if hasattr(k, "dist_name"): - k = k.dist_name - dist_depmap[k] = v +def print_linkages( + depmap: dict[ + PrefixRecord | Literal["not found" | "system" | "untracked"], + list[tuple[str, str, str]], + ], + show_files: bool = False, +) -> str: + # print system, not found, and untracked last + sort_order = { + # PrefixRecord: (0, PrefixRecord.name), + "system": (1, "system"), + "not found": (2, "not found"), + "untracked": (3, "untracked"), + # str: (4, str), + } - depmap = dist_depmap - k = sorted(set(depmap.keys()) - {"system", "not found"}) - all_deps = k if "not found" not in depmap.keys() else k + ["system", "not found"] output_string = "" - for dep in all_deps: - output_string += "%s:\n" % dep + for prec, links in sorted( + depmap.items(), + key=( + lambda key: (0, key[0].name) + if isinstance(key[0], PrefixRecord) + else sort_order.get(key[0], (4, key[0])) + ), + ): + output_string += "%s:\n" % prec if show_files: - for lib, path, binary in sorted(depmap[dep]): + for lib, path, binary in sorted(links): output_string += f" {lib} ({path}) from {binary}\n" else: - for lib, path in sorted(set(map(itemgetter(0, 1), depmap[dep]))): + for lib, path in sorted(set(map(itemgetter(0, 1), links))): output_string += f" {lib} ({path})\n" output_string += "\n" return output_string @@ -214,10 +249,9 @@ def test_installable(channel="defaults"): return success -def _installed(prefix): - installed = linked_data(prefix) - installed = {rec["name"]: dist for dist, rec in installed.items()} - return installed +@deprecated("3.28.0", "4.0.0") +def _installed(prefix: str | os.PathLike | Path) -> dict[str, Dist]: + return {dist.name: dist for dist in linked_data(str(prefix))} def _underlined_text(text): @@ -225,79 +259,66 @@ def _underlined_text(text): def inspect_linkages( - packages, - prefix=sys.prefix, - untracked=False, - all_packages=False, - show_files=False, - groupby="package", + packages: Iterable[str | _untracked_package], + prefix: str | os.PathLike | Path = sys.prefix, + untracked: bool = False, + all_packages: bool = False, + show_files: bool = False, + groupby: Literal["package" | "dependency"] = "package", sysroot="", ): - pkgmap = {} - - installed = _installed(prefix) - if not packages and not untracked and not all_packages: - raise ValueError( - "At least one package or --untracked or --all must be provided" - ) + sys.exit("At least one package or --untracked or --all must be provided") + elif on_win: + sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X") + + prefix = Path(prefix) + installed = {prec.name: prec for prec in PrefixData(str(prefix)).iter_records()} if all_packages: packages = sorted(installed.keys()) - + packages = ensure_list(packages) if untracked: packages.append(untracked_package) - for pkg in ensure_list(packages): - if pkg == untracked_package: - dist = untracked_package - elif pkg not in installed: - sys.exit(f"Package {pkg} is not installed in {prefix}") - else: - dist = installed[pkg] - - if not sys.platform.startswith(("linux", "darwin")): - sys.exit( - "Error: conda inspect linkages is only implemented in Linux and OS X" - ) - - if dist == untracked_package: + pkgmap: dict[str | _untracked_package, dict[str, list]] = {} + for name in packages: + if name == untracked_package: obj_files = get_untracked_obj_files(prefix) + elif name not in installed: + sys.exit(f"Package {name} is not installed in {prefix}") else: - obj_files = get_package_obj_files(dist, prefix) + obj_files = get_package_obj_files(installed[name], prefix) + linkages = get_linkages(obj_files, prefix, sysroot) - depmap = defaultdict(list) - pkgmap[pkg] = depmap - depmap["not found"] = [] - depmap["system"] = [] - for binary in linkages: - for lib, path in linkages[binary]: + pkgmap[name] = depmap = defaultdict(list) + for binary, paths in linkages.items(): + for lib, path in paths: path = ( replace_path(binary, path, prefix) if path not in {"", "not found"} else path ) - if path.startswith(prefix): - in_prefix_path = re.sub("^" + prefix + "/", "", path) - deps = list(which_package(in_prefix_path, prefix)) - if len(deps) > 1: - deps_str = [str(dep) for dep in deps] + try: + relative = str(Path(path).relative_to(prefix)) + except ValueError: + # ValueError: path is not relative to prefix + relative = None + if relative: + precs = list(which_package(relative, prefix)) + if len(precs) > 1: get_logger(__name__).warn( - "Warning: %s comes from multiple " "packages: %s", + "Warning: %s comes from multiple packages: %s", path, - comma_join(deps_str), + comma_join(map(str, precs)), ) - if not deps: + elif not precs: if exists(path): - depmap["untracked"].append( - (lib, path.split(prefix + "/", 1)[-1], binary) - ) + depmap["untracked"].append((lib, relative, binary)) else: - depmap["not found"].append( - (lib, path.split(prefix + "/", 1)[-1], binary) - ) - for d in deps: - depmap[d].append((lib, path.split(prefix + "/", 1)[-1], binary)) + depmap["not found"].append((lib, relative, binary)) + for prec in precs: + depmap[prec].append((lib, relative, binary)) elif path == "not found": depmap["not found"].append((lib, path, binary)) else: @@ -330,27 +351,27 @@ def inspect_linkages( return output_string -def inspect_objects(packages, prefix=sys.prefix, groupby="package"): - installed = _installed(prefix) - - output_string = "" - for pkg in ensure_list(packages): - if pkg == untracked_package: - dist = untracked_package - elif pkg not in installed: - raise ValueError(f"Package {pkg} is not installed in {prefix}") - else: - dist = installed[pkg] - - output_string += _underlined_text(pkg) +def inspect_objects( + packages: Iterable[str], + prefix: str | os.PathLike | Path = sys.prefix, + groupby: str = "package", +): + if not on_mac: + sys.exit("Error: conda inspect objects is only implemented in OS X") - if not sys.platform.startswith("darwin"): - sys.exit("Error: conda inspect objects is only implemented in OS X") + prefix = Path(prefix) + installed = {prec.name: prec for prec in PrefixData(str(prefix)).iter_records()} - if dist == untracked_package: + output_string = "" + for name in ensure_list(packages): + if name == untracked_package: obj_files = get_untracked_obj_files(prefix) + elif name not in installed: + raise ValueError(f"Package {name} is not installed in {prefix}") else: - obj_files = get_package_obj_files(dist, prefix) + obj_files = get_package_obj_files(installed[name], prefix) + + output_string += _underlined_text(name) info = [] for f in obj_files: diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 32eea125a2..f3597d065a 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -1,15 +1,24 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os import re import subprocess -import sys from functools import lru_cache -from os.path import basename, join +from os.path import basename +from pathlib import Path +from typing import Iterable + +from conda.models.records import PrefixRecord -from conda_build.conda_interface import linked_data, untracked +from conda_build.conda_interface import untracked from conda_build.os_utils.macho import otool from conda_build.os_utils.pyldd import codefile_class, inspect_linkages, machofile +from ..deprecations import deprecated +from ..utils import on_linux, on_mac + LDD_RE = re.compile(r"\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)") LDD_NOT_FOUND_RE = re.compile(r"\s*(.*?)\s*=>\s*not found") @@ -38,94 +47,85 @@ def ldd(path): return res -def get_linkages(obj_files, prefix, sysroot): - return _get_linkages(tuple(obj_files), prefix, sysroot) +def get_linkages( + obj_files: Iterable[str], + prefix: str | os.PathLike | Path, + sysroot, +) -> dict[str, list[tuple[str, str]]]: + return _get_linkages(tuple(obj_files), Path(prefix), sysroot) @lru_cache(maxsize=None) -def _get_linkages(obj_files, prefix, sysroot): - res = {} - - for f in obj_files: - path = join(prefix, f) - # ldd quite often fails on foreign architectures. - ldd_failed = False +def _get_linkages( + obj_files: tuple[str], + prefix: Path, + sysroot, +) -> dict[str, list[tuple[str, str]]]: + linkages = {} + for file in obj_files: # Detect the filetype to emulate what the system-native tool does. - klass = codefile_class(path) - if klass == machofile: + path = prefix / file + if codefile_class(path) == machofile: resolve_filenames = False recurse = False else: resolve_filenames = True recurse = True - try: - if sys.platform.startswith("linux"): - res[f] = ldd(path) - elif sys.platform.startswith("darwin"): - links = otool(path) - res[f] = [(basename(line["name"]), line["name"]) for line in links] - except: - ldd_failed = True - finally: - res_py = inspect_linkages( + ldd_emulate = [ + (basename(link), link) + for link in inspect_linkages( path, resolve_filenames=resolve_filenames, sysroot=sysroot, recurse=recurse, ) - res_py = [(basename(lp), lp) for lp in res_py] - if ldd_failed: - res[f] = res_py - else: - if set(res[f]) != set(res_py): - print( - "WARNING: pyldd disagrees with ldd/otool. This will not cause any" - ) - print("WARNING: problems for this build, but please file a bug at:") - print("WARNING: https://github.com/conda/conda-build") - print(f"WARNING: and (if possible) attach file {path}") - print( - "WARNING: \nldd/otool gives:\n{}\npyldd gives:\n{}\n".format( - "\n".join(str(e) for e in res[f]), - "\n".join(str(e) for e in res_py), - ) - ) - print(f"Diffs\n{set(res[f]) - set(res_py)}") - print(f"Diffs\n{set(res_py) - set(res[f])}") - return res - + ] + try: + if on_linux: + ldd_computed = ldd(path) + elif on_mac: + ldd_computed = [ + (basename(link["name"]), link["name"]) for link in otool(path) + ] + except: + # ldd quite often fails on foreign architectures, fallback to + ldd_computed = ldd_emulate + + if set(ldd_computed) != set(ldd_emulate): + print("WARNING: pyldd disagrees with ldd/otool. This will not cause any") + print("WARNING: problems for this build, but please file a bug at:") + print("WARNING: https://github.com/conda/conda-build") + print(f"WARNING: and (if possible) attach file {path}") + print("WARNING:") + print(" ldd/otool gives:") + print(" " + "\n ".join(map(str, ldd_computed))) + print(" pyldd gives:") + print(" " + "\n ".join(map(str, ldd_emulate))) + print(f"Diffs\n{set(ldd_computed) - set(ldd_emulate)}") + print(f"Diffs\n{set(ldd_emulate) - set(ldd_computed)}") + + linkages[file] = ldd_computed + return linkages + + +@deprecated("3.28.0", "4.0.0") @lru_cache(maxsize=None) -def get_package_files(dist, prefix): - files = [] - if hasattr(dist, "get"): - files = dist.get("files") - else: - data = linked_data(prefix).get(dist) - if data: - files = data.get("files", []) - return files +def get_package_files( + prec: PrefixRecord, prefix: str | os.PathLike | Path +) -> list[str]: + return prec["files"] @lru_cache(maxsize=None) -def get_package_obj_files(dist, prefix): - res = [] - files = get_package_files(dist, prefix) - for f in files: - path = join(prefix, f) - if codefile_class(path): - res.append(f) - - return res +def get_package_obj_files( + prec: PrefixRecord, prefix: str | os.PathLike | Path +) -> list[str]: + return [file for file in prec["files"] if codefile_class(Path(prefix, file))] @lru_cache(maxsize=None) -def get_untracked_obj_files(prefix): - res = [] - files = untracked(prefix) - for f in files: - path = join(prefix, f) - if codefile_class(path): - res.append(f) - - return res +def get_untracked_obj_files(prefix: str | os.PathLike | Path) -> list[str]: + return [ + file for file in untracked(str(prefix)) if codefile_class(Path(prefix, file)) + ] diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index 42b89711ae..90679409c9 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -365,6 +365,7 @@ def do_file(file, lc_operation, off_sz, arch, results, *args): results.append(do_macho(file, 64, LITTLE_ENDIAN, lc_operation, *args)) +@deprecated("3.28.0", "4.0.0") def mach_o_change(path, arch, what, value): """ Replace a given name (what) in any LC_LOAD_DYLIB command found in @@ -1139,6 +1140,7 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"): return cf.uniqueness_key(), orig_names, resolved_names +@deprecated("3.28.0", "4.0.0") def inspect_rpaths( filename, resolve_dirnames=True, use_os_varnames=True, sysroot="", arch="native" ): @@ -1170,6 +1172,7 @@ def inspect_rpaths( return cf.rpaths_nontransitive +@deprecated("3.28.0", "4.0.0") def get_runpaths(filename, arch="native"): if not os.path.exists(filename): return [] @@ -1257,16 +1260,16 @@ def otool(*args): return 1 +@deprecated("3.28.0", "4.0.0") def otool_sys(*args): import subprocess - result = subprocess.check_output("/usr/bin/otool", args).decode(encoding="ascii") - return result + return subprocess.check_output("/usr/bin/otool", args).decode(encoding="ascii") +@deprecated("3.28.0", "4.0.0") def ldd_sys(*args): - result = [] - return result + return [] def ldd(*args): @@ -1297,12 +1300,11 @@ def main(argv): elif re.match(r".*otool(?:$|\.exe|\.py)", progname): return otool(*argv[2 - idx :]) elif os.path.isfile(progname): - klass = codefile_class(progname) - if not klass: + if not (codefile := codefile_class(progname)): return 1 - elif klass == elffile: + elif codefile == elffile: return ldd(*argv[1 - idx :]) - elif klass == machofile: + elif codefile == machofile: return otool("-L", *argv[1 - idx :]) return 1 diff --git a/conda_build/post.py b/conda_build/post.py index 7be43cbe21..93c761d3d0 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -1,5 +1,8 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import json import locale import os import re @@ -27,12 +30,12 @@ sep, splitext, ) +from pathlib import Path from subprocess import CalledProcessError, call, check_output +from typing import Iterable, Literal -try: - from os import readlink -except ImportError: - readlink = False +from conda.core.prefix_data import PrefixData +from conda.models.records import PrefixRecord from conda_build import utils from conda_build.conda_interface import ( @@ -44,7 +47,6 @@ from conda_build.exceptions import OverDependingError, OverLinkingError, RunPathError from conda_build.inspect_pkg import which_package from conda_build.os_utils import external, macho -from conda_build.os_utils.ldd import get_package_files, get_package_obj_files from conda_build.os_utils.liefldd import ( get_exports_memoized, get_linkages_memoized, @@ -61,6 +63,7 @@ machofile, ) +from .deprecations import deprecated from .metadata import MetaData filetypes_for_platform = { @@ -648,33 +651,88 @@ def assert_relative_osx(path, host_prefix, build_prefix): ) +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead.", +) def determine_package_nature( - pkg, prefix, subdir, bldpkgs_dir, output_folder, channel_urls -): - run_exports = None - lib_prefix = pkg.name.startswith("lib") - codefiles = get_package_obj_files(pkg, prefix) - # get_package_obj_files already filters by extension and I'm not sure we need two. - dsos = [ - f for f in codefiles for ext in (".dylib", ".so", ".dll", ".pyd") if ext in f - ] - # TODO :: Is this package not in a channel somewhere at this point? It would be good not to be special - # casing like this. Clearly we aren't able to get run_exports for starters and that's not good - if not isinstance(pkg, FakeDist): - # we don't care about the actual run_exports value, just whether or not run_exports are present. - json_file = os.path.join(prefix, "conda-meta", pkg.dist_name + ".json") - import json - - assert os.path.isfile(json_file), f"conda-meta :: Not a file: {json_file}" - json_info = json.loads(open(json_file).read()) - epd = json_info["extracted_package_dir"] - run_exports_json = os.path.join(epd, "info", "run_exports.json") - if os.path.isfile(run_exports_json): - run_exports = json.loads(open(run_exports_json).read()) - return (dsos, run_exports, lib_prefix) - - -def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls): + prec: PrefixRecord, + prefix: str | os.PathLike | Path, + subdir, + bldpkgs_dir, + output_folder, + channel_urls, +) -> tuple[set[str], tuple[str, ...], bool]: + return ( + get_dsos(prec, prefix), + get_run_exports(prec, prefix), + prec.name.startswith("lib"), + ) + + +def get_dsos(prec: PrefixRecord, prefix: str | os.PathLike | Path) -> set[str]: + return { + file + for file in prec["files"] + if codefile_class(Path(prefix, file)) + # codefile_class already filters by extension/binary type, do we need this second filter? + for ext in (".dylib", ".so", ".dll", ".pyd") + if ext in file + } + + +def get_run_exports( + prec: PrefixRecord, + prefix: str | os.PathLike | Path, +) -> tuple[str, ...]: + json_file = Path( + prefix, + "conda-meta", + f"{prec.name}-{prec.version}-{prec.build}.json", + ) + try: + json_info = json.loads(json_file.read_text()) + except (FileNotFoundError, IsADirectoryError): + # FileNotFoundError: path doesn't exist + # IsADirectoryError: path is a directory + # raise CondaBuildException(f"Not a file: {json_file}") + # is this a "fake" PrefixRecord? + # i.e. this is the package being built and hasn't been "installed" to disk? + return () + + run_exports_json = Path( + json_info["extracted_package_dir"], + "info", + "run_exports.json", + ) + try: + return tuple(json.loads(run_exports_json.read_text())) + except (FileNotFoundError, IsADirectoryError): + # FileNotFoundError: path doesn't exist + # IsADirectoryError: path is a directory + return () + + +@deprecated.argument("3.28.0", "4.0.0", "subdir") +@deprecated.argument("3.28.0", "4.0.0", "bldpkgs_dirs") +@deprecated.argument("3.28.0", "4.0.0", "output_folder") +@deprecated.argument("3.28.0", "4.0.0", "channel_urls") +def library_nature( + prec: PrefixRecord, prefix: str | os.PathLike | Path +) -> Literal[ + "interpreter (Python)" + | "interpreter (R)" + | "run-exports library" + | "dso library" + | "plugin library (Python,R)" + | "plugin library (Python)" + | "plugin library (R)" + | "interpreted library (Python,R)" + | "interpreted library (Python)" + | "interpreted library (R)" + | "non-library" +]: """ Result :: "non-library", "interpreted library (Python|R|Python,R)", @@ -685,55 +743,53 @@ def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_url "interpreter (Python)" .. in that order, i.e. if have both dsos and run_exports, it's a run_exports_library. """ - dsos, run_exports, _ = determine_package_nature( - pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls - ) - if pkg.name == "python": + if prec.name == "python": return "interpreter (Python)" - elif pkg.name == "r-base": + elif prec.name == "r-base": return "interpreter (R)" - if run_exports: + elif get_run_exports(prec, prefix): return "run-exports library" - elif len(dsos): + elif dsos := get_dsos(prec, prefix): # If all DSOs are under site-packages or R/lib/ - python_dsos = [dso for dso in dsos if "site-packages" in dso] - r_dsos = [dso for dso in dsos if "lib/R/library" in dso] - dsos_without_plugins = [dso for dso in dsos if dso not in r_dsos + python_dsos] - if len(dsos_without_plugins): + python_dsos = {dso for dso in dsos if "site-packages" in dso} + r_dsos = {dso for dso in dsos if "lib/R/library" in dso} + if dsos - python_dsos - r_dsos: return "dso library" - else: - if python_dsos and r_dsos: - return "plugin library (Python,R)" - elif python_dsos: - return "plugin library (Python)" - elif r_dsos: - return "plugin library (R)" + elif python_dsos and r_dsos: + return "plugin library (Python,R)" + elif python_dsos: + return "plugin library (Python)" + elif r_dsos: + return "plugin library (R)" else: - files = get_package_files(pkg, prefix) - python_files = [f for f in files if "site-packages" in f] - r_files = [f for f in files if "lib/R/library" in f] + python_files = {file for file in prec["files"] if "site-packages" in file} + r_files = {file for file in prec["files"] if "lib/R/library" in file} if python_files and r_files: return "interpreted library (Python,R)" elif python_files: return "interpreted library (Python)" elif r_files: return "interpreted library (R)" - return "non-library" -def dists_from_names(names, prefix): - results = [] +@deprecated( + "3.28.0", + "4.0.0", + addendum="Query `conda.core.prefix_data.PrefixData` instead.", +) +def dists_from_names(names: Iterable[str], prefix: str | os.PathLike | Path): from conda_build.utils import linked_data_no_multichannels - pkgs = linked_data_no_multichannels(prefix) - for name in names: - for pkg in pkgs: - if pkg.quad[0] == name: - results.append(pkg) - return results + names = utils.ensure_list(names) + return [prec for prec in linked_data_no_multichannels(prefix) if prec.name in names] +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda.models.records.PrefixRecord` instead.", +) class FakeDist: def __init__(self, name, version, build_number, build_str, channel, files): self.name = name @@ -922,9 +978,7 @@ def _map_file_to_package( if not len(owners): if any(rp == normpath(w) for w in files): owners.append(pkg_vendored_dist) - new_pkgs = list( - which_package(rp, prefix, avoid_canonical_channel_name=True) - ) + new_pkgs = list(which_package(rp, prefix)) # Cannot filter here as this means the DSO (eg libomp.dylib) will not be found in any package # [owners.append(new_pkg) for new_pkg in new_pkgs if new_pkg not in owners # and not any([fnmatch(new_pkg.name, i) for i in ignore_for_statics])] @@ -964,25 +1018,20 @@ def _map_file_to_package( return prefix_owners, contains_dsos, contains_static_libs, all_lib_exports +@deprecated( + "3.28.0", "4.0.0", addendum="Use `conda.models.records.PrefixRecord` instead." +) def _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number, channel, files): - pkg_vendoring_name = pkg_name - pkg_vendoring_version = str(pkg_version) - pkg_vendoring_build_str = build_str - pkg_vendoring_build_number = build_number - pkg_vendoring_key = "-".join( - [pkg_vendoring_name, pkg_vendoring_version, pkg_vendoring_build_str] - ) - return ( FakeDist( - pkg_vendoring_name, - pkg_vendoring_version, - pkg_vendoring_build_number, - pkg_vendoring_build_str, + pkg_name, + str(pkg_version), + build_number, + build_str, channel, files, ), - pkg_vendoring_key, + f"{pkg_name}-{pkg_version}-{build_str}", ) @@ -1121,20 +1170,18 @@ def _lookup_in_prefix_packages( in_prefix_dso = normpath(needed_dso) n_dso_p = "Needed DSO {}".format(in_prefix_dso.replace("\\", "/")) and_also = " (and also in this package)" if in_prefix_dso in files else "" - pkgs = list( - which_package(in_prefix_dso, run_prefix, avoid_canonical_channel_name=True) - ) - in_pkgs_in_run_reqs = [pkg for pkg in pkgs if pkg.quad[0] in requirements_run] + precs = list(which_package(in_prefix_dso, run_prefix)) + precs_in_reqs = [prec for prec in precs if prec.name in requirements_run] # TODO :: metadata build/inherit_child_run_exports (for vc, mro-base-impl). - for pkg in in_pkgs_in_run_reqs: - if pkg in lib_packages: - lib_packages_used.add(pkg) + for prec in precs_in_reqs: + if prec in lib_packages: + lib_packages_used.add(prec) in_whitelist = any([fnmatch(in_prefix_dso, w) for w in whitelist]) - if len(in_pkgs_in_run_reqs) == 1: + if len(precs_in_reqs) == 1: _print_msg( errors, "{}: {} found in {}{}".format( - info_prelude, n_dso_p, in_pkgs_in_run_reqs[0], and_also + info_prelude, n_dso_p, precs_in_reqs[0], and_also ), verbose=verbose, ) @@ -1144,11 +1191,11 @@ def _lookup_in_prefix_packages( f"{info_prelude}: {n_dso_p} found in the whitelist", verbose=verbose, ) - elif len(in_pkgs_in_run_reqs) == 0 and len(pkgs) > 0: + elif len(precs_in_reqs) == 0 and len(precs) > 0: _print_msg( errors, "{}: {} found in {}{}".format( - msg_prelude, n_dso_p, [p.quad[0] for p in pkgs], and_also + msg_prelude, n_dso_p, [prec.name for prec in precs], and_also ), verbose=verbose, ) @@ -1156,15 +1203,15 @@ def _lookup_in_prefix_packages( errors, "{}: .. but {} not in reqs/run, (i.e. it is overlinking)" " (likely) or a missing dependency (less likely)".format( - msg_prelude, [p.quad[0] for p in pkgs] + msg_prelude, [prec.name for prec in precs] ), verbose=verbose, ) - elif len(in_pkgs_in_run_reqs) > 1: + elif len(precs_in_reqs) > 1: _print_msg( errors, "{}: {} found in multiple packages in run/reqs: {}{}".format( - warn_prelude, in_prefix_dso, in_pkgs_in_run_reqs, and_also + warn_prelude, in_prefix_dso, precs_in_reqs, and_also ), verbose=verbose, ) @@ -1283,11 +1330,11 @@ def _show_linking_messages( def check_overlinking_impl( - pkg_name, - pkg_version, - build_str, - build_number, - subdir, + pkg_name: str, + pkg_version: str, + build_str: str, + build_number: int, + subdir: str, ignore_run_exports, requirements_run, requirements_build, @@ -1326,30 +1373,32 @@ def check_overlinking_impl( build_prefix_substitution = "$PATH" # Used to detect overlinking (finally) requirements_run = [req.split(" ")[0] for req in requirements_run] - packages = dists_from_names(requirements_run, run_prefix) + pd = PrefixData(run_prefix) + precs = [prec for req in requirements_run if (prec := pd.get(req, None))] local_channel = ( dirname(bldpkgs_dirs).replace("\\", "/") if utils.on_win else dirname(bldpkgs_dirs)[1:] ) - pkg_vendored_dist, pkg_vendoring_key = _get_fake_pkg_dist( - pkg_name, pkg_version, build_str, build_number, local_channel, files + pkg_vendored_dist = PrefixRecord( + name=pkg_name, + version=str(pkg_version), + build=build_str, + build_number=build_number, + channel=local_channel, + files=files, ) - packages.append(pkg_vendored_dist) + pkg_vendoring_key = f"{pkg_name}-{pkg_version}-{build_str}" + precs.append(pkg_vendored_dist) ignore_list = utils.ensure_list(ignore_run_exports) if subdir.startswith("linux"): ignore_list.append("libgcc-ng") - package_nature = { - package: library_nature( - package, run_prefix, subdir, bldpkgs_dirs, output_folder, channel_urls - ) - for package in packages - } + package_nature = {prec: library_nature(prec, run_prefix) for prec in precs} lib_packages = { - package - for package in packages - if package.quad[0] not in ignore_list and [package] != "non-library" + prec + for prec, nature in package_nature.items() + if prec.name not in ignore_list and nature != "non-library" } lib_packages_used = {pkg_vendored_dist} @@ -1712,14 +1761,12 @@ def post_build(m, files, build_python, host_prefix=None, is_already_linked=False def check_symlinks(files, prefix, croot): - if readlink is False: - return # Not on Unix system msgs = [] real_build_prefix = realpath(prefix) for f in files: path = join(real_build_prefix, f) if islink(path): - link_path = readlink(path) + link_path = os.readlink(path) real_link_path = realpath(path) # symlinks to binaries outside of the same dir don't work. RPATH stuff gets confused # because ld.so follows symlinks in RPATHS diff --git a/conda_build/utils.py b/conda_build/utils.py index 4f68a7f79e..9f41400990 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import contextlib import fnmatch import hashlib @@ -70,6 +72,9 @@ from conda.api import PackageCacheData # noqa from conda.base.constants import KNOWN_SUBDIRS +from conda.core.prefix_data import PrefixData +from conda.models.dist import Dist +from conda.models.records import PrefixRecord # NOQA because it is not used in this file. from conda_build.conda_interface import rm_rf as _rm_rf # noqa @@ -1274,7 +1279,7 @@ def islist(arg, uniform=False, include_dict=True): # StopIteration: list is empty, an empty list is still uniform return True # check for explicit type match, do not allow the ambiguity of isinstance - uniform = lambda e: type(e) == etype # noqa: E721 + uniform = lambda e: type(e) == etype # noqa: E731 try: return all(uniform(e) for e in arg) @@ -2161,17 +2166,17 @@ def download_channeldata(channel_url): return data -def linked_data_no_multichannels(prefix): +def linked_data_no_multichannels( + prefix: str | os.PathLike | Path, +) -> dict[Dist, PrefixRecord]: """ Return a dictionary of the linked packages in prefix, with correct channels, hopefully. cc @kalefranz. """ - from conda.core.prefix_data import PrefixData - from conda.models.dist import Dist - + prefix = Path(prefix) return { Dist.from_string(prec.fn, channel_override=prec.channel.name): prec - for prec in PrefixData(prefix)._prefix_records.values() + for prec in PrefixData(str(prefix)).iter_records() } diff --git a/news/5041-refactor-which_package b/news/5041-refactor-which_package new file mode 100644 index 0000000000..0b060e3e51 --- /dev/null +++ b/news/5041-refactor-which_package @@ -0,0 +1,32 @@ +### Enhancements + +* Consolidate `which_package` implementations and replacing `conda.models.dist.Dist` usage in favor of `conda.models.records.PrefixRecords`. (#5041) + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.inspect_pkg.dist_files` as pending deprecation. (#5041) +* Mark `conda_build.inspect_pkg.which_package(avoid_canonical_channel_name)` as pending deprecation. (#5041) +* Mark `conda_build.inspect_pkg._installed` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.ldd.get_package_files` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.mach_o_change` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.inspect_rpath` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.get_runpaths` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.otool_sys` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.ldd_sys` as pending deprecation. (#5041) +* Mark `conda_build.post.determine_package_nature` as pending deprecation. Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead. (#5041) +* Mark `conda_build.post.library_nature(subdir, bldpkgs_dirs, output_folder, channel_urls)` as pending deprecation. (#5041) +* Mark `conda_build.post.dist_from_names` as pending deprecation. Query `conda.core.prefix_data.PrefixData` instead. (#5041) +* Mark `conda_build.post.FakeDist` as pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) +* Mark `conda_build.post._get_fake_pkg_dist` as pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) + +### Docs + +* + +### Other + +* diff --git a/tests/cli/test_main_inspect.py b/tests/cli/test_main_inspect.py index 94ede317dc..b8931b5220 100644 --- a/tests/cli/test_main_inspect.py +++ b/tests/cli/test_main_inspect.py @@ -22,7 +22,7 @@ def test_inspect_installable(testing_workdir): def test_inspect_linkages(testing_workdir, capfd): # get a package that has known object output args = ["linkages", "python"] - if sys.platform == "win32": + if on_win: with pytest.raises(SystemExit) as exc: main_inspect.execute(args) assert "conda inspect linkages is only implemented in Linux and OS X" in exc diff --git a/tests/test_api_build.py b/tests/test_api_build.py index e0c786dcc8..c3059f066d 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -1691,7 +1691,6 @@ def test_provides_features_metadata(testing_config): # using different MACOSX_DEPLOYMENT_TARGET in parallel causes some SDK race condition # https://github.com/conda/conda-build/issues/4708 @pytest.mark.serial -@pytest.mark.flaky(reruns=5, reruns_delay=2) def test_overlinking_detection(testing_config, variants_conda_build_sysroot): testing_config.activate = True testing_config.error_overlinking = True