diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 530c5143a6..3302aec421 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -69,10 +69,18 @@ jobs:
fail-fast: false
matrix:
# test all lower versions (w/ stable conda) and upper version (w/ canary conda)
- python-version: ['3.8', '3.9', '3.10']
+ python-version: ['3.9', '3.10']
conda-version: [release]
test-type: [serial, parallel]
include:
+ # minimum Python/conda combo
+ - python-version: '3.8'
+ conda-version: 22.11.0
+ test-type: serial
+ - python-version: '3.8'
+ conda-version: 22.11.0
+ test-type: parallel
+ # maximum Python/conda combo
- python-version: '3.11'
conda-version: canary
test-type: serial
@@ -81,6 +89,7 @@ jobs:
test-type: parallel
env:
CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }}
+ CONDA_VERSION: ${{ contains('canary,release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }}
REPLAY_NAME: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}
REPLAY_DIR: ${{ github.workspace }}/pytest-replay
ALLURE_DIR: ${{ github.workspace }}/allure-results
@@ -115,7 +124,7 @@ jobs:
conda install -q -y -c defaults \
--file ./tests/requirements.txt \
--file ./tests/requirements-linux.txt \
- ${{ env.CONDA_CHANNEL_LABEL }}::conda
+ ${{ env.CONDA_CHANNEL_LABEL }}::${{ env.CONDA_VERSION }}
pip install -e . --no-deps
- name: Show info
diff --git a/conda_build/api.py b/conda_build/api.py
index 2d5fa7ee7d..727240aece 100644
--- a/conda_build/api.py
+++ b/conda_build/api.py
@@ -8,6 +8,7 @@
Design philosophy: put variability into config. Make each function here accept kwargs,
but only use those kwargs in config. Config must change to support new features elsewhere.
"""
+from __future__ import annotations
import sys as _sys
@@ -76,8 +77,8 @@ def render(
raise
# remove outputs section from output objects for simplicity
- if not om.path and om.meta.get("outputs"):
- om.parent_outputs = om.meta["outputs"]
+ if not om.path and (outputs := om.get_section("outputs")):
+ om.parent_outputs = outputs
del om.meta["outputs"]
output_metas[
@@ -571,7 +572,7 @@ def debug(
test=False,
output_id=None,
config=None,
- verbose=True,
+ verbose: bool = True,
link_source_method="auto",
**kwargs,
):
@@ -587,6 +588,8 @@ def debug(
from conda_build.build import test as run_test
from conda_build.utils import CONDA_PACKAGE_EXTENSIONS, LoggingContext, on_win
+ from .metadata import MetaData
+
is_package = False
default_config = get_or_merge_config(config, **kwargs)
args = {"set_build_id": False}
@@ -622,15 +625,13 @@ def debug(
config.channel_urls = get_channel_urls(kwargs)
- metadata_tuples = []
+ metadata_tuples: list[tuple[MetaData, bool, bool]] = []
best_link_source_method = "skip"
if isinstance(recipe_or_package_path_or_metadata_tuples, str):
if path_is_build_dir:
for metadata_conda_debug in metadatas_conda_debug:
best_link_source_method = "symlink"
- from conda_build.metadata import MetaData
-
metadata = MetaData(metadata_conda_debug, config, {})
metadata_tuples.append((metadata, False, True))
else:
@@ -681,10 +682,7 @@ def debug(
"local",
"src",
"conda",
- "{}-{}".format(
- metadata.get_value("package/name"),
- metadata.get_value("package/version"),
- ),
+ f"{metadata.name()}-{metadata.version()}",
)
link_target = os.path.dirname(metadata.meta_path)
try:
diff --git a/conda_build/build.py b/conda_build/build.py
index 1d66cf114f..134730138a 100644
--- a/conda_build/build.py
+++ b/conda_build/build.py
@@ -151,7 +151,7 @@ def log_stats(stats_dict, descriptor):
)
-def create_post_scripts(m):
+def create_post_scripts(m: MetaData):
"""
Create scripts to run after build step
"""
@@ -162,12 +162,9 @@ def create_post_scripts(m):
is_output = "package:" not in m.get_recipe_text()
scriptname = tp
if is_output:
- if m.meta.get("build", {}).get(tp, ""):
- scriptname = m.meta["build"][tp]
- else:
- scriptname = m.name() + "-" + tp
+ scriptname = m.get_value(f"build/{tp}", f"{m.name()}-{tp}")
scriptname += ext
- dst_name = "." + m.name() + "-" + tp + ext
+ dst_name = f".{m.name()}-{tp}{ext}"
src = join(m.path, scriptname)
if isfile(src):
dst_dir = join(
@@ -1456,12 +1453,12 @@ def write_about_json(m):
json.dump(d, fo, indent=2, sort_keys=True)
-def write_info_json(m):
+def write_info_json(m: MetaData):
info_index = m.info_index()
if m.pin_depends:
# Wtih 'strict' depends, we will have pinned run deps during rendering
if m.pin_depends == "strict":
- runtime_deps = m.meta.get("requirements", {}).get("run", [])
+ runtime_deps = m.get_value("requirements/run", [])
info_index["depends"] = runtime_deps
else:
runtime_deps = environ.get_pinned_deps(m, "run")
@@ -1508,8 +1505,8 @@ def get_entry_point_script_names(entry_point_scripts):
return scripts
-def write_run_exports(m):
- run_exports = m.meta.get("build", {}).get("run_exports", {})
+def write_run_exports(m: MetaData):
+ run_exports = m.get_value("build/run_exports", {})
if run_exports:
with open(os.path.join(m.config.info_dir, "run_exports.json"), "w") as f:
if not hasattr(run_exports, "keys"):
@@ -1747,8 +1744,8 @@ def create_info_files_json_v1(m, info_dir, prefix, files, files_with_prefix):
return checksums
-def post_process_files(m, initial_prefix_files):
- package_name = m.get_value("package/name")
+def post_process_files(m: MetaData, initial_prefix_files):
+ package_name = m.name()
host_prefix = m.config.host_prefix
missing = []
for f in initial_prefix_files:
@@ -1778,7 +1775,7 @@ def post_process_files(m, initial_prefix_files):
)
post_process(
package_name,
- m.get_value("package/version"),
+ m.version(),
sorted(current_prefix_files - initial_prefix_files),
prefix=host_prefix,
config=m.config,
@@ -1839,7 +1836,7 @@ def post_process_files(m, initial_prefix_files):
return new_files
-def bundle_conda(output, metadata, env, stats, **kw):
+def bundle_conda(output, metadata: MetaData, env, stats, **kw):
log = utils.get_logger(__name__)
log.info("Packaging %s", metadata.dist())
get_all_replacements(metadata.config)
@@ -1911,7 +1908,7 @@ def bundle_conda(output, metadata, env, stats, **kw):
env_output["TOP_PKG_NAME"] = env["PKG_NAME"]
env_output["TOP_PKG_VERSION"] = env["PKG_VERSION"]
env_output["PKG_VERSION"] = metadata.version()
- env_output["PKG_NAME"] = metadata.get_value("package/name")
+ env_output["PKG_NAME"] = metadata.name()
env_output["RECIPE_DIR"] = metadata.path
env_output["MSYS2_PATH_TYPE"] = "inherit"
env_output["CHERE_INVOKING"] = "1"
@@ -2129,7 +2126,7 @@ def bundle_conda(output, metadata, env, stats, **kw):
return final_outputs
-def bundle_wheel(output, metadata, env, stats):
+def bundle_wheel(output, metadata: MetaData, env, stats):
ext = ".bat" if utils.on_win else ".sh"
with TemporaryDirectory() as tmpdir, utils.tmp_chdir(metadata.config.work_dir):
dest_file = os.path.join(metadata.config.work_dir, "wheel_output" + ext)
@@ -2145,7 +2142,7 @@ def bundle_wheel(output, metadata, env, stats):
env["TOP_PKG_NAME"] = env["PKG_NAME"]
env["TOP_PKG_VERSION"] = env["PKG_VERSION"]
env["PKG_VERSION"] = metadata.version()
- env["PKG_NAME"] = metadata.get_value("package/name")
+ env["PKG_NAME"] = metadata.name()
interpreter_and_args = guess_interpreter(dest_file)
bundle_stats = {}
@@ -2317,7 +2314,7 @@ def _write_activation_text(script_path, m):
fh.write(data)
-def create_build_envs(m, notest):
+def create_build_envs(m: MetaData, notest):
build_ms_deps = m.ms_depends("build")
build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps]
host_ms_deps = m.ms_depends("host")
@@ -2371,11 +2368,12 @@ def create_build_envs(m, notest):
try:
if not notest:
utils.insert_variant_versions(
- m.meta.get("requirements", {}), m.config.variant, "run"
+ m.get_section("requirements"), m.config.variant, "run"
)
- test_run_ms_deps = utils.ensure_list(
- m.get_value("test/requires", [])
- ) + utils.ensure_list(m.get_value("requirements/run", []))
+ test_run_ms_deps = [
+ *utils.ensure_list(m.get_value("test/requires", [])),
+ *utils.ensure_list(m.get_value("requirements/run", [])),
+ ]
# make sure test deps are available before taking time to create build env
environ.get_install_actions(
m.config.test_prefix,
@@ -2424,7 +2422,7 @@ def create_build_envs(m, notest):
def build(
- m,
+ m: MetaData,
stats,
post=None,
need_source_download=True,
@@ -2516,7 +2514,7 @@ def build(
)
specs = [ms.spec for ms in m.ms_depends("build")]
- if any(out.get("type") == "wheel" for out in m.meta.get("outputs", [])):
+ if any(out.get("type") == "wheel" for out in m.get_section("outputs")):
specs.extend(["pip", "wheel"])
# TODO :: This is broken. It does not respect build/script for example and also if you need git
@@ -4099,7 +4097,7 @@ def handle_anaconda_upload(paths, config):
print(no_upload_message)
return
- if anaconda is None:
+ if not anaconda:
print(no_upload_message)
sys.exit(
"Error: cannot locate anaconda command (required for upload)\n"
diff --git a/conda_build/create_test.py b/conda_build/create_test.py
index 45cb20ebfe..35511ef503 100644
--- a/conda_build/create_test.py
+++ b/conda_build/create_test.py
@@ -47,7 +47,7 @@ def _get_output_script_name(
src_name = dst_name
if m.is_output:
src_name = "no-file"
- for out in m.meta.get("outputs", []):
+ for out in m.get_section("outputs"):
if m.name() == out.get("name"):
out_test_script = out.get("test", {}).get("script", "no-file")
if os.path.splitext(out_test_script)[1].lower() == ext:
@@ -103,7 +103,7 @@ def _create_test_files(
name = ""
# the way this works is that each output needs to explicitly define a test script to run
# They do not automatically pick up run_test.*, but can be pointed at that explicitly.
- for out in m.meta.get("outputs", []):
+ for out in m.get_section("outputs"):
if m.name() == out.get("name"):
out_test_script = out.get("test", {}).get("script", "no-file")
if out_test_script.endswith(ext):
diff --git a/conda_build/environ.py b/conda_build/environ.py
index 5afcf93c4d..9e128ad511 100644
--- a/conda_build/environ.py
+++ b/conda_build/environ.py
@@ -42,6 +42,7 @@
root_dir,
)
from .deprecations import deprecated
+from .metadata import MetaData
# these are things that we provide env vars for more explicitly. This list disables the
# pass-through of variant values to env vars for these keys.
@@ -388,7 +389,7 @@ def python_vars(metadata, prefix, escape_backslash):
}
build_or_host = "host" if metadata.is_cross else "build"
deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)]
- if "python" in deps or metadata.name(fail_ok=True) == "python":
+ if "python" in deps or metadata.name() == "python":
python_bin = metadata.config.python_bin(prefix, metadata.config.host_subdir)
if utils.on_win and escape_backslash:
@@ -417,7 +418,7 @@ def perl_vars(metadata, prefix, escape_backslash):
}
build_or_host = "host" if metadata.is_cross else "build"
deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)]
- if "perl" in deps or metadata.name(fail_ok=True) == "perl":
+ if "perl" in deps or metadata.name() == "perl":
perl_bin = metadata.config.perl_bin(prefix, metadata.config.host_subdir)
if utils.on_win and escape_backslash:
@@ -464,10 +465,7 @@ def r_vars(metadata, prefix, escape_backslash):
build_or_host = "host" if metadata.is_cross else "build"
deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)]
- if (
- any(r_pkg in deps for r_pkg in R_PACKAGES)
- or metadata.name(fail_ok=True) in R_PACKAGES
- ):
+ if any(r_pkg in deps for r_pkg in R_PACKAGES) or metadata.name() in R_PACKAGES:
r_bin = metadata.config.r_bin(prefix, metadata.config.host_subdir)
# set R_USER explicitly to prevent crosstalk with existing R_LIBS_USER packages
r_user = join(prefix, "Libs", "R")
@@ -484,7 +482,7 @@ def r_vars(metadata, prefix, escape_backslash):
return vars_
-def meta_vars(meta, skip_build_id=False):
+def meta_vars(meta: MetaData, skip_build_id=False):
d = {}
for var_name in ensure_list(meta.get_value("build/script_env", [])):
if "=" in var_name:
@@ -545,12 +543,11 @@ def meta_vars(meta, skip_build_id=False):
):
d.update(get_hg_build_info(hg_dir))
- # use `get_value` to prevent early exit while name is still unresolved during rendering
- d["PKG_NAME"] = meta.get_value("package/name")
+ d["PKG_NAME"] = meta.name()
d["PKG_VERSION"] = meta.version()
d["PKG_BUILDNUM"] = str(meta.build_number())
if meta.final and not skip_build_id:
- d["PKG_BUILD_STRING"] = str(meta.build_id())
+ d["PKG_BUILD_STRING"] = meta.build_id()
d["PKG_HASH"] = meta.hash_dependencies()
else:
d["PKG_BUILD_STRING"] = "placeholder"
diff --git a/conda_build/metadata.py b/conda_build/metadata.py
index d2d87912bf..7ad51c7880 100644
--- a/conda_build/metadata.py
+++ b/conda_build/metadata.py
@@ -13,10 +13,11 @@
from collections import OrderedDict
from functools import lru_cache
from os.path import isfile, join
+from typing import Literal
from bs4 import UnicodeDammit
-from conda_build import environ, exceptions, utils, variants
+from conda_build import exceptions, utils, variants
from conda_build.config import Config, get_or_merge_config
from conda_build.features import feature_list
from conda_build.license_family import ensure_valid_license_family
@@ -31,6 +32,7 @@
)
from .conda_interface import MatchSpec, envs_dirs, md5_file
+from .deprecations import deprecated
try:
import yaml
@@ -409,7 +411,7 @@ def ensure_matching_hashes(output_metadata):
for _, m in output_metadata.values():
for _, om in output_metadata.values():
if m != om:
- run_exports = om.meta.get("build", {}).get("run_exports", [])
+ run_exports = om.get_value("build/run_exports", [])
if hasattr(run_exports, "keys"):
run_exports_list = []
for export_type in utils.RUN_EXPORTS_TYPES:
@@ -550,7 +552,7 @@ def parse(data, config, path=None):
"provides_features": dict,
"force_use_keys": list,
"force_ignore_keys": list,
- "merge_build_host": bool,
+ "merge_build_host": None,
"pre-link": str,
"post-link": str,
"pre-unlink": str,
@@ -1123,33 +1125,28 @@ def __init__(self, path, config=None, variant=None):
# establish whether this recipe should squish build and host together
@property
- def is_cross(self):
- return bool(self.get_depends_top_and_out("host")) or "host" in self.meta.get(
- "requirements", {}
+ def is_cross(self) -> bool:
+ return bool(
+ self.get_depends_top_and_out("host")
+ or "host" in self.get_section("requirements")
)
@property
- def final(self):
- return self.get_value("extra/final")
+ def final(self) -> bool:
+ return bool(self.get_value("extra/final"))
@final.setter
- def final(self, boolean):
- extra = self.meta.get("extra", {})
- extra["final"] = boolean
- self.meta["extra"] = extra
+ def final(self, value: bool) -> None:
+ self.meta.setdefault("extra", {})["final"] = bool(value)
@property
- def disable_pip(self):
- return self.config.disable_pip or (
- "build" in self.meta and "disable_pip" in self.meta["build"]
- )
+ def disable_pip(self) -> bool:
+ return bool(self.config.disable_pip or self.get_value("build/disable_pip"))
@disable_pip.setter
- def disable_pip(self, value):
- self.config.disable_pip = value
- build = self.meta.get("build", {})
- build["disable_pip"] = value
- self.meta["build"] = build
+ def disable_pip(self, value: bool) -> None:
+ self.config.disable_pip = bool(value)
+ self.meta.setdefault("build", {})["disable_pip"] = bool(value)
def append_metadata_sections(
self, sections_file_or_dict, merge, raise_on_clobber=False
@@ -1175,10 +1172,9 @@ def append_metadata_sections(
)
@property
- def is_output(self):
- self_name = self.name(fail_ok=True)
- parent_name = self.meta.get("extra", {}).get("parent_recipe", {}).get("name")
- return bool(parent_name) and parent_name != self_name
+ def is_output(self) -> str:
+ parent_name = self.get_value("extra/parent_recipe", {}).get("name")
+ return parent_name and parent_name != self.name()
def parse_again(
self,
@@ -1245,17 +1241,16 @@ def parse_again(
dependencies = _get_dependencies_from_environment(self.config.bootstrap)
self.append_metadata_sections(dependencies, merge=True)
- if "error_overlinking" in self.meta.get("build", {}):
+ if "error_overlinking" in self.get_section("build"):
self.config.error_overlinking = self.meta["build"]["error_overlinking"]
- if "error_overdepending" in self.meta.get("build", {}):
+ if "error_overdepending" in self.get_section("build"):
self.config.error_overdepending = self.meta["build"]["error_overdepending"]
self.validate_features()
self.ensure_no_pip_requirements()
def ensure_no_pip_requirements(self):
- keys = "requirements/build", "requirements/run", "test/requires"
- for key in keys:
+ for key in ("requirements/build", "requirements/run", "test/requires"):
if any(hasattr(item, "keys") for item in (self.get_value(key) or [])):
raise ValueError(
"Dictionaries are not supported as values in requirements sections"
@@ -1265,15 +1260,13 @@ def ensure_no_pip_requirements(self):
def append_requirements(self):
"""For dynamic determination of build or run reqs, based on configuration"""
- reqs = self.meta.get("requirements", {})
- run_reqs = reqs.get("run", [])
+ run_reqs = self.meta.setdefault("requirements", {}).setdefault("run", [])
if (
- bool(self.get_value("build/osx_is_app", False))
+ self.get_value("build/osx_is_app", False)
and self.config.platform == "osx"
+ and "python.app" not in run_reqs
):
- if "python.app" not in run_reqs:
- run_reqs.append("python.app")
- self.meta["requirements"] = reqs
+ run_reqs.append("python.app")
def parse_until_resolved(
self, allow_no_other_outputs=False, bypass_env_check=False
@@ -1436,26 +1429,28 @@ def check_field(key, section):
check_field(key_or_dict, section)
return True
- def name(self, fail_ok=False):
- res = self.meta.get("package", {}).get("name", "")
- if not res and not fail_ok:
+ @deprecated.argument("3.28.0", "4.0.0", "fail_ok")
+ def name(self) -> str:
+ name = self.get_value("package/name", "")
+ if not name and self.final:
sys.exit("Error: package/name missing in: %r" % self.meta_path)
- res = str(res)
- if res != res.lower():
- sys.exit("Error: package/name must be lowercase, got: %r" % res)
- check_bad_chrs(res, "package/name")
- return res
-
- def version(self):
- res = str(self.get_value("package/version"))
- if res is None:
+ name = str(name)
+ if name != name.lower():
+ sys.exit("Error: package/name must be lowercase, got: %r" % name)
+ check_bad_chrs(name, "package/name")
+ return name
+
+ def version(self) -> str:
+ version = self.get_value("package/version", "")
+ if not version and self.final:
sys.exit("Error: package/version missing in: %r" % self.meta_path)
- check_bad_chrs(res, "package/version")
- if self.final and res.startswith("."):
+ version = str(version)
+ check_bad_chrs(version, "package/version")
+ if self.final and version.startswith("."):
raise ValueError(
- "Fully-rendered version can't start with period - got %s", res
+ "Fully-rendered version can't start with period - got %s", version
)
- return res
+ return version
def build_number(self):
number = self.get_value("build/number")
@@ -1809,8 +1804,8 @@ def binary_relocation(self):
expand_globs(ret, self.config.host_prefix) if isinstance(ret, list) else ret
)
- def include_recipe(self):
- return self.get_value("build/include_recipe", True)
+ def include_recipe(self) -> bool:
+ return bool(self.get_value("build/include_recipe", True))
def binary_has_prefix_files(self):
ret = ensure_list(self.get_value("build/binary_has_prefix_files", []))
@@ -1826,8 +1821,8 @@ def binary_has_prefix_files(self):
)
return expand_globs(ret, self.config.host_prefix)
- def skip(self):
- return self.get_value("build/skip", False)
+ def skip(self) -> bool:
+ return bool(self.get_value("build/skip", False))
def _get_contents(
self,
@@ -1891,8 +1886,10 @@ def _get_contents(
loader = FilteredLoader(jinja2.ChoiceLoader(loaders), config=self.config)
env = jinja2.Environment(loader=loader, undefined=undefined_type)
+ from .environ import get_dict
+
env.globals.update(get_selectors(self.config))
- env.globals.update(environ.get_dict(m=self, skip_build_id=skip_build_id))
+ env.globals.update(get_dict(m=self, skip_build_id=skip_build_id))
env.globals.update({"CONDA_BUILD_STATE": "RENDER"})
env.globals.update(
context_processor(
@@ -1964,9 +1961,11 @@ def __repr__(self):
@property
def meta_path(self):
- meta_path = self._meta_path or self.meta.get("extra", {}).get(
- "parent_recipe", {}
- ).get("path", "")
+ meta_path = (
+ self._meta_path
+ # get the parent recipe path if this is a subpackage
+ or self.get_value("extra/parent_recipe", {}).get("path", "")
+ )
if meta_path and os.path.basename(meta_path) != self._meta_name:
meta_path = os.path.join(meta_path, self._meta_name)
return meta_path
@@ -2014,7 +2013,7 @@ def uses_jinja(self):
return len(matches) > 0
@property
- def uses_vcs_in_meta(self):
+ def uses_vcs_in_meta(self) -> Literal["git" | "svn" | "mercurial"] | None:
"""returns name of vcs used if recipe contains metadata associated with version control systems.
If this metadata is present, a download/copy will be forced in parse_or_try_download.
"""
@@ -2026,7 +2025,7 @@ def uses_vcs_in_meta(self):
meta_text = UnicodeDammit(f.read()).unicode_markup
for _vcs in vcs_types:
matches = re.findall(rf"{_vcs.upper()}_[^\.\s\'\"]+", meta_text)
- if len(matches) > 0 and _vcs != self.meta["package"]["name"]:
+ if len(matches) > 0 and _vcs != self.get_value("package/name"):
if _vcs == "hg":
_vcs = "mercurial"
vcs = _vcs
@@ -2034,7 +2033,7 @@ def uses_vcs_in_meta(self):
return vcs
@property
- def uses_vcs_in_build(self):
+ def uses_vcs_in_build(self) -> Literal["git" | "svn" | "mercurial"] | None:
# TODO :: Re-work this. Is it even useful? We can declare any vcs in our build deps.
build_script = "bld.bat" if on_win else "build.sh"
build_script = os.path.join(self.path, build_script)
@@ -2053,7 +2052,7 @@ def uses_vcs_in_build(self):
build_script,
flags=re.IGNORECASE,
)
- if len(matches) > 0 and vcs != self.meta["package"]["name"]:
+ if len(matches) > 0 and vcs != self.get_value("package/name"):
if vcs == "hg":
vcs = "mercurial"
return vcs
@@ -2155,15 +2154,14 @@ def extract_single_output_text(
return output
@property
- def numpy_xx(self):
+ def numpy_xx(self) -> bool:
"""This is legacy syntax that we need to support for a while. numpy x.x means
"pin run as build" for numpy. It was special-cased to only numpy."""
text = self.extract_requirements_text()
- uses_xx = bool(numpy_xx_re.search(text))
- return uses_xx
+ return bool(numpy_xx_re.search(text))
@property
- def uses_numpy_pin_compatible_without_xx(self):
+ def uses_numpy_pin_compatible_without_xx(self) -> tuple[bool, bool]:
text = self.extract_requirements_text()
compatible_search = numpy_compatible_re.search(text)
max_pin_search = None
@@ -2225,24 +2223,20 @@ def noarch(self):
return self.get_value("build/noarch")
@noarch.setter
- def noarch(self, value):
- build = self.meta.get("build", {})
- build["noarch"] = value
- self.meta["build"] = build
+ def noarch(self, value: str | None) -> None:
+ self.meta.setdefault("build", {})["noarch"] = value
if not self.noarch_python and not value:
self.config.reset_platform()
elif value:
self.config.host_platform = "noarch"
@property
- def noarch_python(self):
- return self.get_value("build/noarch_python")
+ def noarch_python(self) -> bool:
+ return bool(self.get_value("build/noarch_python"))
@noarch_python.setter
- def noarch_python(self, value):
- build = self.meta.get("build", {})
- build["noarch_python"] = value
- self.meta["build"] = build
+ def noarch_python(self, value: bool) -> None:
+ self.meta.setdefault("build", {})["noarch_python"] = value
if not self.noarch and not value:
self.config.reset_platform()
elif value:
@@ -2574,7 +2568,7 @@ def get_output_metadata_set(
)
output_d["requirements"] = output_d.get("requirements", {})
output_d["requirements"]["build"] = build_reqs
- m.meta["requirements"] = m.meta.get("requirements", {})
+ m.meta["requirements"] = m.get_section("requirements")
m.meta["requirements"]["build"] = build_reqs
non_conda_packages.append((output_d, m))
else:
@@ -2889,18 +2883,19 @@ def clean(self):
self.config.clean()
@property
- def activate_build_script(self):
- b = self.meta.get("build", {}) or {}
- should_activate = b.get("activate_in_script") is not False
- return bool(self.config.activate and should_activate)
+ def activate_build_script(self) -> bool:
+ return bool(
+ self.config.activate
+ and self.get_value("build/activate_in_script") is not False
+ )
@property
- def build_is_host(self):
+ def build_is_host(self) -> bool:
manual_overrides = (
- self.meta.get("build", {}).get("merge_build_host") is True
+ self.get_value("build/merge_build_host") is True
or self.config.build_is_host
)
- manually_disabled = self.meta.get("build", {}).get("merge_build_host") is False
+ manually_disabled = self.get_value("build/merge_build_host") is False
return manual_overrides or (
self.config.subdirs_same
and not manually_disabled
diff --git a/conda_build/post.py b/conda_build/post.py
index d335a33a39..954bdda275 100644
--- a/conda_build/post.py
+++ b/conda_build/post.py
@@ -62,6 +62,8 @@
machofile,
)
+from .metadata import MetaData
+
filetypes_for_platform = {
"win": (DLLfile, EXEfile),
"osx": (machofile,),
@@ -1584,33 +1586,27 @@ def check_overlinking_impl(
return dict()
-def check_overlinking(m, files, host_prefix=None):
- if not host_prefix:
- host_prefix = m.config.host_prefix
-
- overlinking_ignore_patterns = m.meta.get("build", {}).get(
- "overlinking_ignore_patterns"
- )
- if overlinking_ignore_patterns:
- files = [
- f
- for f in files
- if not any([fnmatch(f, p) for p in overlinking_ignore_patterns])
- ]
+def check_overlinking(m: MetaData, files, host_prefix=None):
+ patterns = m.get_value("build/overlinking_ignore_patterns", [])
+ files = [
+ file
+ for file in files
+ if not any([fnmatch(file, pattern) for pattern in patterns])
+ ]
return check_overlinking_impl(
- m.get_value("package/name"),
- m.get_value("package/version"),
- m.get_value("build/string"),
- m.get_value("build/number"),
+ m.name(),
+ m.version(),
+ m.build_id(),
+ m.build_number(),
m.config.target_subdir,
m.get_value("build/ignore_run_exports"),
- [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("run", [])],
- [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("build", [])],
- [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("host", [])],
- host_prefix,
+ [req.split(" ")[0] for req in m.get_value("requirements/run", [])],
+ [req.split(" ")[0] for req in m.get_value("requirements/build", [])],
+ [req.split(" ")[0] for req in m.get_value("requirements/host", [])],
+ host_prefix or m.config.host_prefix,
m.config.build_prefix,
- m.meta.get("build", {}).get("missing_dso_whitelist", []),
- m.meta.get("build", {}).get("runpath_whitelist", []),
+ m.get_value("build/missing_dso_whitelist", []),
+ m.get_value("build/runpath_whitelist", []),
m.config.error_overlinking,
m.config.error_overdepending,
m.config.verbose,
@@ -1618,7 +1614,7 @@ def check_overlinking(m, files, host_prefix=None):
files,
m.config.bldpkgs_dir,
m.config.output_folder,
- list(m.config.channel_urls) + ["local"],
+ [*m.config.channel_urls, "local"],
m.config.enable_static,
m.config.variant,
)
diff --git a/conda_build/render.py b/conda_build/render.py
index fa428e07f6..c0f1d8be73 100644
--- a/conda_build/render.py
+++ b/conda_build/render.py
@@ -13,7 +13,15 @@
import tempfile
from collections import OrderedDict, defaultdict
from functools import lru_cache
-from os.path import abspath, isdir, isfile
+from os.path import (
+ abspath,
+ dirname,
+ isabs,
+ isdir,
+ isfile,
+ join,
+ normpath,
+)
from pathlib import Path
import yaml
@@ -67,15 +75,17 @@ def bldpkg_path(m):
# the default case will switch over to conda_v2 at some point
if pkg_type == "conda":
- path = os.path.join(
+ path = join(
m.config.output_folder, subdir, f"{m.dist()}{CONDA_PACKAGE_EXTENSION_V1}"
)
elif pkg_type == "conda_v2":
- path = os.path.join(
+ path = join(
m.config.output_folder, subdir, f"{m.dist()}{CONDA_PACKAGE_EXTENSION_V2}"
)
else:
- path = f"{m.type} file for {m.name()} in: {os.path.join(m.config.output_folder, subdir)}"
+ path = (
+ f"{m.type} file for {m.name()} in: {join(m.config.output_folder, subdir)}"
+ )
return path
@@ -118,7 +128,7 @@ def _categorize_deps(m, specs, exclude_pattern, variant):
def get_env_dependencies(
- m,
+ m: MetaData,
env,
variant,
exclude_pattern=None,
@@ -178,7 +188,7 @@ def get_env_dependencies(
return (
utils.ensure_list(
(specs + subpackages + pass_through_deps)
- or m.meta.get("requirements", {}).get(env, [])
+ or m.get_value(f"requirements/{env}", [])
),
actions,
unsat,
@@ -278,19 +288,19 @@ def find_pkg_dir_or_file_in_pkgs_dirs(
@lru_cache(maxsize=None)
def _read_specs_from_package(pkg_loc, pkg_dist):
specs = {}
- if pkg_loc and os.path.isdir(pkg_loc):
- downstream_file = os.path.join(pkg_loc, "info/run_exports")
- if os.path.isfile(downstream_file):
+ if pkg_loc and isdir(pkg_loc):
+ downstream_file = join(pkg_loc, "info/run_exports")
+ if isfile(downstream_file):
with open(downstream_file) as f:
specs = {"weak": [spec.rstrip() for spec in f.readlines()]}
# a later attempt: record more info in the yaml file, to support "strong" run exports
- elif os.path.isfile(downstream_file + ".yaml"):
+ elif isfile(downstream_file + ".yaml"):
with open(downstream_file + ".yaml") as f:
specs = yaml.safe_load(f)
- elif os.path.isfile(downstream_file + ".json"):
+ elif isfile(downstream_file + ".json"):
with open(downstream_file + ".json") as f:
specs = json.load(f)
- if not specs and pkg_loc and os.path.isfile(pkg_loc):
+ if not specs and pkg_loc and isfile(pkg_loc):
# switching to json for consistency in conda-build 4
specs_yaml = utils.package_has_file(pkg_loc, "info/run_exports.yaml")
specs_json = utils.package_has_file(pkg_loc, "info/run_exports.json")
@@ -384,8 +394,8 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files
with utils.LoggingContext():
pfe.execute()
for pkg_dir in pkgs_dirs:
- _loc = os.path.join(pkg_dir, index.get(pkg, pkg).fn)
- if os.path.isfile(_loc):
+ _loc = join(pkg_dir, index.get(pkg, pkg).fn)
+ if isfile(_loc):
pkg_loc = _loc
break
pkg_files[pkg] = pkg_loc, pkg_dist
@@ -393,11 +403,10 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files
return pkg_files
-def get_upstream_pins(m, actions, env):
+def get_upstream_pins(m: MetaData, actions, env):
"""Download packages from specs, then inspect each downloaded package for additional
downstream dependency specs. Return these additional specs."""
-
- env_specs = m.meta.get("requirements", {}).get(env, [])
+ env_specs = m.get_value(f"requirements/{env}", [])
explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else []
linked_packages = actions.get("LINK", [])
linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]
@@ -427,7 +436,12 @@ def get_upstream_pins(m, actions, env):
return additional_specs
-def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_pattern):
+def _read_upstream_pin_files(
+ m: MetaData,
+ env,
+ permit_unsatisfiable_variants,
+ exclude_pattern,
+):
deps, actions, unsat = get_env_dependencies(
m,
env,
@@ -439,16 +453,16 @@ def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_patt
# vc feature activation to work correctly in the host env.
extra_run_specs = get_upstream_pins(m, actions, env)
return (
- list(set(deps)) or m.meta.get("requirements", {}).get(env, []),
+ list(set(deps)) or m.get_value(f"requirements/{env}", []),
unsat,
extra_run_specs,
)
-def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
+def add_upstream_pins(m: MetaData, permit_unsatisfiable_variants, exclude_pattern):
"""Applies run_exports from any build deps to host and run sections"""
# if we have host deps, they're more important than the build deps.
- requirements = m.meta.get("requirements", {})
+ requirements = m.get_section("requirements")
build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(
m, "build", permit_unsatisfiable_variants, exclude_pattern
)
@@ -464,7 +478,7 @@ def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
if not host_reqs:
matching_output = [
- out for out in m.meta.get("outputs", []) if out.get("name") == m.name()
+ out for out in m.get_section("outputs") if out.get("name") == m.name()
]
if matching_output:
requirements = utils.expand_reqs(
@@ -580,7 +594,11 @@ def _simplify_to_exact_constraints(metadata):
metadata.meta["requirements"] = requirements
-def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False):
+def finalize_metadata(
+ m: MetaData,
+ parent_metadata=None,
+ permit_unsatisfiable_variants=False,
+):
"""Fully render a recipe. Fill in versions for build/host dependencies."""
if not parent_metadata:
parent_metadata = m
@@ -605,7 +623,7 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
)
)
- parent_recipe = m.meta.get("extra", {}).get("parent_recipe", {})
+ parent_recipe = m.get_value("extra/parent_recipe", {})
# extract the topmost section where variables are defined, and put it on top of the
# requirements for a particular output
@@ -625,13 +643,9 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
requirements = utils.expand_reqs(output.get("requirements", {}))
m.meta["requirements"] = requirements
- if m.meta.get("requirements"):
- utils.insert_variant_versions(
- m.meta["requirements"], m.config.variant, "build"
- )
- utils.insert_variant_versions(
- m.meta["requirements"], m.config.variant, "host"
- )
+ if requirements := m.get_section("requirements"):
+ utils.insert_variant_versions(requirements, m.config.variant, "build")
+ utils.insert_variant_versions(requirements, m.config.variant, "host")
m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
build_unsat, host_unsat = add_upstream_pins(
@@ -639,7 +653,7 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
)
# getting this AFTER add_upstream_pins is important, because that function adds deps
# to the metadata.
- requirements = m.meta.get("requirements", {})
+ requirements = m.get_section("requirements")
# here's where we pin run dependencies to their build time versions. This happens based
# on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
@@ -700,34 +714,26 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps
]
m.meta["test"]["requires"] = versioned_test_deps
- extra = m.meta.get("extra", {})
+ extra = m.get_section("extra")
extra["copy_test_source_files"] = m.config.copy_test_source_files
m.meta["extra"] = extra
# if source/path is relative, then the output package makes no sense at all. The next
# best thing is to hard-code the absolute path. This probably won't exist on any
# system other than the original build machine, but at least it will work there.
- if m.meta.get("source"):
- if "path" in m.meta["source"]:
- source_path = m.meta["source"]["path"]
- os.path.expanduser(source_path)
- if not os.path.isabs(source_path):
- m.meta["source"]["path"] = os.path.normpath(
- os.path.join(m.path, source_path)
- )
- elif "git_url" in m.meta["source"] and not (
- # absolute paths are not relative paths
- os.path.isabs(m.meta["source"]["git_url"])
- or
- # real urls are not relative paths
- ":" in m.meta["source"]["git_url"]
- ):
- m.meta["source"]["git_url"] = os.path.normpath(
- os.path.join(m.path, m.meta["source"]["git_url"])
- )
-
- if not m.meta.get("build"):
- m.meta["build"] = {}
+ if source_path := m.get_value("source/path"):
+ if not isabs(source_path):
+ m.meta["source"]["path"] = normpath(join(m.path, source_path))
+ elif (
+ (git_url := m.get_value("source/git_url"))
+ # absolute paths are not relative paths
+ and not isabs(git_url)
+ # real urls are not relative paths
+ and ":" not in git_url
+ ):
+ m.meta["source"]["git_url"] = normpath(join(m.path, git_url))
+
+ m.meta.setdefault("build", {})
_simplify_to_exact_constraints(m)
@@ -953,7 +959,7 @@ def render_recipe(
t.close()
need_cleanup = True
elif arg.endswith(".yaml"):
- recipe_dir = os.path.dirname(arg)
+ recipe_dir = dirname(arg)
need_cleanup = False
else:
print("Ignoring non-recipe: %s" % arg)
@@ -987,9 +993,9 @@ def render_recipe(
if m.final:
if not hasattr(m.config, "variants") or not m.config.variant:
m.config.ignore_system_variants = True
- if os.path.isfile(os.path.join(m.path, "conda_build_config.yaml")):
+ if isfile(join(m.path, "conda_build_config.yaml")):
m.config.variant_config_files = [
- os.path.join(m.path, "conda_build_config.yaml")
+ join(m.path, "conda_build_config.yaml")
]
m.config.variants = get_package_variants(m, variants=variants)
m.config.variant = m.config.variants[0]
@@ -1076,7 +1082,7 @@ def output_yaml(metadata, filename=None, suppress_outputs=False):
if filename:
if any(sep in filename for sep in ("\\", "/")):
try:
- os.makedirs(os.path.dirname(filename))
+ os.makedirs(dirname(filename))
except OSError:
pass
with open(filename, "w") as f:
diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py
index e3b22ef7d2..cd093e6d9e 100755
--- a/conda_build/skeletons/cran.py
+++ b/conda_build/skeletons/cran.py
@@ -3,7 +3,7 @@
"""
Tools for converting Cran packages to conda recipes.
"""
-
+from __future__ import annotations
import argparse
import copy
@@ -28,6 +28,7 @@
realpath,
relpath,
)
+from typing import Literal
import requests
import yaml
@@ -40,13 +41,15 @@
from conda.common.io import dashlist
-from conda_build import metadata, source
+from conda_build import source
from conda_build.conda_interface import TemporaryDirectory, cc_conda_build
from conda_build.config import get_or_merge_config
from conda_build.license_family import allowed_license_families, guess_license_family
from conda_build.utils import ensure_list, rm_rf
from conda_build.variants import DEFAULT_VARIANTS, get_package_variants
+from ..metadata import MetaData
+
SOURCE_META = """\
{archive_keys}
{git_url_key} {git_url}
@@ -736,7 +739,9 @@ def strip_end(string, end):
return string
-def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version=None):
+def package_to_inputs_dict(
+ output_dir, output_suffix, git_tag, package: str, version=None
+):
"""
Converts `package` (*) into a tuple of:
@@ -802,9 +807,10 @@ def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version=
location = existing_location = existing_recipe_dir(
output_dir, output_suffix, package, version
)
+ m: MetaData | None
if existing_location:
try:
- m = metadata.MetaData(existing_location)
+ m = MetaData(existing_location)
except:
# Happens when the folder exists but contains no recipe.
m = None
@@ -868,7 +874,7 @@ def skeletonize(
r_interp="r-base",
use_binaries_ver=None,
use_noarch_generic=False,
- use_when_no_binary="src",
+ use_when_no_binary: Literal["error" | "src" | "old" | "old-src"] = "src",
use_rtools_win=False,
config=None,
variant_config_files=None,
@@ -884,6 +890,9 @@ def skeletonize(
):
print(f"ERROR: --use_when_no_binary={use_when_no_binary} not yet implemented")
sys.exit(1)
+
+ m: MetaData
+
output_dir = realpath(output_dir)
config = get_or_merge_config(config, variant_config_files=variant_config_files)
@@ -970,9 +979,7 @@ def skeletonize(
elif is_github_url or is_tarfile:
rm_rf(config.work_dir)
- m = metadata.MetaData.fromdict(
- {"source": {"git_url": location}}, config=config
- )
+ m = MetaData.fromdict({"source": {"git_url": location}}, config=config)
source.git_source(
m.get_section("source"), m.config.git_cache, m.config.work_dir
)
@@ -1088,7 +1095,7 @@ def skeletonize(
m, "extra/recipe-maintainers", add_maintainer
)
if m.version() == d["conda_version"]:
- build_number = int(m.get_value("build/number", 0))
+ build_number = m.build_number()
build_number += 1 if update_policy == "merge-incr-build-num" else 0
if add_maintainer:
new_maintainer = "{indent}{add_maintainer}".format(
@@ -1695,8 +1702,8 @@ def skeletonize(
)
-def version_compare(recipe_dir, newest_conda_version):
- m = metadata.MetaData(recipe_dir)
+def version_compare(recipe_dir: str, newest_conda_version):
+ m = MetaData(recipe_dir)
local_version = m.version()
package = basename(recipe_dir)
diff --git a/conda_build/variants.py b/conda_build/variants.py
index d7c6841238..319ace7fea 100644
--- a/conda_build/variants.py
+++ b/conda_build/variants.py
@@ -679,9 +679,13 @@ def filter_combined_spec_to_used_keys(combined_spec, specs):
# TODO: act here?
combined_spec = explode_variants(combined_spec)
+ # seen_keys makes sure that a setting from a lower-priority spec doesn't clobber
+ # the same setting that has been redefined in a higher-priority spec.
+ seen_keys = set()
+ # The specs are checked from high to low priority order.
for source, source_specs in reversed(specs.items()):
for k, vs in source_specs.items():
- if k not in extend_keys:
+ if k not in extend_keys and k not in seen_keys:
# when filtering ends up killing off all variants, we just ignore that. Generally,
# this arises when a later variant config overrides, rather than selects a
# subspace of earlier configs
@@ -689,6 +693,7 @@ def filter_combined_spec_to_used_keys(combined_spec, specs):
filter_by_key_value(combined_spec, k, vs, source_name=source)
or combined_spec
)
+ seen_keys.add(k)
return combined_spec
diff --git a/docs/source/_static/css/custom.css b/docs/source/_static/css/custom.css
index 95805e211b..f78cbde400 100644
--- a/docs/source/_static/css/custom.css
+++ b/docs/source/_static/css/custom.css
@@ -81,3 +81,7 @@ h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend {
/*color of nav at top when the window is narrow*/
background: #43B02A;
}
+
+.wy-table-responsive table td:not(:first-child), .wy-table-responsive table th:not(:first-child) {
+ white-space: normal;
+}
diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst
index a3f0c98ac9..0bcd3f929b 100644
--- a/docs/source/resources/package-spec.rst
+++ b/docs/source/resources/package-spec.rst
@@ -289,7 +289,7 @@ parts:
three parts, the second part must be the exact version.
.. list-table:: Version Special Characters
- :widths: 10, 40, 40
+ :widths: 10 40 40
:header-rows: 1
* - Symbol
@@ -297,9 +297,7 @@ parts:
- Example
* - <, >, <=, >=
- - Relational operators on versions,
-
- which are compared using `PEP-440 `_.
+ - Relational operators on versions, which are compared using `PEP-440 `_.
- ``<=1.0`` matches 0.9, 0.9.1, and 1.0, but not 1.0.1.
* - ==, and !=
@@ -315,16 +313,12 @@ parts:
- ``1.0|1.2`` matches version 1.0 or 1.2.
* - \*
- - Matches 0 or more characters in the version string.
-
- In terms of regular expressions, it is the same as ``r'.*'``.
+ - Matches 0 or more characters in the version string. In terms of regular expressions, it is the same as ``r'.*'``.
- ``1.0|1.4*`` matches 1.0, 1.4 and 1.4.1b2, but not 1.2.
* - ,
- AND
- - ``>=2,<3`` matches all packages in the 2 series.
-
- 2.0, 2.1, and 2.9 all match, but 3.0 and 1.0 do not.
+ - ``>=2,<3`` matches all packages in the 2 series. 2.0, 2.1, and 2.9 all match, but 3.0 and 1.0 do not.
.. hint::
``,`` has higher precedence than \|, so >=1,<2|>3 means greater than or equal to 1 AND less than 2 or greater than 3, which matches 1, 1.3 and 3.0, but not 2.2.
@@ -380,17 +374,17 @@ the following characters: <, >, \*, or \|.
* - Example
- Meaning
- * - conda install numpy=1.11
+ * - ``conda install numpy=1.11``
- The fuzzy constraint numpy=1.11 matches 1.11, 1.11.0, 1.11.1, 1.11.2, 1.11.18, and so on.
- * - conda install numpy==1.11
+ * - ``conda install numpy==1.11``
- The exact constraint numpy==1.11 matches 1.11, 1.11.0, 1.11.0.0, and so on.
- * - conda install "numpy=1.11.1|1.11.3"
+ * - ``conda install "numpy=1.11.1|1.11.3"``
- The OR constraint "numpy=1.11.1|1.11.3" matches with 1.11.1 or 1.11.3.
- * - conda install "numpy>1.11"
+ * - ``conda install "numpy>1.11"``
- Any numpy version 1.12.0a or greater.
- * - conda install "numpy>=1.8,<2"
+ * - ``conda install "numpy>=1.8,<2"``
- The AND constraint "numpy>=1.8,<2" matches with 1.8 and 1.9 but not 2.0.
diff --git a/news/5039-dont-clobber-multiple-config b/news/5039-dont-clobber-multiple-config
new file mode 100644
index 0000000000..630868093d
--- /dev/null
+++ b/news/5039-dont-clobber-multiple-config
@@ -0,0 +1,19 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+* Avoid clobbering of variants in high priority cbc.yaml entries when they aren't present in lower priority cbc.yamls. (#5039)
+
+### Deprecations
+
+*
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/news/5050-missing-anaconda-client-bugfix b/news/5050-missing-anaconda-client-bugfix
new file mode 100644
index 0000000000..99df06709d
--- /dev/null
+++ b/news/5050-missing-anaconda-client-bugfix
@@ -0,0 +1,19 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+* Fixes the check for a missing anaconda-client so a useful error message is shown
+
+### Deprecations
+
+*
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml b/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml
index cbe6ac859b..4bc665ad7d 100644
--- a/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml
+++ b/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml
@@ -1,5 +1,6 @@
package:
name: pkg
+ version: 0.0.1
# build:
# merge_build_host: False
diff --git a/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml b/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml
index d4f463886f..8aae740991 100644
--- a/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml
+++ b/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml
@@ -1,5 +1,6 @@
package:
name: pkg
+ version: 0.0.1
build:
merge_build_host: False
diff --git a/tests/test-recipes/split-packages/_order/meta.yaml b/tests/test-recipes/split-packages/_order/meta.yaml
index df0c0db7b2..0db9f6bbce 100644
--- a/tests/test-recipes/split-packages/_order/meta.yaml
+++ b/tests/test-recipes/split-packages/_order/meta.yaml
@@ -1,5 +1,7 @@
package:
name: toplevel-ab
+ version: 0.0.1
+
outputs:
- name: a
version: 1
diff --git a/tests/test-recipes/variants/27_requirements_host/meta.yaml b/tests/test-recipes/variants/27_requirements_host/meta.yaml
index 0c4a833fa8..0ab071e56b 100644
--- a/tests/test-recipes/variants/27_requirements_host/meta.yaml
+++ b/tests/test-recipes/variants/27_requirements_host/meta.yaml
@@ -1,5 +1,6 @@
package:
name: cfastpm
+ version: 0.0.1
requirements:
host:
diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py
index 3df998fe1f..d0f97370fb 100644
--- a/tests/test_deprecations.py
+++ b/tests/test_deprecations.py
@@ -5,7 +5,8 @@
import sys
import pytest
-from conda.deprecations import DeprecatedError, DeprecationHandler
+
+from conda_build.deprecations import DeprecatedError, DeprecationHandler
@pytest.fixture(scope="module")
diff --git a/tests/test_metadata.py b/tests/test_metadata.py
index e122b45b4b..0fd89a22c3 100644
--- a/tests/test_metadata.py
+++ b/tests/test_metadata.py
@@ -7,7 +7,9 @@
import sys
import pytest
+from conda import __version__ as conda_version
from conda.base.context import context
+from packaging.version import Version
from pytest import MonkeyPatch
from conda_build import api
@@ -362,25 +364,20 @@ def test_yamlize_versions():
assert yml == ["1.2.3", "1.2.3.4"]
-OS_ARCH = (
+OS_ARCH: tuple[str, ...] = (
"aarch64",
"arm",
"arm64",
"armv6l",
"armv7l",
- "emscripten",
- "freebsd",
"linux",
"linux32",
"linux64",
"osx",
"ppc64",
"ppc64le",
- "riscv64",
"s390x",
"unix",
- "wasi",
- "wasm32",
"win",
"win32",
"win64",
@@ -390,6 +387,15 @@ def test_yamlize_versions():
"zos",
)
+if Version(conda_version) >= Version("23.3"):
+ OS_ARCH = (*OS_ARCH, "riscv64")
+
+if Version(conda_version) >= Version("23.7"):
+ OS_ARCH = (*OS_ARCH, "freebsd")
+
+if Version(conda_version) >= Version("23.9"):
+ OS_ARCH = (*OS_ARCH, "emscripten", "wasi", "wasm32")
+
@pytest.mark.parametrize(
(
diff --git a/tests/test_variants.py b/tests/test_variants.py
index 3e7ba621a5..819f39d793 100644
--- a/tests/test_variants.py
+++ b/tests/test_variants.py
@@ -16,6 +16,7 @@
from conda_build.variants import (
combine_specs,
dict_of_lists_to_list_of_dicts,
+ filter_combined_spec_to_used_keys,
get_package_variants,
validate_spec,
)
@@ -657,3 +658,45 @@ def test_variant_subkeys_retained():
m.final = False
outputs = m.get_output_metadata_set(permit_unsatisfiable_variants=False)
get_all_replacements(outputs[0][1].config.variant)
+
+
+@pytest.mark.parametrize(
+ "internal_defaults, low_prio_config, high_prio_config, expected",
+ [
+ pytest.param(
+ {"pkg_1": "1.0"},
+ {"pkg_1": "1.1"},
+ {"pkg_1": ["1.1", "1.2"], "pkg_2": ["1.1"]},
+ [{"pkg_1": "1.1", "pkg_2": "1.1"}, {"pkg_1": "1.2", "pkg_2": "1.1"}],
+ id="basic",
+ ),
+ pytest.param(
+ {"pkg_1": "1.0"},
+ {"pkg_1": "1.1"},
+ {
+ "pkg_1": ["1.1", "1.2"],
+ "pkg_2": ["1.1", "1.2"],
+ "zip_keys": [["pkg_1", "pkg_2"]],
+ },
+ [
+ {"pkg_1": "1.1", "pkg_2": "1.1", "zip_keys": [["pkg_1", "pkg_2"]]},
+ {"pkg_1": "1.2", "pkg_2": "1.2", "zip_keys": [["pkg_1", "pkg_2"]]},
+ ],
+ id="zip_keys",
+ ),
+ ],
+)
+def test_zip_key_filtering(
+ internal_defaults, low_prio_config, high_prio_config, expected
+):
+ combined_spec = {
+ **low_prio_config,
+ **high_prio_config,
+ }
+ specs = {
+ "internal_defaults": internal_defaults,
+ "low_prio_config": low_prio_config,
+ "high_prio_config": high_prio_config,
+ }
+
+ assert filter_combined_spec_to_used_keys(combined_spec, specs=specs) == expected