Skip to content

Commit

Permalink
Merge branch 'main' into menuinst-validation
Browse files Browse the repository at this point in the history
  • Loading branch information
jaimergp authored Nov 6, 2023
2 parents 81f824c + 90aee03 commit 76dbc1e
Show file tree
Hide file tree
Showing 21 changed files with 352 additions and 250 deletions.
13 changes: 11 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,18 @@ jobs:
fail-fast: false
matrix:
# test all lower versions (w/ stable conda) and upper version (w/ canary conda)
python-version: ['3.8', '3.9', '3.10']
python-version: ['3.9', '3.10']
conda-version: [release]
test-type: [serial, parallel]
include:
# minimum Python/conda combo
- python-version: '3.8'
conda-version: 22.11.0
test-type: serial
- python-version: '3.8'
conda-version: 22.11.0
test-type: parallel
# maximum Python/conda combo
- python-version: '3.11'
conda-version: canary
test-type: serial
Expand All @@ -81,6 +89,7 @@ jobs:
test-type: parallel
env:
CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }}
CONDA_VERSION: ${{ contains('canary,release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }}
REPLAY_NAME: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}
REPLAY_DIR: ${{ github.workspace }}/pytest-replay
ALLURE_DIR: ${{ github.workspace }}/allure-results
Expand Down Expand Up @@ -115,7 +124,7 @@ jobs:
conda install -q -y -c defaults \
--file ./tests/requirements.txt \
--file ./tests/requirements-linux.txt \
${{ env.CONDA_CHANNEL_LABEL }}::conda
${{ env.CONDA_CHANNEL_LABEL }}::${{ env.CONDA_VERSION }}
pip install -e . --no-deps
- name: Show info
Expand Down
18 changes: 8 additions & 10 deletions conda_build/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
Design philosophy: put variability into config. Make each function here accept kwargs,
but only use those kwargs in config. Config must change to support new features elsewhere.
"""
from __future__ import annotations

import sys as _sys

Expand Down Expand Up @@ -76,8 +77,8 @@ def render(
raise

# remove outputs section from output objects for simplicity
if not om.path and om.meta.get("outputs"):
om.parent_outputs = om.meta["outputs"]
if not om.path and (outputs := om.get_section("outputs")):
om.parent_outputs = outputs
del om.meta["outputs"]

output_metas[
Expand Down Expand Up @@ -571,7 +572,7 @@ def debug(
test=False,
output_id=None,
config=None,
verbose=True,
verbose: bool = True,
link_source_method="auto",
**kwargs,
):
Expand All @@ -587,6 +588,8 @@ def debug(
from conda_build.build import test as run_test
from conda_build.utils import CONDA_PACKAGE_EXTENSIONS, LoggingContext, on_win

from .metadata import MetaData

is_package = False
default_config = get_or_merge_config(config, **kwargs)
args = {"set_build_id": False}
Expand Down Expand Up @@ -622,15 +625,13 @@ def debug(

config.channel_urls = get_channel_urls(kwargs)

metadata_tuples = []
metadata_tuples: list[tuple[MetaData, bool, bool]] = []

best_link_source_method = "skip"
if isinstance(recipe_or_package_path_or_metadata_tuples, str):
if path_is_build_dir:
for metadata_conda_debug in metadatas_conda_debug:
best_link_source_method = "symlink"
from conda_build.metadata import MetaData

metadata = MetaData(metadata_conda_debug, config, {})
metadata_tuples.append((metadata, False, True))
else:
Expand Down Expand Up @@ -681,10 +682,7 @@ def debug(
"local",
"src",
"conda",
"{}-{}".format(
metadata.get_value("package/name"),
metadata.get_value("package/version"),
),
f"{metadata.name()}-{metadata.version()}",
)
link_target = os.path.dirname(metadata.meta_path)
try:
Expand Down
48 changes: 23 additions & 25 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def log_stats(stats_dict, descriptor):
)


def create_post_scripts(m):
def create_post_scripts(m: MetaData):
"""
Create scripts to run after build step
"""
Expand All @@ -162,12 +162,9 @@ def create_post_scripts(m):
is_output = "package:" not in m.get_recipe_text()
scriptname = tp
if is_output:
if m.meta.get("build", {}).get(tp, ""):
scriptname = m.meta["build"][tp]
else:
scriptname = m.name() + "-" + tp
scriptname = m.get_value(f"build/{tp}", f"{m.name()}-{tp}")
scriptname += ext
dst_name = "." + m.name() + "-" + tp + ext
dst_name = f".{m.name()}-{tp}{ext}"
src = join(m.path, scriptname)
if isfile(src):
dst_dir = join(
Expand Down Expand Up @@ -1456,12 +1453,12 @@ def write_about_json(m):
json.dump(d, fo, indent=2, sort_keys=True)


def write_info_json(m):
def write_info_json(m: MetaData):
info_index = m.info_index()
if m.pin_depends:
# Wtih 'strict' depends, we will have pinned run deps during rendering
if m.pin_depends == "strict":
runtime_deps = m.meta.get("requirements", {}).get("run", [])
runtime_deps = m.get_value("requirements/run", [])
info_index["depends"] = runtime_deps
else:
runtime_deps = environ.get_pinned_deps(m, "run")
Expand Down Expand Up @@ -1508,8 +1505,8 @@ def get_entry_point_script_names(entry_point_scripts):
return scripts


def write_run_exports(m):
run_exports = m.meta.get("build", {}).get("run_exports", {})
def write_run_exports(m: MetaData):
run_exports = m.get_value("build/run_exports", {})
if run_exports:
with open(os.path.join(m.config.info_dir, "run_exports.json"), "w") as f:
if not hasattr(run_exports, "keys"):
Expand Down Expand Up @@ -1747,8 +1744,8 @@ def create_info_files_json_v1(m, info_dir, prefix, files, files_with_prefix):
return checksums


def post_process_files(m, initial_prefix_files):
package_name = m.get_value("package/name")
def post_process_files(m: MetaData, initial_prefix_files):
package_name = m.name()
host_prefix = m.config.host_prefix
missing = []
for f in initial_prefix_files:
Expand Down Expand Up @@ -1778,7 +1775,7 @@ def post_process_files(m, initial_prefix_files):
)
post_process(
package_name,
m.get_value("package/version"),
m.version(),
sorted(current_prefix_files - initial_prefix_files),
prefix=host_prefix,
config=m.config,
Expand Down Expand Up @@ -1839,7 +1836,7 @@ def post_process_files(m, initial_prefix_files):
return new_files


def bundle_conda(output, metadata, env, stats, **kw):
def bundle_conda(output, metadata: MetaData, env, stats, **kw):
log = utils.get_logger(__name__)
log.info("Packaging %s", metadata.dist())
get_all_replacements(metadata.config)
Expand Down Expand Up @@ -1911,7 +1908,7 @@ def bundle_conda(output, metadata, env, stats, **kw):
env_output["TOP_PKG_NAME"] = env["PKG_NAME"]
env_output["TOP_PKG_VERSION"] = env["PKG_VERSION"]
env_output["PKG_VERSION"] = metadata.version()
env_output["PKG_NAME"] = metadata.get_value("package/name")
env_output["PKG_NAME"] = metadata.name()
env_output["RECIPE_DIR"] = metadata.path
env_output["MSYS2_PATH_TYPE"] = "inherit"
env_output["CHERE_INVOKING"] = "1"
Expand Down Expand Up @@ -2129,7 +2126,7 @@ def bundle_conda(output, metadata, env, stats, **kw):
return final_outputs


def bundle_wheel(output, metadata, env, stats):
def bundle_wheel(output, metadata: MetaData, env, stats):
ext = ".bat" if utils.on_win else ".sh"
with TemporaryDirectory() as tmpdir, utils.tmp_chdir(metadata.config.work_dir):
dest_file = os.path.join(metadata.config.work_dir, "wheel_output" + ext)
Expand All @@ -2145,7 +2142,7 @@ def bundle_wheel(output, metadata, env, stats):
env["TOP_PKG_NAME"] = env["PKG_NAME"]
env["TOP_PKG_VERSION"] = env["PKG_VERSION"]
env["PKG_VERSION"] = metadata.version()
env["PKG_NAME"] = metadata.get_value("package/name")
env["PKG_NAME"] = metadata.name()
interpreter_and_args = guess_interpreter(dest_file)

bundle_stats = {}
Expand Down Expand Up @@ -2317,7 +2314,7 @@ def _write_activation_text(script_path, m):
fh.write(data)


def create_build_envs(m, notest):
def create_build_envs(m: MetaData, notest):
build_ms_deps = m.ms_depends("build")
build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps]
host_ms_deps = m.ms_depends("host")
Expand Down Expand Up @@ -2371,11 +2368,12 @@ def create_build_envs(m, notest):
try:
if not notest:
utils.insert_variant_versions(
m.meta.get("requirements", {}), m.config.variant, "run"
m.get_section("requirements"), m.config.variant, "run"
)
test_run_ms_deps = utils.ensure_list(
m.get_value("test/requires", [])
) + utils.ensure_list(m.get_value("requirements/run", []))
test_run_ms_deps = [
*utils.ensure_list(m.get_value("test/requires", [])),
*utils.ensure_list(m.get_value("requirements/run", [])),
]
# make sure test deps are available before taking time to create build env
environ.get_install_actions(
m.config.test_prefix,
Expand Down Expand Up @@ -2424,7 +2422,7 @@ def create_build_envs(m, notest):


def build(
m,
m: MetaData,
stats,
post=None,
need_source_download=True,
Expand Down Expand Up @@ -2516,7 +2514,7 @@ def build(
)

specs = [ms.spec for ms in m.ms_depends("build")]
if any(out.get("type") == "wheel" for out in m.meta.get("outputs", [])):
if any(out.get("type") == "wheel" for out in m.get_section("outputs")):
specs.extend(["pip", "wheel"])

# TODO :: This is broken. It does not respect build/script for example and also if you need git
Expand Down Expand Up @@ -4099,7 +4097,7 @@ def handle_anaconda_upload(paths, config):
print(no_upload_message)
return

if anaconda is None:
if not anaconda:
print(no_upload_message)
sys.exit(
"Error: cannot locate anaconda command (required for upload)\n"
Expand Down
4 changes: 2 additions & 2 deletions conda_build/create_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def _get_output_script_name(
src_name = dst_name
if m.is_output:
src_name = "no-file"
for out in m.meta.get("outputs", []):
for out in m.get_section("outputs"):
if m.name() == out.get("name"):
out_test_script = out.get("test", {}).get("script", "no-file")
if os.path.splitext(out_test_script)[1].lower() == ext:
Expand Down Expand Up @@ -103,7 +103,7 @@ def _create_test_files(
name = ""
# the way this works is that each output needs to explicitly define a test script to run
# They do not automatically pick up run_test.*, but can be pointed at that explicitly.
for out in m.meta.get("outputs", []):
for out in m.get_section("outputs"):
if m.name() == out.get("name"):
out_test_script = out.get("test", {}).get("script", "no-file")
if out_test_script.endswith(ext):
Expand Down
17 changes: 7 additions & 10 deletions conda_build/environ.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
root_dir,
)
from .deprecations import deprecated
from .metadata import MetaData

# these are things that we provide env vars for more explicitly. This list disables the
# pass-through of variant values to env vars for these keys.
Expand Down Expand Up @@ -388,7 +389,7 @@ def python_vars(metadata, prefix, escape_backslash):
}
build_or_host = "host" if metadata.is_cross else "build"
deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)]
if "python" in deps or metadata.name(fail_ok=True) == "python":
if "python" in deps or metadata.name() == "python":
python_bin = metadata.config.python_bin(prefix, metadata.config.host_subdir)

if utils.on_win and escape_backslash:
Expand Down Expand Up @@ -417,7 +418,7 @@ def perl_vars(metadata, prefix, escape_backslash):
}
build_or_host = "host" if metadata.is_cross else "build"
deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)]
if "perl" in deps or metadata.name(fail_ok=True) == "perl":
if "perl" in deps or metadata.name() == "perl":
perl_bin = metadata.config.perl_bin(prefix, metadata.config.host_subdir)

if utils.on_win and escape_backslash:
Expand Down Expand Up @@ -464,10 +465,7 @@ def r_vars(metadata, prefix, escape_backslash):

build_or_host = "host" if metadata.is_cross else "build"
deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)]
if (
any(r_pkg in deps for r_pkg in R_PACKAGES)
or metadata.name(fail_ok=True) in R_PACKAGES
):
if any(r_pkg in deps for r_pkg in R_PACKAGES) or metadata.name() in R_PACKAGES:
r_bin = metadata.config.r_bin(prefix, metadata.config.host_subdir)
# set R_USER explicitly to prevent crosstalk with existing R_LIBS_USER packages
r_user = join(prefix, "Libs", "R")
Expand All @@ -484,7 +482,7 @@ def r_vars(metadata, prefix, escape_backslash):
return vars_


def meta_vars(meta, skip_build_id=False):
def meta_vars(meta: MetaData, skip_build_id=False):
d = {}
for var_name in ensure_list(meta.get_value("build/script_env", [])):
if "=" in var_name:
Expand Down Expand Up @@ -545,12 +543,11 @@ def meta_vars(meta, skip_build_id=False):
):
d.update(get_hg_build_info(hg_dir))

# use `get_value` to prevent early exit while name is still unresolved during rendering
d["PKG_NAME"] = meta.get_value("package/name")
d["PKG_NAME"] = meta.name()
d["PKG_VERSION"] = meta.version()
d["PKG_BUILDNUM"] = str(meta.build_number())
if meta.final and not skip_build_id:
d["PKG_BUILD_STRING"] = str(meta.build_id())
d["PKG_BUILD_STRING"] = meta.build_id()
d["PKG_HASH"] = meta.hash_dependencies()
else:
d["PKG_BUILD_STRING"] = "placeholder"
Expand Down
Loading

0 comments on commit 76dbc1e

Please sign in to comment.