Skip to content

Commit

Permalink
Merge branch 'main' into update-conda-inspect-channels
Browse files Browse the repository at this point in the history
  • Loading branch information
kenodegard authored Nov 6, 2023
2 parents fd96c8b + 90aee03 commit 44222cf
Show file tree
Hide file tree
Showing 48 changed files with 1,050 additions and 553 deletions.
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/1_feature.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ body:
id: what
attributes:
label: What should happen?
description: What should be the user experience with the feature? Describe from a user perpective what they would do and see.
description: What should be the user experience with the feature? Describe from a user perspective what they would do and see.
- type: textarea
id: context
attributes:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/cla.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check CLA
uses: conda/actions/check-cla@v23.7.0
uses: conda/actions/check-cla@v23.10.0
with:
# [required]
# A token with ability to comment, label, and modify the commit status
Expand All @@ -31,6 +31,6 @@ jobs:
label: cla-signed

# [required]
# Token for opening singee PR in the provided `cla_repo`
# Token for opening signee PR in the provided `cla_repo`
# (`pull_request: write` for fine-grained PAT; `repo` and `workflow` for classic PAT)
cla_token: ${{ secrets.CLA_FORK_TOKEN }}
2 changes: 1 addition & 1 deletion .github/workflows/stale.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
days-before-issue-stale: 90
days-before-issue-close: 21
steps:
- uses: conda/actions/read-yaml@v23.7.0
- uses: conda/actions/read-yaml@v23.10.0
id: read_yaml
with:
path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml
Expand Down
13 changes: 11 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,18 @@ jobs:
fail-fast: false
matrix:
# test all lower versions (w/ stable conda) and upper version (w/ canary conda)
python-version: ['3.8', '3.9', '3.10']
python-version: ['3.9', '3.10']
conda-version: [release]
test-type: [serial, parallel]
include:
# minimum Python/conda combo
- python-version: '3.8'
conda-version: 22.11.0
test-type: serial
- python-version: '3.8'
conda-version: 22.11.0
test-type: parallel
# maximum Python/conda combo
- python-version: '3.11'
conda-version: canary
test-type: serial
Expand All @@ -81,6 +89,7 @@ jobs:
test-type: parallel
env:
CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }}
CONDA_VERSION: ${{ contains('canary,release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }}
REPLAY_NAME: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}
REPLAY_DIR: ${{ github.workspace }}/pytest-replay
ALLURE_DIR: ${{ github.workspace }}/allure-results
Expand Down Expand Up @@ -115,7 +124,7 @@ jobs:
conda install -q -y -c defaults \
--file ./tests/requirements.txt \
--file ./tests/requirements-linux.txt \
${{ env.CONDA_CHANNEL_LABEL }}::conda
${{ env.CONDA_CHANNEL_LABEL }}::${{ env.CONDA_VERSION }}
pip install -e .
- name: Show info
Expand Down
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ exclude: |
repos:
# generic verification and formatting
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
# standard end of line/end of file cleanup
- id: mixed-line-ending
Expand Down Expand Up @@ -49,13 +49,13 @@ repos:
args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol]
exclude: ^conda_build/version.py
- repo: https://github.com/asottile/pyupgrade
rev: v3.13.0
rev: v3.15.0
hooks:
# upgrade standard Python codes
- id: pyupgrade
args: [--py38-plus]
- repo: https://github.com/psf/black
rev: 23.9.1
rev: 23.10.1
hooks:
# auto format Python codes
- id: black
Expand All @@ -66,7 +66,7 @@ repos:
- id: blacken-docs
additional_dependencies: [black]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.291
rev: v0.1.3
hooks:
- id: ruff
args: [--fix]
Expand Down
18 changes: 8 additions & 10 deletions conda_build/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
Design philosophy: put variability into config. Make each function here accept kwargs,
but only use those kwargs in config. Config must change to support new features elsewhere.
"""
from __future__ import annotations

import sys as _sys

Expand Down Expand Up @@ -76,8 +77,8 @@ def render(
raise

# remove outputs section from output objects for simplicity
if not om.path and om.meta.get("outputs"):
om.parent_outputs = om.meta["outputs"]
if not om.path and (outputs := om.get_section("outputs")):
om.parent_outputs = outputs
del om.meta["outputs"]

output_metas[
Expand Down Expand Up @@ -571,7 +572,7 @@ def debug(
test=False,
output_id=None,
config=None,
verbose=True,
verbose: bool = True,
link_source_method="auto",
**kwargs,
):
Expand All @@ -587,6 +588,8 @@ def debug(
from conda_build.build import test as run_test
from conda_build.utils import CONDA_PACKAGE_EXTENSIONS, LoggingContext, on_win

from .metadata import MetaData

is_package = False
default_config = get_or_merge_config(config, **kwargs)
args = {"set_build_id": False}
Expand Down Expand Up @@ -622,15 +625,13 @@ def debug(

config.channel_urls = get_channel_urls(kwargs)

metadata_tuples = []
metadata_tuples: list[tuple[MetaData, bool, bool]] = []

best_link_source_method = "skip"
if isinstance(recipe_or_package_path_or_metadata_tuples, str):
if path_is_build_dir:
for metadata_conda_debug in metadatas_conda_debug:
best_link_source_method = "symlink"
from conda_build.metadata import MetaData

metadata = MetaData(metadata_conda_debug, config, {})
metadata_tuples.append((metadata, False, True))
else:
Expand Down Expand Up @@ -681,10 +682,7 @@ def debug(
"local",
"src",
"conda",
"{}-{}".format(
metadata.get_value("package/name"),
metadata.get_value("package/version"),
),
f"{metadata.name()}-{metadata.version()}",
)
link_target = os.path.dirname(metadata.meta_path)
try:
Expand Down
61 changes: 23 additions & 38 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@
env_path_backup_var_exists,
get_conda_channel,
get_rc_urls,
pkgs_dirs,
prefix_placeholder,
reset_context,
root_dir,
Expand Down Expand Up @@ -152,7 +151,7 @@ def log_stats(stats_dict, descriptor):
)


def create_post_scripts(m):
def create_post_scripts(m: MetaData):
"""
Create scripts to run after build step
"""
Expand All @@ -163,12 +162,9 @@ def create_post_scripts(m):
is_output = "package:" not in m.get_recipe_text()
scriptname = tp
if is_output:
if m.meta.get("build", {}).get(tp, ""):
scriptname = m.meta["build"][tp]
else:
scriptname = m.name() + "-" + tp
scriptname = m.get_value(f"build/{tp}", f"{m.name()}-{tp}")
scriptname += ext
dst_name = "." + m.name() + "-" + tp + ext
dst_name = f".{m.name()}-{tp}{ext}"
src = join(m.path, scriptname)
if isfile(src):
dst_dir = join(
Expand Down Expand Up @@ -1457,12 +1453,12 @@ def write_about_json(m):
json.dump(d, fo, indent=2, sort_keys=True)


def write_info_json(m):
def write_info_json(m: MetaData):
info_index = m.info_index()
if m.pin_depends:
# Wtih 'strict' depends, we will have pinned run deps during rendering
if m.pin_depends == "strict":
runtime_deps = m.meta.get("requirements", {}).get("run", [])
runtime_deps = m.get_value("requirements/run", [])
info_index["depends"] = runtime_deps
else:
runtime_deps = environ.get_pinned_deps(m, "run")
Expand Down Expand Up @@ -1509,8 +1505,8 @@ def get_entry_point_script_names(entry_point_scripts):
return scripts


def write_run_exports(m):
run_exports = m.meta.get("build", {}).get("run_exports", {})
def write_run_exports(m: MetaData):
run_exports = m.get_value("build/run_exports", {})
if run_exports:
with open(os.path.join(m.config.info_dir, "run_exports.json"), "w") as f:
if not hasattr(run_exports, "keys"):
Expand Down Expand Up @@ -1748,8 +1744,8 @@ def create_info_files_json_v1(m, info_dir, prefix, files, files_with_prefix):
return checksums


def post_process_files(m, initial_prefix_files):
package_name = m.get_value("package/name")
def post_process_files(m: MetaData, initial_prefix_files):
package_name = m.name()
host_prefix = m.config.host_prefix
missing = []
for f in initial_prefix_files:
Expand Down Expand Up @@ -1779,7 +1775,7 @@ def post_process_files(m, initial_prefix_files):
)
post_process(
package_name,
m.get_value("package/version"),
m.version(),
sorted(current_prefix_files - initial_prefix_files),
prefix=host_prefix,
config=m.config,
Expand Down Expand Up @@ -1840,7 +1836,7 @@ def post_process_files(m, initial_prefix_files):
return new_files


def bundle_conda(output, metadata, env, stats, **kw):
def bundle_conda(output, metadata: MetaData, env, stats, **kw):
log = utils.get_logger(__name__)
log.info("Packaging %s", metadata.dist())
get_all_replacements(metadata.config)
Expand Down Expand Up @@ -1912,7 +1908,7 @@ def bundle_conda(output, metadata, env, stats, **kw):
env_output["TOP_PKG_NAME"] = env["PKG_NAME"]
env_output["TOP_PKG_VERSION"] = env["PKG_VERSION"]
env_output["PKG_VERSION"] = metadata.version()
env_output["PKG_NAME"] = metadata.get_value("package/name")
env_output["PKG_NAME"] = metadata.name()
env_output["RECIPE_DIR"] = metadata.path
env_output["MSYS2_PATH_TYPE"] = "inherit"
env_output["CHERE_INVOKING"] = "1"
Expand Down Expand Up @@ -2130,7 +2126,7 @@ def bundle_conda(output, metadata, env, stats, **kw):
return final_outputs


def bundle_wheel(output, metadata, env, stats):
def bundle_wheel(output, metadata: MetaData, env, stats):
ext = ".bat" if utils.on_win else ".sh"
with TemporaryDirectory() as tmpdir, utils.tmp_chdir(metadata.config.work_dir):
dest_file = os.path.join(metadata.config.work_dir, "wheel_output" + ext)
Expand All @@ -2146,7 +2142,7 @@ def bundle_wheel(output, metadata, env, stats):
env["TOP_PKG_NAME"] = env["PKG_NAME"]
env["TOP_PKG_VERSION"] = env["PKG_VERSION"]
env["PKG_VERSION"] = metadata.version()
env["PKG_NAME"] = metadata.get_value("package/name")
env["PKG_NAME"] = metadata.name()
interpreter_and_args = guess_interpreter(dest_file)

bundle_stats = {}
Expand Down Expand Up @@ -2318,7 +2314,7 @@ def _write_activation_text(script_path, m):
fh.write(data)


def create_build_envs(m, notest):
def create_build_envs(m: MetaData, notest):
build_ms_deps = m.ms_depends("build")
build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps]
host_ms_deps = m.ms_depends("host")
Expand Down Expand Up @@ -2372,11 +2368,12 @@ def create_build_envs(m, notest):
try:
if not notest:
utils.insert_variant_versions(
m.meta.get("requirements", {}), m.config.variant, "run"
m.get_section("requirements"), m.config.variant, "run"
)
test_run_ms_deps = utils.ensure_list(
m.get_value("test/requires", [])
) + utils.ensure_list(m.get_value("requirements/run", []))
test_run_ms_deps = [
*utils.ensure_list(m.get_value("test/requires", [])),
*utils.ensure_list(m.get_value("requirements/run", [])),
]
# make sure test deps are available before taking time to create build env
environ.get_install_actions(
m.config.test_prefix,
Expand Down Expand Up @@ -2425,7 +2422,7 @@ def create_build_envs(m, notest):


def build(
m,
m: MetaData,
stats,
post=None,
need_source_download=True,
Expand Down Expand Up @@ -2517,7 +2514,7 @@ def build(
)

specs = [ms.spec for ms in m.ms_depends("build")]
if any(out.get("type") == "wheel" for out in m.meta.get("outputs", [])):
if any(out.get("type") == "wheel" for out in m.get_section("outputs")):
specs.extend(["pip", "wheel"])

# TODO :: This is broken. It does not respect build/script for example and also if you need git
Expand Down Expand Up @@ -3420,18 +3417,6 @@ def test(
# folder destination
_extract_test_files_from_package(metadata)

# When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it.
# Prevent this. When https://github.com/conda/conda/issues/5708 gets fixed
# I think we can remove this call to clean_pkg_cache().
in_pkg_cache = (
not hasattr(recipedir_or_package_or_metadata, "config")
and os.path.isfile(recipedir_or_package_or_metadata)
and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS)
and os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0]
)
if not in_pkg_cache:
environ.clean_pkg_cache(metadata.dist(), metadata.config)

copy_test_source_files(metadata, metadata.config.test_dir)
# this is also copying tests/source_files from work_dir to testing workdir

Expand Down Expand Up @@ -4112,7 +4097,7 @@ def handle_anaconda_upload(paths, config):
print(no_upload_message)
return

if anaconda is None:
if not anaconda:
print(no_upload_message)
sys.exit(
"Error: cannot locate anaconda command (required for upload)\n"
Expand Down
11 changes: 10 additions & 1 deletion conda_build/conda_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,10 +74,19 @@
win_path_to_unix,
)
from conda.models.channel import get_conda_build_local_url # noqa: F401
from conda.models.dist import Dist, IndexRecord # noqa: F401
from conda.models.dist import Dist # noqa: F401
from conda.models.records import PackageRecord

from .deprecations import deprecated

deprecated.constant(
"3.28.0",
"4.0.0",
"IndexRecord",
PackageRecord,
addendum="Use `conda.models.records.PackageRecord` instead.",
)

# TODO: Go to references of all properties below and import them from `context` instead
binstar_upload = context.binstar_upload
default_python = context.default_python
Expand Down
4 changes: 2 additions & 2 deletions conda_build/create_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def _get_output_script_name(
src_name = dst_name
if m.is_output:
src_name = "no-file"
for out in m.meta.get("outputs", []):
for out in m.get_section("outputs"):
if m.name() == out.get("name"):
out_test_script = out.get("test", {}).get("script", "no-file")
if os.path.splitext(out_test_script)[1].lower() == ext:
Expand Down Expand Up @@ -103,7 +103,7 @@ def _create_test_files(
name = ""
# the way this works is that each output needs to explicitly define a test script to run
# They do not automatically pick up run_test.*, but can be pointed at that explicitly.
for out in m.meta.get("outputs", []):
for out in m.get_section("outputs"):
if m.name() == out.get("name"):
out_test_script = out.get("test", {}).get("script", "no-file")
if out_test_script.endswith(ext):
Expand Down
Loading

0 comments on commit 44222cf

Please sign in to comment.