Skip to content

Commit

Permalink
Use MetaData's get_section & get_value
Browse files Browse the repository at this point in the history
  • Loading branch information
kenodegard committed Nov 2, 2023
1 parent 558999b commit 3c376b1
Show file tree
Hide file tree
Showing 6 changed files with 65 additions and 63 deletions.
2 changes: 1 addition & 1 deletion conda_build/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def render(
raise

# remove outputs section from output objects for simplicity
if not om.path and om.meta.get("outputs"):
if not om.path and om.get_section("outputs"):
om.parent_outputs = om.meta["outputs"]
del om.meta["outputs"]

Expand Down
32 changes: 15 additions & 17 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def log_stats(stats_dict, descriptor):
)


def create_post_scripts(m):
def create_post_scripts(m: MetaData):
"""
Create scripts to run after build step
"""
Expand All @@ -162,12 +162,9 @@ def create_post_scripts(m):
is_output = "package:" not in m.get_recipe_text()
scriptname = tp
if is_output:
if m.meta.get("build", {}).get(tp, ""):
scriptname = m.meta["build"][tp]
else:
scriptname = m.name() + "-" + tp
scriptname = m.get_value(f"build/{tp}", f"{m.name()}-{tp}")
scriptname += ext
dst_name = "." + m.name() + "-" + tp + ext
dst_name = f".{m.name()}-{tp}{ext}"
src = join(m.path, scriptname)
if isfile(src):
dst_dir = join(
Expand Down Expand Up @@ -1456,12 +1453,12 @@ def write_about_json(m):
json.dump(d, fo, indent=2, sort_keys=True)


def write_info_json(m):
def write_info_json(m: MetaData):
info_index = m.info_index()
if m.pin_depends:
# Wtih 'strict' depends, we will have pinned run deps during rendering
if m.pin_depends == "strict":
runtime_deps = m.meta.get("requirements", {}).get("run", [])
runtime_deps = m.get_value("requirements/run", [])
info_index["depends"] = runtime_deps
else:
runtime_deps = environ.get_pinned_deps(m, "run")
Expand Down Expand Up @@ -1508,8 +1505,8 @@ def get_entry_point_script_names(entry_point_scripts):
return scripts


def write_run_exports(m):
run_exports = m.meta.get("build", {}).get("run_exports", {})
def write_run_exports(m: MetaData):
run_exports = m.get_value("build/run_exports", {})
if run_exports:
with open(os.path.join(m.config.info_dir, "run_exports.json"), "w") as f:
if not hasattr(run_exports, "keys"):
Expand Down Expand Up @@ -2317,7 +2314,7 @@ def _write_activation_text(script_path, m):
fh.write(data)


def create_build_envs(m, notest):
def create_build_envs(m: MetaData, notest):
build_ms_deps = m.ms_depends("build")
build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps]
host_ms_deps = m.ms_depends("host")
Expand Down Expand Up @@ -2371,11 +2368,12 @@ def create_build_envs(m, notest):
try:
if not notest:
utils.insert_variant_versions(
m.meta.get("requirements", {}), m.config.variant, "run"
m.get_section("requirements"), m.config.variant, "run"
)
test_run_ms_deps = utils.ensure_list(
m.get_value("test/requires", [])
) + utils.ensure_list(m.get_value("requirements/run", []))
test_run_ms_deps = [
*utils.ensure_list(m.get_value("test/requires", [])),
*utils.ensure_list(m.get_value("requirements/run", [])),
]
# make sure test deps are available before taking time to create build env
environ.get_install_actions(
m.config.test_prefix,
Expand Down Expand Up @@ -2424,7 +2422,7 @@ def create_build_envs(m, notest):


def build(
m,
m: MetaData,
stats,
post=None,
need_source_download=True,
Expand Down Expand Up @@ -2516,7 +2514,7 @@ def build(
)

specs = [ms.spec for ms in m.ms_depends("build")]
if any(out.get("type") == "wheel" for out in m.meta.get("outputs", [])):
if any(out.get("type") == "wheel" for out in m.get("outputs")):
specs.extend(["pip", "wheel"])

# TODO :: This is broken. It does not respect build/script for example and also if you need git
Expand Down
4 changes: 2 additions & 2 deletions conda_build/create_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def _get_output_script_name(
src_name = dst_name
if m.is_output:
src_name = "no-file"
for out in m.meta.get("outputs", []):
for out in m.get_section("outputs"):
if m.name() == out.get("name"):
out_test_script = out.get("test", {}).get("script", "no-file")
if os.path.splitext(out_test_script)[1].lower() == ext:
Expand Down Expand Up @@ -103,7 +103,7 @@ def _create_test_files(
name = ""
# the way this works is that each output needs to explicitly define a test script to run
# They do not automatically pick up run_test.*, but can be pointed at that explicitly.
for out in m.meta.get("outputs", []):
for out in m.get_section("outputs"):
if m.name() == out.get("name"):
out_test_script = out.get("test", {}).get("script", "no-file")
if out_test_script.endswith(ext):
Expand Down
4 changes: 2 additions & 2 deletions conda_build/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ def ensure_matching_hashes(output_metadata):
for _, m in output_metadata.values():
for _, om in output_metadata.values():
if m != om:
run_exports = om.meta.get("build", {}).get("run_exports", [])
run_exports = om.get_value("build/run_exports", [])
if hasattr(run_exports, "keys"):
run_exports_list = []
for export_type in utils.RUN_EXPORTS_TYPES:
Expand Down Expand Up @@ -2574,7 +2574,7 @@ def get_output_metadata_set(
)
output_d["requirements"] = output_d.get("requirements", {})
output_d["requirements"]["build"] = build_reqs
m.meta["requirements"] = m.meta.get("requirements", {})
m.meta["requirements"] = m.get_section("requirements")
m.meta["requirements"]["build"] = build_reqs
non_conda_packages.append((output_d, m))
else:
Expand Down
44 changes: 20 additions & 24 deletions conda_build/post.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,8 @@
machofile,
)

from .metadata import MetaData

filetypes_for_platform = {
"win": (DLLfile, EXEfile),
"osx": (machofile,),
Expand Down Expand Up @@ -1583,41 +1585,35 @@ def check_overlinking_impl(
return dict()


def check_overlinking(m, files, host_prefix=None):
if not host_prefix:
host_prefix = m.config.host_prefix

overlinking_ignore_patterns = m.meta.get("build", {}).get(
"overlinking_ignore_patterns"
)
if overlinking_ignore_patterns:
files = [
f
for f in files
if not any([fnmatch(f, p) for p in overlinking_ignore_patterns])
]
def check_overlinking(m: MetaData, files, host_prefix=None):
patterns = m.get_value("build/overlinking_ignore_patterns", [])
files = [
file
for file in files
if not any([fnmatch(file, pattern) for pattern in patterns])
]
return check_overlinking_impl(
m.get_value("package/name"),
m.get_value("package/version"),
m.get_value("build/string"),
m.get_value("build/number"),
m.name(),
m.version(),
m.build_id(),
m.build_number(),
m.config.target_subdir,
m.get_value("build/ignore_run_exports"),
[req.split(" ")[0] for req in m.meta.get("requirements", {}).get("run", [])],
[req.split(" ")[0] for req in m.meta.get("requirements", {}).get("build", [])],
[req.split(" ")[0] for req in m.meta.get("requirements", {}).get("host", [])],
host_prefix,
[req.split(" ")[0] for req in m.get_value("requirements/run", [])],
[req.split(" ")[0] for req in m.get_value("requirements/build", [])],
[req.split(" ")[0] for req in m.get_value("requirements/host", [])],
host_prefix or m.config.host_prefix,
m.config.build_prefix,
m.meta.get("build", {}).get("missing_dso_whitelist", []),
m.meta.get("build", {}).get("runpath_whitelist", []),
m.get_value("build/missing_dso_whitelist", []),
m.get_value("build/runpath_whitelist", []),
m.config.error_overlinking,
m.config.error_overdepending,
m.config.verbose,
True,
files,
m.config.bldpkgs_dir,
m.config.output_folder,
list(m.config.channel_urls) + ["local"],
[*m.config.channel_urls, "local"],
m.config.enable_static,
m.config.variant,
)
Expand Down
42 changes: 25 additions & 17 deletions conda_build/render.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def _categorize_deps(m, specs, exclude_pattern, variant):


def get_env_dependencies(
m,
m: MetaData,
env,
variant,
exclude_pattern=None,
Expand Down Expand Up @@ -178,7 +178,7 @@ def get_env_dependencies(
return (
utils.ensure_list(
(specs + subpackages + pass_through_deps)
or m.meta.get("requirements", {}).get(env, [])
or m.get_value(f"requirements/{env}", [])
),
actions,
unsat,
Expand Down Expand Up @@ -393,11 +393,11 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files
return pkg_files


def get_upstream_pins(m, actions, env):
def get_upstream_pins(m: MetaData, actions, env):
"""Download packages from specs, then inspect each downloaded package for additional
downstream dependency specs. Return these additional specs."""

env_specs = m.meta.get("requirements", {}).get(env, [])
env_specs = m.get_value(f"requirements/{env}", [])
explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else []
linked_packages = actions.get("LINK", [])
linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]
Expand Down Expand Up @@ -427,7 +427,12 @@ def get_upstream_pins(m, actions, env):
return additional_specs


def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_pattern):
def _read_upstream_pin_files(
m: MetaData,
env,
permit_unsatisfiable_variants,
exclude_pattern,
):
deps, actions, unsat = get_env_dependencies(
m,
env,
Expand All @@ -439,16 +444,16 @@ def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_patt
# vc feature activation to work correctly in the host env.
extra_run_specs = get_upstream_pins(m, actions, env)
return (
list(set(deps)) or m.meta.get("requirements", {}).get(env, []),
list(set(deps)) or m.get_value(f"requirements/{env}", []),
unsat,
extra_run_specs,
)


def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
def add_upstream_pins(m: MetaData, permit_unsatisfiable_variants, exclude_pattern):
"""Applies run_exports from any build deps to host and run sections"""
# if we have host deps, they're more important than the build deps.
requirements = m.meta.get("requirements", {})
requirements = m.get_section("requirements")
build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(
m, "build", permit_unsatisfiable_variants, exclude_pattern
)
Expand All @@ -464,7 +469,7 @@ def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):

if not host_reqs:
matching_output = [
out for out in m.meta.get("outputs", []) if out.get("name") == m.name()
out for out in m.get_section("outputs") if out.get("name") == m.name()
]
if matching_output:
requirements = utils.expand_reqs(
Expand Down Expand Up @@ -580,7 +585,11 @@ def _simplify_to_exact_constraints(metadata):
metadata.meta["requirements"] = requirements


def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False):
def finalize_metadata(
m: MetaData,
parent_metadata=None,
permit_unsatisfiable_variants=False,
):
"""Fully render a recipe. Fill in versions for build/host dependencies."""
if not parent_metadata:
parent_metadata = m
Expand All @@ -605,7 +614,7 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
)
)

parent_recipe = m.meta.get("extra", {}).get("parent_recipe", {})
parent_recipe = m.get_value("extra/parent_recipe", {})

# extract the topmost section where variables are defined, and put it on top of the
# requirements for a particular output
Expand All @@ -625,7 +634,7 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
requirements = utils.expand_reqs(output.get("requirements", {}))
m.meta["requirements"] = requirements

if m.meta.get("requirements"):
if m.get_section("requirements"):
utils.insert_variant_versions(
m.meta["requirements"], m.config.variant, "build"
)
Expand All @@ -639,7 +648,7 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
)
# getting this AFTER add_upstream_pins is important, because that function adds deps
# to the metadata.
requirements = m.meta.get("requirements", {})
requirements = m.get_section("requirements")

# here's where we pin run dependencies to their build time versions. This happens based
# on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
Expand Down Expand Up @@ -700,14 +709,14 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps
]
m.meta["test"]["requires"] = versioned_test_deps
extra = m.meta.get("extra", {})
extra = m.get_section("extra")
extra["copy_test_source_files"] = m.config.copy_test_source_files
m.meta["extra"] = extra

# if source/path is relative, then the output package makes no sense at all. The next
# best thing is to hard-code the absolute path. This probably won't exist on any
# system other than the original build machine, but at least it will work there.
if m.meta.get("source"):
if m.get_section("source"):
if "path" in m.meta["source"]:
source_path = m.meta["source"]["path"]
os.path.expanduser(source_path)
Expand All @@ -726,8 +735,7 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal
os.path.join(m.path, m.meta["source"]["git_url"])
)

if not m.meta.get("build"):
m.meta["build"] = {}
m.meta.setdefault("build", {})

_simplify_to_exact_constraints(m)

Expand Down

0 comments on commit 3c376b1

Please sign in to comment.