diff --git a/CHANGELOG.md b/CHANGELOG.md index 5777412269..9377577710 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,41 @@ * Mark `conda_build.conda_interface.linked` as pending deprecation. (#5074) * Mark `conda_build.conda_interface.linked_data` as pending deprecation. (#5074) * Mark `conda_build.utils.linked_data_no_multichannels` as pending deprecation. (#5074) +* Mark `conda_build.environ.get_install_actions` as pending deprecation in favor of `conda_build.environ.get_package_records`. (#5152) +* Mark `conda_build.environ.create_env(specs_or_actions)` as pending deprecation in favor of `conda_build.environ.create_env(specs_or_precs)`. (#5152) +* Mark `conda_build.index.channel_data` as pending deprecation. (#5152) +* Mark `conda_build.index._determine_namespace` as pending deprecation. (#5152) +* Mark `conda_build.index._make_seconds` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_VERSION` as pending deprecation. (#5152) +* Mark `conda_build.index.CHANNELDATA_VERSION` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_JSON_FN` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_FROM_PKGS_JSON_FN` as pending deprecation. (#5152) +* Mark `conda_build.index.CHANNELDATA_FIELDS` as pending deprecation. (#5152) +* Mark `conda_build.index._clear_newline_chars` as pending deprecation. (#5152) +* Mark `conda_build.index._apply_instructions` as pending deprecation. (#5152) +* Mark `conda_build.index._get_jinja2_environment` as pending deprecation. (#5152) +* Mark `conda_build.index._maybe_write` as pending deprecation. (#5152) +* Mark `conda_build.index._maybe_build_string` as pending deprecation. (#5152) +* Mark `conda_build.index._warn_on_missing_dependencies` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_post_install_details` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_recipe` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_run_exports` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_icon` as pending deprecation. (#5152) +* Mark `conda_build.index._make_subdir_index_html` as pending deprecation. (#5152) +* Mark `conda_build.index._make_channeldata_index_html` as pending deprecation. (#5152) +* Mark `conda_build.index._get_source_repo_git_info` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_info_file` as pending deprecation. (#5152) +* Mark `conda_build.index._alternate_file_extension` as pending deprecation. (#5152) +* Mark `conda_build.index._get_resolve_object` as pending deprecation. (#5152) +* Mark `conda_build.index._get_newest_versions` as pending deprecation. (#5152) +* Mark `conda_build.index._add_missing_deps` as pending deprecation. (#5152) +* Mark `conda_build.index._add_prev_ver_for_features` as pending deprecation. (#5152) +* Mark `conda_build.index._shard_newest_packages` as pending deprecation. (#5152) +* Mark `conda_build.index._build_current_repodata` as pending deprecation. (#5152) +* Mark `conda_build.index.ChannelIndex` as pending deprecation. (#5152) +* Mark `conda_build.render.actions_to_pins` as pending deprecation. (#5152) +* Mark `conda_build.render.execute_download_actions(actions)` as pending deprecation in favor of `conda_build.render.execute_download_actions(precs)`. (#5152) +* Mark `conda_build.render.get_upstream_pins(actions)` as pending deprecation in favor of `conda_build.render.get_upstream_pins(precs)`. (#5152) * Remove `conda_build.api.update_index`. (#5151) * Remove `conda_build.cli.main_build.main`. (#5151) * Remove `conda_build.cli.main_convert.main`. (#5151) diff --git a/conda_build/build.py b/conda_build/build.py index 45f64995f2..28ffc04a70 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -2308,7 +2308,7 @@ def create_build_envs(m: MetaData, notest): m.config._merge_build_host = m.build_is_host if m.is_cross and not m.build_is_host: - host_actions = environ.get_install_actions( + host_precs = environ.get_package_records( m.config.host_prefix, tuple(host_ms_deps), "host", @@ -2325,7 +2325,7 @@ def create_build_envs(m: MetaData, notest): ) environ.create_env( m.config.host_prefix, - host_actions, + host_precs, env="host", config=m.config, subdir=m.config.host_subdir, @@ -2334,7 +2334,7 @@ def create_build_envs(m: MetaData, notest): ) if m.build_is_host: build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions( + build_precs = environ.get_package_records( m.config.build_prefix, tuple(build_ms_deps), "build", @@ -2360,7 +2360,7 @@ def create_build_envs(m: MetaData, notest): *utils.ensure_list(m.get_value("requirements/run", [])), ] # make sure test deps are available before taking time to create build env - environ.get_install_actions( + environ.get_package_records( m.config.test_prefix, tuple(test_run_ms_deps), "test", @@ -2397,7 +2397,7 @@ def create_build_envs(m: MetaData, notest): ): environ.create_env( m.config.build_prefix, - build_actions, + build_precs, env="build", config=m.config, subdir=m.config.build_subdir, @@ -2435,8 +2435,8 @@ def build( return default_return log = utils.get_logger(__name__) - host_actions = [] - build_actions = [] + host_precs = [] + build_precs = [] output_metas = [] with utils.path_prepended(m.config.build_prefix): @@ -2779,7 +2779,7 @@ def build( host_ms_deps = m.ms_depends("host") sub_build_ms_deps = m.ms_depends("build") if m.is_cross and not m.build_is_host: - host_actions = environ.get_install_actions( + host_precs = environ.get_package_records( m.config.host_prefix, tuple(host_ms_deps), "host", @@ -2796,7 +2796,7 @@ def build( ) environ.create_env( m.config.host_prefix, - host_actions, + host_precs, env="host", config=m.config, subdir=subdir, @@ -2806,7 +2806,7 @@ def build( else: # When not cross-compiling, the build deps aggregate 'build' and 'host'. sub_build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions( + build_precs = environ.get_package_records( m.config.build_prefix, tuple(sub_build_ms_deps), "build", @@ -2823,7 +2823,7 @@ def build( ) environ.create_env( m.config.build_prefix, - build_actions, + build_precs, env="build", config=m.config, subdir=m.config.build_subdir, @@ -3481,7 +3481,7 @@ def test( utils.rm_rf(metadata.config.test_prefix) try: - actions = environ.get_install_actions( + precs = environ.get_package_records( metadata.config.test_prefix, tuple(specs), "host", @@ -3523,7 +3523,7 @@ def test( with env_var("CONDA_PATH_CONFLICT", conflict_verbosity, reset_context): environ.create_env( metadata.config.test_prefix, - actions, + precs, config=metadata.config, env="host", subdir=subdir, @@ -3819,7 +3819,7 @@ def build_tree( with TemporaryDirectory( prefix="_", suffix=r_string ) as tmpdir: - actions = environ.get_install_actions( + precs = environ.get_package_records( tmpdir, specs, env="run", @@ -3839,7 +3839,7 @@ def build_tree( # make sure to download that package to the local cache if not there local_file = execute_download_actions( meta, - actions, + precs, "host", package_subset=[dep], require_files=True, diff --git a/conda_build/environ.py b/conda_build/environ.py index c363588e3f..3026f1bf60 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -43,6 +43,7 @@ reset_context, root_dir, ) +from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list from .index import get_build_index @@ -849,7 +850,7 @@ def package_specs(self): last_index_ts = 0 -def get_install_actions( +def get_package_records( prefix, specs, env, @@ -996,12 +997,49 @@ def get_install_actions( utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() last_index_ts = index_ts - return actions + return actions.get(LINK_ACTION, []) +@deprecated("24.1.0", "24.3.0", addendum="Use `get_package_records` instead.") +def get_install_actions( + prefix, + specs, + env, + retries=0, + subdir=None, + verbose=True, + debug=False, + locking=True, + bldpkgs_dirs=None, + timeout=900, + disable_pip=False, + max_env_retry=3, + output_folder=None, + channel_urls=None, +): + precs = get_package_records( + prefix=prefix, + specs=specs, + env=env, + retries=retries, + subdir=subdir, + verbose=verbose, + debug=debug, + locking=locking, + bldpkgs_dirs=bldpkgs_dirs, + timeout=timeout, + disable_pip=disable_pip, + max_env_retry=max_env_retry, + output_folder=output_folder, + channel_urls=channel_urls, + ) + return {PREFIX_ACTION: prefix, LINK_ACTION: precs} + + +@deprecated.argument("24.1.0", "24.3.0", "specs_or_actions", rename="specs_or_precs") def create_env( prefix, - specs_or_actions, + specs_or_precs, env, config, subdir, @@ -1029,17 +1067,20 @@ def create_env( # if os.path.isdir(prefix): # utils.rm_rf(prefix) - if specs_or_actions: # Don't waste time if there is nothing to do + if specs_or_precs: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) - log.debug(str(specs_or_actions)) + log.debug(str(specs_or_precs)) if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): - # input is a list - it's specs in MatchSpec format - if not hasattr(specs_or_actions, "keys"): - specs = list(set(specs_or_actions)) + # input is a list of specs in MatchSpec format + if not ( + hasattr(specs_or_precs, "keys") + or isinstance(specs_or_precs[0], PackageRecord) + ): + specs = list(set(specs_or_precs)) actions = get_install_actions( prefix, tuple(specs), @@ -1056,7 +1097,10 @@ def create_env( channel_urls=tuple(config.channel_urls), ) else: - actions = specs_or_actions + if not hasattr(specs_or_precs, "keys"): + actions = {LINK_ACTION: specs_or_precs} + else: + actions = specs_or_precs index, _, _ = get_build_index( subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, @@ -1068,13 +1112,13 @@ def create_env( timeout=config.timeout, ) utils.trim_empty_keys(actions) - _display_actions(actions) + _display_actions(prefix, actions) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) with env_var("CONDA_QUIET", not config.verbose, reset_context): with env_var("CONDA_JSON", not config.verbose, reset_context): - _execute_actions(actions) + _execute_actions(prefix, actions) except ( SystemExit, PaddingError, @@ -1134,7 +1178,7 @@ def create_env( ) create_env( prefix, - specs_or_actions, + specs_or_precs, config=config, subdir=subdir, env=env, @@ -1165,7 +1209,7 @@ def create_env( ) create_env( prefix, - specs_or_actions, + specs_or_precs, config=config, subdir=subdir, env=env, @@ -1203,7 +1247,7 @@ def create_env( ) create_env( prefix, - specs_or_actions, + specs_or_precs, config=config, subdir=subdir, env=env, @@ -1312,12 +1356,11 @@ def install_actions(prefix, index, specs): del install_actions -def _execute_actions(actions): +def _execute_actions(prefix, actions): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575 # but reduced to only the functionality actually used within conda-build. - assert PREFIX_ACTION in actions and actions[PREFIX_ACTION] - prefix = actions[PREFIX_ACTION] + assert prefix if LINK_ACTION not in actions: log.debug(f"action {LINK_ACTION} not in actions") @@ -1346,11 +1389,10 @@ def _execute_actions(actions): unlink_link_transaction.execute() -def _display_actions(actions): +def _display_actions(prefix, actions): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58 # but reduced to only the functionality actually used within conda-build. - prefix = actions.get(PREFIX_ACTION) builder = ["", "## Package Plan ##\n"] if prefix: builder.append(" environment location: %s" % prefix) diff --git a/conda_build/index.py b/conda_build/index.py index aebc28fe21..229c5e1632 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -61,6 +61,7 @@ human_bytes, url_path, ) +from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, @@ -112,7 +113,8 @@ def map(self, func, *iterables): local_subdir = "" local_output_folder = "" cached_channels = [] -channel_data = {} +_channel_data = {} +deprecated.constant("24.1.0", "24.3.0", "channel_data", _channel_data) # TODO: support for libarchive seems to have broken ability to use multiple threads here. @@ -151,7 +153,7 @@ def get_build_index( global local_output_folder global cached_index global cached_channels - global channel_data + global _channel_data mtime = 0 channel_urls = list(utils.ensure_list(channel_urls)) @@ -248,7 +250,7 @@ def get_build_index( while retry < max_retries: try: with open(channeldata_file, "r+") as f: - channel_data[channel.name] = json.load(f) + _channel_data[channel.name] = json.load(f) break except (OSError, JSONDecodeError): time.sleep(0.2) @@ -257,24 +259,24 @@ def get_build_index( # download channeldata.json for url if not context.offline: try: - channel_data[channel.name] = utils.download_channeldata( + _channel_data[channel.name] = utils.download_channeldata( channel.base_url + "/channeldata.json" ) except CondaHTTPError: continue # collapse defaults metachannel back into one superchannel, merging channeldata - if channel.base_url in context.default_channels and channel_data.get( + if channel.base_url in context.default_channels and _channel_data.get( channel.name ): packages = superchannel.get("packages", {}) - packages.update(channel_data[channel.name]) + packages.update(_channel_data[channel.name]) superchannel["packages"] = packages - channel_data["defaults"] = superchannel + _channel_data["defaults"] = superchannel local_index_timestamp = os.path.getmtime(index_file) local_subdir = subdir local_output_folder = output_folder cached_channels = channel_urls - return cached_index, local_index_timestamp, channel_data + return cached_index, local_index_timestamp, _channel_data def _ensure_valid_channel(local_folder, subdir): @@ -328,6 +330,7 @@ def _delegated_update_index( # Everything below is deprecated to maintain API/feature compatibility. +@deprecated("24.1.0", "24.3.0") def _determine_namespace(info): if info.get("namespace"): namespace = info["namespace"] @@ -354,6 +357,7 @@ def _determine_namespace(info): return namespace, info.get("name_in_channel", info["name"]), info["name"] +@deprecated("24.1.0", "24.3.0") def _make_seconds(timestamp): timestamp = int(timestamp) if timestamp > 253402300799: # 9999-12-31 @@ -366,11 +370,11 @@ def _make_seconds(timestamp): # ========================================================================== -REPODATA_VERSION = 1 -CHANNELDATA_VERSION = 1 -REPODATA_JSON_FN = "repodata.json" -REPODATA_FROM_PKGS_JSON_FN = "repodata_from_packages.json" -CHANNELDATA_FIELDS = ( +_REPODATA_VERSION = 1 +_CHANNELDATA_VERSION = 1 +_REPODATA_JSON_FN = "repodata.json" +_REPODATA_FROM_PKGS_JSON_FN = "repodata_from_packages.json" +_CHANNELDATA_FIELDS = ( "description", "dev_url", "doc_url", @@ -401,8 +405,16 @@ def _make_seconds(timestamp): "recipe_origin", "commits", ) +deprecated.constant("24.1.0", "24.3.0", "REPODATA_VERSION", _REPODATA_VERSION) +deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_VERSION", _CHANNELDATA_VERSION) +deprecated.constant("24.1.0", "24.3.0", "REPODATA_JSON_FN", _REPODATA_JSON_FN) +deprecated.constant( + "24.1.0", "24.3.0", "REPODATA_FROM_PKGS_JSON_FN", _REPODATA_FROM_PKGS_JSON_FN +) +deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_FIELDS", _CHANNELDATA_FIELDS) +@deprecated("24.1.0", "24.3.0") def _clear_newline_chars(record, field_name): if field_name in record: try: @@ -412,6 +424,9 @@ def _clear_newline_chars(record, field_name): record[field_name] = record[field_name][0].strip().replace("\n", " ") +@deprecated( + "24.1.0", "24.5.0", addendum="Use `conda_index._apply_instructions` instead." +) def _apply_instructions(subdir, repodata, instructions): repodata.setdefault("removed", []) utils.merge_or_update_dict( @@ -460,6 +475,7 @@ def _apply_instructions(subdir, repodata, instructions): return repodata +@deprecated("24.1.0", "24.3.0") def _get_jinja2_environment(): def _filter_strftime(dt, dt_format): if isinstance(dt, Number): @@ -489,6 +505,7 @@ def _filter_add_href(text, link, **kwargs): return environment +@deprecated("24.1.0", "24.3.0") def _maybe_write(path, content, write_newline_end=False, content_is_binary=False): # Create the temp file next "path" so that we can use an atomic move, see # https://github.com/conda/conda-build/issues/3833 @@ -510,6 +527,7 @@ def _maybe_write(path, content, write_newline_end=False, content_is_binary=False return True +@deprecated("24.1.0", "24.3.0") def _make_build_string(build, build_number): build_number_as_string = str(build_number) if build.endswith(build_number_as_string): @@ -519,6 +537,7 @@ def _make_build_string(build, build_number): return build_string +@deprecated("24.1.0", "24.3.0") def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): """ The following dependencies do not exist in the channel and are not declared @@ -553,6 +572,7 @@ def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): log.warn("\n".join(builder)) +@deprecated("24.1.0", "24.3.0") def _cache_post_install_details(paths_cache_path, post_install_cache_path): post_install_details_json = { "binary_prefix": False, @@ -591,6 +611,7 @@ def _cache_post_install_details(paths_cache_path, post_install_cache_path): json.dump(post_install_details_json, fh) +@deprecated("24.1.0", "24.3.0") def _cache_recipe(tmpdir, recipe_cache_path): recipe_path_search_order = ( "info/recipe/meta.yaml.rendered", @@ -620,6 +641,7 @@ def _cache_recipe(tmpdir, recipe_cache_path): return recipe_json +@deprecated("24.1.0", "24.3.0") def _cache_run_exports(tmpdir, run_exports_cache_path): run_exports = {} try: @@ -635,6 +657,7 @@ def _cache_run_exports(tmpdir, run_exports_cache_path): json.dump(run_exports, fh) +@deprecated("24.1.0", "24.3.0") def _cache_icon(tmpdir, recipe_json, icon_cache_path): # If a conda package contains an icon, also extract and cache that in an .icon/ # directory. The icon file name is the name of the package, plus the extension @@ -651,6 +674,7 @@ def _cache_icon(tmpdir, recipe_json, icon_cache_path): utils.move_with_fallback(icon_path, icon_cache_path) +@deprecated("24.1.0", "24.3.0") def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths): environment = _get_jinja2_environment() template = environment.get_template("subdir-index.html.j2") @@ -663,6 +687,7 @@ def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths return rendered_html +@deprecated("24.1.0", "24.3.0") def _make_channeldata_index_html(channel_name, channeldata): environment = _get_jinja2_environment() template = environment.get_template("channeldata-index.html.j2") @@ -675,6 +700,7 @@ def _make_channeldata_index_html(channel_name, channeldata): return rendered_html +@deprecated("24.1.0", "24.3.0") def _get_source_repo_git_info(path): is_repo = subprocess.check_output( ["git", "rev-parse", "--is-inside-work-tree"], cwd=path @@ -697,12 +723,14 @@ def _get_source_repo_git_info(path): return commits +@deprecated("24.1.0", "24.3.0") def _cache_info_file(tmpdir, info_fn, cache_path): info_path = os.path.join(tmpdir, "info", info_fn) if os.path.lexists(info_path): utils.move_with_fallback(info_path, cache_path) +@deprecated("24.1.0", "24.3.0") def _alternate_file_extension(fn): cache_fn = fn for ext in CONDA_PACKAGE_EXTENSIONS: @@ -711,6 +739,7 @@ def _alternate_file_extension(fn): return cache_fn + next(iter(other_ext)) +@deprecated("24.1.0", "24.3.0") def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None): packages = {} conda_packages = {} @@ -745,6 +774,7 @@ def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None): return r +@deprecated("24.1.0", "24.3.0") def _get_newest_versions(r, pins={}): groups = {} for g_name, g_recs in r.groups.items(): @@ -760,6 +790,7 @@ def _get_newest_versions(r, pins={}): return [pkg for group in groups.values() for pkg in group] +@deprecated("24.1.0", "24.3.0") def _add_missing_deps(new_r, original_r): """For each package in new_r, if any deps are not satisfiable, backfill them from original_r.""" @@ -784,6 +815,7 @@ def _add_missing_deps(new_r, original_r): return [pkg for group in expanded_groups.values() for pkg in group] +@deprecated("24.1.0", "24.3.0") def _add_prev_ver_for_features(new_r, orig_r): expanded_groups = copy.deepcopy(new_r.groups) for g_name in new_r.groups: @@ -812,6 +844,7 @@ def _add_prev_ver_for_features(new_r, orig_r): return [pkg for group in expanded_groups.values() for pkg in group] +@deprecated("24.1.0", "24.3.0") def _shard_newest_packages(subdir, r, pins=None): """Captures only the newest versions of software in the resolve object. @@ -844,6 +877,7 @@ def _shard_newest_packages(subdir, r, pins=None): return set(_add_prev_ver_for_features(new_r, r)) +@deprecated("24.1.0", "24.3.0") def _build_current_repodata(subdir, repodata, pins): r = _get_resolve_object(subdir, repodata=repodata) keep_pkgs = _shard_newest_packages(subdir, r, pins) @@ -871,6 +905,7 @@ def _build_current_repodata(subdir, repodata, pins): return new_repodata +@deprecated("24.1.0", "24.3.0") class ChannelIndex: def __init__( self, @@ -951,7 +986,7 @@ def index( self._write_repodata( subdir, repodata_from_packages, - REPODATA_FROM_PKGS_JSON_FN, + _REPODATA_FROM_PKGS_JSON_FN, ) # Step 3. Apply patch instructions. @@ -968,7 +1003,7 @@ def index( t2.set_description("Writing patched repodata") t2.update() self._write_repodata( - subdir, patched_repodata, REPODATA_JSON_FN + subdir, patched_repodata, _REPODATA_JSON_FN ) t2.set_description("Building current_repodata subset") t2.update() @@ -1000,7 +1035,7 @@ def index( def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): subdir_path = join(self.channel_root, subdir) self._ensure_dirs(subdir) - repodata_json_path = join(subdir_path, REPODATA_FROM_PKGS_JSON_FN) + repodata_json_path = join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN) if verbose: log.info("Building repodata for %s" % subdir_path) @@ -1158,7 +1193,7 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): "info": { "subdir": subdir, }, - "repodata_version": REPODATA_VERSION, + "repodata_version": _REPODATA_VERSION, "removed": sorted(list(ignore_set)), } finally: @@ -1465,11 +1500,11 @@ def _add_extra_path(extra_paths, path): } extra_paths = OrderedDict() - _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN)) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN + ".bz2")) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN)) + _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN)) + _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN + ".bz2")) + _add_extra_path(extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN)) _add_extra_path( - extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN + ".bz2") + extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN + ".bz2") ) # _add_extra_path(extra_paths, join(subdir_path, "repodata2.json")) _add_extra_path(extra_paths, join(subdir_path, "patch_instructions.json")) @@ -1603,7 +1638,7 @@ def _replace_if_newer_and_present(pd, data, erec, data_newer, k): channel_data.update( { - "channeldata_version": CHANNELDATA_VERSION, + "channeldata_version": _CHANNELDATA_VERSION, "subdirs": sorted( list(set(channel_data.get("subdirs", []) + [subdir])) ), diff --git a/conda_build/render.py b/conda_build/render.py index c75838a65b..a46130f4ed 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -35,6 +35,7 @@ pkgs_dirs, specs_from_url, ) +from .deprecations import deprecated from .environ import LINK_ACTION from .exceptions import DependencyNeedsBuildingError from .index import get_build_index @@ -90,6 +91,7 @@ def bldpkg_path(m): return path +@deprecated("24.1.0", "24.3.0") def actions_to_pins(actions): if LINK_ACTION in actions: return [package_record_to_requirement(prec) for prec in actions[LINK_ACTION]] @@ -182,7 +184,9 @@ def get_env_dependencies( else: raise - specs = actions_to_pins(actions) + specs = [ + package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, []) + ] return ( utils.ensure_list( (specs + subpackages + pass_through_deps) @@ -325,7 +329,8 @@ def _read_specs_from_package(pkg_loc, pkg_dist): return specs -def execute_download_actions(m, actions, env, package_subset=None, require_files=False): +@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs") +def execute_download_actions(m, precs, env, package_subset=None, require_files=False): subdir = getattr(m.config, f"{env}_subdir") index, _, _ = get_build_index( subdir=subdir, @@ -354,7 +359,8 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files pkg_files = {} - precs = actions.get(LINK_ACTION, []) + if hasattr(precs, "keys"): + precs = precs.get(LINK_ACTION, []) if isinstance(package_subset, PackageRecord): package_subset = [package_subset] else: @@ -403,18 +409,20 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files return pkg_files -def get_upstream_pins(m: MetaData, actions, env): +@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs") +def get_upstream_pins(m: MetaData, precs, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" env_specs = m.get_value(f"requirements/{env}", []) explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else [] - linked_packages = actions.get(LINK_ACTION, []) - linked_packages = [prec for prec in linked_packages if prec.name in explicit_specs] + if hasattr(precs, "keys"): + precs = precs.get(LINK_ACTION, []) + precs = [prec for prec in precs if prec.name in explicit_specs] ignore_pkgs_list = utils.ensure_list(m.get_value("build/ignore_run_exports_from")) ignore_list = utils.ensure_list(m.get_value("build/ignore_run_exports")) additional_specs = {} - for prec in linked_packages: + for prec in precs: if any(prec.name in req.split(" ")[0] for req in ignore_pkgs_list): continue run_exports = None @@ -428,7 +436,7 @@ def get_upstream_pins(m: MetaData, actions, env): if run_exports is None: loc, dist = execute_download_actions( m, - actions, + precs, env=env, package_subset=[prec], )[prec]