Skip to content

Commit

Permalink
Fix corrupted package cache for outputs in subpackage tests
Browse files Browse the repository at this point in the history
This re-introduces conda_build.environ.clean_pkg_cache with slight
changes to not use conda.models.dist.Dist and handle multiple pkgs_dirs
better.

Signed-off-by: Marcel Bargull <[email protected]>
  • Loading branch information
mbargull committed Feb 14, 2024
1 parent a379a91 commit 34552d5
Show file tree
Hide file tree
Showing 3 changed files with 68 additions and 1 deletion.
20 changes: 20 additions & 0 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
env_path_backup_var_exists,
get_conda_channel,
get_rc_urls,
pkgs_dirs,
prefix_placeholder,
reset_context,
root_dir,
Expand Down Expand Up @@ -3394,6 +3395,25 @@ def test(
# folder destination
_extract_test_files_from_package(metadata)

# Remove any previously cached build from the package cache to ensure we
# really test the requested build and not some clashing or corrupted build.
# (Corruption of the extracted package can happen, e.g., in multi-output
# builds if one of the subpackages overwrites files from the other.)
# Special case:
# If test is requested for .tar.bz2/.conda file from the pkgs dir itself,
# clean_pkg_cache() will remove it; don't call that function in this case.
in_pkg_cache = (
not hasattr(recipedir_or_package_or_metadata, "config")
and os.path.isfile(recipedir_or_package_or_metadata)
and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS)
and any(
os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dir
for pkgs_dir in pkgs_dirs
)
)
if not in_pkg_cache:
environ.clean_pkg_cache(metadata.dist(), metadata.config)

copy_test_source_files(metadata, metadata.config.test_dir)
# this is also copying tests/source_files from work_dir to testing workdir

Expand Down
30 changes: 29 additions & 1 deletion conda_build/environ.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,15 @@
from logging import getLogger
from os.path import join, normpath

from conda.base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL
from conda.base.constants import (
CONDA_PACKAGE_EXTENSIONS,
DEFAULTS_CHANNEL_NAME,
UNKNOWN_CHANNEL,
)
from conda.common.io import env_vars
from conda.core.index import LAST_CHANNEL_URLS
from conda.core.link import PrefixSetup, UnlinkLinkTransaction
from conda.core.package_cache_data import PackageCacheData
from conda.core.prefix_data import PrefixData
from conda.models.channel import prioritize_channels

Expand Down Expand Up @@ -1264,6 +1269,29 @@ def get_pkg_dirs_locks(dirs, config):
return [utils.get_lock(folder, timeout=config.timeout) for folder in dirs]


def clean_pkg_cache(dist, config):
with utils.LoggingContext(logging.DEBUG if config.debug else logging.WARN):
locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config)
with utils.try_acquire_locks(locks, timeout=config.timeout):
for pkgs_dir in pkgs_dirs:
if any(
os.path.exists(os.path.join(pkgs_dir, f"{dist}{ext}"))
for ext in ("", *CONDA_PACKAGE_EXTENSIONS)
):
log.debug(
"Conda caching error: %s package remains in cache after removal",
dist,
)
log.debug("manually removing to compensate")
package_cache = PackageCacheData.first_writable([pkgs_dir])
for cache_pkg_id in package_cache.query(dist):
package_cache.remove(cache_pkg_id)

# Note that this call acquires the relevant locks, so this must be called
# outside the lock context above.
remove_existing_packages(pkgs_dirs, [dist], config)


def remove_existing_packages(dirs, fns, config):
locks = get_pkg_dirs_locks(dirs, config) if config.locking else []

Expand Down
19 changes: 19 additions & 0 deletions news/5184-fix-multi-output-package-corruption
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
### Enhancements

* <news item>

### Bug fixes

* Fix corrupted package cache for outputs in subpackage tests. (#5184)

### Deprecations

* <news item>

### Docs

* <news item>

### Other

* <news item>

0 comments on commit 34552d5

Please sign in to comment.